file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
routes.js | /**
* Created by andyf on 4/14/2017.
*/
//------------------------------------------//
//------------------ROUTES------------------//
//------------------------------------------//
var multer = require("multer");
var express = require("express");
var app = express();
var multer = require("multer");
var mongoose = require("mongoose");
var createFile = require("create-file");
var jsonfile = require("jsonfile");
var fs = require("fs");
var db = mongoose.connect('mongodb://localhost/it410database');
// Upload Photos
var create = function(file){
createFile('app/image-info/' + file.originalname + "id.json",
'{"name": "' + file.originalname + '", "imageUrl":"img/' + file.originalname + '", "imageId":"' + file.originalname + 'id"}',
function(err) {
console.log("error");
}
);
var filePlace = __dirname + '/' + 'images.json';
var object = ',{"name": "' + file.originalname + '", "imageUrl":"img/' + file.originalname + '", "imageId":"' + file.originalname + 'id"}]'
var obj = jsonfile.readFileSync(filePlace);
console.log(obj);
var newObj = JSON.stringify(obj);
fs.unlinkSync(filePlace);
var brandNew = newObj.replace(']', object);
console.log(brandNew);
jsonfile.writeFileSync(filePlace, brandNew);
};
var storage = multer.diskStorage({
destination: function (req, file, callback) {
console.log(file, req.body);
callback(null, 'app/img');
},
filename: function (req, file, callback) {
create(file);
callback(null, file.originalname);
}
});
var upload = multer({ storage : storage}).single('userPhoto');
var User = require('./models/user');
module.exports = function(app, passport){
app.get('/authenticate', function(req, res){
res.render('authenticate.ejs');
});
app.post('/api/photo',function(req,res) {
console.log("got here", req.body);
upload(req, res, function(err) {
if (err) {
console.log(err);
return res.end("Error uploading file");
}
res.end("File has uploaded");
});
});
app.get('/', function(req, res){
res.sendfile('app/index.html');
});
app.get('/admin', function(req, res){
if(req.user) {
if(req.user.local.admin || req.user.google.admin) {
res.sendfile('app/admin.html');
}
else
res.send('You are not authorized to view this page </br> <a href="/authenticate">Return Home</a>');
}
else
res.render('authenticate.ejs');
});
app.get('/checkAdmin', function(req, res) {
if (req.user.admin){
res.sendfile('app/admin.html');
}
else {
res.sendfile('app/index.html');
}
});
app.get('/admin', function(req,res) {
res.sendfile('app/admin.html');
});
app.get('/users', function(req,res){
res.json(req.user);
// Find some documents
// User.find(function(err, docs) {
// console.log("Found the following records");
// console.dir(docs);
// res.json(req.user);
// });
});
app.get('/login', function(req, res){
res.render('login.ejs', { message: req.flash('loginMessage') });
});
app.post('/login', passport.authenticate('local-login', {
successRedirect: '/checkAdmin',
failureRedirect: '/login',
failureFlash: true
}));
app.get('/signup', function(req, res){
res.render('signup.ejs', { message: req.flash('signupMessage') });
});
app.post('/signup', passport.authenticate('local-signup', {
successRedirect: '/',
failureRedirect: '/signup',
failureFlash: true
}));
app.get('/profile', isLoggedIn, function(req, res){
res.render('profile.ejs', { user: req.user });
});
app.get('/auth/facebook', passport.authenticate('facebook', {scope: ['email']}));
app.get('/auth/facebook/callback',
passport.authenticate('facebook', { successRedirect: '/checkAdmin',
failureRedirect: '/' }));
app.get('/auth/google', passport.authenticate('google', {scope: ['profile', 'email']}));
app.get('/auth/google/callback',
passport.authenticate('google', { successRedirect: '/checkAdmin',
failureRedirect: '/' }));
app.get('/logout', function(req, res){
req.logout();
res.redirect('/authenticate');
})
};
function isLoggedIn(req, res, next) | {
if(req.isAuthenticated()){
return next();
}
res.redirect('/login');
} | identifier_body | |
routes.js | /**
* Created by andyf on 4/14/2017.
*/
//------------------------------------------//
//------------------ROUTES------------------//
//------------------------------------------//
var multer = require("multer");
var express = require("express");
var app = express();
var multer = require("multer");
var mongoose = require("mongoose");
var createFile = require("create-file");
var jsonfile = require("jsonfile");
var fs = require("fs");
var db = mongoose.connect('mongodb://localhost/it410database');
// Upload Photos
var create = function(file){
createFile('app/image-info/' + file.originalname + "id.json",
'{"name": "' + file.originalname + '", "imageUrl":"img/' + file.originalname + '", "imageId":"' + file.originalname + 'id"}',
function(err) {
console.log("error");
}
);
var filePlace = __dirname + '/' + 'images.json';
var object = ',{"name": "' + file.originalname + '", "imageUrl":"img/' + file.originalname + '", "imageId":"' + file.originalname + 'id"}]'
var obj = jsonfile.readFileSync(filePlace);
console.log(obj);
var newObj = JSON.stringify(obj);
fs.unlinkSync(filePlace);
var brandNew = newObj.replace(']', object);
console.log(brandNew);
jsonfile.writeFileSync(filePlace, brandNew);
};
var storage = multer.diskStorage({
destination: function (req, file, callback) {
console.log(file, req.body);
callback(null, 'app/img');
},
filename: function (req, file, callback) {
|
var upload = multer({ storage : storage}).single('userPhoto');
var User = require('./models/user');
module.exports = function(app, passport){
app.get('/authenticate', function(req, res){
res.render('authenticate.ejs');
});
app.post('/api/photo',function(req,res) {
console.log("got here", req.body);
upload(req, res, function(err) {
if (err) {
console.log(err);
return res.end("Error uploading file");
}
res.end("File has uploaded");
});
});
app.get('/', function(req, res){
res.sendfile('app/index.html');
});
app.get('/admin', function(req, res){
if(req.user) {
if(req.user.local.admin || req.user.google.admin) {
res.sendfile('app/admin.html');
}
else
res.send('You are not authorized to view this page </br> <a href="/authenticate">Return Home</a>');
}
else
res.render('authenticate.ejs');
});
app.get('/checkAdmin', function(req, res) {
if (req.user.admin){
res.sendfile('app/admin.html');
}
else {
res.sendfile('app/index.html');
}
});
app.get('/admin', function(req,res) {
res.sendfile('app/admin.html');
});
app.get('/users', function(req,res){
res.json(req.user);
// Find some documents
// User.find(function(err, docs) {
// console.log("Found the following records");
// console.dir(docs);
// res.json(req.user);
// });
});
app.get('/login', function(req, res){
res.render('login.ejs', { message: req.flash('loginMessage') });
});
app.post('/login', passport.authenticate('local-login', {
successRedirect: '/checkAdmin',
failureRedirect: '/login',
failureFlash: true
}));
app.get('/signup', function(req, res){
res.render('signup.ejs', { message: req.flash('signupMessage') });
});
app.post('/signup', passport.authenticate('local-signup', {
successRedirect: '/',
failureRedirect: '/signup',
failureFlash: true
}));
app.get('/profile', isLoggedIn, function(req, res){
res.render('profile.ejs', { user: req.user });
});
app.get('/auth/facebook', passport.authenticate('facebook', {scope: ['email']}));
app.get('/auth/facebook/callback',
passport.authenticate('facebook', { successRedirect: '/checkAdmin',
failureRedirect: '/' }));
app.get('/auth/google', passport.authenticate('google', {scope: ['profile', 'email']}));
app.get('/auth/google/callback',
passport.authenticate('google', { successRedirect: '/checkAdmin',
failureRedirect: '/' }));
app.get('/logout', function(req, res){
req.logout();
res.redirect('/authenticate');
})
};
function isLoggedIn(req, res, next) {
if(req.isAuthenticated()){
return next();
}
res.redirect('/login');
} | create(file);
callback(null, file.originalname);
}
});
| random_line_split |
action_support.js | /**
@module ember
@submodule ember-views
*/
import { inspect } from 'ember-utils';
import { Mixin, get, isNone, assert } from 'ember-metal';
import { MUTABLE_CELL } from '../compat/attrs';
function | (component, actionName) {
if (actionName && actionName[MUTABLE_CELL]) {
actionName = actionName.value;
}
assert(
'The default action was triggered on the component ' + component.toString() +
', but the action name (' + actionName + ') was not a string.',
isNone(actionName) || typeof actionName === 'string' || typeof actionName === 'function'
);
return actionName;
}
/**
@class ActionSupport
@namespace Ember
@private
*/
export default Mixin.create({
/**
Calls an action passed to a component.
For example a component for playing or pausing music may translate click events
into action notifications of "play" or "stop" depending on some internal state
of the component:
```javascript
// app/components/play-button.js
export default Ember.Component.extend({
click() {
if (this.get('isPlaying')) {
this.sendAction('play');
} else {
this.sendAction('stop');
}
}
});
```
The actions "play" and "stop" must be passed to this `play-button` component:
```handlebars
{{! app/templates/application.hbs }}
{{play-button play=(action "musicStarted") stop=(action "musicStopped")}}
```
When the component receives a browser `click` event it translate this
interaction into application-specific semantics ("play" or "stop") and
calls the specified action.
```javascript
// app/controller/application.js
export default Ember.Controller.extend({
actions: {
musicStarted() {
// called when the play button is clicked
// and the music started playing
},
musicStopped() {
// called when the play button is clicked
// and the music stopped playing
}
}
});
```
If no action is passed to `sendAction` a default name of "action"
is assumed.
```javascript
// app/components/next-button.js
export default Ember.Component.extend({
click() {
this.sendAction();
}
});
```
```handlebars
{{! app/templates/application.hbs }}
{{next-button action=(action "playNextSongInAlbum")}}
```
```javascript
// app/controllers/application.js
App.ApplicationController = Ember.Controller.extend({
actions: {
playNextSongInAlbum() {
...
}
}
});
```
@method sendAction
@param [action] {String} the action to call
@param [params] {*} arguments for the action
@public
*/
sendAction(action, ...contexts) {
let actionName;
// Send the default action
if (action === undefined) {
action = 'action';
}
actionName = get(this, `attrs.${action}`) || get(this, action);
actionName = validateAction(this, actionName);
// If no action name for that action could be found, just abort.
if (actionName === undefined) { return; }
if (typeof actionName === 'function') {
actionName(...contexts);
} else {
this.triggerAction({
action: actionName,
actionContext: contexts
});
}
},
send(actionName, ...args) {
let target;
let action = this.actions && this.actions[actionName];
if (action) {
let shouldBubble = action.apply(this, args) === true;
if (!shouldBubble) { return; }
}
target = get(this, 'target');
if (target) {
assert(
'The `target` for ' + this + ' (' + target +
') does not have a `send` method',
typeof target.send === 'function'
);
target.send(...arguments);
} else {
assert(`${inspect(this)} had no action handler for: ${actionName}`, action);
}
}
});
| validateAction | identifier_name |
action_support.js | /**
@module ember
@submodule ember-views
*/
import { inspect } from 'ember-utils';
import { Mixin, get, isNone, assert } from 'ember-metal';
import { MUTABLE_CELL } from '../compat/attrs';
function validateAction(component, actionName) {
if (actionName && actionName[MUTABLE_CELL]) {
actionName = actionName.value;
}
assert(
'The default action was triggered on the component ' + component.toString() +
', but the action name (' + actionName + ') was not a string.',
isNone(actionName) || typeof actionName === 'string' || typeof actionName === 'function'
);
return actionName;
}
/**
@class ActionSupport
@namespace Ember
@private
*/
export default Mixin.create({
/**
Calls an action passed to a component.
For example a component for playing or pausing music may translate click events
into action notifications of "play" or "stop" depending on some internal state
of the component:
```javascript
// app/components/play-button.js
export default Ember.Component.extend({
click() {
if (this.get('isPlaying')) {
this.sendAction('play');
} else {
this.sendAction('stop');
}
}
});
```
The actions "play" and "stop" must be passed to this `play-button` component:
```handlebars
{{! app/templates/application.hbs }}
{{play-button play=(action "musicStarted") stop=(action "musicStopped")}}
```
When the component receives a browser `click` event it translate this
interaction into application-specific semantics ("play" or "stop") and
calls the specified action.
```javascript
// app/controller/application.js
export default Ember.Controller.extend({
actions: {
musicStarted() {
// called when the play button is clicked
// and the music started playing
},
musicStopped() {
// called when the play button is clicked
// and the music stopped playing
}
}
});
```
If no action is passed to `sendAction` a default name of "action"
is assumed.
```javascript
// app/components/next-button.js
export default Ember.Component.extend({
click() {
this.sendAction();
}
});
```
```handlebars
{{! app/templates/application.hbs }}
{{next-button action=(action "playNextSongInAlbum")}}
```
```javascript
// app/controllers/application.js
App.ApplicationController = Ember.Controller.extend({
actions: {
playNextSongInAlbum() {
...
}
}
});
```
@method sendAction
@param [action] {String} the action to call
@param [params] {*} arguments for the action
@public
*/
sendAction(action, ...contexts) {
let actionName;
// Send the default action
if (action === undefined) {
action = 'action';
}
actionName = get(this, `attrs.${action}`) || get(this, action);
actionName = validateAction(this, actionName);
// If no action name for that action could be found, just abort.
if (actionName === undefined) { return; }
if (typeof actionName === 'function') {
actionName(...contexts);
} else {
this.triggerAction({
action: actionName,
actionContext: contexts
});
}
},
send(actionName, ...args) {
let target;
let action = this.actions && this.actions[actionName];
if (action) {
let shouldBubble = action.apply(this, args) === true;
if (!shouldBubble) { return; }
}
target = get(this, 'target');
if (target) | else {
assert(`${inspect(this)} had no action handler for: ${actionName}`, action);
}
}
});
| {
assert(
'The `target` for ' + this + ' (' + target +
') does not have a `send` method',
typeof target.send === 'function'
);
target.send(...arguments);
} | conditional_block |
action_support.js | /**
@module ember
@submodule ember-views
*/
import { inspect } from 'ember-utils';
import { Mixin, get, isNone, assert } from 'ember-metal';
import { MUTABLE_CELL } from '../compat/attrs';
function validateAction(component, actionName) {
if (actionName && actionName[MUTABLE_CELL]) {
actionName = actionName.value;
}
assert(
'The default action was triggered on the component ' + component.toString() +
', but the action name (' + actionName + ') was not a string.',
isNone(actionName) || typeof actionName === 'string' || typeof actionName === 'function'
);
return actionName;
}
/**
@class ActionSupport
@namespace Ember
@private
*/
export default Mixin.create({
/**
Calls an action passed to a component.
For example a component for playing or pausing music may translate click events
into action notifications of "play" or "stop" depending on some internal state
of the component:
```javascript
// app/components/play-button.js
export default Ember.Component.extend({
click() {
if (this.get('isPlaying')) {
this.sendAction('play');
} else {
this.sendAction('stop');
}
}
});
```
The actions "play" and "stop" must be passed to this `play-button` component:
```handlebars
{{! app/templates/application.hbs }}
{{play-button play=(action "musicStarted") stop=(action "musicStopped")}}
```
When the component receives a browser `click` event it translate this
interaction into application-specific semantics ("play" or "stop") and
calls the specified action.
```javascript
// app/controller/application.js
export default Ember.Controller.extend({
actions: {
musicStarted() {
// called when the play button is clicked
// and the music started playing
},
musicStopped() {
// called when the play button is clicked
// and the music stopped playing
}
}
});
```
If no action is passed to `sendAction` a default name of "action"
is assumed.
```javascript
// app/components/next-button.js
export default Ember.Component.extend({
click() {
this.sendAction();
}
});
```
```handlebars
{{! app/templates/application.hbs }}
{{next-button action=(action "playNextSongInAlbum")}}
``` | playNextSongInAlbum() {
...
}
}
});
```
@method sendAction
@param [action] {String} the action to call
@param [params] {*} arguments for the action
@public
*/
sendAction(action, ...contexts) {
let actionName;
// Send the default action
if (action === undefined) {
action = 'action';
}
actionName = get(this, `attrs.${action}`) || get(this, action);
actionName = validateAction(this, actionName);
// If no action name for that action could be found, just abort.
if (actionName === undefined) { return; }
if (typeof actionName === 'function') {
actionName(...contexts);
} else {
this.triggerAction({
action: actionName,
actionContext: contexts
});
}
},
send(actionName, ...args) {
let target;
let action = this.actions && this.actions[actionName];
if (action) {
let shouldBubble = action.apply(this, args) === true;
if (!shouldBubble) { return; }
}
target = get(this, 'target');
if (target) {
assert(
'The `target` for ' + this + ' (' + target +
') does not have a `send` method',
typeof target.send === 'function'
);
target.send(...arguments);
} else {
assert(`${inspect(this)} had no action handler for: ${actionName}`, action);
}
}
}); |
```javascript
// app/controllers/application.js
App.ApplicationController = Ember.Controller.extend({
actions: { | random_line_split |
action_support.js | /**
@module ember
@submodule ember-views
*/
import { inspect } from 'ember-utils';
import { Mixin, get, isNone, assert } from 'ember-metal';
import { MUTABLE_CELL } from '../compat/attrs';
function validateAction(component, actionName) |
/**
@class ActionSupport
@namespace Ember
@private
*/
export default Mixin.create({
/**
Calls an action passed to a component.
For example a component for playing or pausing music may translate click events
into action notifications of "play" or "stop" depending on some internal state
of the component:
```javascript
// app/components/play-button.js
export default Ember.Component.extend({
click() {
if (this.get('isPlaying')) {
this.sendAction('play');
} else {
this.sendAction('stop');
}
}
});
```
The actions "play" and "stop" must be passed to this `play-button` component:
```handlebars
{{! app/templates/application.hbs }}
{{play-button play=(action "musicStarted") stop=(action "musicStopped")}}
```
When the component receives a browser `click` event it translate this
interaction into application-specific semantics ("play" or "stop") and
calls the specified action.
```javascript
// app/controller/application.js
export default Ember.Controller.extend({
actions: {
musicStarted() {
// called when the play button is clicked
// and the music started playing
},
musicStopped() {
// called when the play button is clicked
// and the music stopped playing
}
}
});
```
If no action is passed to `sendAction` a default name of "action"
is assumed.
```javascript
// app/components/next-button.js
export default Ember.Component.extend({
click() {
this.sendAction();
}
});
```
```handlebars
{{! app/templates/application.hbs }}
{{next-button action=(action "playNextSongInAlbum")}}
```
```javascript
// app/controllers/application.js
App.ApplicationController = Ember.Controller.extend({
actions: {
playNextSongInAlbum() {
...
}
}
});
```
@method sendAction
@param [action] {String} the action to call
@param [params] {*} arguments for the action
@public
*/
sendAction(action, ...contexts) {
let actionName;
// Send the default action
if (action === undefined) {
action = 'action';
}
actionName = get(this, `attrs.${action}`) || get(this, action);
actionName = validateAction(this, actionName);
// If no action name for that action could be found, just abort.
if (actionName === undefined) { return; }
if (typeof actionName === 'function') {
actionName(...contexts);
} else {
this.triggerAction({
action: actionName,
actionContext: contexts
});
}
},
send(actionName, ...args) {
let target;
let action = this.actions && this.actions[actionName];
if (action) {
let shouldBubble = action.apply(this, args) === true;
if (!shouldBubble) { return; }
}
target = get(this, 'target');
if (target) {
assert(
'The `target` for ' + this + ' (' + target +
') does not have a `send` method',
typeof target.send === 'function'
);
target.send(...arguments);
} else {
assert(`${inspect(this)} had no action handler for: ${actionName}`, action);
}
}
});
| {
if (actionName && actionName[MUTABLE_CELL]) {
actionName = actionName.value;
}
assert(
'The default action was triggered on the component ' + component.toString() +
', but the action name (' + actionName + ') was not a string.',
isNone(actionName) || typeof actionName === 'string' || typeof actionName === 'function'
);
return actionName;
} | identifier_body |
app.js | //Express, Mongo & Environment specific imports
var express = require('express');
var morgan = require('morgan');
var serveStatic = require('serve-static');
var bodyParser = require('body-parser');
var cookieParser = require('cookie-parser');
var compression = require('compression');
var errorHandler = require('errorhandler');
var mongo = require('./api/config/db');
var env = require('./api/config/env');
// Controllers/Routes import
var BookController = require('./api/controller/BookController');
//MongoDB setup
mongo.createConnection(env.mongoUrl);
//Express setup
var app = express();
//Express middleware
app.use(morgan('short'));
app.use(serveStatic(__dirname + '/app'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(compression());
var environment = process.env.NODE_ENV || 'development';
if ('development' == environment) |
// Route definitions
app.get('/api/books', BookController.list);
app.get('/api/books/:id', BookController.show);
app.post('api/books', BookController.create);
app.put('/api/books/:id', BookController.update);
app.delete('/api/books/:id', BookController.remove);
var server = app.listen(3000, function () {
var host = server.address().address;
var port = server.address().port;
console.log('Books app listening at http://%s:%s', host, port);
console.log("Configured MongoDB URL: " + env.mongoUrl);
});
| {
app.use(errorHandler({ dumpExceptions: true, showStack: true }));
var ImportController = require('./api/controller/ImportController');
app.get('/import', ImportController.import);
app.get('/import/reset', ImportController.reset);
} | conditional_block |
app.js | //Express, Mongo & Environment specific imports
var express = require('express');
var morgan = require('morgan');
var serveStatic = require('serve-static');
var bodyParser = require('body-parser');
var cookieParser = require('cookie-parser');
var compression = require('compression');
var errorHandler = require('errorhandler');
var mongo = require('./api/config/db');
var env = require('./api/config/env');
// Controllers/Routes import
var BookController = require('./api/controller/BookController');
//MongoDB setup
mongo.createConnection(env.mongoUrl);
//Express setup
var app = express();
//Express middleware
app.use(morgan('short'));
app.use(serveStatic(__dirname + '/app'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(compression());
var environment = process.env.NODE_ENV || 'development';
if ('development' == environment) {
app.use(errorHandler({ dumpExceptions: true, showStack: true }));
var ImportController = require('./api/controller/ImportController');
app.get('/import', ImportController.import);
app.get('/import/reset', ImportController.reset);
}
| app.get('/api/books', BookController.list);
app.get('/api/books/:id', BookController.show);
app.post('api/books', BookController.create);
app.put('/api/books/:id', BookController.update);
app.delete('/api/books/:id', BookController.remove);
var server = app.listen(3000, function () {
var host = server.address().address;
var port = server.address().port;
console.log('Books app listening at http://%s:%s', host, port);
console.log("Configured MongoDB URL: " + env.mongoUrl);
}); |
// Route definitions | random_line_split |
cmp.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::cmp::Ordering::{self, Less, Equal, Greater};
// macro_rules! ord_impl {
// ($($t:ty)*) => ($(
// #[stable(feature = "rust1", since = "1.0.0")]
// impl Ord for $t {
// #[inline]
// fn cmp(&self, other: &$t) -> Ordering {
// if *self < *other { Less }
// else if *self > *other { Greater }
// else { Equal }
// }
// } | // }
// ord_impl! { char usize u8 u16 u32 u64 isize i8 i16 i32 i64 }
macro_rules! cmp_test {
($($t:ty)*) => ($({
let v1: $t = 68 as $t;
{
let result: Ordering = v1.cmp(&v1);
assert_eq!(result, Equal);
}
let v2: $t = 100 as $t;
{
let result: Ordering = v1.cmp(&v2);
assert_eq!(result, Less);
}
{
let result: Ordering = v2.cmp(&v1);
assert_eq!(result, Greater);
}
})*)
}
#[test]
fn cmp_test1() {
cmp_test! { char usize u8 u16 u32 u64 isize i8 i16 i32 i64 };
}
} | // )*) | random_line_split |
cmp.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::cmp::Ordering::{self, Less, Equal, Greater};
// macro_rules! ord_impl {
// ($($t:ty)*) => ($(
// #[stable(feature = "rust1", since = "1.0.0")]
// impl Ord for $t {
// #[inline]
// fn cmp(&self, other: &$t) -> Ordering {
// if *self < *other { Less }
// else if *self > *other { Greater }
// else { Equal }
// }
// }
// )*)
// }
// ord_impl! { char usize u8 u16 u32 u64 isize i8 i16 i32 i64 }
macro_rules! cmp_test {
($($t:ty)*) => ($({
let v1: $t = 68 as $t;
{
let result: Ordering = v1.cmp(&v1);
assert_eq!(result, Equal);
}
let v2: $t = 100 as $t;
{
let result: Ordering = v1.cmp(&v2);
assert_eq!(result, Less);
}
{
let result: Ordering = v2.cmp(&v1);
assert_eq!(result, Greater);
}
})*)
}
#[test]
fn cmp_test1() |
}
| {
cmp_test! { char usize u8 u16 u32 u64 isize i8 i16 i32 i64 };
} | identifier_body |
cmp.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::cmp::Ordering::{self, Less, Equal, Greater};
// macro_rules! ord_impl {
// ($($t:ty)*) => ($(
// #[stable(feature = "rust1", since = "1.0.0")]
// impl Ord for $t {
// #[inline]
// fn cmp(&self, other: &$t) -> Ordering {
// if *self < *other { Less }
// else if *self > *other { Greater }
// else { Equal }
// }
// }
// )*)
// }
// ord_impl! { char usize u8 u16 u32 u64 isize i8 i16 i32 i64 }
macro_rules! cmp_test {
($($t:ty)*) => ($({
let v1: $t = 68 as $t;
{
let result: Ordering = v1.cmp(&v1);
assert_eq!(result, Equal);
}
let v2: $t = 100 as $t;
{
let result: Ordering = v1.cmp(&v2);
assert_eq!(result, Less);
}
{
let result: Ordering = v2.cmp(&v1);
assert_eq!(result, Greater);
}
})*)
}
#[test]
fn | () {
cmp_test! { char usize u8 u16 u32 u64 isize i8 i16 i32 i64 };
}
}
| cmp_test1 | identifier_name |
fertility_rate_preprocess_gen_tmcf.py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pandas as pd
import io
import csv
_DATA_URL = "https://ec.europa.eu/eurostat/estat-navtree-portlet-prod/BulkDownloadListing?file=data/demo_r_find3.tsv.gz"
_CLEANED_CSV = "./Eurostats_NUTS3_FRate_Age.csv"
_TMCF = "./Eurostats_NUTS3_FRate_Age.tmcf"
_OUTPUT_COLUMNS = [
'Date',
'GeoId',
'MeanMothersAge_BirthEvent',
'MedianMothersAge_BirthEvent',
'FertilityRate_Person_Female',
]
def translate_wide_to_long(data_url):
df = pd.read_csv(data_url, delimiter='\t')
assert df.head
header = list(df.columns.values)
years = header[1:]
# Pandas.melt() unpivots a DataFrame from wide format to long format.
df = pd.melt(df,
id_vars=header[0],
value_vars=years,
var_name='time',
value_name='value')
# Separate geo and unit columns.
new = df[header[0]].str.split(",", n=-1, expand=True)
df = df.join(
pd.DataFrame({
'geo': new[2],
'unit': new[1],
'indic_de': new[0]
}))
df["indicator_unit"] = df["indic_de"] + "_" + df["unit"]
df.drop(columns=[header[0]], inplace=True)
# Remove empty rows, clean values to have all digits.
df = df[df.value.str.contains('[0-9]')]
possible_flags = [' ', ':']
for flag in possible_flags:
df['value'] = df['value'].str.replace(flag, '')
df['value'] = pd.to_numeric(df['value'])
df = df.pivot_table(values='value',
index=['geo', 'time'],
columns=['indicator_unit'],
aggfunc='first').reset_index().rename_axis(None, axis=1)
return df
def | (df, cleaned_csv):
with open(cleaned_csv, 'w', newline='') as f_out:
writer = csv.DictWriter(f_out,
fieldnames=_OUTPUT_COLUMNS,
lineterminator='\n')
writer.writeheader()
for _, row in df.iterrows():
writer.writerow({
'Date': '%s' % (row['time'][:4]),
'GeoId': 'dcid:nuts/%s' % row['geo'],
'MeanMothersAge_BirthEvent': (row['AGEMOTH_YR']),
'MedianMothersAge_BirthEvent': (row['MEDAGEMOTH_YR']),
'FertilityRate_Person_Female': (row['TOTFERRT_NR']),
})
def get_template_mcf():
# Automate Template MCF generation since there are many Statistical Variables.
TEMPLATE_MCF_TEMPLATE = """
Node: E:Eurostats_NUTS3_FRate_Age->E{index}
typeOf: dcs:StatVarObservation
variableMeasured: dcs:{stat_var}
observationAbout: C:Eurostats_NUTS3_FRate_Age->GeoId
observationDate: C:Eurostats_NUTS3_FRate_Age->Date
value: C:Eurostats_NUTS3_FRate_Age->{stat_var}
measurementMethod: dcs:EurostatRegionalStatistics
"""
stat_vars = _OUTPUT_COLUMNS[2:]
with open(_TMCF, 'w', newline='') as f_out:
for i in range(len(stat_vars)):
f_out.write(
TEMPLATE_MCF_TEMPLATE.format_map({
'index': i,
'stat_var': _OUTPUT_COLUMNS[2:][i]
}))
if __name__ == "__main__":
preprocess(translate_wide_to_long(_DATA_URL), _CLEANED_CSV)
get_template_mcf()
| preprocess | identifier_name |
fertility_rate_preprocess_gen_tmcf.py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pandas as pd
import io
import csv
_DATA_URL = "https://ec.europa.eu/eurostat/estat-navtree-portlet-prod/BulkDownloadListing?file=data/demo_r_find3.tsv.gz"
_CLEANED_CSV = "./Eurostats_NUTS3_FRate_Age.csv"
_TMCF = "./Eurostats_NUTS3_FRate_Age.tmcf"
_OUTPUT_COLUMNS = [
'Date',
'GeoId',
'MeanMothersAge_BirthEvent',
'MedianMothersAge_BirthEvent',
'FertilityRate_Person_Female',
]
def translate_wide_to_long(data_url):
df = pd.read_csv(data_url, delimiter='\t')
assert df.head
header = list(df.columns.values)
years = header[1:]
# Pandas.melt() unpivots a DataFrame from wide format to long format.
df = pd.melt(df,
id_vars=header[0],
value_vars=years,
var_name='time',
value_name='value')
# Separate geo and unit columns.
new = df[header[0]].str.split(",", n=-1, expand=True)
df = df.join(
pd.DataFrame({
'geo': new[2],
'unit': new[1],
'indic_de': new[0]
}))
df["indicator_unit"] = df["indic_de"] + "_" + df["unit"]
df.drop(columns=[header[0]], inplace=True)
# Remove empty rows, clean values to have all digits.
df = df[df.value.str.contains('[0-9]')]
possible_flags = [' ', ':']
for flag in possible_flags:
|
df['value'] = pd.to_numeric(df['value'])
df = df.pivot_table(values='value',
index=['geo', 'time'],
columns=['indicator_unit'],
aggfunc='first').reset_index().rename_axis(None, axis=1)
return df
def preprocess(df, cleaned_csv):
with open(cleaned_csv, 'w', newline='') as f_out:
writer = csv.DictWriter(f_out,
fieldnames=_OUTPUT_COLUMNS,
lineterminator='\n')
writer.writeheader()
for _, row in df.iterrows():
writer.writerow({
'Date': '%s' % (row['time'][:4]),
'GeoId': 'dcid:nuts/%s' % row['geo'],
'MeanMothersAge_BirthEvent': (row['AGEMOTH_YR']),
'MedianMothersAge_BirthEvent': (row['MEDAGEMOTH_YR']),
'FertilityRate_Person_Female': (row['TOTFERRT_NR']),
})
def get_template_mcf():
# Automate Template MCF generation since there are many Statistical Variables.
TEMPLATE_MCF_TEMPLATE = """
Node: E:Eurostats_NUTS3_FRate_Age->E{index}
typeOf: dcs:StatVarObservation
variableMeasured: dcs:{stat_var}
observationAbout: C:Eurostats_NUTS3_FRate_Age->GeoId
observationDate: C:Eurostats_NUTS3_FRate_Age->Date
value: C:Eurostats_NUTS3_FRate_Age->{stat_var}
measurementMethod: dcs:EurostatRegionalStatistics
"""
stat_vars = _OUTPUT_COLUMNS[2:]
with open(_TMCF, 'w', newline='') as f_out:
for i in range(len(stat_vars)):
f_out.write(
TEMPLATE_MCF_TEMPLATE.format_map({
'index': i,
'stat_var': _OUTPUT_COLUMNS[2:][i]
}))
if __name__ == "__main__":
preprocess(translate_wide_to_long(_DATA_URL), _CLEANED_CSV)
get_template_mcf()
| df['value'] = df['value'].str.replace(flag, '') | conditional_block |
fertility_rate_preprocess_gen_tmcf.py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pandas as pd
import io
import csv
_DATA_URL = "https://ec.europa.eu/eurostat/estat-navtree-portlet-prod/BulkDownloadListing?file=data/demo_r_find3.tsv.gz"
_CLEANED_CSV = "./Eurostats_NUTS3_FRate_Age.csv"
_TMCF = "./Eurostats_NUTS3_FRate_Age.tmcf"
_OUTPUT_COLUMNS = [
'Date',
'GeoId',
'MeanMothersAge_BirthEvent',
'MedianMothersAge_BirthEvent',
'FertilityRate_Person_Female',
]
def translate_wide_to_long(data_url):
df = pd.read_csv(data_url, delimiter='\t')
assert df.head
header = list(df.columns.values)
years = header[1:]
# Pandas.melt() unpivots a DataFrame from wide format to long format.
df = pd.melt(df,
id_vars=header[0],
value_vars=years,
var_name='time',
value_name='value')
# Separate geo and unit columns.
new = df[header[0]].str.split(",", n=-1, expand=True)
df = df.join(
pd.DataFrame({
'geo': new[2],
'unit': new[1],
'indic_de': new[0]
}))
df["indicator_unit"] = df["indic_de"] + "_" + df["unit"]
df.drop(columns=[header[0]], inplace=True)
# Remove empty rows, clean values to have all digits.
df = df[df.value.str.contains('[0-9]')]
possible_flags = [' ', ':']
for flag in possible_flags:
df['value'] = df['value'].str.replace(flag, '')
df['value'] = pd.to_numeric(df['value'])
df = df.pivot_table(values='value',
index=['geo', 'time'],
columns=['indicator_unit'],
aggfunc='first').reset_index().rename_axis(None, axis=1)
return df
def preprocess(df, cleaned_csv):
|
def get_template_mcf():
# Automate Template MCF generation since there are many Statistical Variables.
TEMPLATE_MCF_TEMPLATE = """
Node: E:Eurostats_NUTS3_FRate_Age->E{index}
typeOf: dcs:StatVarObservation
variableMeasured: dcs:{stat_var}
observationAbout: C:Eurostats_NUTS3_FRate_Age->GeoId
observationDate: C:Eurostats_NUTS3_FRate_Age->Date
value: C:Eurostats_NUTS3_FRate_Age->{stat_var}
measurementMethod: dcs:EurostatRegionalStatistics
"""
stat_vars = _OUTPUT_COLUMNS[2:]
with open(_TMCF, 'w', newline='') as f_out:
for i in range(len(stat_vars)):
f_out.write(
TEMPLATE_MCF_TEMPLATE.format_map({
'index': i,
'stat_var': _OUTPUT_COLUMNS[2:][i]
}))
if __name__ == "__main__":
preprocess(translate_wide_to_long(_DATA_URL), _CLEANED_CSV)
get_template_mcf()
| with open(cleaned_csv, 'w', newline='') as f_out:
writer = csv.DictWriter(f_out,
fieldnames=_OUTPUT_COLUMNS,
lineterminator='\n')
writer.writeheader()
for _, row in df.iterrows():
writer.writerow({
'Date': '%s' % (row['time'][:4]),
'GeoId': 'dcid:nuts/%s' % row['geo'],
'MeanMothersAge_BirthEvent': (row['AGEMOTH_YR']),
'MedianMothersAge_BirthEvent': (row['MEDAGEMOTH_YR']),
'FertilityRate_Person_Female': (row['TOTFERRT_NR']),
}) | identifier_body |
fertility_rate_preprocess_gen_tmcf.py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pandas as pd
import io
import csv
_DATA_URL = "https://ec.europa.eu/eurostat/estat-navtree-portlet-prod/BulkDownloadListing?file=data/demo_r_find3.tsv.gz"
_CLEANED_CSV = "./Eurostats_NUTS3_FRate_Age.csv"
_TMCF = "./Eurostats_NUTS3_FRate_Age.tmcf"
_OUTPUT_COLUMNS = [
'Date',
'GeoId',
'MeanMothersAge_BirthEvent',
'MedianMothersAge_BirthEvent',
'FertilityRate_Person_Female',
]
def translate_wide_to_long(data_url):
df = pd.read_csv(data_url, delimiter='\t')
assert df.head
header = list(df.columns.values)
years = header[1:]
# Pandas.melt() unpivots a DataFrame from wide format to long format.
df = pd.melt(df,
id_vars=header[0],
value_vars=years,
var_name='time',
value_name='value')
# Separate geo and unit columns.
new = df[header[0]].str.split(",", n=-1, expand=True)
df = df.join(
pd.DataFrame({
'geo': new[2],
'unit': new[1],
'indic_de': new[0]
}))
df["indicator_unit"] = df["indic_de"] + "_" + df["unit"]
df.drop(columns=[header[0]], inplace=True)
# Remove empty rows, clean values to have all digits.
df = df[df.value.str.contains('[0-9]')] | df = df.pivot_table(values='value',
index=['geo', 'time'],
columns=['indicator_unit'],
aggfunc='first').reset_index().rename_axis(None, axis=1)
return df
def preprocess(df, cleaned_csv):
with open(cleaned_csv, 'w', newline='') as f_out:
writer = csv.DictWriter(f_out,
fieldnames=_OUTPUT_COLUMNS,
lineterminator='\n')
writer.writeheader()
for _, row in df.iterrows():
writer.writerow({
'Date': '%s' % (row['time'][:4]),
'GeoId': 'dcid:nuts/%s' % row['geo'],
'MeanMothersAge_BirthEvent': (row['AGEMOTH_YR']),
'MedianMothersAge_BirthEvent': (row['MEDAGEMOTH_YR']),
'FertilityRate_Person_Female': (row['TOTFERRT_NR']),
})
def get_template_mcf():
# Automate Template MCF generation since there are many Statistical Variables.
TEMPLATE_MCF_TEMPLATE = """
Node: E:Eurostats_NUTS3_FRate_Age->E{index}
typeOf: dcs:StatVarObservation
variableMeasured: dcs:{stat_var}
observationAbout: C:Eurostats_NUTS3_FRate_Age->GeoId
observationDate: C:Eurostats_NUTS3_FRate_Age->Date
value: C:Eurostats_NUTS3_FRate_Age->{stat_var}
measurementMethod: dcs:EurostatRegionalStatistics
"""
stat_vars = _OUTPUT_COLUMNS[2:]
with open(_TMCF, 'w', newline='') as f_out:
for i in range(len(stat_vars)):
f_out.write(
TEMPLATE_MCF_TEMPLATE.format_map({
'index': i,
'stat_var': _OUTPUT_COLUMNS[2:][i]
}))
if __name__ == "__main__":
preprocess(translate_wide_to_long(_DATA_URL), _CLEANED_CSV)
get_template_mcf() | possible_flags = [' ', ':']
for flag in possible_flags:
df['value'] = df['value'].str.replace(flag, '')
df['value'] = pd.to_numeric(df['value']) | random_line_split |
MyProfileSettings.tsx | import { MenuItem } from "app/Components/MenuItem"
import { presentEmailComposer } from "app/NativeModules/presentEmailComposer"
import { navigate } from "app/navigation/navigate"
import { GlobalStore, useFeatureFlag } from "app/store/GlobalStore"
import { Button, Flex, Sans, Separator, Spacer, useColor } from "palette"
import React from "react"
import { Alert, ScrollView } from "react-native"
import { Tab } from "../Favorites/Favorites"
export const MyProfileSettings: React.FC<{}> = () => {
const showOrderHistory = useFeatureFlag("AREnableOrderHistoryOption")
const showSavedAddresses = useFeatureFlag("AREnableSavedAddresses")
const darkModeSupport = useFeatureFlag("ARDarkModeSupport")
const color = useColor()
const separatorColor = color("black5")
return (
<ScrollView>
<Sans size="8" mx="2" mt="6">
{"Settings"}
</Sans>
<Spacer mt={3} mb={2} />
<SectionHeading title="FAVORITES" />
<Spacer my={1} />
<MenuItem title="Saved Alerts" onPress={() => navigate("my-profile/saved-search-alerts")} />
<Separator my={1} borderColor={separatorColor} />
<MenuItem
title="Follows"
onPress={() =>
navigate("favorites", {
passProps: {
initialTab: Tab.artists,
},
})
}
/>
<Separator my={1} borderColor={separatorColor} />
<Spacer my={2} />
<SectionHeading title="ACCOUNT SETTINGS" /> | <Spacer my={1} />
<MenuItem title="Account" onPress={() => navigate("my-account")} />
<Separator my={1} borderColor={separatorColor} />
{!!showOrderHistory && (
<>
<MenuItem title="Order History" onPress={() => navigate("/orders")} />
<Separator my={1} borderColor={separatorColor} />
</>
)}
<MenuItem title="Payment" onPress={() => navigate("my-profile/payment")} />
<Separator my={1} borderColor={separatorColor} />
{!!darkModeSupport && (
<>
<MenuItem title="Dark Mode" onPress={() => navigate("settings/dark-mode")} />
<Separator my={1} borderColor={separatorColor} />
</>
)}
{!!showSavedAddresses && (
<>
<MenuItem
title="Saved Addresses"
onPress={() => navigate("my-profile/saved-addresses")}
/>
<Separator my={1} borderColor={separatorColor} />
</>
)}
<MenuItem
title="Push Notifications"
onPress={() => navigate("my-profile/push-notifications")}
/>
<Separator my={1} borderColor={separatorColor} />
<MenuItem
title="Send Feedback"
onPress={() => presentEmailComposer("support@artsy.net", "Feedback from the Artsy app")}
/>
<Separator my={1} borderColor={separatorColor} />
<MenuItem title="Personal Data Request" onPress={() => navigate("privacy-request")} />
<Separator my={1} borderColor={separatorColor} />
<MenuItem title="About" onPress={() => navigate("about")} />
<Separator my={1} borderColor={separatorColor} />
<Flex
flexDirection="row"
alignItems="center"
justifyContent="center"
py={7.5}
px="2"
pr="15px"
>
<Button variant="fillDark" haptic onPress={confirmLogout} block>
Log Out{" "}
</Button>
</Flex>
<Spacer mb={1} />
</ScrollView>
)
}
export const SectionHeading: React.FC<{ title: string }> = ({ title }) => (
<Sans size="3" color="black100" mb="1" mx="2">
{title}
</Sans>
)
export function confirmLogout() {
Alert.alert("Log out?", "Are you sure you want to log out?", [
{
text: "Cancel",
style: "cancel",
},
{
text: "Log out",
style: "destructive",
onPress: () => GlobalStore.actions.auth.signOut(),
},
])
} | random_line_split | |
api.js | (function (root, factory) {
if (typeof module === 'object' && module.exports) {
module.exports = factory(require('jquery'), require('../common/utility'));
} else {
root.api = factory(root.jQuery, root.utility);
}
}(this, function ($, util) {
// API
var self = {};
// Object -> Promise[[Entity]]
self.searchEntity = function(query){
return get('/search/entity', { num: 10, q: query })
.then(format)
.catch(handleError);
// [Entity] -> [Entity]
function format(results){
// stringify id & rename keys (`primary_type` -> `primary_ext`; `description` -> `blurb`)
return results.map(function(result) {
var _result = Object.assign({}, result, {
primary_ext: result.primary_ext || result.primary_type,
blurb: result.blurb || result.description,
id: String(result.id)
});
delete _result.primary_type;
delete _result.description;
return _result;
});
}
// Error -> []
function handleError(err){
console.error('API request error: ', err);
return [];
}
};
// [EntityWithoutId] -> Promise[[Entity]]
self.createEntities = function(entities){
return post('/entities/bulk', formatReq(entities))
.then(formatResp);
// [Entity] -> [Entity]
function formatReq(entities){
return {
data: entities.map(function(entity){
return {
type: "entities",
attributes: entity
};
})
};
};
// [Entity] -> [Entity]
function formatResp(resp){
// copy, but stringify id
return resp.data.map(function(datum){
return Object.assign(
datum.attributes,
{ id: String(datum.attributes.id)}
);
});
}
};
// Integer, [Integer] -> Promise[[ListEntity]]
self.addEntitiesToList = function(listId, entityIds, reference){
return post('/lists/'+listId+'/entities/bulk', formatReq(entityIds))
.then(formatResp);
function formatReq(entityIds){
return {
data: entityIds.map(function(id){
return { type: 'entities', id: id };
}).concat({
type: 'references',
attributes: reference
})
};
};
function formatResp(resp){
return resp.data.map(function(datum){
return util.stringifyValues(datum.attributes);
});
}
};
// String, Integer -> Promise
// helpers
function get(url, queryParams){
return fetch(url + qs(queryParams), {
headers: headers(),
method: 'get',
credentials: 'include' // use auth tokens stored in session cookies
}).then(jsonify);
}
function post(url, payload){
return fetch(url, {
headers: headers(),
method: 'post',
credentials: 'include', // use auth tokens stored in session cookies
body: JSON.stringify(payload)
}).then(jsonify);
};
function patch(url, payload){
return fetch(url, {
headers: headers(),
method: 'PATCH',
credentials: 'include', // use auth tokens stored in session cookies
body: JSON.stringify(payload)
}).then(function(response) {
if (response.body) {
return jsonify(response);
} else {
return response;
}
});
};
function | (){
return {
'Accept': 'application/json, text/plain, */*',
'Content-Type': 'application/json',
'Littlesis-Request-Type': 'API',
// TODO: retrieve this w/o JQuery
'X-CSRF-Token': $("meta[name='csrf-token']").attr("content") || ""
};
}
function qs(queryParams){
return '?' + $.param(queryParams);
}
// Response -> Promise[Error|JSON]
function jsonify(response){
return response
.json()
.then(function(json){
return json.errors ?
Promise.reject(json.errors[0].title) :
Promise.resolve(json);
});
}
return self;
}));
| headers | identifier_name |
api.js | (function (root, factory) {
if (typeof module === 'object' && module.exports) | else {
root.api = factory(root.jQuery, root.utility);
}
}(this, function ($, util) {
// API
var self = {};
// Object -> Promise[[Entity]]
self.searchEntity = function(query){
return get('/search/entity', { num: 10, q: query })
.then(format)
.catch(handleError);
// [Entity] -> [Entity]
function format(results){
// stringify id & rename keys (`primary_type` -> `primary_ext`; `description` -> `blurb`)
return results.map(function(result) {
var _result = Object.assign({}, result, {
primary_ext: result.primary_ext || result.primary_type,
blurb: result.blurb || result.description,
id: String(result.id)
});
delete _result.primary_type;
delete _result.description;
return _result;
});
}
// Error -> []
function handleError(err){
console.error('API request error: ', err);
return [];
}
};
// [EntityWithoutId] -> Promise[[Entity]]
self.createEntities = function(entities){
return post('/entities/bulk', formatReq(entities))
.then(formatResp);
// [Entity] -> [Entity]
function formatReq(entities){
return {
data: entities.map(function(entity){
return {
type: "entities",
attributes: entity
};
})
};
};
// [Entity] -> [Entity]
function formatResp(resp){
// copy, but stringify id
return resp.data.map(function(datum){
return Object.assign(
datum.attributes,
{ id: String(datum.attributes.id)}
);
});
}
};
// Integer, [Integer] -> Promise[[ListEntity]]
self.addEntitiesToList = function(listId, entityIds, reference){
return post('/lists/'+listId+'/entities/bulk', formatReq(entityIds))
.then(formatResp);
function formatReq(entityIds){
return {
data: entityIds.map(function(id){
return { type: 'entities', id: id };
}).concat({
type: 'references',
attributes: reference
})
};
};
function formatResp(resp){
return resp.data.map(function(datum){
return util.stringifyValues(datum.attributes);
});
}
};
// String, Integer -> Promise
// helpers
function get(url, queryParams){
return fetch(url + qs(queryParams), {
headers: headers(),
method: 'get',
credentials: 'include' // use auth tokens stored in session cookies
}).then(jsonify);
}
function post(url, payload){
return fetch(url, {
headers: headers(),
method: 'post',
credentials: 'include', // use auth tokens stored in session cookies
body: JSON.stringify(payload)
}).then(jsonify);
};
function patch(url, payload){
return fetch(url, {
headers: headers(),
method: 'PATCH',
credentials: 'include', // use auth tokens stored in session cookies
body: JSON.stringify(payload)
}).then(function(response) {
if (response.body) {
return jsonify(response);
} else {
return response;
}
});
};
function headers(){
return {
'Accept': 'application/json, text/plain, */*',
'Content-Type': 'application/json',
'Littlesis-Request-Type': 'API',
// TODO: retrieve this w/o JQuery
'X-CSRF-Token': $("meta[name='csrf-token']").attr("content") || ""
};
}
function qs(queryParams){
return '?' + $.param(queryParams);
}
// Response -> Promise[Error|JSON]
function jsonify(response){
return response
.json()
.then(function(json){
return json.errors ?
Promise.reject(json.errors[0].title) :
Promise.resolve(json);
});
}
return self;
}));
| {
module.exports = factory(require('jquery'), require('../common/utility'));
} | conditional_block |
api.js | (function (root, factory) {
if (typeof module === 'object' && module.exports) {
module.exports = factory(require('jquery'), require('../common/utility'));
} else {
root.api = factory(root.jQuery, root.utility);
}
}(this, function ($, util) {
// API
var self = {};
// Object -> Promise[[Entity]]
self.searchEntity = function(query){
return get('/search/entity', { num: 10, q: query })
.then(format)
.catch(handleError);
// [Entity] -> [Entity]
function format(results){
// stringify id & rename keys (`primary_type` -> `primary_ext`; `description` -> `blurb`)
return results.map(function(result) {
var _result = Object.assign({}, result, {
primary_ext: result.primary_ext || result.primary_type,
blurb: result.blurb || result.description,
id: String(result.id)
});
delete _result.primary_type;
delete _result.description;
return _result;
});
}
// Error -> []
function handleError(err){
console.error('API request error: ', err);
return [];
}
};
// [EntityWithoutId] -> Promise[[Entity]]
self.createEntities = function(entities){
return post('/entities/bulk', formatReq(entities))
.then(formatResp);
// [Entity] -> [Entity]
function formatReq(entities){
return {
data: entities.map(function(entity){
return {
type: "entities",
attributes: entity
};
})
};
};
// [Entity] -> [Entity]
function formatResp(resp){
// copy, but stringify id
return resp.data.map(function(datum){
return Object.assign(
datum.attributes,
{ id: String(datum.attributes.id)}
);
});
}
};
// Integer, [Integer] -> Promise[[ListEntity]]
self.addEntitiesToList = function(listId, entityIds, reference){
return post('/lists/'+listId+'/entities/bulk', formatReq(entityIds))
.then(formatResp);
function formatReq(entityIds){
return {
data: entityIds.map(function(id){
return { type: 'entities', id: id };
}).concat({
type: 'references',
attributes: reference
})
};
};
function formatResp(resp){
return resp.data.map(function(datum){
return util.stringifyValues(datum.attributes);
});
}
};
// String, Integer -> Promise
// helpers
function get(url, queryParams){ | return fetch(url + qs(queryParams), {
headers: headers(),
method: 'get',
credentials: 'include' // use auth tokens stored in session cookies
}).then(jsonify);
}
function post(url, payload){
return fetch(url, {
headers: headers(),
method: 'post',
credentials: 'include', // use auth tokens stored in session cookies
body: JSON.stringify(payload)
}).then(jsonify);
};
function patch(url, payload){
return fetch(url, {
headers: headers(),
method: 'PATCH',
credentials: 'include', // use auth tokens stored in session cookies
body: JSON.stringify(payload)
}).then(function(response) {
if (response.body) {
return jsonify(response);
} else {
return response;
}
});
};
function headers(){
return {
'Accept': 'application/json, text/plain, */*',
'Content-Type': 'application/json',
'Littlesis-Request-Type': 'API',
// TODO: retrieve this w/o JQuery
'X-CSRF-Token': $("meta[name='csrf-token']").attr("content") || ""
};
}
function qs(queryParams){
return '?' + $.param(queryParams);
}
// Response -> Promise[Error|JSON]
function jsonify(response){
return response
.json()
.then(function(json){
return json.errors ?
Promise.reject(json.errors[0].title) :
Promise.resolve(json);
});
}
return self;
})); | random_line_split | |
api.js | (function (root, factory) {
if (typeof module === 'object' && module.exports) {
module.exports = factory(require('jquery'), require('../common/utility'));
} else {
root.api = factory(root.jQuery, root.utility);
}
}(this, function ($, util) {
// API
var self = {};
// Object -> Promise[[Entity]]
self.searchEntity = function(query){
return get('/search/entity', { num: 10, q: query })
.then(format)
.catch(handleError);
// [Entity] -> [Entity]
function format(results){
// stringify id & rename keys (`primary_type` -> `primary_ext`; `description` -> `blurb`)
return results.map(function(result) {
var _result = Object.assign({}, result, {
primary_ext: result.primary_ext || result.primary_type,
blurb: result.blurb || result.description,
id: String(result.id)
});
delete _result.primary_type;
delete _result.description;
return _result;
});
}
// Error -> []
function handleError(err) |
};
// [EntityWithoutId] -> Promise[[Entity]]
self.createEntities = function(entities){
return post('/entities/bulk', formatReq(entities))
.then(formatResp);
// [Entity] -> [Entity]
function formatReq(entities){
return {
data: entities.map(function(entity){
return {
type: "entities",
attributes: entity
};
})
};
};
// [Entity] -> [Entity]
function formatResp(resp){
// copy, but stringify id
return resp.data.map(function(datum){
return Object.assign(
datum.attributes,
{ id: String(datum.attributes.id)}
);
});
}
};
// Integer, [Integer] -> Promise[[ListEntity]]
self.addEntitiesToList = function(listId, entityIds, reference){
return post('/lists/'+listId+'/entities/bulk', formatReq(entityIds))
.then(formatResp);
function formatReq(entityIds){
return {
data: entityIds.map(function(id){
return { type: 'entities', id: id };
}).concat({
type: 'references',
attributes: reference
})
};
};
function formatResp(resp){
return resp.data.map(function(datum){
return util.stringifyValues(datum.attributes);
});
}
};
// String, Integer -> Promise
// helpers
function get(url, queryParams){
return fetch(url + qs(queryParams), {
headers: headers(),
method: 'get',
credentials: 'include' // use auth tokens stored in session cookies
}).then(jsonify);
}
function post(url, payload){
return fetch(url, {
headers: headers(),
method: 'post',
credentials: 'include', // use auth tokens stored in session cookies
body: JSON.stringify(payload)
}).then(jsonify);
};
function patch(url, payload){
return fetch(url, {
headers: headers(),
method: 'PATCH',
credentials: 'include', // use auth tokens stored in session cookies
body: JSON.stringify(payload)
}).then(function(response) {
if (response.body) {
return jsonify(response);
} else {
return response;
}
});
};
function headers(){
return {
'Accept': 'application/json, text/plain, */*',
'Content-Type': 'application/json',
'Littlesis-Request-Type': 'API',
// TODO: retrieve this w/o JQuery
'X-CSRF-Token': $("meta[name='csrf-token']").attr("content") || ""
};
}
function qs(queryParams){
return '?' + $.param(queryParams);
}
// Response -> Promise[Error|JSON]
function jsonify(response){
return response
.json()
.then(function(json){
return json.errors ?
Promise.reject(json.errors[0].title) :
Promise.resolve(json);
});
}
return self;
}));
| {
console.error('API request error: ', err);
return [];
} | identifier_body |
mod.rs | extern crate chrono;
extern crate chrono_tz;
extern crate clap;
extern crate ical;
use std::fs::File;
use std::io::BufReader;
use self::chrono::{DateTime, TimeZone};
use self::chrono_tz::{Tz, UTC};
use self::clap::SubCommand;
use self::ical::IcalParser;
#[derive(Debug)]
enum LineInfo {
TextInfo {
name: String,
value: Option<String>,
},
DateTimeInfo {
name: String,
value: DateTime<UTC>,
}
}
pub fn entry(m: &SubCommand) -> i32 {
let fname = m.matches.value_of("filename").unwrap();
let file = match File::open(fname) {
Ok(f) => f,
Err(f) => {
println!("{}", f);
return 1;
}
}; |
let input = IcalParser::new(BufReader::new(file));
for line in input {
let line = line.unwrap();
for evt in line.events {
println!("EVENT");
for prop in evt.properties {
let parsedval: LineInfo = match prop.value {
Some(mut v) => {
let tz = if (&v).ends_with("Z") {
v.pop();
UTC
} else {
match prop.params {
Some(params) => {
for &(ref pk, ref pv) in ¶ms {
if pk == "TZID" {
match pv[0].parse::<Tz>() {
Ok(tz) => tz,
Err(_) => UTC,
}
}
}
UTC
}
None => UTC,
}
};
match tz.datetime_from_str(&v, "%Y%m%dT%H%M%S") {
Ok(v) => LineInfo::DateTimeInfo { name: prop.name, value: v },
Err(_) => LineInfo::TextInfo { name: prop.name, value: Some(v) },
}
}
None => LineInfo::TextInfo { name: prop.name, value: None },
};
println!("{:?}", parsedval);
}
}
}
0
} | random_line_split | |
mod.rs | extern crate chrono;
extern crate chrono_tz;
extern crate clap;
extern crate ical;
use std::fs::File;
use std::io::BufReader;
use self::chrono::{DateTime, TimeZone};
use self::chrono_tz::{Tz, UTC};
use self::clap::SubCommand;
use self::ical::IcalParser;
#[derive(Debug)]
enum | {
TextInfo {
name: String,
value: Option<String>,
},
DateTimeInfo {
name: String,
value: DateTime<UTC>,
}
}
pub fn entry(m: &SubCommand) -> i32 {
let fname = m.matches.value_of("filename").unwrap();
let file = match File::open(fname) {
Ok(f) => f,
Err(f) => {
println!("{}", f);
return 1;
}
};
let input = IcalParser::new(BufReader::new(file));
for line in input {
let line = line.unwrap();
for evt in line.events {
println!("EVENT");
for prop in evt.properties {
let parsedval: LineInfo = match prop.value {
Some(mut v) => {
let tz = if (&v).ends_with("Z") {
v.pop();
UTC
} else {
match prop.params {
Some(params) => {
for &(ref pk, ref pv) in ¶ms {
if pk == "TZID" {
match pv[0].parse::<Tz>() {
Ok(tz) => tz,
Err(_) => UTC,
}
}
}
UTC
}
None => UTC,
}
};
match tz.datetime_from_str(&v, "%Y%m%dT%H%M%S") {
Ok(v) => LineInfo::DateTimeInfo { name: prop.name, value: v },
Err(_) => LineInfo::TextInfo { name: prop.name, value: Some(v) },
}
}
None => LineInfo::TextInfo { name: prop.name, value: None },
};
println!("{:?}", parsedval);
}
}
}
0
}
| LineInfo | identifier_name |
security-check.py | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Perform basic ELF security checks on a series of executables.
Exit status will be 0 if successful, and the program will be silent.
Otherwise the exit status will be 1 and it will log which executables failed which checks.
Needs `readelf` (for ELF) and `objdump` (for PE).
'''
import subprocess
import sys
import os
READELF_CMD = os.getenv('READELF', '/usr/bin/readelf')
OBJDUMP_CMD = os.getenv('OBJDUMP', '/usr/bin/objdump')
NONFATAL = {} # checks which are non-fatal for now but only generate a warning
def check_ELF_PIE(executable):
'''
Check for position independent executable (PIE), allowing for address space randomization.
'''
p = subprocess.Popen([READELF_CMD, '-h', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
ok = False
for line in stdout.splitlines():
line = line.split()
if len(line)>=2 and line[0] == 'Type:' and line[1] == 'DYN':
ok = True
return ok
def get_ELF_program_headers(executable):
'''Return type and flags for ELF program headers'''
p = subprocess.Popen([READELF_CMD, '-l', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
in_headers = False
count = 0
headers = []
for line in stdout.splitlines():
if line.startswith('Program Headers:'):
in_headers = True
if line == '':
in_headers = False
if in_headers:
if count == 1: # header line
ofs_typ = line.find('Type')
ofs_offset = line.find('Offset')
ofs_flags = line.find('Flg')
ofs_align = line.find('Align')
if ofs_typ == -1 or ofs_offset == -1 or ofs_flags == -1 or ofs_align == -1:
raise ValueError('Cannot parse elfread -lW output')
elif count > 1:
typ = line[ofs_typ:ofs_offset].rstrip()
flags = line[ofs_flags:ofs_align].rstrip()
headers.append((typ, flags))
count += 1
return headers
def check_ELF_NX(executable):
'''
Check that no sections are writable and executable (including the stack)
'''
have_wx = False
have_gnu_stack = False
for (typ, flags) in get_ELF_program_headers(executable):
if typ == 'GNU_STACK':
have_gnu_stack = True
if 'W' in flags and 'E' in flags: # section is both writable and executable
have_wx = True
return have_gnu_stack and not have_wx
def check_ELF_RELRO(executable):
'''
Check for read-only relocations.
GNU_RELRO program header must exist
Dynamic section must have BIND_NOW flag
'''
have_gnu_relro = False
for (typ, flags) in get_ELF_program_headers(executable):
# Note: not checking flags == 'R': here as linkers set the permission differently
# This does not affect security: the permission flags of the GNU_RELRO program header are ignored, the PT_LOAD header determines the effective permissions.
# However, the dynamic linker need to write to this area so these are RW.
# Glibc itself takes care of mprotecting this area R after relocations are finished.
# See also https://marc.info/?l=binutils&m=1498883354122353
if typ == 'GNU_RELRO':
have_gnu_relro = True
have_bindnow = False
p = subprocess.Popen([READELF_CMD, '-d', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
for line in stdout.splitlines():
tokens = line.split()
if len(tokens)>1 and tokens[1] == '(BIND_NOW)' or (len(tokens)>2 and tokens[1] == '(FLAGS)' and 'BIND_NOW' in tokens[2:]):
have_bindnow = True
return have_gnu_relro and have_bindnow
def check_ELF_Canary(executable):
'''
Check for use of stack canary
'''
p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
ok = False
for line in stdout.splitlines():
if '__stack_chk_fail' in line:
ok = True
return ok
def get_PE_dll_characteristics(executable):
'''
Get PE DllCharacteristics bits.
Returns a tuple (arch,bits) where arch is 'i386:x86-64' or 'i386'
and bits is the DllCharacteristics value.
'''
p = subprocess.Popen([OBJDUMP_CMD, '-x', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
arch = ''
bits = 0
for line in stdout.splitlines():
|
return (arch,bits)
IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA = 0x0020
IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE = 0x0040
IMAGE_DLL_CHARACTERISTICS_NX_COMPAT = 0x0100
def check_PE_DYNAMIC_BASE(executable):
'''PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)'''
(arch,bits) = get_PE_dll_characteristics(executable)
reqbits = IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE
return (bits & reqbits) == reqbits
# On 64 bit, must support high-entropy 64-bit address space layout randomization in addition to DYNAMIC_BASE
# to have secure ASLR.
def check_PE_HIGH_ENTROPY_VA(executable):
'''PIE: DllCharacteristics bit 0x20 signifies high-entropy ASLR'''
(arch,bits) = get_PE_dll_characteristics(executable)
if arch == 'i386:x86-64':
reqbits = IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA
else: # Unnecessary on 32-bit
assert(arch == 'i386')
reqbits = 0
return (bits & reqbits) == reqbits
def check_PE_NX(executable):
'''NX: DllCharacteristics bit 0x100 signifies nxcompat (DEP)'''
(arch,bits) = get_PE_dll_characteristics(executable)
return (bits & IMAGE_DLL_CHARACTERISTICS_NX_COMPAT) == IMAGE_DLL_CHARACTERISTICS_NX_COMPAT
CHECKS = {
'ELF': [
('PIE', check_ELF_PIE),
('NX', check_ELF_NX),
('RELRO', check_ELF_RELRO),
('Canary', check_ELF_Canary)
],
'PE': [
('DYNAMIC_BASE', check_PE_DYNAMIC_BASE),
('HIGH_ENTROPY_VA', check_PE_HIGH_ENTROPY_VA),
('NX', check_PE_NX)
]
}
def identify_executable(executable):
with open(filename, 'rb') as f:
magic = f.read(4)
if magic.startswith(b'MZ'):
return 'PE'
elif magic.startswith(b'\x7fELF'):
return 'ELF'
return None
if __name__ == '__main__':
retval = 0
for filename in sys.argv[1:]:
try:
etype = identify_executable(filename)
if etype is None:
print('%s: unknown format' % filename)
retval = 1
continue
failed = []
warning = []
for (name, func) in CHECKS[etype]:
if not func(filename):
if name in NONFATAL:
warning.append(name)
else:
failed.append(name)
if failed:
print('%s: failed %s' % (filename, ' '.join(failed)))
retval = 1
if warning:
print('%s: warning %s' % (filename, ' '.join(warning)))
except IOError:
print('%s: cannot open' % filename)
retval = 1
sys.exit(retval)
| tokens = line.split()
if len(tokens)>=2 and tokens[0] == 'architecture:':
arch = tokens[1].rstrip(',')
if len(tokens)>=2 and tokens[0] == 'DllCharacteristics':
bits = int(tokens[1],16) | conditional_block |
security-check.py | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Perform basic ELF security checks on a series of executables.
Exit status will be 0 if successful, and the program will be silent.
Otherwise the exit status will be 1 and it will log which executables failed which checks.
Needs `readelf` (for ELF) and `objdump` (for PE).
'''
import subprocess
import sys
import os
READELF_CMD = os.getenv('READELF', '/usr/bin/readelf')
OBJDUMP_CMD = os.getenv('OBJDUMP', '/usr/bin/objdump')
NONFATAL = {} # checks which are non-fatal for now but only generate a warning
def check_ELF_PIE(executable):
'''
Check for position independent executable (PIE), allowing for address space randomization.
'''
p = subprocess.Popen([READELF_CMD, '-h', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
ok = False
for line in stdout.splitlines():
line = line.split()
if len(line)>=2 and line[0] == 'Type:' and line[1] == 'DYN':
ok = True
return ok
def get_ELF_program_headers(executable):
'''Return type and flags for ELF program headers'''
p = subprocess.Popen([READELF_CMD, '-l', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
in_headers = False
count = 0
headers = []
for line in stdout.splitlines():
if line.startswith('Program Headers:'):
in_headers = True
if line == '':
in_headers = False
if in_headers:
if count == 1: # header line
ofs_typ = line.find('Type')
ofs_offset = line.find('Offset')
ofs_flags = line.find('Flg')
ofs_align = line.find('Align')
if ofs_typ == -1 or ofs_offset == -1 or ofs_flags == -1 or ofs_align == -1:
raise ValueError('Cannot parse elfread -lW output')
elif count > 1:
typ = line[ofs_typ:ofs_offset].rstrip()
flags = line[ofs_flags:ofs_align].rstrip()
headers.append((typ, flags))
count += 1
return headers
def check_ELF_NX(executable):
'''
Check that no sections are writable and executable (including the stack)
'''
have_wx = False
have_gnu_stack = False
for (typ, flags) in get_ELF_program_headers(executable):
if typ == 'GNU_STACK':
have_gnu_stack = True
if 'W' in flags and 'E' in flags: # section is both writable and executable
have_wx = True
return have_gnu_stack and not have_wx
def check_ELF_RELRO(executable):
|
def check_ELF_Canary(executable):
'''
Check for use of stack canary
'''
p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
ok = False
for line in stdout.splitlines():
if '__stack_chk_fail' in line:
ok = True
return ok
def get_PE_dll_characteristics(executable):
'''
Get PE DllCharacteristics bits.
Returns a tuple (arch,bits) where arch is 'i386:x86-64' or 'i386'
and bits is the DllCharacteristics value.
'''
p = subprocess.Popen([OBJDUMP_CMD, '-x', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
arch = ''
bits = 0
for line in stdout.splitlines():
tokens = line.split()
if len(tokens)>=2 and tokens[0] == 'architecture:':
arch = tokens[1].rstrip(',')
if len(tokens)>=2 and tokens[0] == 'DllCharacteristics':
bits = int(tokens[1],16)
return (arch,bits)
IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA = 0x0020
IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE = 0x0040
IMAGE_DLL_CHARACTERISTICS_NX_COMPAT = 0x0100
def check_PE_DYNAMIC_BASE(executable):
'''PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)'''
(arch,bits) = get_PE_dll_characteristics(executable)
reqbits = IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE
return (bits & reqbits) == reqbits
# On 64 bit, must support high-entropy 64-bit address space layout randomization in addition to DYNAMIC_BASE
# to have secure ASLR.
def check_PE_HIGH_ENTROPY_VA(executable):
'''PIE: DllCharacteristics bit 0x20 signifies high-entropy ASLR'''
(arch,bits) = get_PE_dll_characteristics(executable)
if arch == 'i386:x86-64':
reqbits = IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA
else: # Unnecessary on 32-bit
assert(arch == 'i386')
reqbits = 0
return (bits & reqbits) == reqbits
def check_PE_NX(executable):
'''NX: DllCharacteristics bit 0x100 signifies nxcompat (DEP)'''
(arch,bits) = get_PE_dll_characteristics(executable)
return (bits & IMAGE_DLL_CHARACTERISTICS_NX_COMPAT) == IMAGE_DLL_CHARACTERISTICS_NX_COMPAT
CHECKS = {
'ELF': [
('PIE', check_ELF_PIE),
('NX', check_ELF_NX),
('RELRO', check_ELF_RELRO),
('Canary', check_ELF_Canary)
],
'PE': [
('DYNAMIC_BASE', check_PE_DYNAMIC_BASE),
('HIGH_ENTROPY_VA', check_PE_HIGH_ENTROPY_VA),
('NX', check_PE_NX)
]
}
def identify_executable(executable):
with open(filename, 'rb') as f:
magic = f.read(4)
if magic.startswith(b'MZ'):
return 'PE'
elif magic.startswith(b'\x7fELF'):
return 'ELF'
return None
if __name__ == '__main__':
retval = 0
for filename in sys.argv[1:]:
try:
etype = identify_executable(filename)
if etype is None:
print('%s: unknown format' % filename)
retval = 1
continue
failed = []
warning = []
for (name, func) in CHECKS[etype]:
if not func(filename):
if name in NONFATAL:
warning.append(name)
else:
failed.append(name)
if failed:
print('%s: failed %s' % (filename, ' '.join(failed)))
retval = 1
if warning:
print('%s: warning %s' % (filename, ' '.join(warning)))
except IOError:
print('%s: cannot open' % filename)
retval = 1
sys.exit(retval)
| '''
Check for read-only relocations.
GNU_RELRO program header must exist
Dynamic section must have BIND_NOW flag
'''
have_gnu_relro = False
for (typ, flags) in get_ELF_program_headers(executable):
# Note: not checking flags == 'R': here as linkers set the permission differently
# This does not affect security: the permission flags of the GNU_RELRO program header are ignored, the PT_LOAD header determines the effective permissions.
# However, the dynamic linker need to write to this area so these are RW.
# Glibc itself takes care of mprotecting this area R after relocations are finished.
# See also https://marc.info/?l=binutils&m=1498883354122353
if typ == 'GNU_RELRO':
have_gnu_relro = True
have_bindnow = False
p = subprocess.Popen([READELF_CMD, '-d', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
for line in stdout.splitlines():
tokens = line.split()
if len(tokens)>1 and tokens[1] == '(BIND_NOW)' or (len(tokens)>2 and tokens[1] == '(FLAGS)' and 'BIND_NOW' in tokens[2:]):
have_bindnow = True
return have_gnu_relro and have_bindnow | identifier_body |
security-check.py | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Perform basic ELF security checks on a series of executables.
Exit status will be 0 if successful, and the program will be silent.
Otherwise the exit status will be 1 and it will log which executables failed which checks.
Needs `readelf` (for ELF) and `objdump` (for PE).
'''
import subprocess
import sys
import os
READELF_CMD = os.getenv('READELF', '/usr/bin/readelf')
OBJDUMP_CMD = os.getenv('OBJDUMP', '/usr/bin/objdump')
NONFATAL = {} # checks which are non-fatal for now but only generate a warning
def check_ELF_PIE(executable):
'''
Check for position independent executable (PIE), allowing for address space randomization.
'''
p = subprocess.Popen([READELF_CMD, '-h', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
ok = False
for line in stdout.splitlines():
line = line.split()
if len(line)>=2 and line[0] == 'Type:' and line[1] == 'DYN':
ok = True
return ok
def | (executable):
'''Return type and flags for ELF program headers'''
p = subprocess.Popen([READELF_CMD, '-l', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
in_headers = False
count = 0
headers = []
for line in stdout.splitlines():
if line.startswith('Program Headers:'):
in_headers = True
if line == '':
in_headers = False
if in_headers:
if count == 1: # header line
ofs_typ = line.find('Type')
ofs_offset = line.find('Offset')
ofs_flags = line.find('Flg')
ofs_align = line.find('Align')
if ofs_typ == -1 or ofs_offset == -1 or ofs_flags == -1 or ofs_align == -1:
raise ValueError('Cannot parse elfread -lW output')
elif count > 1:
typ = line[ofs_typ:ofs_offset].rstrip()
flags = line[ofs_flags:ofs_align].rstrip()
headers.append((typ, flags))
count += 1
return headers
def check_ELF_NX(executable):
'''
Check that no sections are writable and executable (including the stack)
'''
have_wx = False
have_gnu_stack = False
for (typ, flags) in get_ELF_program_headers(executable):
if typ == 'GNU_STACK':
have_gnu_stack = True
if 'W' in flags and 'E' in flags: # section is both writable and executable
have_wx = True
return have_gnu_stack and not have_wx
def check_ELF_RELRO(executable):
'''
Check for read-only relocations.
GNU_RELRO program header must exist
Dynamic section must have BIND_NOW flag
'''
have_gnu_relro = False
for (typ, flags) in get_ELF_program_headers(executable):
# Note: not checking flags == 'R': here as linkers set the permission differently
# This does not affect security: the permission flags of the GNU_RELRO program header are ignored, the PT_LOAD header determines the effective permissions.
# However, the dynamic linker need to write to this area so these are RW.
# Glibc itself takes care of mprotecting this area R after relocations are finished.
# See also https://marc.info/?l=binutils&m=1498883354122353
if typ == 'GNU_RELRO':
have_gnu_relro = True
have_bindnow = False
p = subprocess.Popen([READELF_CMD, '-d', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
for line in stdout.splitlines():
tokens = line.split()
if len(tokens)>1 and tokens[1] == '(BIND_NOW)' or (len(tokens)>2 and tokens[1] == '(FLAGS)' and 'BIND_NOW' in tokens[2:]):
have_bindnow = True
return have_gnu_relro and have_bindnow
def check_ELF_Canary(executable):
'''
Check for use of stack canary
'''
p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
ok = False
for line in stdout.splitlines():
if '__stack_chk_fail' in line:
ok = True
return ok
def get_PE_dll_characteristics(executable):
'''
Get PE DllCharacteristics bits.
Returns a tuple (arch,bits) where arch is 'i386:x86-64' or 'i386'
and bits is the DllCharacteristics value.
'''
p = subprocess.Popen([OBJDUMP_CMD, '-x', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
arch = ''
bits = 0
for line in stdout.splitlines():
tokens = line.split()
if len(tokens)>=2 and tokens[0] == 'architecture:':
arch = tokens[1].rstrip(',')
if len(tokens)>=2 and tokens[0] == 'DllCharacteristics':
bits = int(tokens[1],16)
return (arch,bits)
IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA = 0x0020
IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE = 0x0040
IMAGE_DLL_CHARACTERISTICS_NX_COMPAT = 0x0100
def check_PE_DYNAMIC_BASE(executable):
'''PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)'''
(arch,bits) = get_PE_dll_characteristics(executable)
reqbits = IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE
return (bits & reqbits) == reqbits
# On 64 bit, must support high-entropy 64-bit address space layout randomization in addition to DYNAMIC_BASE
# to have secure ASLR.
def check_PE_HIGH_ENTROPY_VA(executable):
'''PIE: DllCharacteristics bit 0x20 signifies high-entropy ASLR'''
(arch,bits) = get_PE_dll_characteristics(executable)
if arch == 'i386:x86-64':
reqbits = IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA
else: # Unnecessary on 32-bit
assert(arch == 'i386')
reqbits = 0
return (bits & reqbits) == reqbits
def check_PE_NX(executable):
'''NX: DllCharacteristics bit 0x100 signifies nxcompat (DEP)'''
(arch,bits) = get_PE_dll_characteristics(executable)
return (bits & IMAGE_DLL_CHARACTERISTICS_NX_COMPAT) == IMAGE_DLL_CHARACTERISTICS_NX_COMPAT
CHECKS = {
'ELF': [
('PIE', check_ELF_PIE),
('NX', check_ELF_NX),
('RELRO', check_ELF_RELRO),
('Canary', check_ELF_Canary)
],
'PE': [
('DYNAMIC_BASE', check_PE_DYNAMIC_BASE),
('HIGH_ENTROPY_VA', check_PE_HIGH_ENTROPY_VA),
('NX', check_PE_NX)
]
}
def identify_executable(executable):
with open(filename, 'rb') as f:
magic = f.read(4)
if magic.startswith(b'MZ'):
return 'PE'
elif magic.startswith(b'\x7fELF'):
return 'ELF'
return None
if __name__ == '__main__':
retval = 0
for filename in sys.argv[1:]:
try:
etype = identify_executable(filename)
if etype is None:
print('%s: unknown format' % filename)
retval = 1
continue
failed = []
warning = []
for (name, func) in CHECKS[etype]:
if not func(filename):
if name in NONFATAL:
warning.append(name)
else:
failed.append(name)
if failed:
print('%s: failed %s' % (filename, ' '.join(failed)))
retval = 1
if warning:
print('%s: warning %s' % (filename, ' '.join(warning)))
except IOError:
print('%s: cannot open' % filename)
retval = 1
sys.exit(retval)
| get_ELF_program_headers | identifier_name |
security-check.py | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Perform basic ELF security checks on a series of executables.
Exit status will be 0 if successful, and the program will be silent.
Otherwise the exit status will be 1 and it will log which executables failed which checks.
Needs `readelf` (for ELF) and `objdump` (for PE).
'''
import subprocess
import sys
import os
READELF_CMD = os.getenv('READELF', '/usr/bin/readelf')
OBJDUMP_CMD = os.getenv('OBJDUMP', '/usr/bin/objdump')
NONFATAL = {} # checks which are non-fatal for now but only generate a warning
def check_ELF_PIE(executable):
'''
Check for position independent executable (PIE), allowing for address space randomization.
'''
p = subprocess.Popen([READELF_CMD, '-h', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
ok = False
for line in stdout.splitlines():
line = line.split()
if len(line)>=2 and line[0] == 'Type:' and line[1] == 'DYN':
ok = True
return ok
def get_ELF_program_headers(executable):
'''Return type and flags for ELF program headers'''
p = subprocess.Popen([READELF_CMD, '-l', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
in_headers = False
count = 0
headers = []
for line in stdout.splitlines():
if line.startswith('Program Headers:'):
in_headers = True
if line == '':
in_headers = False
if in_headers:
if count == 1: # header line
ofs_typ = line.find('Type')
ofs_offset = line.find('Offset')
ofs_flags = line.find('Flg')
ofs_align = line.find('Align')
if ofs_typ == -1 or ofs_offset == -1 or ofs_flags == -1 or ofs_align == -1:
raise ValueError('Cannot parse elfread -lW output')
elif count > 1:
typ = line[ofs_typ:ofs_offset].rstrip()
flags = line[ofs_flags:ofs_align].rstrip()
headers.append((typ, flags))
count += 1
return headers
def check_ELF_NX(executable):
'''
Check that no sections are writable and executable (including the stack)
'''
have_wx = False
have_gnu_stack = False
for (typ, flags) in get_ELF_program_headers(executable):
if typ == 'GNU_STACK':
have_gnu_stack = True
if 'W' in flags and 'E' in flags: # section is both writable and executable
have_wx = True
return have_gnu_stack and not have_wx
def check_ELF_RELRO(executable):
'''
Check for read-only relocations.
GNU_RELRO program header must exist
Dynamic section must have BIND_NOW flag
'''
have_gnu_relro = False
for (typ, flags) in get_ELF_program_headers(executable):
# Note: not checking flags == 'R': here as linkers set the permission differently
# This does not affect security: the permission flags of the GNU_RELRO program header are ignored, the PT_LOAD header determines the effective permissions.
# However, the dynamic linker need to write to this area so these are RW.
# Glibc itself takes care of mprotecting this area R after relocations are finished.
# See also https://marc.info/?l=binutils&m=1498883354122353
if typ == 'GNU_RELRO':
have_gnu_relro = True
have_bindnow = False
p = subprocess.Popen([READELF_CMD, '-d', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
for line in stdout.splitlines():
tokens = line.split()
if len(tokens)>1 and tokens[1] == '(BIND_NOW)' or (len(tokens)>2 and tokens[1] == '(FLAGS)' and 'BIND_NOW' in tokens[2:]):
have_bindnow = True
return have_gnu_relro and have_bindnow
def check_ELF_Canary(executable):
'''
Check for use of stack canary
'''
p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
ok = False
for line in stdout.splitlines():
if '__stack_chk_fail' in line:
ok = True
return ok
def get_PE_dll_characteristics(executable):
'''
Get PE DllCharacteristics bits.
Returns a tuple (arch,bits) where arch is 'i386:x86-64' or 'i386'
and bits is the DllCharacteristics value.
'''
p = subprocess.Popen([OBJDUMP_CMD, '-x', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
arch = ''
bits = 0
for line in stdout.splitlines():
tokens = line.split()
if len(tokens)>=2 and tokens[0] == 'architecture:':
arch = tokens[1].rstrip(',')
if len(tokens)>=2 and tokens[0] == 'DllCharacteristics':
bits = int(tokens[1],16)
return (arch,bits)
IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA = 0x0020
IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE = 0x0040
IMAGE_DLL_CHARACTERISTICS_NX_COMPAT = 0x0100
def check_PE_DYNAMIC_BASE(executable):
'''PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)'''
(arch,bits) = get_PE_dll_characteristics(executable)
reqbits = IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE
return (bits & reqbits) == reqbits
# On 64 bit, must support high-entropy 64-bit address space layout randomization in addition to DYNAMIC_BASE
# to have secure ASLR.
def check_PE_HIGH_ENTROPY_VA(executable):
'''PIE: DllCharacteristics bit 0x20 signifies high-entropy ASLR'''
(arch,bits) = get_PE_dll_characteristics(executable)
if arch == 'i386:x86-64':
reqbits = IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA
else: # Unnecessary on 32-bit
assert(arch == 'i386')
reqbits = 0
return (bits & reqbits) == reqbits
def check_PE_NX(executable):
'''NX: DllCharacteristics bit 0x100 signifies nxcompat (DEP)'''
(arch,bits) = get_PE_dll_characteristics(executable)
return (bits & IMAGE_DLL_CHARACTERISTICS_NX_COMPAT) == IMAGE_DLL_CHARACTERISTICS_NX_COMPAT
CHECKS = {
'ELF': [
('PIE', check_ELF_PIE),
('NX', check_ELF_NX),
('RELRO', check_ELF_RELRO),
('Canary', check_ELF_Canary)
],
'PE': [
('DYNAMIC_BASE', check_PE_DYNAMIC_BASE),
('HIGH_ENTROPY_VA', check_PE_HIGH_ENTROPY_VA), | ('NX', check_PE_NX)
]
}
def identify_executable(executable):
with open(filename, 'rb') as f:
magic = f.read(4)
if magic.startswith(b'MZ'):
return 'PE'
elif magic.startswith(b'\x7fELF'):
return 'ELF'
return None
if __name__ == '__main__':
retval = 0
for filename in sys.argv[1:]:
try:
etype = identify_executable(filename)
if etype is None:
print('%s: unknown format' % filename)
retval = 1
continue
failed = []
warning = []
for (name, func) in CHECKS[etype]:
if not func(filename):
if name in NONFATAL:
warning.append(name)
else:
failed.append(name)
if failed:
print('%s: failed %s' % (filename, ' '.join(failed)))
retval = 1
if warning:
print('%s: warning %s' % (filename, ' '.join(warning)))
except IOError:
print('%s: cannot open' % filename)
retval = 1
sys.exit(retval) | random_line_split | |
voir.py | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'voir' de la commande 'chemin'."""
from primaires.format.fonctions import oui_ou_non
from primaires.interpreteur.masque.parametre import Parametre
from primaires.pnj.chemin import FLAGS
class PrmVoir(Parametre):
"""Commande 'chemin voir'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "voir", "view")
self.schema = "<cle>"
self.aide_courte = "affiche le détail d'un chemin"
self.aide_longue = \
"Cette commande permet d'obtenir plus d'informations sur " \
"un chemin (ses flags actifs, ses salles et sorties...)."
def ajouter(self):
"""Méthode appelée lors de l'ajout de la commande à l'interpréteur"""
cle = self.noeud.get_masque("cle")
cle.proprietes["regex"] = r"'[a-z0-9_:]{3,}'"
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
cle = dic_masques["cle"].cle
if cle not in importeur.pnj.chemins: | msg = "Détail sur le chemin {} :".format(chemin.cle)
msg += "\n Flags :"
for nom_flag in FLAGS.keys():
msg += "\n {}".format(nom_flag.capitalize())
msg += " : " + oui_ou_non(chemin.a_flag(nom_flag))
msg += "\n Salles du chemin :"
if len(chemin.salles) == 0:
msg += "\n Aucune"
else:
for salle, direction in chemin.salles.items():
msg += "\n " + salle.ident.ljust(20) + " "
msg += direction.ljust(10)
if salle in chemin.salles_retour and \
chemin.salles_retour[salle]:
msg += " (retour " + chemin.salles_retour[salle] + ")"
personnage << msg | personnage << "|err|Ce chemin n'existe pas.|ff|"
return
chemin = importeur.pnj.chemins[cle] | random_line_split |
voir.py | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'voir' de la commande 'chemin'."""
from primaires.format.fonctions import oui_ou_non
from primaires.interpreteur.masque.parametre import Parametre
from primaires.pnj.chemin import FLAGS
class PrmVoir(Parametre):
"""Commande 'chemin voir'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "voir", "view")
self.schema = "<cle>"
self.aide_courte = "affiche le détail d'un chemin"
self.aide_longue = \
"Cette commande permet d'obtenir plus d'informations sur " \
"un chemin (ses flags actifs, ses salles et sorties...)."
def ajouter(self):
"""Méthode appelée lors de l'ajout de la commande à l'interpréteur"""
cle = self.noeud.get_masque("cle")
cle.proprietes["regex"] = r"'[a-z0-9_:]{3,}'"
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
cle = dic_masques["cle"].cle
if cle not in importeur.pnj.chemins:
personnage << "|err|Ce chemin n'existe pas.|ff|"
return
chemin = importeur.pnj.chemins[cle]
msg = "Détail sur le chemin {} :".format(chemin.cle)
msg += "\n Flags :"
for nom_flag in FLAGS.keys():
msg += "\n {}".format(nom_flag.capitalize())
msg += " : " + oui_ou_non(chemin.a_flag(nom_flag))
msg += "\n Salles du chemin :"
if len(chemin.salles) == 0:
msg += "\n Aucune"
else:
for salle, direction in chemin.salles.items():
msg += "\n " + salle.ident.ljust(20) + " "
msg += direction.ljust(10)
if salle in chemin.salles_retour and \
chemin.salles_retour[salle]:
msg += " ( | personnage << msg
| retour " + chemin.salles_retour[salle] + ")"
| conditional_block |
voir.py | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'voir' de la commande 'chemin'."""
from primaires.format.fonctions import oui_ou_non
from primaires.interpreteur.masque.parametre import Parametre
from primaires.pnj.chemin import FLAGS
class PrmVoir(Parametre):
"""Commande 'chemin voir'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "voir", "view")
self.schema = "<cle>"
self.aide_courte = "affiche le détail d'un chemin"
self.aide_longue = \
"Cette commande permet d'obtenir plus d'informations sur " \
"un chemin (ses flags actifs, ses salles et sorties...)."
def ajouter(self):
""" | ef interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
cle = dic_masques["cle"].cle
if cle not in importeur.pnj.chemins:
personnage << "|err|Ce chemin n'existe pas.|ff|"
return
chemin = importeur.pnj.chemins[cle]
msg = "Détail sur le chemin {} :".format(chemin.cle)
msg += "\n Flags :"
for nom_flag in FLAGS.keys():
msg += "\n {}".format(nom_flag.capitalize())
msg += " : " + oui_ou_non(chemin.a_flag(nom_flag))
msg += "\n Salles du chemin :"
if len(chemin.salles) == 0:
msg += "\n Aucune"
else:
for salle, direction in chemin.salles.items():
msg += "\n " + salle.ident.ljust(20) + " "
msg += direction.ljust(10)
if salle in chemin.salles_retour and \
chemin.salles_retour[salle]:
msg += " (retour " + chemin.salles_retour[salle] + ")"
personnage << msg
| Méthode appelée lors de l'ajout de la commande à l'interpréteur"""
cle = self.noeud.get_masque("cle")
cle.proprietes["regex"] = r"'[a-z0-9_:]{3,}'"
d | identifier_body |
voir.py | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'voir' de la commande 'chemin'."""
from primaires.format.fonctions import oui_ou_non
from primaires.interpreteur.masque.parametre import Parametre
from primaires.pnj.chemin import FLAGS
class PrmVoir(Parametre):
"""Commande 'chemin voir'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "voir", "view")
self.schema = "<cle>"
self.aide_courte = "affiche le détail d'un chemin"
self.aide_longue = \
"Cette commande permet d'obtenir plus d'informations sur " \
"un chemin (ses flags actifs, ses salles et sorties...)."
def ajo | lf):
"""Méthode appelée lors de l'ajout de la commande à l'interpréteur"""
cle = self.noeud.get_masque("cle")
cle.proprietes["regex"] = r"'[a-z0-9_:]{3,}'"
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
cle = dic_masques["cle"].cle
if cle not in importeur.pnj.chemins:
personnage << "|err|Ce chemin n'existe pas.|ff|"
return
chemin = importeur.pnj.chemins[cle]
msg = "Détail sur le chemin {} :".format(chemin.cle)
msg += "\n Flags :"
for nom_flag in FLAGS.keys():
msg += "\n {}".format(nom_flag.capitalize())
msg += " : " + oui_ou_non(chemin.a_flag(nom_flag))
msg += "\n Salles du chemin :"
if len(chemin.salles) == 0:
msg += "\n Aucune"
else:
for salle, direction in chemin.salles.items():
msg += "\n " + salle.ident.ljust(20) + " "
msg += direction.ljust(10)
if salle in chemin.salles_retour and \
chemin.salles_retour[salle]:
msg += " (retour " + chemin.salles_retour[salle] + ")"
personnage << msg
| uter(se | identifier_name |
rack-add-modal.js | import Ember from "ember";
import ModalBaseView from "./modal-base";
import Form from "./mixins/form-modal-mixin";
import Full from "./mixins/full-modal-mixin";
import Save from "./mixins/object-action-mixin";
import Row from "mb-test-1/models/row";
import Rack from "mb-test-1/models/rack";
var RackAddModal = ModalBaseView.extend(Full, Form, Save, {
templateName: "modals/rack-add-modal",
elementId: "add-rack",
title: "Add a rack",
cancelButtonText: "Cancel",
submitButtonText: "Add",
successAlertText: "Your rack was created successfully",
rows: function() {
return Row.findAll();
}.property(),
rowsForSelect: Ember.computed.map('rows', function(o) {
return {label: o.get('name'), value: o.get('id')};
}),
preselect: function() {
var row_id = this.get('rowsForSelect.firstObject').value;
this.set('selectedRow', row_id);
}.observes('rowsForSelect.@each'),
model: function() {
return Rack.create();
}.property(),
onModelSaved: function(model) {
var controller = this.get('controller');
controller.transitionToRoute("rack", model.__json.id);
},
actions: {
save: function() {
this.save(this.get("model"));
}
}
});
RackAddModal.reopenClass({
open: function() {
return this.create({
selectedRow: null
});
},
}); |
export default RackAddModal; | random_line_split | |
http-request.service.ts | import { Http, Headers, Response, RequestOptions } from '@angular/http';
import { Injectable, Inject } from '@angular/core';
import { DefaultRequest } from '../models/default-request.model'
import { Observable } from 'rxjs';
import 'rxjs/add/operator/map'
import 'rxjs/add/operator/catch';
import { CookieService } from 'angular2-cookie/core';
@Injectable()
export class HttpRequest {
private cookieName: string = "test";
constructor(private http: Http,
@Inject("Config") private config: any,
private cookieService: CookieService) { }
post(data: DefaultRequest): Observable<any> |
private getOptions(): RequestOptions {
let headers;
if (this.config.MOCK) {
headers = new Headers({ 'Content-Type': 'application/json'});
}else{
headers = new Headers({ 'Content-Type': 'application/json', 'Access-Control-Allow-Origin': "*" });
}
let options = new RequestOptions({ headers: headers });
return options;
}
private extractData(res: Response) {
let body = res.json();
return body || {};
}
private error(error: any) {
return Observable.throw(error.json().error || 'Server error');
}
/**
* Create the default requestt object, and set the module name, endpoint and body (optional);
*/
createRequest(moduleName: string, endpoint: string, data?: any): DefaultRequest {
let cookie = this.cookieService.getObject(this.cookieName);
console.log(cookie);
return new DefaultRequest(
"string",
"string",
moduleName,
endpoint,
data
);
}
} | {
let fullURL = this.config.API_URL
if (this.config.MOCK) {
fullURL = fullURL + data.sei;
}
console.info("Post: " + fullURL);
let dataString = JSON.stringify(data); // Stringify
console.info("Data:", dataString);
console.info("Options", this.getOptions());
return this.http.post(fullURL, dataString, this.getOptions())
.map(this.extractData)
.catch(this.error);
} | identifier_body |
http-request.service.ts | import { Http, Headers, Response, RequestOptions } from '@angular/http';
import { Injectable, Inject } from '@angular/core';
import { DefaultRequest } from '../models/default-request.model'
import { Observable } from 'rxjs';
import 'rxjs/add/operator/map'
import 'rxjs/add/operator/catch';
import { CookieService } from 'angular2-cookie/core';
@Injectable()
export class HttpRequest {
private cookieName: string = "test";
constructor(private http: Http,
@Inject("Config") private config: any,
private cookieService: CookieService) { }
post(data: DefaultRequest): Observable<any> {
let fullURL = this.config.API_URL
if (this.config.MOCK) {
fullURL = fullURL + data.sei;
}
console.info("Post: " + fullURL);
let dataString = JSON.stringify(data); // Stringify
console.info("Data:", dataString);
console.info("Options", this.getOptions());
return this.http.post(fullURL, dataString, this.getOptions())
.map(this.extractData)
.catch(this.error);
}
private getOptions(): RequestOptions {
let headers;
if (this.config.MOCK) {
headers = new Headers({ 'Content-Type': 'application/json'});
}else{
headers = new Headers({ 'Content-Type': 'application/json', 'Access-Control-Allow-Origin': "*" });
}
let options = new RequestOptions({ headers: headers });
return options;
}
private extractData(res: Response) {
let body = res.json();
return body || {};
}
private error(error: any) {
return Observable.throw(error.json().error || 'Server error');
}
/**
* Create the default requestt object, and set the module name, endpoint and body (optional);
*/
createRequest(moduleName: string, endpoint: string, data?: any): DefaultRequest { | "string",
"string",
moduleName,
endpoint,
data
);
}
} | let cookie = this.cookieService.getObject(this.cookieName);
console.log(cookie);
return new DefaultRequest( | random_line_split |
http-request.service.ts | import { Http, Headers, Response, RequestOptions } from '@angular/http';
import { Injectable, Inject } from '@angular/core';
import { DefaultRequest } from '../models/default-request.model'
import { Observable } from 'rxjs';
import 'rxjs/add/operator/map'
import 'rxjs/add/operator/catch';
import { CookieService } from 'angular2-cookie/core';
@Injectable()
export class | {
private cookieName: string = "test";
constructor(private http: Http,
@Inject("Config") private config: any,
private cookieService: CookieService) { }
post(data: DefaultRequest): Observable<any> {
let fullURL = this.config.API_URL
if (this.config.MOCK) {
fullURL = fullURL + data.sei;
}
console.info("Post: " + fullURL);
let dataString = JSON.stringify(data); // Stringify
console.info("Data:", dataString);
console.info("Options", this.getOptions());
return this.http.post(fullURL, dataString, this.getOptions())
.map(this.extractData)
.catch(this.error);
}
private getOptions(): RequestOptions {
let headers;
if (this.config.MOCK) {
headers = new Headers({ 'Content-Type': 'application/json'});
}else{
headers = new Headers({ 'Content-Type': 'application/json', 'Access-Control-Allow-Origin': "*" });
}
let options = new RequestOptions({ headers: headers });
return options;
}
private extractData(res: Response) {
let body = res.json();
return body || {};
}
private error(error: any) {
return Observable.throw(error.json().error || 'Server error');
}
/**
* Create the default requestt object, and set the module name, endpoint and body (optional);
*/
createRequest(moduleName: string, endpoint: string, data?: any): DefaultRequest {
let cookie = this.cookieService.getObject(this.cookieName);
console.log(cookie);
return new DefaultRequest(
"string",
"string",
moduleName,
endpoint,
data
);
}
} | HttpRequest | identifier_name |
http-request.service.ts | import { Http, Headers, Response, RequestOptions } from '@angular/http';
import { Injectable, Inject } from '@angular/core';
import { DefaultRequest } from '../models/default-request.model'
import { Observable } from 'rxjs';
import 'rxjs/add/operator/map'
import 'rxjs/add/operator/catch';
import { CookieService } from 'angular2-cookie/core';
@Injectable()
export class HttpRequest {
private cookieName: string = "test";
constructor(private http: Http,
@Inject("Config") private config: any,
private cookieService: CookieService) { }
post(data: DefaultRequest): Observable<any> {
let fullURL = this.config.API_URL
if (this.config.MOCK) |
console.info("Post: " + fullURL);
let dataString = JSON.stringify(data); // Stringify
console.info("Data:", dataString);
console.info("Options", this.getOptions());
return this.http.post(fullURL, dataString, this.getOptions())
.map(this.extractData)
.catch(this.error);
}
private getOptions(): RequestOptions {
let headers;
if (this.config.MOCK) {
headers = new Headers({ 'Content-Type': 'application/json'});
}else{
headers = new Headers({ 'Content-Type': 'application/json', 'Access-Control-Allow-Origin': "*" });
}
let options = new RequestOptions({ headers: headers });
return options;
}
private extractData(res: Response) {
let body = res.json();
return body || {};
}
private error(error: any) {
return Observable.throw(error.json().error || 'Server error');
}
/**
* Create the default requestt object, and set the module name, endpoint and body (optional);
*/
createRequest(moduleName: string, endpoint: string, data?: any): DefaultRequest {
let cookie = this.cookieService.getObject(this.cookieName);
console.log(cookie);
return new DefaultRequest(
"string",
"string",
moduleName,
endpoint,
data
);
}
} | {
fullURL = fullURL + data.sei;
} | conditional_block |
tester.rs | use std::comm;
use std::fmt::Show;
use std::io::ChanWriter;
use std::iter;
use std::rand;
use std::task::TaskBuilder;
use super::{Arbitrary, Gen, Shrinker, StdGen};
use tester::trap::safe;
use tester::Status::{Discard, Fail, Pass};
/// The main QuickCheck type for setting configuration and running QuickCheck.
pub struct QuickCheck<G> {
tests: uint,
max_tests: uint,
gen: G,
}
impl QuickCheck<StdGen<rand::TaskRng>> {
/// Creates a new QuickCheck value.
///
/// This can be used to run QuickCheck on things that implement
/// `Testable`. You may also adjust the configuration, such as
/// the number of tests to run.
///
/// By default, the maximum number of passed tests is set to `100`,
/// the max number of overall tests is set to `10000` and the generator
/// is set to a `StdGen` with a default size of `100`.
pub fn new() -> QuickCheck<StdGen<rand::TaskRng>> {
QuickCheck {
tests: 100,
max_tests: 10000,
gen: StdGen::new(rand::task_rng(), 100),
}
}
}
impl<G: Gen> QuickCheck<G> {
/// Set the number of tests to run.
///
/// This actually refers to the maximum number of *passed* tests that
/// can occur. Namely, if a test causes a failure, future testing on that
/// property stops. Additionally, if tests are discarded, there may be
/// fewer than `tests` passed.
pub fn tests(mut self, tests: uint) -> QuickCheck<G> {
self.tests = tests;
self
}
/// Set the maximum number of tests to run.
///
/// The number of invocations of a property will never exceed this number.
/// This is necessary to cap the number of tests because QuickCheck
/// properties can discard tests.
pub fn max_tests(mut self, max_tests: uint) -> QuickCheck<G> {
self.max_tests = max_tests;
self
}
/// Set the random number generator to be used by QuickCheck.
pub fn gen(mut self, gen: G) -> QuickCheck<G> {
self.gen = gen;
self
}
/// Tests a property and returns the result.
///
/// The result returned is either the number of tests passed or a witness
/// of failure.
///
/// (If you're using Rust's unit testing infrastructure, then you'll
/// want to use the `quickcheck` method, which will `panic!` on failure.)
pub fn quicktest<A>(&mut self, f: A) -> Result<uint, TestResult>
where A: Testable {
let mut ntests: uint = 0;
for _ in iter::range(0, self.max_tests) {
if ntests >= self.tests {
break
}
let r = f.result(&mut self.gen);
match r.status {
Pass => ntests += 1,
Discard => continue,
Fail => return Err(r),
}
}
Ok(ntests)
}
/// Tests a property and calls `panic!` on failure.
///
/// The `panic!` message will include a (hopefully) minimal witness of
/// failure.
///
/// It is appropriate to use this method with Rust's unit testing
/// infrastructure.
///
/// Note that if the environment variable `RUST_LOG` is set to enable
/// `info` level log messages for the `quickcheck` crate, then this will
/// include output on how many QuickCheck tests were passed.
///
/// # Example
///
/// ```rust
/// use quickcheck::QuickCheck;
///
/// fn prop_reverse_reverse() {
/// fn revrev(xs: Vec<uint>) -> bool {
/// let rev: Vec<uint> = xs.clone().into_iter().rev().collect();
/// let revrev = rev.into_iter().rev().collect();
/// xs == revrev
/// }
/// QuickCheck::new().quickcheck(revrev);
/// }
/// ```
pub fn quickcheck<A>(&mut self, f: A) where A: Testable {
match self.quicktest(f) {
Ok(ntests) => info!("(Passed {} QuickCheck tests.)", ntests),
Err(result) => panic!(result.failed_msg()),
}
}
}
/// Convenience function for running QuickCheck.
///
/// This is an alias for `QuickCheck::new().quickcheck(f)`.
pub fn quickcheck<A: Testable>(f: A) { QuickCheck::new().quickcheck(f) }
/// Describes the status of a single instance of a test.
///
/// All testable things must be capable of producing a `TestResult`.
#[deriving(Clone, Show)]
pub struct TestResult {
status: Status,
arguments: Vec<String>,
err: String,
}
/// Whether a test has passed, failed or been discarded.
#[deriving(Clone, Show)]
enum | { Pass, Fail, Discard }
impl TestResult {
/// Produces a test result that indicates the current test has passed.
pub fn passed() -> TestResult { TestResult::from_bool(true) }
/// Produces a test result that indicates the current test has failed.
pub fn failed() -> TestResult { TestResult::from_bool(false) }
/// Produces a test result that indicates failure from a runtime
/// error.
pub fn error(msg: &str) -> TestResult {
let mut r = TestResult::from_bool(false);
r.err = msg.to_string();
r
}
/// Produces a test result that instructs `quickcheck` to ignore it.
/// This is useful for restricting the domain of your properties.
/// When a test is discarded, `quickcheck` will replace it with a
/// fresh one (up to a certain limit).
pub fn discard() -> TestResult {
TestResult {
status: Discard,
arguments: vec![],
err: "".to_string(),
}
}
/// Converts a `bool` to a `TestResult`. A `true` value indicates that
/// the test has passed and a `false` value indicates that the test
/// has failed.
pub fn from_bool(b: bool) -> TestResult {
TestResult {
status: if b { Pass } else { Fail },
arguments: vec![],
err: "".to_string(),
}
}
/// Tests if a "procedure" fails when executed. The test passes only if
/// `f` generates a task failure during its execution.
pub fn must_fail<T: Send>(f: proc(): Send -> T) -> TestResult {
let (tx, _) = comm::channel();
TestResult::from_bool(
TaskBuilder::new()
.stdout(box ChanWriter::new(tx.clone()))
.stderr(box ChanWriter::new(tx))
.try(f)
.is_err())
}
/// Returns `true` if and only if this test result describes a failing
/// test.
pub fn is_failure(&self) -> bool {
match self.status {
Fail => true,
Pass|Discard => false,
}
}
/// Returns `true` if and only if this test result describes a failing
/// test as a result of a run time error.
pub fn is_error(&self) -> bool {
self.is_failure() && self.err.len() > 0
}
fn failed_msg(&self) -> String {
if self.err.len() == 0 {
format!(
"[quickcheck] TEST FAILED. Arguments: ({})",
self.arguments.connect(", "))
} else {
format!(
"[quickcheck] TEST FAILED (runtime error). \
Arguments: ({})\nError: {}",
self.arguments.connect(", "), self.err)
}
}
}
/// `Testable` describes types (e.g., a function) whose values can be
/// tested.
///
/// Anything that can be tested must be capable of producing a `TestResult`
/// given a random number generator. This is trivial for types like `bool`,
/// which are just converted to either a passing or failing test result.
///
/// For functions, an implementation must generate random arguments
/// and potentially shrink those arguments if they produce a failure.
///
/// It's unlikely that you'll have to implement this trait yourself.
/// This comes with a caveat: currently, only functions with 4 parameters
/// or fewer (both `fn` and `||` types) satisfy `Testable`. If you have
/// functions to test with more than 4 parameters, please
/// [file a bug](https://github.com/BurntSushi/quickcheck/issues) and
/// I'll hopefully add it. (As of now, it would be very difficult to
/// add your own implementation outside of `quickcheck`, since the
/// functions that do shrinking are not public.)
pub trait Testable : Send {
fn result<G: Gen>(&self, &mut G) -> TestResult;
}
impl Testable for bool {
fn result<G: Gen>(&self, _: &mut G) -> TestResult {
TestResult::from_bool(*self)
}
}
impl Testable for TestResult {
fn result<G: Gen>(&self, _: &mut G) -> TestResult { self.clone() }
}
impl<A> Testable for Result<A, String> where A: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
match *self {
Ok(ref r) => r.result(g),
Err(ref err) => TestResult::error(err.as_slice()),
}
}
}
impl<T> Testable for fn() -> T where T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, (), (), (), (), fn() -> T>(g, self)
}
}
impl<A, T> Testable for fn(A) -> T where A: AShow, T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, A, (), (), (), fn(A) -> T>(g, self)
}
}
impl<A, B, T> Testable for fn(A, B) -> T
where A: AShow, B: AShow, T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, A, B, (), (), fn(A, B) -> T>(g, self)
}
}
impl<A, B, C, T> Testable for fn(A, B, C) -> T
where A: AShow, B: AShow, C: AShow, T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, A, B, C, (), fn(A, B, C) -> T>(g, self)
}
}
impl<A, B, C, D, T,> Testable for fn(A, B, C, D) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, A, B, C, D, fn(A, B, C, D) -> T>(g, self)
}
}
trait Fun<A, B, C, D, T> {
fn call<G>(&self, g: &mut G,
a: Option<&A>, b: Option<&B>,
c: Option<&C>, d: Option<&D>)
-> TestResult
where G: Gen;
}
macro_rules! impl_fun_call(
($f:expr, $g:expr, $($name:ident,)+) => ({
let ($($name,)*) = ($($name.unwrap(),)*);
let f = $f;
let mut r = {
let ($($name,)*) = ($(box $name.clone(),)*);
safe(proc() { f($(*$name,)*) }).result($g)
};
if r.is_failure() {
r.arguments = vec![$($name.to_string(),)*];
}
r
});
)
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn() -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
_: Option<&A>, _: Option<&B>,
_: Option<&C>, _: Option<&D>)
-> TestResult {
let f = *self;
safe(proc() { f() }).result(g)
}
}
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn(A) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
a: Option<&A>, _: Option<&B>,
_: Option<&C>, _: Option<&D>)
-> TestResult {
impl_fun_call!(*self, g, a,)
}
}
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn(A, B) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
a: Option<&A>, b: Option<&B>,
_: Option<&C>, _: Option<&D>)
-> TestResult {
impl_fun_call!(*self, g, a, b,)
}
}
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn(A, B, C) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
a: Option<&A>, b: Option<&B>,
c: Option<&C>, _: Option<&D>)
-> TestResult {
impl_fun_call!(*self, g, a, b, c,)
}
}
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn(A, B, C, D) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
a: Option<&A>, b: Option<&B>,
c: Option<&C>, d: Option<&D>)
-> TestResult {
impl_fun_call!(*self, g, a, b, c, d,)
}
}
fn shrink<G, T, A, B, C, D, F>(g: &mut G, fun: &F) -> TestResult
where G: Gen, T: Testable, A: AShow, B: AShow, C: AShow, D: AShow,
F: Fun<A, B, C, D, T> {
let (a, b, c, d): (A, B, C, D) = arby(g);
let r = fun.call(g, Some(&a), Some(&b), Some(&c), Some(&d));
match r.status {
Pass|Discard => r,
Fail => shrink_failure(g, (a, b, c, d).shrink(), fun).unwrap_or(r),
}
}
fn shrink_failure<G, T, A, B, C, D, F>
(g: &mut G,
mut shrinker: Box<Shrinker<(A, B, C, D)>+'static>,
fun: &F)
-> Option<TestResult>
where G: Gen, T: Testable, A: AShow, B: AShow, C: AShow, D: AShow,
F: Fun<A, B, C, D, T> {
for (a, b, c, d) in shrinker {
let r = fun.call(g, Some(&a), Some(&b), Some(&c), Some(&d));
match r.status {
// The shrunk value does not witness a failure, so
// throw it away.
Pass|Discard => continue,
// The shrunk value *does* witness a failure, so keep trying
// to shrink it.
Fail => {
let shrunk = shrink_failure(g, (a, b, c, d).shrink(), fun);
// If we couldn't witness a failure on any shrunk value,
// then return the failure we already have.
return Some(shrunk.unwrap_or(r))
},
}
}
None
}
#[cfg(quickfail)]
mod trap {
pub fn safe<T: Send>(fun: proc() -> T) -> Result<T, String> {
Ok(fun())
}
}
#[cfg(not(quickfail))]
mod trap {
use std::comm::channel;
use std::io::{ChanReader, ChanWriter};
use std::task::TaskBuilder;
// This is my bright idea for capturing runtime errors caused by a
// test. Actually, it looks like rustc uses a similar approach.
// The problem is, this is used for *each* test case passed to a
// property, whereas rustc does it once for each test.
//
// I'm not entirely sure there's much of an alternative either.
// We could launch a single task and pass arguments over a channel,
// but the task would need to be restarted if it failed due to a
// runtime error. Since these are rare, it'd probably be more efficient
// then this approach, but it would also be more complex.
//
// Moreover, this feature seems to prevent an implementation of
// Testable for a stack closure type. *sigh*
pub fn safe<T: Send>(fun: proc():Send -> T) -> Result<T, String> {
let (send, recv) = channel();
let stdout = ChanWriter::new(send.clone());
let stderr = ChanWriter::new(send);
let mut reader = ChanReader::new(recv);
let t = TaskBuilder::new()
.named("safefn")
.stdout(box stdout)
.stderr(box stderr);
match t.try(fun) {
Ok(v) => Ok(v),
Err(_) => {
let s = reader.read_to_string().unwrap();
Err(s.as_slice().trim().into_string())
}
}
}
}
/// Convenient aliases.
trait AShow : Arbitrary + Show {}
impl<A: Arbitrary + Show> AShow for A {}
fn arby<A: Arbitrary, G: Gen>(g: &mut G) -> A { Arbitrary::arbitrary(g) }
| Status | identifier_name |
tester.rs | use std::comm;
use std::fmt::Show;
use std::io::ChanWriter;
use std::iter;
use std::rand;
use std::task::TaskBuilder;
use super::{Arbitrary, Gen, Shrinker, StdGen};
use tester::trap::safe;
use tester::Status::{Discard, Fail, Pass};
/// The main QuickCheck type for setting configuration and running QuickCheck.
pub struct QuickCheck<G> {
tests: uint,
max_tests: uint,
gen: G,
}
impl QuickCheck<StdGen<rand::TaskRng>> {
/// Creates a new QuickCheck value.
///
/// This can be used to run QuickCheck on things that implement
/// `Testable`. You may also adjust the configuration, such as
/// the number of tests to run.
///
/// By default, the maximum number of passed tests is set to `100`,
/// the max number of overall tests is set to `10000` and the generator
/// is set to a `StdGen` with a default size of `100`.
pub fn new() -> QuickCheck<StdGen<rand::TaskRng>> {
QuickCheck {
tests: 100,
max_tests: 10000,
gen: StdGen::new(rand::task_rng(), 100),
}
}
}
impl<G: Gen> QuickCheck<G> {
/// Set the number of tests to run.
///
/// This actually refers to the maximum number of *passed* tests that
/// can occur. Namely, if a test causes a failure, future testing on that
/// property stops. Additionally, if tests are discarded, there may be
/// fewer than `tests` passed.
pub fn tests(mut self, tests: uint) -> QuickCheck<G> {
self.tests = tests;
self
}
/// Set the maximum number of tests to run.
///
/// The number of invocations of a property will never exceed this number.
/// This is necessary to cap the number of tests because QuickCheck
/// properties can discard tests.
pub fn max_tests(mut self, max_tests: uint) -> QuickCheck<G> {
self.max_tests = max_tests;
self
}
/// Set the random number generator to be used by QuickCheck.
pub fn gen(mut self, gen: G) -> QuickCheck<G> {
self.gen = gen;
self
}
/// Tests a property and returns the result.
///
/// The result returned is either the number of tests passed or a witness
/// of failure.
///
/// (If you're using Rust's unit testing infrastructure, then you'll
/// want to use the `quickcheck` method, which will `panic!` on failure.)
pub fn quicktest<A>(&mut self, f: A) -> Result<uint, TestResult>
where A: Testable {
let mut ntests: uint = 0;
for _ in iter::range(0, self.max_tests) {
if ntests >= self.tests {
break
}
let r = f.result(&mut self.gen);
match r.status {
Pass => ntests += 1,
Discard => continue,
Fail => return Err(r),
}
}
Ok(ntests)
}
/// Tests a property and calls `panic!` on failure.
///
/// The `panic!` message will include a (hopefully) minimal witness of
/// failure.
///
/// It is appropriate to use this method with Rust's unit testing
/// infrastructure.
///
/// Note that if the environment variable `RUST_LOG` is set to enable
/// `info` level log messages for the `quickcheck` crate, then this will
/// include output on how many QuickCheck tests were passed.
///
/// # Example
///
/// ```rust
/// use quickcheck::QuickCheck;
///
/// fn prop_reverse_reverse() {
/// fn revrev(xs: Vec<uint>) -> bool {
/// let rev: Vec<uint> = xs.clone().into_iter().rev().collect();
/// let revrev = rev.into_iter().rev().collect();
/// xs == revrev
/// }
/// QuickCheck::new().quickcheck(revrev);
/// }
/// ```
pub fn quickcheck<A>(&mut self, f: A) where A: Testable {
match self.quicktest(f) {
Ok(ntests) => info!("(Passed {} QuickCheck tests.)", ntests),
Err(result) => panic!(result.failed_msg()),
}
}
}
/// Convenience function for running QuickCheck.
///
/// This is an alias for `QuickCheck::new().quickcheck(f)`.
pub fn quickcheck<A: Testable>(f: A) { QuickCheck::new().quickcheck(f) }
/// Describes the status of a single instance of a test.
///
/// All testable things must be capable of producing a `TestResult`.
#[deriving(Clone, Show)]
pub struct TestResult {
status: Status,
arguments: Vec<String>,
err: String,
}
/// Whether a test has passed, failed or been discarded.
#[deriving(Clone, Show)]
enum Status { Pass, Fail, Discard }
impl TestResult {
/// Produces a test result that indicates the current test has passed.
pub fn passed() -> TestResult { TestResult::from_bool(true) }
/// Produces a test result that indicates the current test has failed.
pub fn failed() -> TestResult { TestResult::from_bool(false) }
/// Produces a test result that indicates failure from a runtime
/// error.
pub fn error(msg: &str) -> TestResult {
let mut r = TestResult::from_bool(false);
r.err = msg.to_string();
r
}
/// Produces a test result that instructs `quickcheck` to ignore it.
/// This is useful for restricting the domain of your properties.
/// When a test is discarded, `quickcheck` will replace it with a
/// fresh one (up to a certain limit).
pub fn discard() -> TestResult {
TestResult {
status: Discard,
arguments: vec![],
err: "".to_string(),
}
}
/// Converts a `bool` to a `TestResult`. A `true` value indicates that
/// the test has passed and a `false` value indicates that the test
/// has failed.
pub fn from_bool(b: bool) -> TestResult {
TestResult {
status: if b { Pass } else { Fail },
arguments: vec![],
err: "".to_string(),
}
}
/// Tests if a "procedure" fails when executed. The test passes only if
/// `f` generates a task failure during its execution.
pub fn must_fail<T: Send>(f: proc(): Send -> T) -> TestResult {
let (tx, _) = comm::channel();
TestResult::from_bool(
TaskBuilder::new()
.stdout(box ChanWriter::new(tx.clone()))
.stderr(box ChanWriter::new(tx))
.try(f)
.is_err())
}
/// Returns `true` if and only if this test result describes a failing
/// test.
pub fn is_failure(&self) -> bool {
match self.status {
Fail => true,
Pass|Discard => false,
}
}
/// Returns `true` if and only if this test result describes a failing
/// test as a result of a run time error.
pub fn is_error(&self) -> bool {
self.is_failure() && self.err.len() > 0
}
fn failed_msg(&self) -> String {
if self.err.len() == 0 {
format!(
"[quickcheck] TEST FAILED. Arguments: ({})",
self.arguments.connect(", "))
} else {
format!(
"[quickcheck] TEST FAILED (runtime error). \
Arguments: ({})\nError: {}",
self.arguments.connect(", "), self.err)
}
}
}
/// `Testable` describes types (e.g., a function) whose values can be
/// tested.
///
/// Anything that can be tested must be capable of producing a `TestResult`
/// given a random number generator. This is trivial for types like `bool`,
/// which are just converted to either a passing or failing test result.
///
/// For functions, an implementation must generate random arguments
/// and potentially shrink those arguments if they produce a failure.
///
/// It's unlikely that you'll have to implement this trait yourself.
/// This comes with a caveat: currently, only functions with 4 parameters
/// or fewer (both `fn` and `||` types) satisfy `Testable`. If you have
/// functions to test with more than 4 parameters, please
/// [file a bug](https://github.com/BurntSushi/quickcheck/issues) and
/// I'll hopefully add it. (As of now, it would be very difficult to
/// add your own implementation outside of `quickcheck`, since the
/// functions that do shrinking are not public.)
pub trait Testable : Send {
fn result<G: Gen>(&self, &mut G) -> TestResult;
}
impl Testable for bool {
fn result<G: Gen>(&self, _: &mut G) -> TestResult {
TestResult::from_bool(*self)
}
}
impl Testable for TestResult {
fn result<G: Gen>(&self, _: &mut G) -> TestResult { self.clone() }
}
impl<A> Testable for Result<A, String> where A: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
match *self {
Ok(ref r) => r.result(g),
Err(ref err) => TestResult::error(err.as_slice()),
}
}
}
impl<T> Testable for fn() -> T where T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, (), (), (), (), fn() -> T>(g, self)
}
}
impl<A, T> Testable for fn(A) -> T where A: AShow, T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, A, (), (), (), fn(A) -> T>(g, self)
}
}
impl<A, B, T> Testable for fn(A, B) -> T
where A: AShow, B: AShow, T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, A, B, (), (), fn(A, B) -> T>(g, self)
}
}
impl<A, B, C, T> Testable for fn(A, B, C) -> T
where A: AShow, B: AShow, C: AShow, T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, A, B, C, (), fn(A, B, C) -> T>(g, self)
}
}
impl<A, B, C, D, T,> Testable for fn(A, B, C, D) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, A, B, C, D, fn(A, B, C, D) -> T>(g, self)
}
}
trait Fun<A, B, C, D, T> {
fn call<G>(&self, g: &mut G,
a: Option<&A>, b: Option<&B>,
c: Option<&C>, d: Option<&D>)
-> TestResult
where G: Gen;
}
macro_rules! impl_fun_call(
($f:expr, $g:expr, $($name:ident,)+) => ({
let ($($name,)*) = ($($name.unwrap(),)*);
let f = $f;
let mut r = {
let ($($name,)*) = ($(box $name.clone(),)*);
safe(proc() { f($(*$name,)*) }).result($g)
};
if r.is_failure() {
r.arguments = vec![$($name.to_string(),)*];
}
r
});
)
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn() -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
_: Option<&A>, _: Option<&B>,
_: Option<&C>, _: Option<&D>)
-> TestResult |
}
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn(A) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
a: Option<&A>, _: Option<&B>,
_: Option<&C>, _: Option<&D>)
-> TestResult {
impl_fun_call!(*self, g, a,)
}
}
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn(A, B) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
a: Option<&A>, b: Option<&B>,
_: Option<&C>, _: Option<&D>)
-> TestResult {
impl_fun_call!(*self, g, a, b,)
}
}
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn(A, B, C) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
a: Option<&A>, b: Option<&B>,
c: Option<&C>, _: Option<&D>)
-> TestResult {
impl_fun_call!(*self, g, a, b, c,)
}
}
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn(A, B, C, D) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
a: Option<&A>, b: Option<&B>,
c: Option<&C>, d: Option<&D>)
-> TestResult {
impl_fun_call!(*self, g, a, b, c, d,)
}
}
fn shrink<G, T, A, B, C, D, F>(g: &mut G, fun: &F) -> TestResult
where G: Gen, T: Testable, A: AShow, B: AShow, C: AShow, D: AShow,
F: Fun<A, B, C, D, T> {
let (a, b, c, d): (A, B, C, D) = arby(g);
let r = fun.call(g, Some(&a), Some(&b), Some(&c), Some(&d));
match r.status {
Pass|Discard => r,
Fail => shrink_failure(g, (a, b, c, d).shrink(), fun).unwrap_or(r),
}
}
fn shrink_failure<G, T, A, B, C, D, F>
(g: &mut G,
mut shrinker: Box<Shrinker<(A, B, C, D)>+'static>,
fun: &F)
-> Option<TestResult>
where G: Gen, T: Testable, A: AShow, B: AShow, C: AShow, D: AShow,
F: Fun<A, B, C, D, T> {
for (a, b, c, d) in shrinker {
let r = fun.call(g, Some(&a), Some(&b), Some(&c), Some(&d));
match r.status {
// The shrunk value does not witness a failure, so
// throw it away.
Pass|Discard => continue,
// The shrunk value *does* witness a failure, so keep trying
// to shrink it.
Fail => {
let shrunk = shrink_failure(g, (a, b, c, d).shrink(), fun);
// If we couldn't witness a failure on any shrunk value,
// then return the failure we already have.
return Some(shrunk.unwrap_or(r))
},
}
}
None
}
#[cfg(quickfail)]
mod trap {
pub fn safe<T: Send>(fun: proc() -> T) -> Result<T, String> {
Ok(fun())
}
}
#[cfg(not(quickfail))]
mod trap {
use std::comm::channel;
use std::io::{ChanReader, ChanWriter};
use std::task::TaskBuilder;
// This is my bright idea for capturing runtime errors caused by a
// test. Actually, it looks like rustc uses a similar approach.
// The problem is, this is used for *each* test case passed to a
// property, whereas rustc does it once for each test.
//
// I'm not entirely sure there's much of an alternative either.
// We could launch a single task and pass arguments over a channel,
// but the task would need to be restarted if it failed due to a
// runtime error. Since these are rare, it'd probably be more efficient
// then this approach, but it would also be more complex.
//
// Moreover, this feature seems to prevent an implementation of
// Testable for a stack closure type. *sigh*
pub fn safe<T: Send>(fun: proc():Send -> T) -> Result<T, String> {
let (send, recv) = channel();
let stdout = ChanWriter::new(send.clone());
let stderr = ChanWriter::new(send);
let mut reader = ChanReader::new(recv);
let t = TaskBuilder::new()
.named("safefn")
.stdout(box stdout)
.stderr(box stderr);
match t.try(fun) {
Ok(v) => Ok(v),
Err(_) => {
let s = reader.read_to_string().unwrap();
Err(s.as_slice().trim().into_string())
}
}
}
}
/// Convenient aliases.
trait AShow : Arbitrary + Show {}
impl<A: Arbitrary + Show> AShow for A {}
fn arby<A: Arbitrary, G: Gen>(g: &mut G) -> A { Arbitrary::arbitrary(g) }
| {
let f = *self;
safe(proc() { f() }).result(g)
} | identifier_body |
tester.rs | use std::comm;
use std::fmt::Show;
use std::io::ChanWriter;
use std::iter;
use std::rand;
use std::task::TaskBuilder;
use super::{Arbitrary, Gen, Shrinker, StdGen};
use tester::trap::safe;
use tester::Status::{Discard, Fail, Pass};
/// The main QuickCheck type for setting configuration and running QuickCheck.
pub struct QuickCheck<G> {
tests: uint,
max_tests: uint,
gen: G,
}
impl QuickCheck<StdGen<rand::TaskRng>> {
/// Creates a new QuickCheck value.
///
/// This can be used to run QuickCheck on things that implement
/// `Testable`. You may also adjust the configuration, such as
/// the number of tests to run.
///
/// By default, the maximum number of passed tests is set to `100`,
/// the max number of overall tests is set to `10000` and the generator
/// is set to a `StdGen` with a default size of `100`.
pub fn new() -> QuickCheck<StdGen<rand::TaskRng>> {
QuickCheck {
tests: 100,
max_tests: 10000,
gen: StdGen::new(rand::task_rng(), 100),
}
}
}
impl<G: Gen> QuickCheck<G> {
/// Set the number of tests to run.
///
/// This actually refers to the maximum number of *passed* tests that
/// can occur. Namely, if a test causes a failure, future testing on that
/// property stops. Additionally, if tests are discarded, there may be
/// fewer than `tests` passed.
pub fn tests(mut self, tests: uint) -> QuickCheck<G> {
self.tests = tests;
self
}
/// Set the maximum number of tests to run.
///
/// The number of invocations of a property will never exceed this number.
/// This is necessary to cap the number of tests because QuickCheck
/// properties can discard tests.
pub fn max_tests(mut self, max_tests: uint) -> QuickCheck<G> {
self.max_tests = max_tests;
self
}
/// Set the random number generator to be used by QuickCheck.
pub fn gen(mut self, gen: G) -> QuickCheck<G> {
self.gen = gen;
self
}
/// Tests a property and returns the result.
///
/// The result returned is either the number of tests passed or a witness
/// of failure.
///
/// (If you're using Rust's unit testing infrastructure, then you'll
/// want to use the `quickcheck` method, which will `panic!` on failure.)
pub fn quicktest<A>(&mut self, f: A) -> Result<uint, TestResult>
where A: Testable {
let mut ntests: uint = 0;
for _ in iter::range(0, self.max_tests) {
if ntests >= self.tests {
break
}
let r = f.result(&mut self.gen);
match r.status {
Pass => ntests += 1,
Discard => continue,
Fail => return Err(r),
}
}
Ok(ntests)
}
/// Tests a property and calls `panic!` on failure.
///
/// The `panic!` message will include a (hopefully) minimal witness of
/// failure.
///
/// It is appropriate to use this method with Rust's unit testing
/// infrastructure.
///
/// Note that if the environment variable `RUST_LOG` is set to enable
/// `info` level log messages for the `quickcheck` crate, then this will
/// include output on how many QuickCheck tests were passed.
///
/// # Example
///
/// ```rust
/// use quickcheck::QuickCheck;
///
/// fn prop_reverse_reverse() {
/// fn revrev(xs: Vec<uint>) -> bool {
/// let rev: Vec<uint> = xs.clone().into_iter().rev().collect();
/// let revrev = rev.into_iter().rev().collect();
/// xs == revrev
/// }
/// QuickCheck::new().quickcheck(revrev);
/// }
/// ```
pub fn quickcheck<A>(&mut self, f: A) where A: Testable {
match self.quicktest(f) {
Ok(ntests) => info!("(Passed {} QuickCheck tests.)", ntests),
Err(result) => panic!(result.failed_msg()),
}
}
}
/// Convenience function for running QuickCheck.
///
/// This is an alias for `QuickCheck::new().quickcheck(f)`.
pub fn quickcheck<A: Testable>(f: A) { QuickCheck::new().quickcheck(f) }
/// Describes the status of a single instance of a test.
///
/// All testable things must be capable of producing a `TestResult`.
#[deriving(Clone, Show)]
pub struct TestResult {
status: Status,
arguments: Vec<String>,
err: String,
}
/// Whether a test has passed, failed or been discarded.
#[deriving(Clone, Show)]
enum Status { Pass, Fail, Discard }
impl TestResult {
/// Produces a test result that indicates the current test has passed.
pub fn passed() -> TestResult { TestResult::from_bool(true) }
/// Produces a test result that indicates the current test has failed.
pub fn failed() -> TestResult { TestResult::from_bool(false) }
/// Produces a test result that indicates failure from a runtime
/// error.
pub fn error(msg: &str) -> TestResult {
let mut r = TestResult::from_bool(false);
r.err = msg.to_string();
r
}
/// Produces a test result that instructs `quickcheck` to ignore it.
/// This is useful for restricting the domain of your properties.
/// When a test is discarded, `quickcheck` will replace it with a
/// fresh one (up to a certain limit).
pub fn discard() -> TestResult {
TestResult {
status: Discard,
arguments: vec![],
err: "".to_string(),
}
}
/// Converts a `bool` to a `TestResult`. A `true` value indicates that
/// the test has passed and a `false` value indicates that the test
/// has failed.
pub fn from_bool(b: bool) -> TestResult {
TestResult {
status: if b { Pass } else { Fail },
arguments: vec![],
err: "".to_string(),
}
}
/// Tests if a "procedure" fails when executed. The test passes only if
/// `f` generates a task failure during its execution.
pub fn must_fail<T: Send>(f: proc(): Send -> T) -> TestResult {
let (tx, _) = comm::channel();
TestResult::from_bool(
TaskBuilder::new()
.stdout(box ChanWriter::new(tx.clone()))
.stderr(box ChanWriter::new(tx))
.try(f)
.is_err())
}
/// Returns `true` if and only if this test result describes a failing
/// test.
pub fn is_failure(&self) -> bool {
match self.status {
Fail => true,
Pass|Discard => false,
}
}
/// Returns `true` if and only if this test result describes a failing
/// test as a result of a run time error.
pub fn is_error(&self) -> bool {
self.is_failure() && self.err.len() > 0
}
fn failed_msg(&self) -> String {
if self.err.len() == 0 {
format!(
"[quickcheck] TEST FAILED. Arguments: ({})",
self.arguments.connect(", "))
} else {
format!(
"[quickcheck] TEST FAILED (runtime error). \
Arguments: ({})\nError: {}",
self.arguments.connect(", "), self.err)
}
}
}
/// `Testable` describes types (e.g., a function) whose values can be
/// tested.
/// | ///
/// For functions, an implementation must generate random arguments
/// and potentially shrink those arguments if they produce a failure.
///
/// It's unlikely that you'll have to implement this trait yourself.
/// This comes with a caveat: currently, only functions with 4 parameters
/// or fewer (both `fn` and `||` types) satisfy `Testable`. If you have
/// functions to test with more than 4 parameters, please
/// [file a bug](https://github.com/BurntSushi/quickcheck/issues) and
/// I'll hopefully add it. (As of now, it would be very difficult to
/// add your own implementation outside of `quickcheck`, since the
/// functions that do shrinking are not public.)
pub trait Testable : Send {
fn result<G: Gen>(&self, &mut G) -> TestResult;
}
impl Testable for bool {
fn result<G: Gen>(&self, _: &mut G) -> TestResult {
TestResult::from_bool(*self)
}
}
impl Testable for TestResult {
fn result<G: Gen>(&self, _: &mut G) -> TestResult { self.clone() }
}
impl<A> Testable for Result<A, String> where A: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
match *self {
Ok(ref r) => r.result(g),
Err(ref err) => TestResult::error(err.as_slice()),
}
}
}
impl<T> Testable for fn() -> T where T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, (), (), (), (), fn() -> T>(g, self)
}
}
impl<A, T> Testable for fn(A) -> T where A: AShow, T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, A, (), (), (), fn(A) -> T>(g, self)
}
}
impl<A, B, T> Testable for fn(A, B) -> T
where A: AShow, B: AShow, T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, A, B, (), (), fn(A, B) -> T>(g, self)
}
}
impl<A, B, C, T> Testable for fn(A, B, C) -> T
where A: AShow, B: AShow, C: AShow, T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, A, B, C, (), fn(A, B, C) -> T>(g, self)
}
}
impl<A, B, C, D, T,> Testable for fn(A, B, C, D) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn result<G: Gen>(&self, g: &mut G) -> TestResult {
shrink::<G, T, A, B, C, D, fn(A, B, C, D) -> T>(g, self)
}
}
trait Fun<A, B, C, D, T> {
fn call<G>(&self, g: &mut G,
a: Option<&A>, b: Option<&B>,
c: Option<&C>, d: Option<&D>)
-> TestResult
where G: Gen;
}
macro_rules! impl_fun_call(
($f:expr, $g:expr, $($name:ident,)+) => ({
let ($($name,)*) = ($($name.unwrap(),)*);
let f = $f;
let mut r = {
let ($($name,)*) = ($(box $name.clone(),)*);
safe(proc() { f($(*$name,)*) }).result($g)
};
if r.is_failure() {
r.arguments = vec![$($name.to_string(),)*];
}
r
});
)
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn() -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
_: Option<&A>, _: Option<&B>,
_: Option<&C>, _: Option<&D>)
-> TestResult {
let f = *self;
safe(proc() { f() }).result(g)
}
}
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn(A) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
a: Option<&A>, _: Option<&B>,
_: Option<&C>, _: Option<&D>)
-> TestResult {
impl_fun_call!(*self, g, a,)
}
}
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn(A, B) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
a: Option<&A>, b: Option<&B>,
_: Option<&C>, _: Option<&D>)
-> TestResult {
impl_fun_call!(*self, g, a, b,)
}
}
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn(A, B, C) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
a: Option<&A>, b: Option<&B>,
c: Option<&C>, _: Option<&D>)
-> TestResult {
impl_fun_call!(*self, g, a, b, c,)
}
}
impl<A, B, C, D, T> Fun<A, B, C, D, T> for fn(A, B, C, D) -> T
where A: AShow, B: AShow, C: AShow, D: AShow, T: Testable {
fn call<G: Gen>(&self, g: &mut G,
a: Option<&A>, b: Option<&B>,
c: Option<&C>, d: Option<&D>)
-> TestResult {
impl_fun_call!(*self, g, a, b, c, d,)
}
}
fn shrink<G, T, A, B, C, D, F>(g: &mut G, fun: &F) -> TestResult
where G: Gen, T: Testable, A: AShow, B: AShow, C: AShow, D: AShow,
F: Fun<A, B, C, D, T> {
let (a, b, c, d): (A, B, C, D) = arby(g);
let r = fun.call(g, Some(&a), Some(&b), Some(&c), Some(&d));
match r.status {
Pass|Discard => r,
Fail => shrink_failure(g, (a, b, c, d).shrink(), fun).unwrap_or(r),
}
}
fn shrink_failure<G, T, A, B, C, D, F>
(g: &mut G,
mut shrinker: Box<Shrinker<(A, B, C, D)>+'static>,
fun: &F)
-> Option<TestResult>
where G: Gen, T: Testable, A: AShow, B: AShow, C: AShow, D: AShow,
F: Fun<A, B, C, D, T> {
for (a, b, c, d) in shrinker {
let r = fun.call(g, Some(&a), Some(&b), Some(&c), Some(&d));
match r.status {
// The shrunk value does not witness a failure, so
// throw it away.
Pass|Discard => continue,
// The shrunk value *does* witness a failure, so keep trying
// to shrink it.
Fail => {
let shrunk = shrink_failure(g, (a, b, c, d).shrink(), fun);
// If we couldn't witness a failure on any shrunk value,
// then return the failure we already have.
return Some(shrunk.unwrap_or(r))
},
}
}
None
}
#[cfg(quickfail)]
mod trap {
pub fn safe<T: Send>(fun: proc() -> T) -> Result<T, String> {
Ok(fun())
}
}
#[cfg(not(quickfail))]
mod trap {
use std::comm::channel;
use std::io::{ChanReader, ChanWriter};
use std::task::TaskBuilder;
// This is my bright idea for capturing runtime errors caused by a
// test. Actually, it looks like rustc uses a similar approach.
// The problem is, this is used for *each* test case passed to a
// property, whereas rustc does it once for each test.
//
// I'm not entirely sure there's much of an alternative either.
// We could launch a single task and pass arguments over a channel,
// but the task would need to be restarted if it failed due to a
// runtime error. Since these are rare, it'd probably be more efficient
// then this approach, but it would also be more complex.
//
// Moreover, this feature seems to prevent an implementation of
// Testable for a stack closure type. *sigh*
pub fn safe<T: Send>(fun: proc():Send -> T) -> Result<T, String> {
let (send, recv) = channel();
let stdout = ChanWriter::new(send.clone());
let stderr = ChanWriter::new(send);
let mut reader = ChanReader::new(recv);
let t = TaskBuilder::new()
.named("safefn")
.stdout(box stdout)
.stderr(box stderr);
match t.try(fun) {
Ok(v) => Ok(v),
Err(_) => {
let s = reader.read_to_string().unwrap();
Err(s.as_slice().trim().into_string())
}
}
}
}
/// Convenient aliases.
trait AShow : Arbitrary + Show {}
impl<A: Arbitrary + Show> AShow for A {}
fn arby<A: Arbitrary, G: Gen>(g: &mut G) -> A { Arbitrary::arbitrary(g) } | /// Anything that can be tested must be capable of producing a `TestResult`
/// given a random number generator. This is trivial for types like `bool`,
/// which are just converted to either a passing or failing test result. | random_line_split |
CtrlDialogView.js | define(['backbone', 'marionette', 'mustache', 'jquery', 'text!templates/ctrldialog.html'],
function(Backbone, Marionette, Mustache, $, template) {
return Marionette.ItemView.extend({
initialize: function(options) {
if (!options.icon_name) {
options.icon_name = 'bird';
}
this.model = new Backbone.Model( options );
this.render();
},
template: function(serialized_model) {
return Mustache.render(template, serialized_model);
},
ui: {
'ok': '.btn-ok',
'cancel': '.btn-cancel',
'dialog': '.dialog',
'close': '.dialog-close'
},
events: {
'tap @ui.ok': 'onOk', | 'tap @ui.close': 'onCancel'
},
onOk: function(ev) {
this.trigger('ok');
this.destroy();
},
onCancel: function(ev) {
this.trigger('cancel');
this.destroy();
},
onRender: function() {
$('body').append(this.$el);
this.ui.dialog.css({
'marginTop': 0 - this.ui.dialog.height()/2
});
this.ui.dialog.addClass('bounceInDown animated');
},
onDestory: function() {
this.$el.remove();
this.model.destroy();
},
className: 'dialogContainer'
});
}); | 'tap @ui.cancel': 'onCancel', | random_line_split |
CtrlDialogView.js | define(['backbone', 'marionette', 'mustache', 'jquery', 'text!templates/ctrldialog.html'],
function(Backbone, Marionette, Mustache, $, template) {
return Marionette.ItemView.extend({
initialize: function(options) {
if (!options.icon_name) {
| this.model = new Backbone.Model( options );
this.render();
},
template: function(serialized_model) {
return Mustache.render(template, serialized_model);
},
ui: {
'ok': '.btn-ok',
'cancel': '.btn-cancel',
'dialog': '.dialog',
'close': '.dialog-close'
},
events: {
'tap @ui.ok': 'onOk',
'tap @ui.cancel': 'onCancel',
'tap @ui.close': 'onCancel'
},
onOk: function(ev) {
this.trigger('ok');
this.destroy();
},
onCancel: function(ev) {
this.trigger('cancel');
this.destroy();
},
onRender: function() {
$('body').append(this.$el);
this.ui.dialog.css({
'marginTop': 0 - this.ui.dialog.height()/2
});
this.ui.dialog.addClass('bounceInDown animated');
},
onDestory: function() {
this.$el.remove();
this.model.destroy();
},
className: 'dialogContainer'
});
}); | options.icon_name = 'bird';
}
| conditional_block |
sociallogin_interface.js | var options={}; options.login=true;
LoginRadius_SocialLogin.util.ready(function ()
{ $ui = LoginRadius_SocialLogin.lr_login_settings;
$ui.interfacesize = Drupal.settings.lrsociallogin.interfacesize;
$ui.lrinterfacebackground=Drupal.settings.lrsociallogin.lrinterfacebackground;
$ui.noofcolumns= Drupal.settings.lrsociallogin.noofcolumns;
$ui.apikey = Drupal.settings.lrsociallogin.apikey;
$ui.is_access_token=true;
$ui.callback=Drupal.settings.lrsociallogin.location;
$ui.lrinterfacecontainer ="interfacecontainerdiv"; LoginRadius_SocialLogin.init(options); });
LoginRadiusSDK.setLoginCallback(function () {
var token = LoginRadiusSDK.getToken();
var form = document.createElement('form');
form.action = Drupal.settings.lrsociallogin.location; |
var hiddenToken = document.createElement('input');
hiddenToken.type = 'hidden';
hiddenToken.value = token;
hiddenToken.name = 'token';
form.appendChild(hiddenToken);
document.body.appendChild(form);
form.submit();
}); | form.method = 'POST'; | random_line_split |
abiquo.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
External inventory script for Abiquo
====================================
Shamelessly copied from an existing inventory script.
This script generates an inventory that Ansible can understand by making API requests to Abiquo API
Requires some python libraries, ensure to have them installed when using this script.
This script has been tested in Abiquo 3.0 but it may work also for Abiquo 2.6.
Before using this script you may want to modify abiquo.ini config file.
This script generates an Ansible hosts file with these host groups:
ABQ_xxx: Defines a hosts itself by Abiquo VM name label
all: Contains all hosts defined in Abiquo user's enterprise
virtualdatecenter: Creates a host group for each virtualdatacenter containing all hosts defined on it
virtualappliance: Creates a host group for each virtualappliance containing all hosts defined on it
imagetemplate: Creates a host group for each image template containing all hosts using it
'''
# (c) 2014, Daniel Beneyto <daniel.beneyto@abiquo.com>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
import ConfigParser
try:
import json
except ImportError:
import simplejson as json
from ansible.module_utils.urls import open_url
def api_get(link, config):
try:
if link is None:
url = config.get('api','uri') + config.get('api','login_path')
headers = {"Accept": config.get('api','login_type')}
else:
url = link['href'] + '?limit=0'
headers = {"Accept": link['type']}
result = open_url(url, headers=headers, url_username=config.get('auth','apiuser').replace('\n', ''),
url_password=config.get('auth','apipass').replace('\n', ''))
return json.loads(result.read())
except:
return None
def save_cache(data, config):
|
def get_cache(cache_item, config):
''' returns cached item '''
dpath = config.get('cache','cache_dir')
inv = {}
try:
cache = open('/'.join([dpath,'inventory']), 'r')
inv = cache.read()
cache.close()
except IOError as e:
pass # not really sure what to do here
return inv
def cache_available(config):
''' checks if we have a 'fresh' cache available for item requested '''
if config.has_option('cache','cache_dir'):
dpath = config.get('cache','cache_dir')
try:
existing = os.stat( '/'.join([dpath,'inventory']))
except:
# cache doesn't exist or isn't accessible
return False
if config.has_option('cache', 'cache_max_age'):
maxage = config.get('cache', 'cache_max_age')
if ((int(time.time()) - int(existing.st_mtime)) <= int(maxage)):
return True
return False
def generate_inv_from_api(enterprise_entity,config):
try:
inventory['all'] = {}
inventory['all']['children'] = []
inventory['all']['hosts'] = []
inventory['_meta'] = {}
inventory['_meta']['hostvars'] = {}
enterprise = api_get(enterprise_entity,config)
vms_entity = next(link for link in (enterprise['links']) if (link['rel']=='virtualmachines'))
vms = api_get(vms_entity,config)
for vmcollection in vms['collection']:
vm_vapp = next(link for link in (vmcollection['links']) if (link['rel']=='virtualappliance'))['title'].replace('[','').replace(']','').replace(' ','_')
vm_vdc = next(link for link in (vmcollection['links']) if (link['rel']=='virtualdatacenter'))['title'].replace('[','').replace(']','').replace(' ','_')
vm_template = next(link for link in (vmcollection['links']) if (link['rel']=='virtualmachinetemplate'))['title'].replace('[','').replace(']','').replace(' ','_')
# From abiquo.ini: Only adding to inventory VMs with public IP
if (config.getboolean('defaults', 'public_ip_only')) == True:
for link in vmcollection['links']:
if (link['type']=='application/vnd.abiquo.publicip+json' and link['rel']=='ip'):
vm_nic = link['title']
break
else:
vm_nic = None
# Otherwise, assigning defined network interface IP address
else:
for link in vmcollection['links']:
if (link['rel']==config.get('defaults', 'default_net_interface')):
vm_nic = link['title']
break
else:
vm_nic = None
vm_state = True
# From abiquo.ini: Only adding to inventory VMs deployed
if ((config.getboolean('defaults', 'deployed_only') == True) and (vmcollection['state'] == 'NOT_ALLOCATED')):
vm_state = False
if vm_nic is not None and vm_state:
if vm_vapp not in inventory:
inventory[vm_vapp] = {}
inventory[vm_vapp]['children'] = []
inventory[vm_vapp]['hosts'] = []
if vm_vdc not in inventory:
inventory[vm_vdc] = {}
inventory[vm_vdc]['hosts'] = []
inventory[vm_vdc]['children'] = []
if vm_template not in inventory:
inventory[vm_template] = {}
inventory[vm_template]['children'] = []
inventory[vm_template]['hosts'] = []
if config.getboolean('defaults', 'get_metadata') == True:
meta_entity = next(link for link in (vmcollection['links']) if (link['rel']=='metadata'))
try:
metadata = api_get(meta_entity,config)
if (config.getfloat("api","version") >= 3.0):
vm_metadata = metadata['metadata']
else:
vm_metadata = metadata['metadata']['metadata']
inventory['_meta']['hostvars'][vm_nic] = vm_metadata
except Exception as e:
pass
inventory[vm_vapp]['children'].append(vmcollection['name'])
inventory[vm_vdc]['children'].append(vmcollection['name'])
inventory[vm_template]['children'].append(vmcollection['name'])
inventory['all']['children'].append(vmcollection['name'])
inventory[vmcollection['name']] = []
inventory[vmcollection['name']].append(vm_nic)
return inventory
except Exception as e:
# Return empty hosts output
return { 'all': {'hosts': []}, '_meta': { 'hostvars': {} } }
def get_inventory(enterprise, config):
''' Reads the inventory from cache or Abiquo api '''
if cache_available(config):
inv = get_cache('inventory', config)
else:
default_group = os.path.basename(sys.argv[0]).rstrip('.py')
# MAKE ABIQUO API CALLS #
inv = generate_inv_from_api(enterprise,config)
save_cache(inv, config)
return json.dumps(inv)
if __name__ == '__main__':
inventory = {}
enterprise = {}
# Read config
config = ConfigParser.SafeConfigParser()
for configfilename in [os.path.abspath(sys.argv[0]).rstrip('.py') + '.ini', 'abiquo.ini']:
if os.path.exists(configfilename):
config.read(configfilename)
break
try:
login = api_get(None,config)
enterprise = next(link for link in (login['links']) if (link['rel']=='enterprise'))
except Exception as e:
enterprise = None
if cache_available(config):
inventory = get_cache('inventory', config)
else:
inventory = get_inventory(enterprise, config)
# return to ansible
sys.stdout.write(str(inventory))
sys.stdout.flush()
| ''' saves item to cache '''
dpath = config.get('cache','cache_dir')
try:
cache = open('/'.join([dpath,'inventory']), 'w')
cache.write(json.dumps(data))
cache.close()
except IOError as e:
pass # not really sure what to do here | identifier_body |
abiquo.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
External inventory script for Abiquo
====================================
Shamelessly copied from an existing inventory script.
This script generates an inventory that Ansible can understand by making API requests to Abiquo API
Requires some python libraries, ensure to have them installed when using this script.
This script has been tested in Abiquo 3.0 but it may work also for Abiquo 2.6.
Before using this script you may want to modify abiquo.ini config file.
This script generates an Ansible hosts file with these host groups:
ABQ_xxx: Defines a hosts itself by Abiquo VM name label
all: Contains all hosts defined in Abiquo user's enterprise
virtualdatecenter: Creates a host group for each virtualdatacenter containing all hosts defined on it
virtualappliance: Creates a host group for each virtualappliance containing all hosts defined on it
imagetemplate: Creates a host group for each image template containing all hosts using it
'''
# (c) 2014, Daniel Beneyto <daniel.beneyto@abiquo.com>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
import ConfigParser
try:
import json
except ImportError:
import simplejson as json
from ansible.module_utils.urls import open_url
def api_get(link, config):
try:
if link is None:
url = config.get('api','uri') + config.get('api','login_path')
headers = {"Accept": config.get('api','login_type')}
else:
url = link['href'] + '?limit=0'
headers = {"Accept": link['type']}
result = open_url(url, headers=headers, url_username=config.get('auth','apiuser').replace('\n', ''),
url_password=config.get('auth','apipass').replace('\n', ''))
return json.loads(result.read())
except:
return None
def save_cache(data, config):
''' saves item to cache '''
dpath = config.get('cache','cache_dir')
try:
cache = open('/'.join([dpath,'inventory']), 'w')
cache.write(json.dumps(data))
cache.close()
except IOError as e:
pass # not really sure what to do here
def get_cache(cache_item, config):
''' returns cached item '''
dpath = config.get('cache','cache_dir')
inv = {}
try:
cache = open('/'.join([dpath,'inventory']), 'r')
inv = cache.read()
cache.close()
except IOError as e:
pass # not really sure what to do here
return inv
def cache_available(config):
''' checks if we have a 'fresh' cache available for item requested '''
if config.has_option('cache','cache_dir'):
dpath = config.get('cache','cache_dir')
try:
existing = os.stat( '/'.join([dpath,'inventory']))
except:
# cache doesn't exist or isn't accessible
return False
if config.has_option('cache', 'cache_max_age'):
maxage = config.get('cache', 'cache_max_age')
if ((int(time.time()) - int(existing.st_mtime)) <= int(maxage)):
return True
return False
def | (enterprise_entity,config):
try:
inventory['all'] = {}
inventory['all']['children'] = []
inventory['all']['hosts'] = []
inventory['_meta'] = {}
inventory['_meta']['hostvars'] = {}
enterprise = api_get(enterprise_entity,config)
vms_entity = next(link for link in (enterprise['links']) if (link['rel']=='virtualmachines'))
vms = api_get(vms_entity,config)
for vmcollection in vms['collection']:
vm_vapp = next(link for link in (vmcollection['links']) if (link['rel']=='virtualappliance'))['title'].replace('[','').replace(']','').replace(' ','_')
vm_vdc = next(link for link in (vmcollection['links']) if (link['rel']=='virtualdatacenter'))['title'].replace('[','').replace(']','').replace(' ','_')
vm_template = next(link for link in (vmcollection['links']) if (link['rel']=='virtualmachinetemplate'))['title'].replace('[','').replace(']','').replace(' ','_')
# From abiquo.ini: Only adding to inventory VMs with public IP
if (config.getboolean('defaults', 'public_ip_only')) == True:
for link in vmcollection['links']:
if (link['type']=='application/vnd.abiquo.publicip+json' and link['rel']=='ip'):
vm_nic = link['title']
break
else:
vm_nic = None
# Otherwise, assigning defined network interface IP address
else:
for link in vmcollection['links']:
if (link['rel']==config.get('defaults', 'default_net_interface')):
vm_nic = link['title']
break
else:
vm_nic = None
vm_state = True
# From abiquo.ini: Only adding to inventory VMs deployed
if ((config.getboolean('defaults', 'deployed_only') == True) and (vmcollection['state'] == 'NOT_ALLOCATED')):
vm_state = False
if vm_nic is not None and vm_state:
if vm_vapp not in inventory:
inventory[vm_vapp] = {}
inventory[vm_vapp]['children'] = []
inventory[vm_vapp]['hosts'] = []
if vm_vdc not in inventory:
inventory[vm_vdc] = {}
inventory[vm_vdc]['hosts'] = []
inventory[vm_vdc]['children'] = []
if vm_template not in inventory:
inventory[vm_template] = {}
inventory[vm_template]['children'] = []
inventory[vm_template]['hosts'] = []
if config.getboolean('defaults', 'get_metadata') == True:
meta_entity = next(link for link in (vmcollection['links']) if (link['rel']=='metadata'))
try:
metadata = api_get(meta_entity,config)
if (config.getfloat("api","version") >= 3.0):
vm_metadata = metadata['metadata']
else:
vm_metadata = metadata['metadata']['metadata']
inventory['_meta']['hostvars'][vm_nic] = vm_metadata
except Exception as e:
pass
inventory[vm_vapp]['children'].append(vmcollection['name'])
inventory[vm_vdc]['children'].append(vmcollection['name'])
inventory[vm_template]['children'].append(vmcollection['name'])
inventory['all']['children'].append(vmcollection['name'])
inventory[vmcollection['name']] = []
inventory[vmcollection['name']].append(vm_nic)
return inventory
except Exception as e:
# Return empty hosts output
return { 'all': {'hosts': []}, '_meta': { 'hostvars': {} } }
def get_inventory(enterprise, config):
''' Reads the inventory from cache or Abiquo api '''
if cache_available(config):
inv = get_cache('inventory', config)
else:
default_group = os.path.basename(sys.argv[0]).rstrip('.py')
# MAKE ABIQUO API CALLS #
inv = generate_inv_from_api(enterprise,config)
save_cache(inv, config)
return json.dumps(inv)
if __name__ == '__main__':
inventory = {}
enterprise = {}
# Read config
config = ConfigParser.SafeConfigParser()
for configfilename in [os.path.abspath(sys.argv[0]).rstrip('.py') + '.ini', 'abiquo.ini']:
if os.path.exists(configfilename):
config.read(configfilename)
break
try:
login = api_get(None,config)
enterprise = next(link for link in (login['links']) if (link['rel']=='enterprise'))
except Exception as e:
enterprise = None
if cache_available(config):
inventory = get_cache('inventory', config)
else:
inventory = get_inventory(enterprise, config)
# return to ansible
sys.stdout.write(str(inventory))
sys.stdout.flush()
| generate_inv_from_api | identifier_name |
abiquo.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
External inventory script for Abiquo
====================================
Shamelessly copied from an existing inventory script.
This script generates an inventory that Ansible can understand by making API requests to Abiquo API
Requires some python libraries, ensure to have them installed when using this script.
This script has been tested in Abiquo 3.0 but it may work also for Abiquo 2.6.
Before using this script you may want to modify abiquo.ini config file.
This script generates an Ansible hosts file with these host groups:
ABQ_xxx: Defines a hosts itself by Abiquo VM name label
all: Contains all hosts defined in Abiquo user's enterprise
virtualdatecenter: Creates a host group for each virtualdatacenter containing all hosts defined on it
virtualappliance: Creates a host group for each virtualappliance containing all hosts defined on it
imagetemplate: Creates a host group for each image template containing all hosts using it
'''
# (c) 2014, Daniel Beneyto <daniel.beneyto@abiquo.com>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
import ConfigParser
try:
import json
except ImportError:
import simplejson as json
from ansible.module_utils.urls import open_url
def api_get(link, config):
try:
if link is None:
url = config.get('api','uri') + config.get('api','login_path')
headers = {"Accept": config.get('api','login_type')}
else:
url = link['href'] + '?limit=0'
headers = {"Accept": link['type']}
result = open_url(url, headers=headers, url_username=config.get('auth','apiuser').replace('\n', ''),
url_password=config.get('auth','apipass').replace('\n', ''))
return json.loads(result.read())
except:
return None
def save_cache(data, config):
''' saves item to cache '''
dpath = config.get('cache','cache_dir')
try:
cache = open('/'.join([dpath,'inventory']), 'w')
cache.write(json.dumps(data))
cache.close()
except IOError as e:
pass # not really sure what to do here
def get_cache(cache_item, config):
''' returns cached item '''
dpath = config.get('cache','cache_dir')
inv = {}
try:
cache = open('/'.join([dpath,'inventory']), 'r')
inv = cache.read()
cache.close()
except IOError as e:
pass # not really sure what to do here
return inv
def cache_available(config):
''' checks if we have a 'fresh' cache available for item requested '''
if config.has_option('cache','cache_dir'):
dpath = config.get('cache','cache_dir')
try:
existing = os.stat( '/'.join([dpath,'inventory']))
except:
# cache doesn't exist or isn't accessible
return False
if config.has_option('cache', 'cache_max_age'): | maxage = config.get('cache', 'cache_max_age')
if ((int(time.time()) - int(existing.st_mtime)) <= int(maxage)):
return True
return False
def generate_inv_from_api(enterprise_entity,config):
try:
inventory['all'] = {}
inventory['all']['children'] = []
inventory['all']['hosts'] = []
inventory['_meta'] = {}
inventory['_meta']['hostvars'] = {}
enterprise = api_get(enterprise_entity,config)
vms_entity = next(link for link in (enterprise['links']) if (link['rel']=='virtualmachines'))
vms = api_get(vms_entity,config)
for vmcollection in vms['collection']:
vm_vapp = next(link for link in (vmcollection['links']) if (link['rel']=='virtualappliance'))['title'].replace('[','').replace(']','').replace(' ','_')
vm_vdc = next(link for link in (vmcollection['links']) if (link['rel']=='virtualdatacenter'))['title'].replace('[','').replace(']','').replace(' ','_')
vm_template = next(link for link in (vmcollection['links']) if (link['rel']=='virtualmachinetemplate'))['title'].replace('[','').replace(']','').replace(' ','_')
# From abiquo.ini: Only adding to inventory VMs with public IP
if (config.getboolean('defaults', 'public_ip_only')) == True:
for link in vmcollection['links']:
if (link['type']=='application/vnd.abiquo.publicip+json' and link['rel']=='ip'):
vm_nic = link['title']
break
else:
vm_nic = None
# Otherwise, assigning defined network interface IP address
else:
for link in vmcollection['links']:
if (link['rel']==config.get('defaults', 'default_net_interface')):
vm_nic = link['title']
break
else:
vm_nic = None
vm_state = True
# From abiquo.ini: Only adding to inventory VMs deployed
if ((config.getboolean('defaults', 'deployed_only') == True) and (vmcollection['state'] == 'NOT_ALLOCATED')):
vm_state = False
if vm_nic is not None and vm_state:
if vm_vapp not in inventory:
inventory[vm_vapp] = {}
inventory[vm_vapp]['children'] = []
inventory[vm_vapp]['hosts'] = []
if vm_vdc not in inventory:
inventory[vm_vdc] = {}
inventory[vm_vdc]['hosts'] = []
inventory[vm_vdc]['children'] = []
if vm_template not in inventory:
inventory[vm_template] = {}
inventory[vm_template]['children'] = []
inventory[vm_template]['hosts'] = []
if config.getboolean('defaults', 'get_metadata') == True:
meta_entity = next(link for link in (vmcollection['links']) if (link['rel']=='metadata'))
try:
metadata = api_get(meta_entity,config)
if (config.getfloat("api","version") >= 3.0):
vm_metadata = metadata['metadata']
else:
vm_metadata = metadata['metadata']['metadata']
inventory['_meta']['hostvars'][vm_nic] = vm_metadata
except Exception as e:
pass
inventory[vm_vapp]['children'].append(vmcollection['name'])
inventory[vm_vdc]['children'].append(vmcollection['name'])
inventory[vm_template]['children'].append(vmcollection['name'])
inventory['all']['children'].append(vmcollection['name'])
inventory[vmcollection['name']] = []
inventory[vmcollection['name']].append(vm_nic)
return inventory
except Exception as e:
# Return empty hosts output
return { 'all': {'hosts': []}, '_meta': { 'hostvars': {} } }
def get_inventory(enterprise, config):
''' Reads the inventory from cache or Abiquo api '''
if cache_available(config):
inv = get_cache('inventory', config)
else:
default_group = os.path.basename(sys.argv[0]).rstrip('.py')
# MAKE ABIQUO API CALLS #
inv = generate_inv_from_api(enterprise,config)
save_cache(inv, config)
return json.dumps(inv)
if __name__ == '__main__':
inventory = {}
enterprise = {}
# Read config
config = ConfigParser.SafeConfigParser()
for configfilename in [os.path.abspath(sys.argv[0]).rstrip('.py') + '.ini', 'abiquo.ini']:
if os.path.exists(configfilename):
config.read(configfilename)
break
try:
login = api_get(None,config)
enterprise = next(link for link in (login['links']) if (link['rel']=='enterprise'))
except Exception as e:
enterprise = None
if cache_available(config):
inventory = get_cache('inventory', config)
else:
inventory = get_inventory(enterprise, config)
# return to ansible
sys.stdout.write(str(inventory))
sys.stdout.flush() | random_line_split | |
abiquo.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
External inventory script for Abiquo
====================================
Shamelessly copied from an existing inventory script.
This script generates an inventory that Ansible can understand by making API requests to Abiquo API
Requires some python libraries, ensure to have them installed when using this script.
This script has been tested in Abiquo 3.0 but it may work also for Abiquo 2.6.
Before using this script you may want to modify abiquo.ini config file.
This script generates an Ansible hosts file with these host groups:
ABQ_xxx: Defines a hosts itself by Abiquo VM name label
all: Contains all hosts defined in Abiquo user's enterprise
virtualdatecenter: Creates a host group for each virtualdatacenter containing all hosts defined on it
virtualappliance: Creates a host group for each virtualappliance containing all hosts defined on it
imagetemplate: Creates a host group for each image template containing all hosts using it
'''
# (c) 2014, Daniel Beneyto <daniel.beneyto@abiquo.com>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
import ConfigParser
try:
import json
except ImportError:
import simplejson as json
from ansible.module_utils.urls import open_url
def api_get(link, config):
try:
if link is None:
url = config.get('api','uri') + config.get('api','login_path')
headers = {"Accept": config.get('api','login_type')}
else:
url = link['href'] + '?limit=0'
headers = {"Accept": link['type']}
result = open_url(url, headers=headers, url_username=config.get('auth','apiuser').replace('\n', ''),
url_password=config.get('auth','apipass').replace('\n', ''))
return json.loads(result.read())
except:
return None
def save_cache(data, config):
''' saves item to cache '''
dpath = config.get('cache','cache_dir')
try:
cache = open('/'.join([dpath,'inventory']), 'w')
cache.write(json.dumps(data))
cache.close()
except IOError as e:
pass # not really sure what to do here
def get_cache(cache_item, config):
''' returns cached item '''
dpath = config.get('cache','cache_dir')
inv = {}
try:
cache = open('/'.join([dpath,'inventory']), 'r')
inv = cache.read()
cache.close()
except IOError as e:
pass # not really sure what to do here
return inv
def cache_available(config):
''' checks if we have a 'fresh' cache available for item requested '''
if config.has_option('cache','cache_dir'):
dpath = config.get('cache','cache_dir')
try:
existing = os.stat( '/'.join([dpath,'inventory']))
except:
# cache doesn't exist or isn't accessible
return False
if config.has_option('cache', 'cache_max_age'):
maxage = config.get('cache', 'cache_max_age')
if ((int(time.time()) - int(existing.st_mtime)) <= int(maxage)):
return True
return False
def generate_inv_from_api(enterprise_entity,config):
try:
inventory['all'] = {}
inventory['all']['children'] = []
inventory['all']['hosts'] = []
inventory['_meta'] = {}
inventory['_meta']['hostvars'] = {}
enterprise = api_get(enterprise_entity,config)
vms_entity = next(link for link in (enterprise['links']) if (link['rel']=='virtualmachines'))
vms = api_get(vms_entity,config)
for vmcollection in vms['collection']:
vm_vapp = next(link for link in (vmcollection['links']) if (link['rel']=='virtualappliance'))['title'].replace('[','').replace(']','').replace(' ','_')
vm_vdc = next(link for link in (vmcollection['links']) if (link['rel']=='virtualdatacenter'))['title'].replace('[','').replace(']','').replace(' ','_')
vm_template = next(link for link in (vmcollection['links']) if (link['rel']=='virtualmachinetemplate'))['title'].replace('[','').replace(']','').replace(' ','_')
# From abiquo.ini: Only adding to inventory VMs with public IP
if (config.getboolean('defaults', 'public_ip_only')) == True:
for link in vmcollection['links']:
if (link['type']=='application/vnd.abiquo.publicip+json' and link['rel']=='ip'):
vm_nic = link['title']
break
else:
vm_nic = None
# Otherwise, assigning defined network interface IP address
else:
for link in vmcollection['links']:
if (link['rel']==config.get('defaults', 'default_net_interface')):
vm_nic = link['title']
break
else:
vm_nic = None
vm_state = True
# From abiquo.ini: Only adding to inventory VMs deployed
if ((config.getboolean('defaults', 'deployed_only') == True) and (vmcollection['state'] == 'NOT_ALLOCATED')):
vm_state = False
if vm_nic is not None and vm_state:
if vm_vapp not in inventory:
inventory[vm_vapp] = {}
inventory[vm_vapp]['children'] = []
inventory[vm_vapp]['hosts'] = []
if vm_vdc not in inventory:
|
if vm_template not in inventory:
inventory[vm_template] = {}
inventory[vm_template]['children'] = []
inventory[vm_template]['hosts'] = []
if config.getboolean('defaults', 'get_metadata') == True:
meta_entity = next(link for link in (vmcollection['links']) if (link['rel']=='metadata'))
try:
metadata = api_get(meta_entity,config)
if (config.getfloat("api","version") >= 3.0):
vm_metadata = metadata['metadata']
else:
vm_metadata = metadata['metadata']['metadata']
inventory['_meta']['hostvars'][vm_nic] = vm_metadata
except Exception as e:
pass
inventory[vm_vapp]['children'].append(vmcollection['name'])
inventory[vm_vdc]['children'].append(vmcollection['name'])
inventory[vm_template]['children'].append(vmcollection['name'])
inventory['all']['children'].append(vmcollection['name'])
inventory[vmcollection['name']] = []
inventory[vmcollection['name']].append(vm_nic)
return inventory
except Exception as e:
# Return empty hosts output
return { 'all': {'hosts': []}, '_meta': { 'hostvars': {} } }
def get_inventory(enterprise, config):
''' Reads the inventory from cache or Abiquo api '''
if cache_available(config):
inv = get_cache('inventory', config)
else:
default_group = os.path.basename(sys.argv[0]).rstrip('.py')
# MAKE ABIQUO API CALLS #
inv = generate_inv_from_api(enterprise,config)
save_cache(inv, config)
return json.dumps(inv)
if __name__ == '__main__':
inventory = {}
enterprise = {}
# Read config
config = ConfigParser.SafeConfigParser()
for configfilename in [os.path.abspath(sys.argv[0]).rstrip('.py') + '.ini', 'abiquo.ini']:
if os.path.exists(configfilename):
config.read(configfilename)
break
try:
login = api_get(None,config)
enterprise = next(link for link in (login['links']) if (link['rel']=='enterprise'))
except Exception as e:
enterprise = None
if cache_available(config):
inventory = get_cache('inventory', config)
else:
inventory = get_inventory(enterprise, config)
# return to ansible
sys.stdout.write(str(inventory))
sys.stdout.flush()
| inventory[vm_vdc] = {}
inventory[vm_vdc]['hosts'] = []
inventory[vm_vdc]['children'] = [] | conditional_block |
map.rs | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use std::collections::HashMap;
use std::convert::{TryFrom, TryInto};
use std::iter::FromIterator;
use std::marker::PhantomData;
use crate::object::debug_print;
use crate::array::Array;
use crate::errors::Error;
use crate::object::{IsObjectRef, Object, ObjectPtr, ObjectRef};
use crate::ArgValue;
use crate::{
external,
function::{Function, Result},
RetValue,
};
#[repr(C)]
#[derive(Clone)]
pub struct Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
object: ObjectRef,
_data: PhantomData<(K, V)>,
}
// TODO(@jroesch): convert to use generics instead of casting inside
// the implementation.
external! {
#[name("node.ArrayGetItem")]
fn array_get_item(array: ObjectRef, index: isize) -> ObjectRef;
#[name("node.MapSize")]
fn map_size(map: ObjectRef) -> i64;
#[name("node.MapGetItem")]
fn map_get_item(map_object: ObjectRef, key: ObjectRef) -> ObjectRef;
#[name("node.MapCount")]
fn map_count(map: ObjectRef, key: ObjectRef) -> ObjectRef;
#[name("node.MapItems")]
fn map_items(map: ObjectRef) -> Array<ObjectRef>;
}
impl<K, V> FromIterator<(K, V)> for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let iter = iter.into_iter();
let (lower_bound, upper_bound) = iter.size_hint();
let mut buffer: Vec<ArgValue> = Vec::with_capacity(upper_bound.unwrap_or(lower_bound) * 2);
for (k, v) in iter {
buffer.push(k.into());
buffer.push(v.into())
}
Self::from_data(buffer).expect("failed to convert from data")
}
}
impl<K, V> Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
pub fn from_data(data: Vec<ArgValue>) -> Result<Map<K, V>> |
pub fn get(&self, key: &K) -> Result<V>
where
V: TryFrom<RetValue, Error = Error>,
{
let key = key.clone();
let oref: ObjectRef = map_get_item(self.object.clone(), key.upcast())?;
oref.downcast()
}
}
pub struct IntoIter<K, V> {
// NB: due to FFI this isn't as lazy as one might like
key_and_values: Array<ObjectRef>,
next_key: i64,
_data: PhantomData<(K, V)>,
}
impl<K, V> Iterator for IntoIter<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Item = (K, V);
#[inline]
fn next(&mut self) -> Option<(K, V)> {
if self.next_key < self.key_and_values.len() {
let key = self
.key_and_values
.get(self.next_key as isize)
.expect("this should always succeed");
let value = self
.key_and_values
.get((self.next_key as isize) + 1)
.expect("this should always succeed");
self.next_key += 2;
Some((key.downcast().unwrap(), value.downcast().unwrap()))
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
((self.key_and_values.len() / 2) as usize, None)
}
}
impl<K, V> IntoIterator for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
fn into_iter(self) -> IntoIter<K, V> {
let items = map_items(self.object).expect("unable to get map items");
IntoIter {
key_and_values: items,
next_key: 0,
_data: PhantomData,
}
}
}
use std::fmt;
impl<K, V> fmt::Debug for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let ctr = debug_print(self.object.clone()).unwrap();
fmt.write_fmt(format_args!("{:?}", ctr))
}
}
impl<K, V, S> From<Map<K, V>> for HashMap<K, V, S>
where
K: Eq + std::hash::Hash,
K: IsObjectRef,
V: IsObjectRef,
S: std::hash::BuildHasher + std::default::Default,
{
fn from(map: Map<K, V>) -> HashMap<K, V, S> {
HashMap::from_iter(map.into_iter())
}
}
impl<'a, K, V> From<Map<K, V>> for ArgValue<'a>
where
K: IsObjectRef,
V: IsObjectRef,
{
fn from(map: Map<K, V>) -> ArgValue<'a> {
map.object.into()
}
}
impl<K, V> From<Map<K, V>> for RetValue
where
K: IsObjectRef,
V: IsObjectRef,
{
fn from(map: Map<K, V>) -> RetValue {
map.object.into()
}
}
impl<'a, K, V> TryFrom<ArgValue<'a>> for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Error = Error;
fn try_from(array: ArgValue<'a>) -> Result<Map<K, V>> {
let object_ref: ObjectRef = array.try_into()?;
// TODO: type check
Ok(Map {
object: object_ref,
_data: PhantomData,
})
}
}
impl<K, V> TryFrom<RetValue> for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Error = Error;
fn try_from(array: RetValue) -> Result<Map<K, V>> {
let object_ref = array.try_into()?;
// TODO: type check
Ok(Map {
object: object_ref,
_data: PhantomData,
})
}
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use super::*;
use crate::string::String as TString;
#[test]
fn test_from_into_hash_map() {
let mut std_map: HashMap<TString, TString> = HashMap::new();
std_map.insert("key1".into(), "value1".into());
std_map.insert("key2".into(), "value2".into());
let tvm_map = Map::from_iter(std_map.clone().into_iter());
let back_map = tvm_map.into();
assert_eq!(std_map, back_map);
}
}
| {
let func = Function::get("node.Map").expect(
"node.Map function is not registered, this is most likely a build or linking error",
);
let map_data: ObjectPtr<Object> = func.invoke(data)?.try_into()?;
debug_assert!(
map_data.count() >= 1,
"map_data count is {}",
map_data.count()
);
Ok(Map {
object: map_data.into(),
_data: PhantomData,
})
} | identifier_body |
map.rs | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use std::collections::HashMap;
use std::convert::{TryFrom, TryInto};
use std::iter::FromIterator;
use std::marker::PhantomData;
use crate::object::debug_print;
use crate::array::Array;
use crate::errors::Error;
use crate::object::{IsObjectRef, Object, ObjectPtr, ObjectRef};
use crate::ArgValue;
use crate::{
external,
function::{Function, Result},
RetValue,
};
#[repr(C)]
#[derive(Clone)]
pub struct Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
object: ObjectRef,
_data: PhantomData<(K, V)>,
}
// TODO(@jroesch): convert to use generics instead of casting inside
// the implementation.
external! {
#[name("node.ArrayGetItem")]
fn array_get_item(array: ObjectRef, index: isize) -> ObjectRef;
#[name("node.MapSize")]
fn map_size(map: ObjectRef) -> i64;
#[name("node.MapGetItem")]
fn map_get_item(map_object: ObjectRef, key: ObjectRef) -> ObjectRef;
#[name("node.MapCount")]
fn map_count(map: ObjectRef, key: ObjectRef) -> ObjectRef;
#[name("node.MapItems")]
fn map_items(map: ObjectRef) -> Array<ObjectRef>;
}
impl<K, V> FromIterator<(K, V)> for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let iter = iter.into_iter();
let (lower_bound, upper_bound) = iter.size_hint();
let mut buffer: Vec<ArgValue> = Vec::with_capacity(upper_bound.unwrap_or(lower_bound) * 2);
for (k, v) in iter {
buffer.push(k.into());
buffer.push(v.into())
}
Self::from_data(buffer).expect("failed to convert from data")
}
}
impl<K, V> Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
pub fn from_data(data: Vec<ArgValue>) -> Result<Map<K, V>> {
let func = Function::get("node.Map").expect(
"node.Map function is not registered, this is most likely a build or linking error",
);
let map_data: ObjectPtr<Object> = func.invoke(data)?.try_into()?;
debug_assert!(
map_data.count() >= 1,
"map_data count is {}",
map_data.count()
);
Ok(Map {
object: map_data.into(),
_data: PhantomData,
})
}
pub fn get(&self, key: &K) -> Result<V>
where
V: TryFrom<RetValue, Error = Error>,
{
let key = key.clone();
let oref: ObjectRef = map_get_item(self.object.clone(), key.upcast())?;
oref.downcast()
}
}
pub struct IntoIter<K, V> {
// NB: due to FFI this isn't as lazy as one might like
key_and_values: Array<ObjectRef>,
next_key: i64,
_data: PhantomData<(K, V)>,
}
impl<K, V> Iterator for IntoIter<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Item = (K, V);
#[inline]
fn next(&mut self) -> Option<(K, V)> {
if self.next_key < self.key_and_values.len() {
let key = self
.key_and_values
.get(self.next_key as isize)
.expect("this should always succeed");
let value = self
.key_and_values
.get((self.next_key as isize) + 1)
.expect("this should always succeed");
self.next_key += 2;
Some((key.downcast().unwrap(), value.downcast().unwrap()))
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
((self.key_and_values.len() / 2) as usize, None)
}
}
impl<K, V> IntoIterator for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
fn into_iter(self) -> IntoIter<K, V> {
let items = map_items(self.object).expect("unable to get map items");
IntoIter {
key_and_values: items,
next_key: 0,
_data: PhantomData,
}
}
}
use std::fmt;
impl<K, V> fmt::Debug for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let ctr = debug_print(self.object.clone()).unwrap();
fmt.write_fmt(format_args!("{:?}", ctr))
}
}
impl<K, V, S> From<Map<K, V>> for HashMap<K, V, S>
where
K: Eq + std::hash::Hash,
K: IsObjectRef,
V: IsObjectRef,
S: std::hash::BuildHasher + std::default::Default,
{
fn from(map: Map<K, V>) -> HashMap<K, V, S> {
HashMap::from_iter(map.into_iter())
}
}
impl<'a, K, V> From<Map<K, V>> for ArgValue<'a>
where
K: IsObjectRef,
V: IsObjectRef,
{
fn from(map: Map<K, V>) -> ArgValue<'a> {
map.object.into()
}
}
impl<K, V> From<Map<K, V>> for RetValue
where
K: IsObjectRef,
V: IsObjectRef,
{
fn from(map: Map<K, V>) -> RetValue {
map.object.into()
}
}
impl<'a, K, V> TryFrom<ArgValue<'a>> for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Error = Error;
fn try_from(array: ArgValue<'a>) -> Result<Map<K, V>> {
let object_ref: ObjectRef = array.try_into()?;
// TODO: type check
Ok(Map {
object: object_ref,
_data: PhantomData,
})
}
}
impl<K, V> TryFrom<RetValue> for Map<K, V>
where | V: IsObjectRef,
{
type Error = Error;
fn try_from(array: RetValue) -> Result<Map<K, V>> {
let object_ref = array.try_into()?;
// TODO: type check
Ok(Map {
object: object_ref,
_data: PhantomData,
})
}
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use super::*;
use crate::string::String as TString;
#[test]
fn test_from_into_hash_map() {
let mut std_map: HashMap<TString, TString> = HashMap::new();
std_map.insert("key1".into(), "value1".into());
std_map.insert("key2".into(), "value2".into());
let tvm_map = Map::from_iter(std_map.clone().into_iter());
let back_map = tvm_map.into();
assert_eq!(std_map, back_map);
}
} | K: IsObjectRef, | random_line_split |
map.rs | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use std::collections::HashMap;
use std::convert::{TryFrom, TryInto};
use std::iter::FromIterator;
use std::marker::PhantomData;
use crate::object::debug_print;
use crate::array::Array;
use crate::errors::Error;
use crate::object::{IsObjectRef, Object, ObjectPtr, ObjectRef};
use crate::ArgValue;
use crate::{
external,
function::{Function, Result},
RetValue,
};
#[repr(C)]
#[derive(Clone)]
pub struct Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
object: ObjectRef,
_data: PhantomData<(K, V)>,
}
// TODO(@jroesch): convert to use generics instead of casting inside
// the implementation.
external! {
#[name("node.ArrayGetItem")]
fn array_get_item(array: ObjectRef, index: isize) -> ObjectRef;
#[name("node.MapSize")]
fn map_size(map: ObjectRef) -> i64;
#[name("node.MapGetItem")]
fn map_get_item(map_object: ObjectRef, key: ObjectRef) -> ObjectRef;
#[name("node.MapCount")]
fn map_count(map: ObjectRef, key: ObjectRef) -> ObjectRef;
#[name("node.MapItems")]
fn map_items(map: ObjectRef) -> Array<ObjectRef>;
}
impl<K, V> FromIterator<(K, V)> for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let iter = iter.into_iter();
let (lower_bound, upper_bound) = iter.size_hint();
let mut buffer: Vec<ArgValue> = Vec::with_capacity(upper_bound.unwrap_or(lower_bound) * 2);
for (k, v) in iter {
buffer.push(k.into());
buffer.push(v.into())
}
Self::from_data(buffer).expect("failed to convert from data")
}
}
impl<K, V> Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
pub fn from_data(data: Vec<ArgValue>) -> Result<Map<K, V>> {
let func = Function::get("node.Map").expect(
"node.Map function is not registered, this is most likely a build or linking error",
);
let map_data: ObjectPtr<Object> = func.invoke(data)?.try_into()?;
debug_assert!(
map_data.count() >= 1,
"map_data count is {}",
map_data.count()
);
Ok(Map {
object: map_data.into(),
_data: PhantomData,
})
}
pub fn get(&self, key: &K) -> Result<V>
where
V: TryFrom<RetValue, Error = Error>,
{
let key = key.clone();
let oref: ObjectRef = map_get_item(self.object.clone(), key.upcast())?;
oref.downcast()
}
}
pub struct IntoIter<K, V> {
// NB: due to FFI this isn't as lazy as one might like
key_and_values: Array<ObjectRef>,
next_key: i64,
_data: PhantomData<(K, V)>,
}
impl<K, V> Iterator for IntoIter<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Item = (K, V);
#[inline]
fn next(&mut self) -> Option<(K, V)> {
if self.next_key < self.key_and_values.len() | else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
((self.key_and_values.len() / 2) as usize, None)
}
}
impl<K, V> IntoIterator for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
fn into_iter(self) -> IntoIter<K, V> {
let items = map_items(self.object).expect("unable to get map items");
IntoIter {
key_and_values: items,
next_key: 0,
_data: PhantomData,
}
}
}
use std::fmt;
impl<K, V> fmt::Debug for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let ctr = debug_print(self.object.clone()).unwrap();
fmt.write_fmt(format_args!("{:?}", ctr))
}
}
impl<K, V, S> From<Map<K, V>> for HashMap<K, V, S>
where
K: Eq + std::hash::Hash,
K: IsObjectRef,
V: IsObjectRef,
S: std::hash::BuildHasher + std::default::Default,
{
fn from(map: Map<K, V>) -> HashMap<K, V, S> {
HashMap::from_iter(map.into_iter())
}
}
impl<'a, K, V> From<Map<K, V>> for ArgValue<'a>
where
K: IsObjectRef,
V: IsObjectRef,
{
fn from(map: Map<K, V>) -> ArgValue<'a> {
map.object.into()
}
}
impl<K, V> From<Map<K, V>> for RetValue
where
K: IsObjectRef,
V: IsObjectRef,
{
fn from(map: Map<K, V>) -> RetValue {
map.object.into()
}
}
impl<'a, K, V> TryFrom<ArgValue<'a>> for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Error = Error;
fn try_from(array: ArgValue<'a>) -> Result<Map<K, V>> {
let object_ref: ObjectRef = array.try_into()?;
// TODO: type check
Ok(Map {
object: object_ref,
_data: PhantomData,
})
}
}
impl<K, V> TryFrom<RetValue> for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Error = Error;
fn try_from(array: RetValue) -> Result<Map<K, V>> {
let object_ref = array.try_into()?;
// TODO: type check
Ok(Map {
object: object_ref,
_data: PhantomData,
})
}
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use super::*;
use crate::string::String as TString;
#[test]
fn test_from_into_hash_map() {
let mut std_map: HashMap<TString, TString> = HashMap::new();
std_map.insert("key1".into(), "value1".into());
std_map.insert("key2".into(), "value2".into());
let tvm_map = Map::from_iter(std_map.clone().into_iter());
let back_map = tvm_map.into();
assert_eq!(std_map, back_map);
}
}
| {
let key = self
.key_and_values
.get(self.next_key as isize)
.expect("this should always succeed");
let value = self
.key_and_values
.get((self.next_key as isize) + 1)
.expect("this should always succeed");
self.next_key += 2;
Some((key.downcast().unwrap(), value.downcast().unwrap()))
} | conditional_block |
map.rs | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use std::collections::HashMap;
use std::convert::{TryFrom, TryInto};
use std::iter::FromIterator;
use std::marker::PhantomData;
use crate::object::debug_print;
use crate::array::Array;
use crate::errors::Error;
use crate::object::{IsObjectRef, Object, ObjectPtr, ObjectRef};
use crate::ArgValue;
use crate::{
external,
function::{Function, Result},
RetValue,
};
#[repr(C)]
#[derive(Clone)]
pub struct Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
object: ObjectRef,
_data: PhantomData<(K, V)>,
}
// TODO(@jroesch): convert to use generics instead of casting inside
// the implementation.
external! {
#[name("node.ArrayGetItem")]
fn array_get_item(array: ObjectRef, index: isize) -> ObjectRef;
#[name("node.MapSize")]
fn map_size(map: ObjectRef) -> i64;
#[name("node.MapGetItem")]
fn map_get_item(map_object: ObjectRef, key: ObjectRef) -> ObjectRef;
#[name("node.MapCount")]
fn map_count(map: ObjectRef, key: ObjectRef) -> ObjectRef;
#[name("node.MapItems")]
fn map_items(map: ObjectRef) -> Array<ObjectRef>;
}
impl<K, V> FromIterator<(K, V)> for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let iter = iter.into_iter();
let (lower_bound, upper_bound) = iter.size_hint();
let mut buffer: Vec<ArgValue> = Vec::with_capacity(upper_bound.unwrap_or(lower_bound) * 2);
for (k, v) in iter {
buffer.push(k.into());
buffer.push(v.into())
}
Self::from_data(buffer).expect("failed to convert from data")
}
}
impl<K, V> Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
pub fn from_data(data: Vec<ArgValue>) -> Result<Map<K, V>> {
let func = Function::get("node.Map").expect(
"node.Map function is not registered, this is most likely a build or linking error",
);
let map_data: ObjectPtr<Object> = func.invoke(data)?.try_into()?;
debug_assert!(
map_data.count() >= 1,
"map_data count is {}",
map_data.count()
);
Ok(Map {
object: map_data.into(),
_data: PhantomData,
})
}
pub fn | (&self, key: &K) -> Result<V>
where
V: TryFrom<RetValue, Error = Error>,
{
let key = key.clone();
let oref: ObjectRef = map_get_item(self.object.clone(), key.upcast())?;
oref.downcast()
}
}
pub struct IntoIter<K, V> {
// NB: due to FFI this isn't as lazy as one might like
key_and_values: Array<ObjectRef>,
next_key: i64,
_data: PhantomData<(K, V)>,
}
impl<K, V> Iterator for IntoIter<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Item = (K, V);
#[inline]
fn next(&mut self) -> Option<(K, V)> {
if self.next_key < self.key_and_values.len() {
let key = self
.key_and_values
.get(self.next_key as isize)
.expect("this should always succeed");
let value = self
.key_and_values
.get((self.next_key as isize) + 1)
.expect("this should always succeed");
self.next_key += 2;
Some((key.downcast().unwrap(), value.downcast().unwrap()))
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
((self.key_and_values.len() / 2) as usize, None)
}
}
impl<K, V> IntoIterator for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
fn into_iter(self) -> IntoIter<K, V> {
let items = map_items(self.object).expect("unable to get map items");
IntoIter {
key_and_values: items,
next_key: 0,
_data: PhantomData,
}
}
}
use std::fmt;
impl<K, V> fmt::Debug for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let ctr = debug_print(self.object.clone()).unwrap();
fmt.write_fmt(format_args!("{:?}", ctr))
}
}
impl<K, V, S> From<Map<K, V>> for HashMap<K, V, S>
where
K: Eq + std::hash::Hash,
K: IsObjectRef,
V: IsObjectRef,
S: std::hash::BuildHasher + std::default::Default,
{
fn from(map: Map<K, V>) -> HashMap<K, V, S> {
HashMap::from_iter(map.into_iter())
}
}
impl<'a, K, V> From<Map<K, V>> for ArgValue<'a>
where
K: IsObjectRef,
V: IsObjectRef,
{
fn from(map: Map<K, V>) -> ArgValue<'a> {
map.object.into()
}
}
impl<K, V> From<Map<K, V>> for RetValue
where
K: IsObjectRef,
V: IsObjectRef,
{
fn from(map: Map<K, V>) -> RetValue {
map.object.into()
}
}
impl<'a, K, V> TryFrom<ArgValue<'a>> for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Error = Error;
fn try_from(array: ArgValue<'a>) -> Result<Map<K, V>> {
let object_ref: ObjectRef = array.try_into()?;
// TODO: type check
Ok(Map {
object: object_ref,
_data: PhantomData,
})
}
}
impl<K, V> TryFrom<RetValue> for Map<K, V>
where
K: IsObjectRef,
V: IsObjectRef,
{
type Error = Error;
fn try_from(array: RetValue) -> Result<Map<K, V>> {
let object_ref = array.try_into()?;
// TODO: type check
Ok(Map {
object: object_ref,
_data: PhantomData,
})
}
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use super::*;
use crate::string::String as TString;
#[test]
fn test_from_into_hash_map() {
let mut std_map: HashMap<TString, TString> = HashMap::new();
std_map.insert("key1".into(), "value1".into());
std_map.insert("key2".into(), "value2".into());
let tvm_map = Map::from_iter(std_map.clone().into_iter());
let back_map = tvm_map.into();
assert_eq!(std_map, back_map);
}
}
| get | identifier_name |
duration.pipe.ts | import * as moment from 'moment';
import { Inject, Optional, Pipe, PipeTransform } from '@angular/core';
import { NGX_MOMENT_OPTIONS, NgxMomentOptions } from './moment-options';
@Pipe({ name: 'amDuration' })
export class DurationPipe implements PipeTransform {
allowedUnits: Array<string> = ['ss', 's', 'm', 'h', 'd', 'M'];
constructor(@Optional() @Inject(NGX_MOMENT_OPTIONS) momentOptions?: NgxMomentOptions) {
this._applyOptions(momentOptions);
}
| (value: moment.DurationInputArg1, ...args: string[]): string {
if (typeof args === 'undefined' || args.length !== 1) {
throw new Error('DurationPipe: missing required time unit argument');
}
return moment.duration(value, args[0] as moment.unitOfTime.DurationConstructor).humanize();
}
private _applyOptions(momentOptions: NgxMomentOptions): void {
if (!momentOptions) {
return;
}
if (!!momentOptions.relativeTimeThresholdOptions) {
const units: Array<string> = Object.keys(momentOptions.relativeTimeThresholdOptions);
const filteredUnits: Array<string> = units.filter(
(unit) => this.allowedUnits.indexOf(unit) !== -1,
);
filteredUnits.forEach((unit) => {
moment.relativeTimeThreshold(unit, momentOptions.relativeTimeThresholdOptions[unit]);
});
}
}
}
| transform | identifier_name |
duration.pipe.ts | import * as moment from 'moment';
import { Inject, Optional, Pipe, PipeTransform } from '@angular/core';
import { NGX_MOMENT_OPTIONS, NgxMomentOptions } from './moment-options';
@Pipe({ name: 'amDuration' })
export class DurationPipe implements PipeTransform {
allowedUnits: Array<string> = ['ss', 's', 'm', 'h', 'd', 'M'];
constructor(@Optional() @Inject(NGX_MOMENT_OPTIONS) momentOptions?: NgxMomentOptions) {
this._applyOptions(momentOptions);
}
transform(value: moment.DurationInputArg1, ...args: string[]): string {
if (typeof args === 'undefined' || args.length !== 1) {
throw new Error('DurationPipe: missing required time unit argument');
}
return moment.duration(value, args[0] as moment.unitOfTime.DurationConstructor).humanize();
}
private _applyOptions(momentOptions: NgxMomentOptions): void |
}
| {
if (!momentOptions) {
return;
}
if (!!momentOptions.relativeTimeThresholdOptions) {
const units: Array<string> = Object.keys(momentOptions.relativeTimeThresholdOptions);
const filteredUnits: Array<string> = units.filter(
(unit) => this.allowedUnits.indexOf(unit) !== -1,
);
filteredUnits.forEach((unit) => {
moment.relativeTimeThreshold(unit, momentOptions.relativeTimeThresholdOptions[unit]);
});
}
} | identifier_body |
duration.pipe.ts | import * as moment from 'moment';
import { Inject, Optional, Pipe, PipeTransform } from '@angular/core';
import { NGX_MOMENT_OPTIONS, NgxMomentOptions } from './moment-options';
@Pipe({ name: 'amDuration' })
export class DurationPipe implements PipeTransform {
allowedUnits: Array<string> = ['ss', 's', 'm', 'h', 'd', 'M'];
constructor(@Optional() @Inject(NGX_MOMENT_OPTIONS) momentOptions?: NgxMomentOptions) {
this._applyOptions(momentOptions);
}
transform(value: moment.DurationInputArg1, ...args: string[]): string {
if (typeof args === 'undefined' || args.length !== 1) {
throw new Error('DurationPipe: missing required time unit argument');
}
return moment.duration(value, args[0] as moment.unitOfTime.DurationConstructor).humanize();
} | if (!momentOptions) {
return;
}
if (!!momentOptions.relativeTimeThresholdOptions) {
const units: Array<string> = Object.keys(momentOptions.relativeTimeThresholdOptions);
const filteredUnits: Array<string> = units.filter(
(unit) => this.allowedUnits.indexOf(unit) !== -1,
);
filteredUnits.forEach((unit) => {
moment.relativeTimeThreshold(unit, momentOptions.relativeTimeThresholdOptions[unit]);
});
}
}
} |
private _applyOptions(momentOptions: NgxMomentOptions): void { | random_line_split |
duration.pipe.ts | import * as moment from 'moment';
import { Inject, Optional, Pipe, PipeTransform } from '@angular/core';
import { NGX_MOMENT_OPTIONS, NgxMomentOptions } from './moment-options';
@Pipe({ name: 'amDuration' })
export class DurationPipe implements PipeTransform {
allowedUnits: Array<string> = ['ss', 's', 'm', 'h', 'd', 'M'];
constructor(@Optional() @Inject(NGX_MOMENT_OPTIONS) momentOptions?: NgxMomentOptions) {
this._applyOptions(momentOptions);
}
transform(value: moment.DurationInputArg1, ...args: string[]): string {
if (typeof args === 'undefined' || args.length !== 1) |
return moment.duration(value, args[0] as moment.unitOfTime.DurationConstructor).humanize();
}
private _applyOptions(momentOptions: NgxMomentOptions): void {
if (!momentOptions) {
return;
}
if (!!momentOptions.relativeTimeThresholdOptions) {
const units: Array<string> = Object.keys(momentOptions.relativeTimeThresholdOptions);
const filteredUnits: Array<string> = units.filter(
(unit) => this.allowedUnits.indexOf(unit) !== -1,
);
filteredUnits.forEach((unit) => {
moment.relativeTimeThreshold(unit, momentOptions.relativeTimeThresholdOptions[unit]);
});
}
}
}
| {
throw new Error('DurationPipe: missing required time unit argument');
} | conditional_block |
settings.py | """
Django settings for librarymanagementsystem project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '=^%a6&@*aq8$sa$_f_r&b_gczd@sr77hv$xys7k!8f85g6-$e1'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'social.apps.django_app.default',
'django_extensions',
'djangosecure',
'mainapp',
)
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'social.backends.google.GoogleOpenId',
)
SECURE_FRAME_DENY = True
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'social.apps.django_app.middleware.SocialAuthExceptionMiddleware',
"djangosecure.middleware.SecurityMiddleware"
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'social.apps.django_app.context_processors.backends',
'social.apps.django_app.context_processors.login_redirect',
'django.contrib.messages.context_processors.messages',
)
CRISPY_TEMPLATE_PACK = 'bootstrap3'
MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage'
LOGIN_REDIRECT_URL = '/admin/'
SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/admin/'
LOGIN_ERROR_URL = '/login-error/'
ROOT_URLCONF = 'librarymanagementsystem.urls'
WSGI_APPLICATION = 'librarymanagementsystem.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
} | LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
SOCIAL_AUTH_GOOGLE_WHITELISTED_DOMAINS = ['gmail.com']
try:
from local_settings import *
except ImportError:
pass | }
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
| random_line_split |
test_cmdline.py | # pyOCD debugger
# Copyright (c) 2015,2018-2019 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import six
from pyocd.utility.cmdline import (
split_command_line,
convert_vector_catch,
VECTOR_CATCH_CHAR_MAP,
convert_session_options,
)
from pyocd.core.target import Target
class TestSplitCommandLine(object):
def test_split(self):
assert split_command_line('foo') == ['foo']
assert split_command_line(['foo']) == ['foo']
assert split_command_line('foo bar') == ['foo', 'bar']
assert split_command_line(['foo bar']) == ['foo', 'bar']
def test_split_strings(self):
assert split_command_line('"foo"') == ['foo']
assert split_command_line('"foo bar"') == ['foo bar']
assert split_command_line(['"foo"']) == ['foo']
assert split_command_line('a "b c" d') == ['a', "b c", 'd']
assert split_command_line("'foo bar'") == ['foo bar']
def test_split_whitespace(self):
assert split_command_line('a b') == ['a', 'b']
assert split_command_line('a\tb') == ['a', 'b']
assert split_command_line('a\rb') == ['a', 'b']
assert split_command_line('a\nb') == ['a', 'b']
assert split_command_line('a \tb') == ['a', 'b']
class TestConvertVectorCatch(object):
def test_none_str(self):
assert convert_vector_catch('none') == 0
def test_all_str(self):
assert convert_vector_catch('all') == Target.VectorCatch.ALL
def test_none_b(self):
assert convert_vector_catch(b'none') == 0
def test_all_b(self):
assert convert_vector_catch(b'all') == Target.VectorCatch.ALL
@pytest.mark.parametrize(("vc", "msk"),
list(VECTOR_CATCH_CHAR_MAP.items()))
def test_vc_str(self, vc, msk):
assert convert_vector_catch(vc) == msk
@pytest.mark.parametrize(("vc", "msk"),
[(six.b(x), y) for x,y in VECTOR_CATCH_CHAR_MAP.items()])
def test_vc_b(self, vc, msk):
assert convert_vector_catch(vc) == msk
class TestConvertSessionOptions(object):
def test_empty(self):
assert convert_session_options([]) == {}
def test_unknown_option(self):
assert convert_session_options(['dumkopf']) == {}
def test_bool(self):
assert convert_session_options(['auto_unlock']) == {'auto_unlock': True}
assert convert_session_options(['no-auto_unlock']) == {'auto_unlock': False}
assert convert_session_options(['auto_unlock=1']) == {'auto_unlock': True}
assert convert_session_options(['auto_unlock=true']) == {'auto_unlock': True}
assert convert_session_options(['auto_unlock=yes']) == {'auto_unlock': True}
assert convert_session_options(['auto_unlock=on']) == {'auto_unlock': True}
assert convert_session_options(['auto_unlock=0']) == {'auto_unlock': False}
assert convert_session_options(['auto_unlock=false']) == {'auto_unlock': False}
assert convert_session_options(['auto_unlock=anything-goes-here']) == {'auto_unlock': False} |
def test_noncasesense(self):
# Test separate paths for with and without a value.
assert convert_session_options(['AUTO_Unlock']) == {'auto_unlock': True}
assert convert_session_options(['AUTO_Unlock=0']) == {'auto_unlock': False}
def test_int(self):
# Non-bool with no value is ignored (and logged).
assert convert_session_options(['frequency']) == {}
# Invalid int value is ignored and logged
assert convert_session_options(['frequency=abc']) == {}
# Ignore with no- prefix
assert convert_session_options(['no-frequency']) == {}
# Valid int
assert convert_session_options(['frequency=1000']) == {'frequency': 1000}
# Valid hex int
assert convert_session_options(['frequency=0x40']) == {'frequency': 64}
def test_str(self):
# Ignore with no value
assert convert_session_options(['test_binary']) == {}
# Ignore with no- prefix
assert convert_session_options(['no-test_binary']) == {}
# Valid
assert convert_session_options(['test_binary=abc']) == {'test_binary': 'abc'} | random_line_split | |
test_cmdline.py | # pyOCD debugger
# Copyright (c) 2015,2018-2019 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import six
from pyocd.utility.cmdline import (
split_command_line,
convert_vector_catch,
VECTOR_CATCH_CHAR_MAP,
convert_session_options,
)
from pyocd.core.target import Target
class TestSplitCommandLine(object):
def test_split(self):
assert split_command_line('foo') == ['foo']
assert split_command_line(['foo']) == ['foo']
assert split_command_line('foo bar') == ['foo', 'bar']
assert split_command_line(['foo bar']) == ['foo', 'bar']
def test_split_strings(self):
assert split_command_line('"foo"') == ['foo']
assert split_command_line('"foo bar"') == ['foo bar']
assert split_command_line(['"foo"']) == ['foo']
assert split_command_line('a "b c" d') == ['a', "b c", 'd']
assert split_command_line("'foo bar'") == ['foo bar']
def test_split_whitespace(self):
assert split_command_line('a b') == ['a', 'b']
assert split_command_line('a\tb') == ['a', 'b']
assert split_command_line('a\rb') == ['a', 'b']
assert split_command_line('a\nb') == ['a', 'b']
assert split_command_line('a \tb') == ['a', 'b']
class TestConvertVectorCatch(object):
def test_none_str(self):
assert convert_vector_catch('none') == 0
def test_all_str(self):
assert convert_vector_catch('all') == Target.VectorCatch.ALL
def test_none_b(self):
|
def test_all_b(self):
assert convert_vector_catch(b'all') == Target.VectorCatch.ALL
@pytest.mark.parametrize(("vc", "msk"),
list(VECTOR_CATCH_CHAR_MAP.items()))
def test_vc_str(self, vc, msk):
assert convert_vector_catch(vc) == msk
@pytest.mark.parametrize(("vc", "msk"),
[(six.b(x), y) for x,y in VECTOR_CATCH_CHAR_MAP.items()])
def test_vc_b(self, vc, msk):
assert convert_vector_catch(vc) == msk
class TestConvertSessionOptions(object):
def test_empty(self):
assert convert_session_options([]) == {}
def test_unknown_option(self):
assert convert_session_options(['dumkopf']) == {}
def test_bool(self):
assert convert_session_options(['auto_unlock']) == {'auto_unlock': True}
assert convert_session_options(['no-auto_unlock']) == {'auto_unlock': False}
assert convert_session_options(['auto_unlock=1']) == {'auto_unlock': True}
assert convert_session_options(['auto_unlock=true']) == {'auto_unlock': True}
assert convert_session_options(['auto_unlock=yes']) == {'auto_unlock': True}
assert convert_session_options(['auto_unlock=on']) == {'auto_unlock': True}
assert convert_session_options(['auto_unlock=0']) == {'auto_unlock': False}
assert convert_session_options(['auto_unlock=false']) == {'auto_unlock': False}
assert convert_session_options(['auto_unlock=anything-goes-here']) == {'auto_unlock': False}
def test_noncasesense(self):
# Test separate paths for with and without a value.
assert convert_session_options(['AUTO_Unlock']) == {'auto_unlock': True}
assert convert_session_options(['AUTO_Unlock=0']) == {'auto_unlock': False}
def test_int(self):
# Non-bool with no value is ignored (and logged).
assert convert_session_options(['frequency']) == {}
# Invalid int value is ignored and logged
assert convert_session_options(['frequency=abc']) == {}
# Ignore with no- prefix
assert convert_session_options(['no-frequency']) == {}
# Valid int
assert convert_session_options(['frequency=1000']) == {'frequency': 1000}
# Valid hex int
assert convert_session_options(['frequency=0x40']) == {'frequency': 64}
def test_str(self):
# Ignore with no value
assert convert_session_options(['test_binary']) == {}
# Ignore with no- prefix
assert convert_session_options(['no-test_binary']) == {}
# Valid
assert convert_session_options(['test_binary=abc']) == {'test_binary': 'abc'}
| assert convert_vector_catch(b'none') == 0 | identifier_body |
test_cmdline.py | # pyOCD debugger
# Copyright (c) 2015,2018-2019 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import six
from pyocd.utility.cmdline import (
split_command_line,
convert_vector_catch,
VECTOR_CATCH_CHAR_MAP,
convert_session_options,
)
from pyocd.core.target import Target
class TestSplitCommandLine(object):
def test_split(self):
assert split_command_line('foo') == ['foo']
assert split_command_line(['foo']) == ['foo']
assert split_command_line('foo bar') == ['foo', 'bar']
assert split_command_line(['foo bar']) == ['foo', 'bar']
def test_split_strings(self):
assert split_command_line('"foo"') == ['foo']
assert split_command_line('"foo bar"') == ['foo bar']
assert split_command_line(['"foo"']) == ['foo']
assert split_command_line('a "b c" d') == ['a', "b c", 'd']
assert split_command_line("'foo bar'") == ['foo bar']
def test_split_whitespace(self):
assert split_command_line('a b') == ['a', 'b']
assert split_command_line('a\tb') == ['a', 'b']
assert split_command_line('a\rb') == ['a', 'b']
assert split_command_line('a\nb') == ['a', 'b']
assert split_command_line('a \tb') == ['a', 'b']
class TestConvertVectorCatch(object):
def test_none_str(self):
assert convert_vector_catch('none') == 0
def test_all_str(self):
assert convert_vector_catch('all') == Target.VectorCatch.ALL
def test_none_b(self):
assert convert_vector_catch(b'none') == 0
def test_all_b(self):
assert convert_vector_catch(b'all') == Target.VectorCatch.ALL
@pytest.mark.parametrize(("vc", "msk"),
list(VECTOR_CATCH_CHAR_MAP.items()))
def test_vc_str(self, vc, msk):
assert convert_vector_catch(vc) == msk
@pytest.mark.parametrize(("vc", "msk"),
[(six.b(x), y) for x,y in VECTOR_CATCH_CHAR_MAP.items()])
def test_vc_b(self, vc, msk):
assert convert_vector_catch(vc) == msk
class TestConvertSessionOptions(object):
def test_empty(self):
assert convert_session_options([]) == {}
def test_unknown_option(self):
assert convert_session_options(['dumkopf']) == {}
def test_bool(self):
assert convert_session_options(['auto_unlock']) == {'auto_unlock': True}
assert convert_session_options(['no-auto_unlock']) == {'auto_unlock': False}
assert convert_session_options(['auto_unlock=1']) == {'auto_unlock': True}
assert convert_session_options(['auto_unlock=true']) == {'auto_unlock': True}
assert convert_session_options(['auto_unlock=yes']) == {'auto_unlock': True}
assert convert_session_options(['auto_unlock=on']) == {'auto_unlock': True}
assert convert_session_options(['auto_unlock=0']) == {'auto_unlock': False}
assert convert_session_options(['auto_unlock=false']) == {'auto_unlock': False}
assert convert_session_options(['auto_unlock=anything-goes-here']) == {'auto_unlock': False}
def test_noncasesense(self):
# Test separate paths for with and without a value.
assert convert_session_options(['AUTO_Unlock']) == {'auto_unlock': True}
assert convert_session_options(['AUTO_Unlock=0']) == {'auto_unlock': False}
def | (self):
# Non-bool with no value is ignored (and logged).
assert convert_session_options(['frequency']) == {}
# Invalid int value is ignored and logged
assert convert_session_options(['frequency=abc']) == {}
# Ignore with no- prefix
assert convert_session_options(['no-frequency']) == {}
# Valid int
assert convert_session_options(['frequency=1000']) == {'frequency': 1000}
# Valid hex int
assert convert_session_options(['frequency=0x40']) == {'frequency': 64}
def test_str(self):
# Ignore with no value
assert convert_session_options(['test_binary']) == {}
# Ignore with no- prefix
assert convert_session_options(['no-test_binary']) == {}
# Valid
assert convert_session_options(['test_binary=abc']) == {'test_binary': 'abc'}
| test_int | identifier_name |
network-task.js | 'use strict';
/**
* Expose 'NetworkTask'
*/
module.exports = NetworkTask;
/**
* Module dependencies
*/
var networkObject = require('./network-object');
var readLine = require('readline');
var childProcess = require('child_process');
/**
* Constants
*/
var NETWORK_TOPIC = 'monitor/network';
/**
* Constructor
* Initialize a new NetworkTask
*/
function NetworkTask(info){
this.noInstance = null;
this.generalInfo = info;
}
/**
* Class Methods
*/
NetworkTask.prototype.runAndParse = function(callback){
if(this.generalInfo){
//run the command, parse the command, return a result
console.log('running network command');
//make sure this is a new instance everytime
this.noInstance = new networkObject(this.generalInfo.thingId);
//lets run ifconfig to get mac address and ip
if(this.generalInfo.os === 'darwin'){
var res = getNetworkInterfacesMac();
if(res){
this.noInstance.nInterface=res.iname;
this.noInstance.ipAddress=res.ip;
this.noInstance.macAddress=res.mac;
}
}
else if(this.generalInfo.os === 'linux'){
var res = getNetworkInterfacesLinux();
if(res){
this.noInstance.nInterface=res.iname;
this.noInstance.ipAddress=res.ip;
this.noInstance.macAddress=res.mac;
}
}
else{
console.log('not implemented');
}
//create the child process to execute $ iftop -t -s 2 -P -N
//must run as ROOT on ubuntu side
//add the interface - from the active network
var commandLine = childProcess.spawn('iftop', ['-t','-s','3','-P','-N','-b','-B','-i',this.noInstance.nInterface]);
var noPass = this.noInstance;
var lineReader = readLine.createInterface(commandLine.stdout, commandLine.stdin);
lineReader.on('line', function(line){
noPass.read(line);
});
commandLine.on('close', function(code, signal){
//console.log('read ' + noPass.counter + ' lines');
callback(NETWORK_TOPIC, noPass);
});
}
else{
//skipping execution
console.log('skipping network task due to missing general information');
}
}
/**
* Helper Methods
*/
// get all available network interfaces for mac
// return an object with {iname, ip, mac, status}
function getNetworkInterfacesMac(){
var result={};
var availableInterfaces=[];
var returnObject = childProcess.spawnSync('ifconfig', ['-a']);
if(returnObject.stdout){
var displayStr = returnObject.stdout.toString().trim().toLowerCase();
if(displayStr){
var ifSplit = displayStr.split('\n');
if(ifSplit){
//declare a point array
var currInterface={};
for(var i=0; i<ifSplit.length; i++){
var temp = ifSplit[i].reduceWhiteSpace().trim();
//search for the first line of each
if(temp.indexOf('flags=')>=0){
if(currInterface.iname){
//lets save this interface
availableInterfaces.push(currInterface);
}
//this is the first line
var interfaceSplit = temp.split(':');
if(interfaceSplit.length == 2){
//lets get the name
var iName = interfaceSplit[0];
//create a new interface and point current to this one
var tempInterface = {};
tempInterface.iname=iName;
currInterface = tempInterface;
}
}
else{
//this is a regular line
//search for ether - which contains the mac address
//search for inet which should contain the ip address
//search for status, which indicates status
//space is important here to diffrentiate between inet6
if(temp.indexOf('inet ') >=0){
var ipSplit = temp.split(' ');
if(ipSplit.length >= 4){
currInterface.ip=ipSplit[1].trim();
}
}
if(temp.indexOf('ether')>=0){
var macSplit = temp.split(' ');
if(macSplit.length >= 2){
currInterface.mac=macSplit[1].trim();
}
}
//we'll use a different algo on mac osx since
//it actually returns the current
if(temp.indexOf('status')>=0){
var statusSplit = temp.split(':');
if(statusSplit.length >= 2){
currInterface.status=statusSplit[1].trim();
}
}
}
}
//lets save the last interface
if(currInterface.iname){
availableInterfaces.push(currInterface);
}
}
}
}
if(availableInterfaces.length > 0){
for(var j=0; j<availableInterfaces.length; j++){
var tRes = availableInterfaces[j];
if(tRes){
//we still have a possibility of seeing 2 interfaces available
if(tRes.status==='active' && tRes.ip && tRes.mac){
result=tRes;
return result;
}
}
}
}
return result;
}
function | (){
var result={};
var availableInterfaces=[];
var returnObject = childProcess.spawnSync('ifconfig', ['-a']);
if(returnObject.stdout){
var displayStr = returnObject.stdout.toString().trim().toLowerCase();
if(displayStr){
var ifSplit = displayStr.split('\n');
if(ifSplit){
//declare a point array
var currInterface={};
for(var i=0; i<ifSplit.length; i++){
var temp = ifSplit[i].reduceWhiteSpace().trim();
//search for the first line of each
if(temp.indexOf('link encap')>=0){
if(currInterface.iname){
//lets save this interface
availableInterfaces.push(currInterface);
}
//this is the first line
var interfaceSplit = temp.split('link encap:');
if(interfaceSplit.length == 2){
//lets get the name
var iName = interfaceSplit[0].trim();
var macAddr='';
//lets get the macaddr
var macSplit = interfaceSplit[1].trim().split(' ');
if(macSplit.length==3){
macAddr = macSplit[2];
}
//create a new interface and point current to this one
var tempInterface = {};
tempInterface.iname=iName;
if(macAddr){
tempInterface.mac=macAddr;
}
currInterface = tempInterface;
}
}
else{
//this is a regular line
//search for ether - which contains the mac address
//search for inet which should contain the ip address
//search for status, which indicates status
//space is important here to diffrentiate between inet6
if(temp.indexOf('inet addr:') >=0){
var ipBlockSplit = temp.split(' ');
if(ipBlockSplit.length >= 2){
//take the second entry
var ipSplit=ipBlockSplit[1].split(':');
if(ipSplit.length >= 2){
currInterface.ip=ipSplit[1].trim();
//if both ip and mac exist
if(currInterface.mac){
currInterface.status='active';
}
}
}
}
}
}
//lets save the last interface
if(currInterface.iname){
availableInterfaces.push(currInterface);
}
}
}
}
//currently only returns the first active link - if there are multiple
//interfaces active, we will probably need to handle multiple
if(availableInterfaces.length > 0){
for(var j=0; j<availableInterfaces.length; j++){
var tRes = availableInterfaces[j];
if(tRes){
if(tRes.status==='active'){
result=tRes;
}
}
}
}
return result;
} | getNetworkInterfacesLinux | identifier_name |
network-task.js | 'use strict';
/**
* Expose 'NetworkTask'
*/
module.exports = NetworkTask;
/**
* Module dependencies
*/
var networkObject = require('./network-object');
var readLine = require('readline');
var childProcess = require('child_process');
/**
* Constants
*/
var NETWORK_TOPIC = 'monitor/network';
/**
* Constructor
* Initialize a new NetworkTask
*/
function NetworkTask(info){
this.noInstance = null;
this.generalInfo = info;
}
/**
* Class Methods
*/
NetworkTask.prototype.runAndParse = function(callback){
if(this.generalInfo){
//run the command, parse the command, return a result
console.log('running network command');
//make sure this is a new instance everytime
this.noInstance = new networkObject(this.generalInfo.thingId);
//lets run ifconfig to get mac address and ip
if(this.generalInfo.os === 'darwin'){
var res = getNetworkInterfacesMac();
if(res){
this.noInstance.nInterface=res.iname;
this.noInstance.ipAddress=res.ip;
this.noInstance.macAddress=res.mac;
}
}
else if(this.generalInfo.os === 'linux'){
var res = getNetworkInterfacesLinux();
if(res){
this.noInstance.nInterface=res.iname;
this.noInstance.ipAddress=res.ip;
this.noInstance.macAddress=res.mac;
}
}
else{
console.log('not implemented');
}
//create the child process to execute $ iftop -t -s 2 -P -N
//must run as ROOT on ubuntu side
//add the interface - from the active network
var commandLine = childProcess.spawn('iftop', ['-t','-s','3','-P','-N','-b','-B','-i',this.noInstance.nInterface]);
var noPass = this.noInstance;
var lineReader = readLine.createInterface(commandLine.stdout, commandLine.stdin);
lineReader.on('line', function(line){
noPass.read(line);
});
commandLine.on('close', function(code, signal){
//console.log('read ' + noPass.counter + ' lines');
callback(NETWORK_TOPIC, noPass);
});
}
else{
//skipping execution
console.log('skipping network task due to missing general information');
}
}
/**
* Helper Methods
*/
// get all available network interfaces for mac
// return an object with {iname, ip, mac, status}
function getNetworkInterfacesMac(){
var result={};
var availableInterfaces=[];
var returnObject = childProcess.spawnSync('ifconfig', ['-a']);
if(returnObject.stdout){
var displayStr = returnObject.stdout.toString().trim().toLowerCase();
if(displayStr){
var ifSplit = displayStr.split('\n');
if(ifSplit){
//declare a point array
var currInterface={};
for(var i=0; i<ifSplit.length; i++){
var temp = ifSplit[i].reduceWhiteSpace().trim();
//search for the first line of each
if(temp.indexOf('flags=')>=0){
if(currInterface.iname){
//lets save this interface
availableInterfaces.push(currInterface);
}
//this is the first line
var interfaceSplit = temp.split(':');
if(interfaceSplit.length == 2){
//lets get the name
var iName = interfaceSplit[0];
//create a new interface and point current to this one
var tempInterface = {};
tempInterface.iname=iName;
currInterface = tempInterface;
}
}
else{
//this is a regular line
//search for ether - which contains the mac address
//search for inet which should contain the ip address
//search for status, which indicates status
//space is important here to diffrentiate between inet6
if(temp.indexOf('inet ') >=0){
var ipSplit = temp.split(' ');
if(ipSplit.length >= 4){
currInterface.ip=ipSplit[1].trim();
}
}
if(temp.indexOf('ether')>=0){
var macSplit = temp.split(' ');
if(macSplit.length >= 2){
currInterface.mac=macSplit[1].trim();
}
}
//we'll use a different algo on mac osx since
//it actually returns the current
if(temp.indexOf('status')>=0){
var statusSplit = temp.split(':');
if(statusSplit.length >= 2){
currInterface.status=statusSplit[1].trim();
}
}
}
}
//lets save the last interface
if(currInterface.iname){
availableInterfaces.push(currInterface);
}
}
}
}
if(availableInterfaces.length > 0){
for(var j=0; j<availableInterfaces.length; j++){
var tRes = availableInterfaces[j];
if(tRes){
//we still have a possibility of seeing 2 interfaces available
if(tRes.status==='active' && tRes.ip && tRes.mac){
result=tRes;
return result;
}
}
}
}
return result;
}
function getNetworkInterfacesLinux(){
var result={};
var availableInterfaces=[];
var returnObject = childProcess.spawnSync('ifconfig', ['-a']);
if(returnObject.stdout){
var displayStr = returnObject.stdout.toString().trim().toLowerCase();
if(displayStr){
var ifSplit = displayStr.split('\n');
if(ifSplit){
//declare a point array
var currInterface={};
for(var i=0; i<ifSplit.length; i++){
var temp = ifSplit[i].reduceWhiteSpace().trim();
//search for the first line of each
if(temp.indexOf('link encap')>=0){
if(currInterface.iname){
//lets save this interface
availableInterfaces.push(currInterface);
}
//this is the first line
var interfaceSplit = temp.split('link encap:');
if(interfaceSplit.length == 2){
//lets get the name
var iName = interfaceSplit[0].trim();
var macAddr='';
//lets get the macaddr
var macSplit = interfaceSplit[1].trim().split(' ');
if(macSplit.length==3){
macAddr = macSplit[2];
}
//create a new interface and point current to this one
var tempInterface = {};
tempInterface.iname=iName;
if(macAddr){
tempInterface.mac=macAddr;
}
currInterface = tempInterface;
}
}
else{
//this is a regular line
//search for ether - which contains the mac address
//search for inet which should contain the ip address
//search for status, which indicates status
//space is important here to diffrentiate between inet6
if(temp.indexOf('inet addr:') >=0) |
}
}
//lets save the last interface
if(currInterface.iname){
availableInterfaces.push(currInterface);
}
}
}
}
//currently only returns the first active link - if there are multiple
//interfaces active, we will probably need to handle multiple
if(availableInterfaces.length > 0){
for(var j=0; j<availableInterfaces.length; j++){
var tRes = availableInterfaces[j];
if(tRes){
if(tRes.status==='active'){
result=tRes;
}
}
}
}
return result;
} | {
var ipBlockSplit = temp.split(' ');
if(ipBlockSplit.length >= 2){
//take the second entry
var ipSplit=ipBlockSplit[1].split(':');
if(ipSplit.length >= 2){
currInterface.ip=ipSplit[1].trim();
//if both ip and mac exist
if(currInterface.mac){
currInterface.status='active';
}
}
}
} | conditional_block |
network-task.js | 'use strict';
/**
* Expose 'NetworkTask'
*/
module.exports = NetworkTask;
/**
* Module dependencies
*/
var networkObject = require('./network-object');
var readLine = require('readline');
var childProcess = require('child_process');
/**
* Constants
*/
var NETWORK_TOPIC = 'monitor/network';
/**
* Constructor
* Initialize a new NetworkTask
*/
function NetworkTask(info) |
/**
* Class Methods
*/
NetworkTask.prototype.runAndParse = function(callback){
if(this.generalInfo){
//run the command, parse the command, return a result
console.log('running network command');
//make sure this is a new instance everytime
this.noInstance = new networkObject(this.generalInfo.thingId);
//lets run ifconfig to get mac address and ip
if(this.generalInfo.os === 'darwin'){
var res = getNetworkInterfacesMac();
if(res){
this.noInstance.nInterface=res.iname;
this.noInstance.ipAddress=res.ip;
this.noInstance.macAddress=res.mac;
}
}
else if(this.generalInfo.os === 'linux'){
var res = getNetworkInterfacesLinux();
if(res){
this.noInstance.nInterface=res.iname;
this.noInstance.ipAddress=res.ip;
this.noInstance.macAddress=res.mac;
}
}
else{
console.log('not implemented');
}
//create the child process to execute $ iftop -t -s 2 -P -N
//must run as ROOT on ubuntu side
//add the interface - from the active network
var commandLine = childProcess.spawn('iftop', ['-t','-s','3','-P','-N','-b','-B','-i',this.noInstance.nInterface]);
var noPass = this.noInstance;
var lineReader = readLine.createInterface(commandLine.stdout, commandLine.stdin);
lineReader.on('line', function(line){
noPass.read(line);
});
commandLine.on('close', function(code, signal){
//console.log('read ' + noPass.counter + ' lines');
callback(NETWORK_TOPIC, noPass);
});
}
else{
//skipping execution
console.log('skipping network task due to missing general information');
}
}
/**
* Helper Methods
*/
// get all available network interfaces for mac
// return an object with {iname, ip, mac, status}
function getNetworkInterfacesMac(){
var result={};
var availableInterfaces=[];
var returnObject = childProcess.spawnSync('ifconfig', ['-a']);
if(returnObject.stdout){
var displayStr = returnObject.stdout.toString().trim().toLowerCase();
if(displayStr){
var ifSplit = displayStr.split('\n');
if(ifSplit){
//declare a point array
var currInterface={};
for(var i=0; i<ifSplit.length; i++){
var temp = ifSplit[i].reduceWhiteSpace().trim();
//search for the first line of each
if(temp.indexOf('flags=')>=0){
if(currInterface.iname){
//lets save this interface
availableInterfaces.push(currInterface);
}
//this is the first line
var interfaceSplit = temp.split(':');
if(interfaceSplit.length == 2){
//lets get the name
var iName = interfaceSplit[0];
//create a new interface and point current to this one
var tempInterface = {};
tempInterface.iname=iName;
currInterface = tempInterface;
}
}
else{
//this is a regular line
//search for ether - which contains the mac address
//search for inet which should contain the ip address
//search for status, which indicates status
//space is important here to diffrentiate between inet6
if(temp.indexOf('inet ') >=0){
var ipSplit = temp.split(' ');
if(ipSplit.length >= 4){
currInterface.ip=ipSplit[1].trim();
}
}
if(temp.indexOf('ether')>=0){
var macSplit = temp.split(' ');
if(macSplit.length >= 2){
currInterface.mac=macSplit[1].trim();
}
}
//we'll use a different algo on mac osx since
//it actually returns the current
if(temp.indexOf('status')>=0){
var statusSplit = temp.split(':');
if(statusSplit.length >= 2){
currInterface.status=statusSplit[1].trim();
}
}
}
}
//lets save the last interface
if(currInterface.iname){
availableInterfaces.push(currInterface);
}
}
}
}
if(availableInterfaces.length > 0){
for(var j=0; j<availableInterfaces.length; j++){
var tRes = availableInterfaces[j];
if(tRes){
//we still have a possibility of seeing 2 interfaces available
if(tRes.status==='active' && tRes.ip && tRes.mac){
result=tRes;
return result;
}
}
}
}
return result;
}
function getNetworkInterfacesLinux(){
var result={};
var availableInterfaces=[];
var returnObject = childProcess.spawnSync('ifconfig', ['-a']);
if(returnObject.stdout){
var displayStr = returnObject.stdout.toString().trim().toLowerCase();
if(displayStr){
var ifSplit = displayStr.split('\n');
if(ifSplit){
//declare a point array
var currInterface={};
for(var i=0; i<ifSplit.length; i++){
var temp = ifSplit[i].reduceWhiteSpace().trim();
//search for the first line of each
if(temp.indexOf('link encap')>=0){
if(currInterface.iname){
//lets save this interface
availableInterfaces.push(currInterface);
}
//this is the first line
var interfaceSplit = temp.split('link encap:');
if(interfaceSplit.length == 2){
//lets get the name
var iName = interfaceSplit[0].trim();
var macAddr='';
//lets get the macaddr
var macSplit = interfaceSplit[1].trim().split(' ');
if(macSplit.length==3){
macAddr = macSplit[2];
}
//create a new interface and point current to this one
var tempInterface = {};
tempInterface.iname=iName;
if(macAddr){
tempInterface.mac=macAddr;
}
currInterface = tempInterface;
}
}
else{
//this is a regular line
//search for ether - which contains the mac address
//search for inet which should contain the ip address
//search for status, which indicates status
//space is important here to diffrentiate between inet6
if(temp.indexOf('inet addr:') >=0){
var ipBlockSplit = temp.split(' ');
if(ipBlockSplit.length >= 2){
//take the second entry
var ipSplit=ipBlockSplit[1].split(':');
if(ipSplit.length >= 2){
currInterface.ip=ipSplit[1].trim();
//if both ip and mac exist
if(currInterface.mac){
currInterface.status='active';
}
}
}
}
}
}
//lets save the last interface
if(currInterface.iname){
availableInterfaces.push(currInterface);
}
}
}
}
//currently only returns the first active link - if there are multiple
//interfaces active, we will probably need to handle multiple
if(availableInterfaces.length > 0){
for(var j=0; j<availableInterfaces.length; j++){
var tRes = availableInterfaces[j];
if(tRes){
if(tRes.status==='active'){
result=tRes;
}
}
}
}
return result;
} | {
this.noInstance = null;
this.generalInfo = info;
} | identifier_body |
network-task.js | 'use strict';
/**
* Expose 'NetworkTask'
*/
module.exports = NetworkTask;
/**
* Module dependencies
*/
var networkObject = require('./network-object');
var readLine = require('readline');
var childProcess = require('child_process');
/**
* Constants
*/
var NETWORK_TOPIC = 'monitor/network';
/**
* Constructor
* Initialize a new NetworkTask
*/
function NetworkTask(info){
this.noInstance = null;
this.generalInfo = info;
}
/**
* Class Methods
*/
NetworkTask.prototype.runAndParse = function(callback){
if(this.generalInfo){
//run the command, parse the command, return a result
console.log('running network command');
//make sure this is a new instance everytime
this.noInstance = new networkObject(this.generalInfo.thingId);
//lets run ifconfig to get mac address and ip
if(this.generalInfo.os === 'darwin'){
var res = getNetworkInterfacesMac();
if(res){
this.noInstance.nInterface=res.iname;
this.noInstance.ipAddress=res.ip;
this.noInstance.macAddress=res.mac;
}
}
else if(this.generalInfo.os === 'linux'){
var res = getNetworkInterfacesLinux();
if(res){
this.noInstance.nInterface=res.iname;
this.noInstance.ipAddress=res.ip;
this.noInstance.macAddress=res.mac;
}
}
else{
console.log('not implemented');
}
//create the child process to execute $ iftop -t -s 2 -P -N
//must run as ROOT on ubuntu side
//add the interface - from the active network
var commandLine = childProcess.spawn('iftop', ['-t','-s','3','-P','-N','-b','-B','-i',this.noInstance.nInterface]);
var noPass = this.noInstance;
var lineReader = readLine.createInterface(commandLine.stdout, commandLine.stdin);
lineReader.on('line', function(line){
noPass.read(line);
});
commandLine.on('close', function(code, signal){
//console.log('read ' + noPass.counter + ' lines');
callback(NETWORK_TOPIC, noPass);
});
}
else{
//skipping execution
console.log('skipping network task due to missing general information');
}
}
/**
* Helper Methods
*/
// get all available network interfaces for mac
// return an object with {iname, ip, mac, status}
function getNetworkInterfacesMac(){
var result={};
var availableInterfaces=[];
var returnObject = childProcess.spawnSync('ifconfig', ['-a']);
if(returnObject.stdout){
var displayStr = returnObject.stdout.toString().trim().toLowerCase();
if(displayStr){
var ifSplit = displayStr.split('\n');
if(ifSplit){
//declare a point array
var currInterface={};
for(var i=0; i<ifSplit.length; i++){
var temp = ifSplit[i].reduceWhiteSpace().trim();
//search for the first line of each
if(temp.indexOf('flags=')>=0){
if(currInterface.iname){
//lets save this interface
availableInterfaces.push(currInterface);
}
//this is the first line
var interfaceSplit = temp.split(':');
if(interfaceSplit.length == 2){
//lets get the name
var iName = interfaceSplit[0];
//create a new interface and point current to this one
var tempInterface = {};
tempInterface.iname=iName;
currInterface = tempInterface;
}
}
else{
//this is a regular line
//search for ether - which contains the mac address
//search for inet which should contain the ip address
//search for status, which indicates status
//space is important here to diffrentiate between inet6
if(temp.indexOf('inet ') >=0){
var ipSplit = temp.split(' ');
if(ipSplit.length >= 4){
currInterface.ip=ipSplit[1].trim();
}
}
if(temp.indexOf('ether')>=0){
var macSplit = temp.split(' ');
if(macSplit.length >= 2){
currInterface.mac=macSplit[1].trim();
}
}
//we'll use a different algo on mac osx since
//it actually returns the current
if(temp.indexOf('status')>=0){
var statusSplit = temp.split(':');
if(statusSplit.length >= 2){
currInterface.status=statusSplit[1].trim();
}
}
}
}
//lets save the last interface
if(currInterface.iname){
availableInterfaces.push(currInterface);
}
}
}
}
if(availableInterfaces.length > 0){
for(var j=0; j<availableInterfaces.length; j++){
var tRes = availableInterfaces[j];
if(tRes){
//we still have a possibility of seeing 2 interfaces available
if(tRes.status==='active' && tRes.ip && tRes.mac){
result=tRes;
return result;
}
}
}
}
return result;
}
function getNetworkInterfacesLinux(){
var result={};
var availableInterfaces=[];
var returnObject = childProcess.spawnSync('ifconfig', ['-a']);
if(returnObject.stdout){
var displayStr = returnObject.stdout.toString().trim().toLowerCase();
if(displayStr){
var ifSplit = displayStr.split('\n');
if(ifSplit){
//declare a point array
var currInterface={};
for(var i=0; i<ifSplit.length; i++){
var temp = ifSplit[i].reduceWhiteSpace().trim();
//search for the first line of each
if(temp.indexOf('link encap')>=0){
if(currInterface.iname){
//lets save this interface
availableInterfaces.push(currInterface);
}
//this is the first line
var interfaceSplit = temp.split('link encap:');
if(interfaceSplit.length == 2){
//lets get the name
var iName = interfaceSplit[0].trim(); |
if(macSplit.length==3){
macAddr = macSplit[2];
}
//create a new interface and point current to this one
var tempInterface = {};
tempInterface.iname=iName;
if(macAddr){
tempInterface.mac=macAddr;
}
currInterface = tempInterface;
}
}
else{
//this is a regular line
//search for ether - which contains the mac address
//search for inet which should contain the ip address
//search for status, which indicates status
//space is important here to diffrentiate between inet6
if(temp.indexOf('inet addr:') >=0){
var ipBlockSplit = temp.split(' ');
if(ipBlockSplit.length >= 2){
//take the second entry
var ipSplit=ipBlockSplit[1].split(':');
if(ipSplit.length >= 2){
currInterface.ip=ipSplit[1].trim();
//if both ip and mac exist
if(currInterface.mac){
currInterface.status='active';
}
}
}
}
}
}
//lets save the last interface
if(currInterface.iname){
availableInterfaces.push(currInterface);
}
}
}
}
//currently only returns the first active link - if there are multiple
//interfaces active, we will probably need to handle multiple
if(availableInterfaces.length > 0){
for(var j=0; j<availableInterfaces.length; j++){
var tRes = availableInterfaces[j];
if(tRes){
if(tRes.status==='active'){
result=tRes;
}
}
}
}
return result;
} |
var macAddr='';
//lets get the macaddr
var macSplit = interfaceSplit[1].trim().split(' '); | random_line_split |
base.py | from couchpotato.core.event import addEvent
from couchpotato.core.helpers.variable import tryFloat
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
from urlparse import urlparse
import re
import time
log = CPLog(__name__)
class Provider(Plugin):
type = None # movie, nzb, torrent, subtitle, trailer
http_time_between_calls = 10 # Default timeout for url requests
last_available_check = {}
is_available = {}
def isAvailable(self, test_url):
if Env.get('dev'): return True
now = time.time()
host = urlparse(test_url).hostname
if self.last_available_check.get(host) < now - 900:
self.last_available_check[host] = now
try:
self.urlopen(test_url, 30)
self.is_available[host] = True
except:
log.error('"%s" unavailable, trying again in an 15 minutes.', host)
self.is_available[host] = False
return self.is_available.get(host, False)
class YarrProvider(Provider):
cat_ids = []
sizeGb = ['gb', 'gib']
sizeMb = ['mb', 'mib']
sizeKb = ['kb', 'kib']
def __init__(self):
addEvent('provider.belongs_to', self.belongsTo)
addEvent('%s.search' % self.type, self.search)
addEvent('yarr.search', self.search)
addEvent('nzb.feed', self.feed)
def download(self, url = '', nzb_id = ''):
return self.urlopen(url)
def feed(self):
return []
def search(self, movie, quality):
return []
def belongsTo(self, url, provider = None, host = None):
|
def parseSize(self, size):
sizeRaw = size.lower()
size = tryFloat(re.sub(r'[^0-9.]', '', size).strip())
for s in self.sizeGb:
if s in sizeRaw:
return size * 1024
for s in self.sizeMb:
if s in sizeRaw:
return size
for s in self.sizeKb:
if s in sizeRaw:
return size / 1024
return 0
def getCatId(self, identifier):
for cats in self.cat_ids:
ids, qualities = cats
if identifier in qualities:
return ids
return [self.cat_backup_id]
def found(self, new):
log.info('Found: score(%(score)s) on %(provider)s: %(name)s', new)
| try:
if provider and provider == self.getName():
return self
hostname = urlparse(url).hostname
if host and hostname in host:
return self
else:
for url_type in self.urls:
download_url = self.urls[url_type]
if hostname in download_url:
return self
except:
log.debug('Url % s doesn\'t belong to %s', (url, self.getName()))
return | identifier_body |
base.py | from couchpotato.core.event import addEvent
from couchpotato.core.helpers.variable import tryFloat
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
from urlparse import urlparse
import re
import time
log = CPLog(__name__)
class Provider(Plugin):
type = None # movie, nzb, torrent, subtitle, trailer
http_time_between_calls = 10 # Default timeout for url requests
last_available_check = {}
is_available = {}
def isAvailable(self, test_url):
if Env.get('dev'): return True
now = time.time()
host = urlparse(test_url).hostname
if self.last_available_check.get(host) < now - 900:
self.last_available_check[host] = now
try:
self.urlopen(test_url, 30)
self.is_available[host] = True
except:
log.error('"%s" unavailable, trying again in an 15 minutes.', host)
self.is_available[host] = False
return self.is_available.get(host, False)
class YarrProvider(Provider):
cat_ids = []
sizeGb = ['gb', 'gib']
sizeMb = ['mb', 'mib']
sizeKb = ['kb', 'kib']
def __init__(self):
addEvent('provider.belongs_to', self.belongsTo)
addEvent('%s.search' % self.type, self.search)
addEvent('yarr.search', self.search)
addEvent('nzb.feed', self.feed)
def download(self, url = '', nzb_id = ''):
return self.urlopen(url)
def feed(self):
return []
def search(self, movie, quality):
return []
def | (self, url, provider = None, host = None):
try:
if provider and provider == self.getName():
return self
hostname = urlparse(url).hostname
if host and hostname in host:
return self
else:
for url_type in self.urls:
download_url = self.urls[url_type]
if hostname in download_url:
return self
except:
log.debug('Url % s doesn\'t belong to %s', (url, self.getName()))
return
def parseSize(self, size):
sizeRaw = size.lower()
size = tryFloat(re.sub(r'[^0-9.]', '', size).strip())
for s in self.sizeGb:
if s in sizeRaw:
return size * 1024
for s in self.sizeMb:
if s in sizeRaw:
return size
for s in self.sizeKb:
if s in sizeRaw:
return size / 1024
return 0
def getCatId(self, identifier):
for cats in self.cat_ids:
ids, qualities = cats
if identifier in qualities:
return ids
return [self.cat_backup_id]
def found(self, new):
log.info('Found: score(%(score)s) on %(provider)s: %(name)s', new)
| belongsTo | identifier_name |
base.py | from couchpotato.core.event import addEvent
from couchpotato.core.helpers.variable import tryFloat
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
from urlparse import urlparse
import re
import time
log = CPLog(__name__)
class Provider(Plugin):
type = None # movie, nzb, torrent, subtitle, trailer
http_time_between_calls = 10 # Default timeout for url requests
last_available_check = {}
is_available = {}
def isAvailable(self, test_url):
if Env.get('dev'): return True
now = time.time()
host = urlparse(test_url).hostname
if self.last_available_check.get(host) < now - 900:
self.last_available_check[host] = now
try:
self.urlopen(test_url, 30)
self.is_available[host] = True
except:
log.error('"%s" unavailable, trying again in an 15 minutes.', host)
self.is_available[host] = False
return self.is_available.get(host, False)
class YarrProvider(Provider):
cat_ids = []
sizeGb = ['gb', 'gib']
sizeMb = ['mb', 'mib']
sizeKb = ['kb', 'kib']
def __init__(self):
addEvent('provider.belongs_to', self.belongsTo)
addEvent('%s.search' % self.type, self.search)
addEvent('yarr.search', self.search)
addEvent('nzb.feed', self.feed)
def download(self, url = '', nzb_id = ''):
return self.urlopen(url)
def feed(self):
return []
| if provider and provider == self.getName():
return self
hostname = urlparse(url).hostname
if host and hostname in host:
return self
else:
for url_type in self.urls:
download_url = self.urls[url_type]
if hostname in download_url:
return self
except:
log.debug('Url % s doesn\'t belong to %s', (url, self.getName()))
return
def parseSize(self, size):
sizeRaw = size.lower()
size = tryFloat(re.sub(r'[^0-9.]', '', size).strip())
for s in self.sizeGb:
if s in sizeRaw:
return size * 1024
for s in self.sizeMb:
if s in sizeRaw:
return size
for s in self.sizeKb:
if s in sizeRaw:
return size / 1024
return 0
def getCatId(self, identifier):
for cats in self.cat_ids:
ids, qualities = cats
if identifier in qualities:
return ids
return [self.cat_backup_id]
def found(self, new):
log.info('Found: score(%(score)s) on %(provider)s: %(name)s', new) | def search(self, movie, quality):
return []
def belongsTo(self, url, provider = None, host = None):
try: | random_line_split |
base.py | from couchpotato.core.event import addEvent
from couchpotato.core.helpers.variable import tryFloat
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
from urlparse import urlparse
import re
import time
log = CPLog(__name__)
class Provider(Plugin):
type = None # movie, nzb, torrent, subtitle, trailer
http_time_between_calls = 10 # Default timeout for url requests
last_available_check = {}
is_available = {}
def isAvailable(self, test_url):
if Env.get('dev'): return True
now = time.time()
host = urlparse(test_url).hostname
if self.last_available_check.get(host) < now - 900:
self.last_available_check[host] = now
try:
self.urlopen(test_url, 30)
self.is_available[host] = True
except:
log.error('"%s" unavailable, trying again in an 15 minutes.', host)
self.is_available[host] = False
return self.is_available.get(host, False)
class YarrProvider(Provider):
cat_ids = []
sizeGb = ['gb', 'gib']
sizeMb = ['mb', 'mib']
sizeKb = ['kb', 'kib']
def __init__(self):
addEvent('provider.belongs_to', self.belongsTo)
addEvent('%s.search' % self.type, self.search)
addEvent('yarr.search', self.search)
addEvent('nzb.feed', self.feed)
def download(self, url = '', nzb_id = ''):
return self.urlopen(url)
def feed(self):
return []
def search(self, movie, quality):
return []
def belongsTo(self, url, provider = None, host = None):
try:
if provider and provider == self.getName():
|
hostname = urlparse(url).hostname
if host and hostname in host:
return self
else:
for url_type in self.urls:
download_url = self.urls[url_type]
if hostname in download_url:
return self
except:
log.debug('Url % s doesn\'t belong to %s', (url, self.getName()))
return
def parseSize(self, size):
sizeRaw = size.lower()
size = tryFloat(re.sub(r'[^0-9.]', '', size).strip())
for s in self.sizeGb:
if s in sizeRaw:
return size * 1024
for s in self.sizeMb:
if s in sizeRaw:
return size
for s in self.sizeKb:
if s in sizeRaw:
return size / 1024
return 0
def getCatId(self, identifier):
for cats in self.cat_ids:
ids, qualities = cats
if identifier in qualities:
return ids
return [self.cat_backup_id]
def found(self, new):
log.info('Found: score(%(score)s) on %(provider)s: %(name)s', new)
| return self | conditional_block |
dialog_presenter.py | from __future__ import absolute_import, print_function
from .presenter_base import AlgorithmProgressPresenterBase
class AlgorithmProgressDialogPresenter(AlgorithmProgressPresenterBase):
"""
Presents progress reports on algorithms.
"""
def __init__(self, view, model):
super(AlgorithmProgressDialogPresenter, self).__init__()
view.close_button.clicked.connect(self.close)
self.view = view
self.model = model
self.model.add_presenter(self)
self.progress_bars = {}
def update_gui(self):
"""
Update the gui elements.
"""
self.progress_bars.clear()
algorithm_data = self.model.get_running_algorithm_data()
self.view.update(algorithm_data)
def add_progress_bar(self, algorithm_id, progress_bar):
"""
Store the mapping between the algorithm and its progress bar.
:param algorithm_id: An id of an algorithm.
:param progress_bar: QProgressBar widget.
"""
self.progress_bars[algorithm_id] = progress_bar
def update_progress_bar(self, algorithm_id, progress, message):
"""
Update the progress bar in the view.
:param algorithm_id: The progressed algorithm's id.
:param progress: Progress value to update the progress bar with.
:param message: A message that may come from the algorithm.
"""
progress_bar = self.progress_bars.get(algorithm_id, None)
if progress_bar is not None:
|
def update(self):
"""
Update the gui asynchronously.
"""
self.need_update_gui.emit()
def close(self):
"""
Close the dialog.
"""
self.model.remove_presenter(self)
self.progress_bars.clear()
self.view.close()
def cancel_algorithm(self, algorithm_id):
"""
Cancel an algorithm.
:param algorithm_id: An id of an algorithm
"""
# algorithm_id is actually an instance of an algorithm
algorithm_id.cancel()
| self.need_update_progress_bar.emit(progress_bar, progress, message) | conditional_block |
dialog_presenter.py | from __future__ import absolute_import, print_function
from .presenter_base import AlgorithmProgressPresenterBase
class | (AlgorithmProgressPresenterBase):
"""
Presents progress reports on algorithms.
"""
def __init__(self, view, model):
super(AlgorithmProgressDialogPresenter, self).__init__()
view.close_button.clicked.connect(self.close)
self.view = view
self.model = model
self.model.add_presenter(self)
self.progress_bars = {}
def update_gui(self):
"""
Update the gui elements.
"""
self.progress_bars.clear()
algorithm_data = self.model.get_running_algorithm_data()
self.view.update(algorithm_data)
def add_progress_bar(self, algorithm_id, progress_bar):
"""
Store the mapping between the algorithm and its progress bar.
:param algorithm_id: An id of an algorithm.
:param progress_bar: QProgressBar widget.
"""
self.progress_bars[algorithm_id] = progress_bar
def update_progress_bar(self, algorithm_id, progress, message):
"""
Update the progress bar in the view.
:param algorithm_id: The progressed algorithm's id.
:param progress: Progress value to update the progress bar with.
:param message: A message that may come from the algorithm.
"""
progress_bar = self.progress_bars.get(algorithm_id, None)
if progress_bar is not None:
self.need_update_progress_bar.emit(progress_bar, progress, message)
def update(self):
"""
Update the gui asynchronously.
"""
self.need_update_gui.emit()
def close(self):
"""
Close the dialog.
"""
self.model.remove_presenter(self)
self.progress_bars.clear()
self.view.close()
def cancel_algorithm(self, algorithm_id):
"""
Cancel an algorithm.
:param algorithm_id: An id of an algorithm
"""
# algorithm_id is actually an instance of an algorithm
algorithm_id.cancel()
| AlgorithmProgressDialogPresenter | identifier_name |
dialog_presenter.py | from __future__ import absolute_import, print_function
from .presenter_base import AlgorithmProgressPresenterBase
class AlgorithmProgressDialogPresenter(AlgorithmProgressPresenterBase):
"""
Presents progress reports on algorithms.
""" | super(AlgorithmProgressDialogPresenter, self).__init__()
view.close_button.clicked.connect(self.close)
self.view = view
self.model = model
self.model.add_presenter(self)
self.progress_bars = {}
def update_gui(self):
"""
Update the gui elements.
"""
self.progress_bars.clear()
algorithm_data = self.model.get_running_algorithm_data()
self.view.update(algorithm_data)
def add_progress_bar(self, algorithm_id, progress_bar):
"""
Store the mapping between the algorithm and its progress bar.
:param algorithm_id: An id of an algorithm.
:param progress_bar: QProgressBar widget.
"""
self.progress_bars[algorithm_id] = progress_bar
def update_progress_bar(self, algorithm_id, progress, message):
"""
Update the progress bar in the view.
:param algorithm_id: The progressed algorithm's id.
:param progress: Progress value to update the progress bar with.
:param message: A message that may come from the algorithm.
"""
progress_bar = self.progress_bars.get(algorithm_id, None)
if progress_bar is not None:
self.need_update_progress_bar.emit(progress_bar, progress, message)
def update(self):
"""
Update the gui asynchronously.
"""
self.need_update_gui.emit()
def close(self):
"""
Close the dialog.
"""
self.model.remove_presenter(self)
self.progress_bars.clear()
self.view.close()
def cancel_algorithm(self, algorithm_id):
"""
Cancel an algorithm.
:param algorithm_id: An id of an algorithm
"""
# algorithm_id is actually an instance of an algorithm
algorithm_id.cancel() | def __init__(self, view, model): | random_line_split |
dialog_presenter.py | from __future__ import absolute_import, print_function
from .presenter_base import AlgorithmProgressPresenterBase
class AlgorithmProgressDialogPresenter(AlgorithmProgressPresenterBase):
"""
Presents progress reports on algorithms.
"""
def __init__(self, view, model):
super(AlgorithmProgressDialogPresenter, self).__init__()
view.close_button.clicked.connect(self.close)
self.view = view
self.model = model
self.model.add_presenter(self)
self.progress_bars = {}
def update_gui(self):
|
def add_progress_bar(self, algorithm_id, progress_bar):
"""
Store the mapping between the algorithm and its progress bar.
:param algorithm_id: An id of an algorithm.
:param progress_bar: QProgressBar widget.
"""
self.progress_bars[algorithm_id] = progress_bar
def update_progress_bar(self, algorithm_id, progress, message):
"""
Update the progress bar in the view.
:param algorithm_id: The progressed algorithm's id.
:param progress: Progress value to update the progress bar with.
:param message: A message that may come from the algorithm.
"""
progress_bar = self.progress_bars.get(algorithm_id, None)
if progress_bar is not None:
self.need_update_progress_bar.emit(progress_bar, progress, message)
def update(self):
"""
Update the gui asynchronously.
"""
self.need_update_gui.emit()
def close(self):
"""
Close the dialog.
"""
self.model.remove_presenter(self)
self.progress_bars.clear()
self.view.close()
def cancel_algorithm(self, algorithm_id):
"""
Cancel an algorithm.
:param algorithm_id: An id of an algorithm
"""
# algorithm_id is actually an instance of an algorithm
algorithm_id.cancel()
| """
Update the gui elements.
"""
self.progress_bars.clear()
algorithm_data = self.model.get_running_algorithm_data()
self.view.update(algorithm_data) | identifier_body |
testes_notificacao.py | # coding=utf-8
# ---------------------------------------------------------------
# Desenvolvedor: Arannã Sousa Santos
# Mês: 12
# Ano: 2015
# Projeto: pagseguro_xml
# e-mail: asousas@live.com
# ---------------------------------------------------------------
import logging
from pagseguro_xml.notificacao import ApiPagSeguroNotificacao_v3, CONST_v3
logger = logging.basicConfig(level=logging.DEBUG)
PAGSEGURO_API_AMBIENTE = u'sandbox'
PAGSEGURO_API_EMAIL = u'seu@email.com'
PAGSEGURO_API_TOKEN_PRODUCAO = u''
PAGSEGURO_API_TOKEN_SANDBOX = u''
CHAVE_NOTIFICACAO = u'AA0000-AA00A0A0AA00-AA00AA000000-AA0000' # ela éh de producao
api = ApiPagSeguroNotificacao_v3(ambiente=CONST_v3.AMBIENTE.SANDBOX)
PAGSEGURO_API_TOKEN = PAGSEGURO_API_TOKEN_PRODUCAO |
if ok:
print u'-' * 50
print retorno.xml
print u'-' * 50
for a in retorno.alertas:
print a
else:
print u'Motivo do erro:', retorno |
ok, retorno = api.consulta_notificacao_transacao_v3(PAGSEGURO_API_EMAIL, PAGSEGURO_API_TOKEN, CHAVE_NOTIFICACAO) | random_line_split |
testes_notificacao.py | # coding=utf-8
# ---------------------------------------------------------------
# Desenvolvedor: Arannã Sousa Santos
# Mês: 12
# Ano: 2015
# Projeto: pagseguro_xml
# e-mail: asousas@live.com
# ---------------------------------------------------------------
import logging
from pagseguro_xml.notificacao import ApiPagSeguroNotificacao_v3, CONST_v3
logger = logging.basicConfig(level=logging.DEBUG)
PAGSEGURO_API_AMBIENTE = u'sandbox'
PAGSEGURO_API_EMAIL = u'seu@email.com'
PAGSEGURO_API_TOKEN_PRODUCAO = u''
PAGSEGURO_API_TOKEN_SANDBOX = u''
CHAVE_NOTIFICACAO = u'AA0000-AA00A0A0AA00-AA00AA000000-AA0000' # ela éh de producao
api = ApiPagSeguroNotificacao_v3(ambiente=CONST_v3.AMBIENTE.SANDBOX)
PAGSEGURO_API_TOKEN = PAGSEGURO_API_TOKEN_PRODUCAO
ok, retorno = api.consulta_notificacao_transacao_v3(PAGSEGURO_API_EMAIL, PAGSEGURO_API_TOKEN, CHAVE_NOTIFICACAO)
if ok:
print u'-' * 50
print retorno.xml
print u'-' * 50
for a in retorno.alertas:
print a
else:
pri | nt u'Motivo do erro:', retorno
| conditional_block | |
mid-path-type-params.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)]
// pretty-expanded FIXME #23616
struct S<T> {
contents: T,
}
impl<T> S<T> {
fn new<U>(x: T, _: U) -> S<T> {
S {
contents: x,
}
}
}
trait Trait<T> {
fn new<U>(x: T, y: U) -> Self;
}
struct S2 {
contents: isize,
}
impl Trait<isize> for S2 {
fn | <U>(x: isize, _: U) -> S2 {
S2 {
contents: x,
}
}
}
pub fn main() {
let _ = S::<isize>::new::<f64>(1, 1.0);
let _: S2 = Trait::<isize>::new::<f64>(1, 1.0);
}
| new | identifier_name |
mid-path-type-params.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)]
// pretty-expanded FIXME #23616
struct S<T> {
contents: T, |
impl<T> S<T> {
fn new<U>(x: T, _: U) -> S<T> {
S {
contents: x,
}
}
}
trait Trait<T> {
fn new<U>(x: T, y: U) -> Self;
}
struct S2 {
contents: isize,
}
impl Trait<isize> for S2 {
fn new<U>(x: isize, _: U) -> S2 {
S2 {
contents: x,
}
}
}
pub fn main() {
let _ = S::<isize>::new::<f64>(1, 1.0);
let _: S2 = Trait::<isize>::new::<f64>(1, 1.0);
} | } | random_line_split |
spawn.ts | import { addProcess, getProcessById } from "../../kernel/kernel/kernel";
import Process = require("../../kernel/kernel/process");
import { ProcessPriority } from "../constants";
interface CreepRequest {
pid: number;
creepID: string;
bodyParts: bodyMap;
priority: number;
}
class SpawnProcess extends Process {
public static start(roomName: string, parentPID: number) {
let p = new SpawnProcess(0, parentPID);
p = addProcess(p, ProcessPriority.TiclyLast);
p.memory.roomName = roomName;
return p;
}
public classPath() {
return "components.processes.room.spawn";
}
public getRoomName() {
return this.memory.roomName;
}
public spawn(id: string, bodyMap: bodyMap, pid: number, priority = 10): number {
const memory = this.memory;
const existing = _.find(memory.requestList as CreepRequest[], (r) => (r.creepID === id) && (r.pid === pid));
if (existing) {
return -1;
} else {
const creepRequest = { pid, creepID: id, bodyParts: bodyMap, priority };
memory.requestList.push(creepRequest);
return 0;
}
}
public run(): number {
const colonyProcess = getProcessById(this.parentPID);
if (!colonyProcess)
return this.stop(0);
const roomName = this.memory.roomName;
if (!roomName)
return this.stop(0);
const makeBody = function(bodyMap: bodyMap): string[] {
// TODO : Fix the part map, this need to include all part type somehow
const partMap: { [s: string]: string } = {
WORK,
MOVE,
CARRY,
ATTACK,
CLAIM,
};
const replicatePart = function(times: number, part: string) {
return _.map(_.times(times, (x) => x),
() => partMap[part]);
};
return _.chain(bodyMap).map(replicatePart).flatten().value() as string[];
};
const memory = this.memory;
memory.requestList = memory.requestList || [];
memory.requestList = _.sortBy(memory.requestList as CreepRequest[], (i) => i.priority);
const request: CreepRequest = memory.requestList.pop();
const spawn = this.findFreeSpawn(this.memory.roomName);
if (request) { | const creepName = spawn.createCreep(body);
process.receiveCreep(request.creepID, Game.creeps[creepName]);
}
}
}
return 0;
}
private findFreeSpawn(roomName: string) {
const spawns = _.filter(Game.spawns,
function(spawn) {
return spawn.room.name === roomName &&
spawn.spawning == null &&
spawn.canCreateCreep([MOVE]) === OK &&
spawn.isActive();
});
if (spawns.length > 0)
return spawns[0];
return null;
};
}
export = SpawnProcess; | if (spawn) {
const body = makeBody(request.bodyParts);
const canSpawn = spawn.canCreateCreep(body);
if (canSpawn === OK) {
const process: any = getProcessById(request.pid); | random_line_split |
spawn.ts | import { addProcess, getProcessById } from "../../kernel/kernel/kernel";
import Process = require("../../kernel/kernel/process");
import { ProcessPriority } from "../constants";
interface CreepRequest {
pid: number;
creepID: string;
bodyParts: bodyMap;
priority: number;
}
class SpawnProcess extends Process {
public static start(roomName: string, parentPID: number) {
let p = new SpawnProcess(0, parentPID);
p = addProcess(p, ProcessPriority.TiclyLast);
p.memory.roomName = roomName;
return p;
}
public classPath() {
return "components.processes.room.spawn";
}
public getRoomName() {
return this.memory.roomName;
}
public spawn(id: string, bodyMap: bodyMap, pid: number, priority = 10): number {
const memory = this.memory;
const existing = _.find(memory.requestList as CreepRequest[], (r) => (r.creepID === id) && (r.pid === pid));
if (existing) {
return -1;
} else {
const creepRequest = { pid, creepID: id, bodyParts: bodyMap, priority };
memory.requestList.push(creepRequest);
return 0;
}
}
public run(): number {
const colonyProcess = getProcessById(this.parentPID);
if (!colonyProcess)
return this.stop(0);
const roomName = this.memory.roomName;
if (!roomName)
return this.stop(0);
const makeBody = function(bodyMap: bodyMap): string[] {
// TODO : Fix the part map, this need to include all part type somehow
const partMap: { [s: string]: string } = {
WORK,
MOVE,
CARRY,
ATTACK,
CLAIM,
};
const replicatePart = function(times: number, part: string) {
return _.map(_.times(times, (x) => x),
() => partMap[part]);
};
return _.chain(bodyMap).map(replicatePart).flatten().value() as string[];
};
const memory = this.memory;
memory.requestList = memory.requestList || [];
memory.requestList = _.sortBy(memory.requestList as CreepRequest[], (i) => i.priority);
const request: CreepRequest = memory.requestList.pop();
const spawn = this.findFreeSpawn(this.memory.roomName);
if (request) {
if (spawn) {
const body = makeBody(request.bodyParts);
const canSpawn = spawn.canCreateCreep(body);
if (canSpawn === OK) {
const process: any = getProcessById(request.pid);
const creepName = spawn.createCreep(body);
process.receiveCreep(request.creepID, Game.creeps[creepName]);
}
}
}
return 0;
}
private findFreeSpawn(roomName: string) | ;
}
export = SpawnProcess;
| {
const spawns = _.filter(Game.spawns,
function(spawn) {
return spawn.room.name === roomName &&
spawn.spawning == null &&
spawn.canCreateCreep([MOVE]) === OK &&
spawn.isActive();
});
if (spawns.length > 0)
return spawns[0];
return null;
} | identifier_body |
spawn.ts | import { addProcess, getProcessById } from "../../kernel/kernel/kernel";
import Process = require("../../kernel/kernel/process");
import { ProcessPriority } from "../constants";
interface CreepRequest {
pid: number;
creepID: string;
bodyParts: bodyMap;
priority: number;
}
class SpawnProcess extends Process {
public static | (roomName: string, parentPID: number) {
let p = new SpawnProcess(0, parentPID);
p = addProcess(p, ProcessPriority.TiclyLast);
p.memory.roomName = roomName;
return p;
}
public classPath() {
return "components.processes.room.spawn";
}
public getRoomName() {
return this.memory.roomName;
}
public spawn(id: string, bodyMap: bodyMap, pid: number, priority = 10): number {
const memory = this.memory;
const existing = _.find(memory.requestList as CreepRequest[], (r) => (r.creepID === id) && (r.pid === pid));
if (existing) {
return -1;
} else {
const creepRequest = { pid, creepID: id, bodyParts: bodyMap, priority };
memory.requestList.push(creepRequest);
return 0;
}
}
public run(): number {
const colonyProcess = getProcessById(this.parentPID);
if (!colonyProcess)
return this.stop(0);
const roomName = this.memory.roomName;
if (!roomName)
return this.stop(0);
const makeBody = function(bodyMap: bodyMap): string[] {
// TODO : Fix the part map, this need to include all part type somehow
const partMap: { [s: string]: string } = {
WORK,
MOVE,
CARRY,
ATTACK,
CLAIM,
};
const replicatePart = function(times: number, part: string) {
return _.map(_.times(times, (x) => x),
() => partMap[part]);
};
return _.chain(bodyMap).map(replicatePart).flatten().value() as string[];
};
const memory = this.memory;
memory.requestList = memory.requestList || [];
memory.requestList = _.sortBy(memory.requestList as CreepRequest[], (i) => i.priority);
const request: CreepRequest = memory.requestList.pop();
const spawn = this.findFreeSpawn(this.memory.roomName);
if (request) {
if (spawn) {
const body = makeBody(request.bodyParts);
const canSpawn = spawn.canCreateCreep(body);
if (canSpawn === OK) {
const process: any = getProcessById(request.pid);
const creepName = spawn.createCreep(body);
process.receiveCreep(request.creepID, Game.creeps[creepName]);
}
}
}
return 0;
}
private findFreeSpawn(roomName: string) {
const spawns = _.filter(Game.spawns,
function(spawn) {
return spawn.room.name === roomName &&
spawn.spawning == null &&
spawn.canCreateCreep([MOVE]) === OK &&
spawn.isActive();
});
if (spawns.length > 0)
return spawns[0];
return null;
};
}
export = SpawnProcess;
| start | identifier_name |
c-stack-returning-int64.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
mod libc {
use std::libc::{c_char, c_long, c_longlong};
#[nolink]
extern {
pub fn atol(x: *c_char) -> c_long;
pub fn atoll(x: *c_char) -> c_longlong;
}
}
#[fixed_stack_segment]
fn atol(s: ~str) -> int {
s.with_c_str(|x| unsafe { libc::atol(x) as int }) | s.with_c_str(|x| unsafe { libc::atoll(x) as i64 })
}
pub fn main() {
assert_eq!(atol(~"1024") * 10, atol(~"10240"));
assert!((atoll(~"11111111111111111") * 10) == atoll(~"111111111111111110"));
} | }
#[fixed_stack_segment]
fn atoll(s: ~str) -> i64 { | random_line_split |
c-stack-returning-int64.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
mod libc {
use std::libc::{c_char, c_long, c_longlong};
#[nolink]
extern {
pub fn atol(x: *c_char) -> c_long;
pub fn atoll(x: *c_char) -> c_longlong;
}
}
#[fixed_stack_segment]
fn atol(s: ~str) -> int {
s.with_c_str(|x| unsafe { libc::atol(x) as int })
}
#[fixed_stack_segment]
fn | (s: ~str) -> i64 {
s.with_c_str(|x| unsafe { libc::atoll(x) as i64 })
}
pub fn main() {
assert_eq!(atol(~"1024") * 10, atol(~"10240"));
assert!((atoll(~"11111111111111111") * 10) == atoll(~"111111111111111110"));
}
| atoll | identifier_name |
c-stack-returning-int64.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
mod libc {
use std::libc::{c_char, c_long, c_longlong};
#[nolink]
extern {
pub fn atol(x: *c_char) -> c_long;
pub fn atoll(x: *c_char) -> c_longlong;
}
}
#[fixed_stack_segment]
fn atol(s: ~str) -> int |
#[fixed_stack_segment]
fn atoll(s: ~str) -> i64 {
s.with_c_str(|x| unsafe { libc::atoll(x) as i64 })
}
pub fn main() {
assert_eq!(atol(~"1024") * 10, atol(~"10240"));
assert!((atoll(~"11111111111111111") * 10) == atoll(~"111111111111111110"));
}
| {
s.with_c_str(|x| unsafe { libc::atol(x) as int })
} | identifier_body |
core.js | "use strict";
(function (socket_path, thread_id) {
var socket = io(socket_path)
, thread_id = 6
, commentLists = []
, commentBox;
socket.emit('comment/list', {thread_id: thread_id});
socket.on('user', function (user) {
var comments = [];
var submitCallback = function (replyTo, body) {
socket.emit('comment/post', {body: body, thread_id: thread_id, replyTo: replyTo});
};
var likeCallback = function (id) {
socket.emit('comment/like', {comment_id: id});
};
/**
* @param integer commentId
* @param integer commentReplyId The replyId of commentId
*/
var getRepliesCallback = function (commentId, commentReplyId) {
var reset = false;
// If the trail is already open then we want to close it
if (commentLists[commentId] !== undefined) {
reset = true;
}
// Delete all list after the list matching commentReplyId
for (var id in commentLists) {
// If the id matches the commentReplyId then all loops going
// forward will delete the lists
if (id === commentReplyId) {
commentLists.length = parseInt(id) + 1;
break;
}
}
if (reset === false) {
socket.emit('comment/list', {commentId: commentId});
} else {
commentBox.setState({commentLists: commentLists}); | };
var renderList = function (list) {
commentBox = React.renderComponent(
CommentBox({
key: 'CommentBox',
user: user,
getRepliesCallback: getRepliesCallback,
likeCallback: likeCallback,
submitCallback: submitCallback,
initialCommentLists: list
}),
document.getElementById('CommentListContainer')
);
};
socket.on('comment/list', function (data) {
commentLists[data.commentId] = data.comments;
renderList(commentLists);
});
socket.on('comment/replies', function (data) {
commentLists[data.commentId] = data.comments;
commentBox.setState({commentLists: commentLists});
});
socket.on('comment/post/confirmation', function (data) {
commentLists[data.replyTo].unshift(data.comment);
commentBox.setState({commentLists: commentLists});
});
});
window.socket;
})(socket_path, thread_id); | } | random_line_split |
core.js | "use strict";
(function (socket_path, thread_id) {
var socket = io(socket_path)
, thread_id = 6
, commentLists = []
, commentBox;
socket.emit('comment/list', {thread_id: thread_id});
socket.on('user', function (user) {
var comments = [];
var submitCallback = function (replyTo, body) {
socket.emit('comment/post', {body: body, thread_id: thread_id, replyTo: replyTo});
};
var likeCallback = function (id) {
socket.emit('comment/like', {comment_id: id});
};
/**
* @param integer commentId
* @param integer commentReplyId The replyId of commentId
*/
var getRepliesCallback = function (commentId, commentReplyId) {
var reset = false;
// If the trail is already open then we want to close it
if (commentLists[commentId] !== undefined) |
// Delete all list after the list matching commentReplyId
for (var id in commentLists) {
// If the id matches the commentReplyId then all loops going
// forward will delete the lists
if (id === commentReplyId) {
commentLists.length = parseInt(id) + 1;
break;
}
}
if (reset === false) {
socket.emit('comment/list', {commentId: commentId});
} else {
commentBox.setState({commentLists: commentLists});
}
};
var renderList = function (list) {
commentBox = React.renderComponent(
CommentBox({
key: 'CommentBox',
user: user,
getRepliesCallback: getRepliesCallback,
likeCallback: likeCallback,
submitCallback: submitCallback,
initialCommentLists: list
}),
document.getElementById('CommentListContainer')
);
};
socket.on('comment/list', function (data) {
commentLists[data.commentId] = data.comments;
renderList(commentLists);
});
socket.on('comment/replies', function (data) {
commentLists[data.commentId] = data.comments;
commentBox.setState({commentLists: commentLists});
});
socket.on('comment/post/confirmation', function (data) {
commentLists[data.replyTo].unshift(data.comment);
commentBox.setState({commentLists: commentLists});
});
});
window.socket;
})(socket_path, thread_id);
| {
reset = true;
} | conditional_block |
htmldataelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::HTMLDataElementBinding;
use crate::dom::bindings::codegen::Bindings::HTMLDataElementBinding::HTMLDataElementMethods;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::Node;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLDataElement {
htmlelement: HTMLElement,
}
impl HTMLDataElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLDataElement {
HTMLDataElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
} | pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLDataElement> {
Node::reflect_node(
Box::new(HTMLDataElement::new_inherited(local_name, prefix, document)),
document,
HTMLDataElementBinding::Wrap,
)
}
}
impl HTMLDataElementMethods for HTMLDataElement {
// https://html.spec.whatwg.org/multipage/#dom-data-value
make_getter!(Value, "value");
// https://html.spec.whatwg.org/multipage/#dom-data-value
make_setter!(SetValue, "value");
} | }
#[allow(unrooted_must_root)] | random_line_split |
htmldataelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::HTMLDataElementBinding;
use crate::dom::bindings::codegen::Bindings::HTMLDataElementBinding::HTMLDataElementMethods;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::Node;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct | {
htmlelement: HTMLElement,
}
impl HTMLDataElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLDataElement {
HTMLDataElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLDataElement> {
Node::reflect_node(
Box::new(HTMLDataElement::new_inherited(local_name, prefix, document)),
document,
HTMLDataElementBinding::Wrap,
)
}
}
impl HTMLDataElementMethods for HTMLDataElement {
// https://html.spec.whatwg.org/multipage/#dom-data-value
make_getter!(Value, "value");
// https://html.spec.whatwg.org/multipage/#dom-data-value
make_setter!(SetValue, "value");
}
| HTMLDataElement | identifier_name |
mainThreadService.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import {TPromise} from 'vs/base/common/winjs.base';
import Worker = require('vs/base/common/worker/workerClient');
import abstractThreadService = require('vs/platform/thread/common/abstractThreadService');
import Env = require('vs/base/common/flags');
import Platform = require('vs/base/common/platform');
import remote = require('vs/base/common/remote');
import {SyncDescriptor0} from 'vs/platform/instantiation/common/descriptors';
import {IThreadService, IThreadSynchronizableObject, ThreadAffinity} from 'vs/platform/thread/common/thread';
import {IWorkspaceContextService} from 'vs/platform/workspace/common/workspace';
import {DefaultWorkerFactory} from 'vs/base/worker/defaultWorkerFactory';
interface IAffinityMap {
[qualifiedMethodName: string]: number;
}
export interface IWorker {
getRemoteCom(): remote.IRemoteCom;
}
export interface IWorkerListenr {
(worker: IWorker): void;
}
export class MainThreadService extends abstractThreadService.AbstractThreadService implements IThreadService {
public serviceId = IThreadService;
static MAXIMUM_WORKER_CREATION_DELAY = 500; // 500ms
private _workerPool: Worker.WorkerClient[];
private _contextService: IWorkspaceContextService;
private _affinityScrambler: IAffinityMap;
private _workersCreatedPromise: TPromise<void>;
private _triggerWorkersCreatedPromise: (value: void) => void;
private _workerFactory: Worker.IWorkerFactory;
private _workerModuleId: string;
private _defaultWorkerCount: number;
constructor(contextService: IWorkspaceContextService, workerModuleId: string, defaultWorkerCount: number) {
super(true);
this._contextService = contextService;
this._workerModuleId = workerModuleId;
this._defaultWorkerCount = defaultWorkerCount;
this._workerFactory = new DefaultWorkerFactory();
if (!this.isInMainThread) {
throw new Error('Incorrect Service usage: this service must be used only in the main thread');
}
this._workerPool = [];
this._affinityScrambler = {};
this._workersCreatedPromise = new TPromise<void>((c, e, p) => {
this._triggerWorkersCreatedPromise = c;
}, () => {
// Not cancelable
});
// If nobody asks for workers to be created in 5s, the workers are created automatically
TPromise.timeout(MainThreadService.MAXIMUM_WORKER_CREATION_DELAY).then(() => this.ensureWorkers());
}
ensureWorkers(): void {
if (this._triggerWorkersCreatedPromise) {
// Workers not created yet
let createCount = Env.workersCount(this._defaultWorkerCount);
if (!Platform.hasWebWorkerSupport()) {
// Create at most 1 compatibility worker
createCount = Math.min(createCount, 1);
}
for (let i = 0; i < createCount; i++) {
this._createWorker();
}
let complete = this._triggerWorkersCreatedPromise;
this._triggerWorkersCreatedPromise = null;
complete(null);
}
}
private _afterWorkers(): TPromise<void> {
let shouldCancelPromise = false;
return new TPromise<void>((c, e, p) => {
// hide the initialize promise inside this
// promise so that it won't be canceled by accident
this._workersCreatedPromise.then(() => {
if (!shouldCancelPromise) {
c(null);
}
}, e, p);
}, () => {
// mark that this promise is canceled
shouldCancelPromise = true;
});
}
private _createWorker(): void {
this._workerPool.push(this._doCreateWorker());
}
private _shortName(major: string, minor: string): string {
return major.substring(major.length - 14) + '.' + minor.substr(0, 14);
}
private _doCreateWorker(): Worker.WorkerClient {
let worker = new Worker.WorkerClient(
this._workerFactory,
this._workerModuleId,
(msg) => {
if (msg.type === 'threadService') {
return this._shortName(msg.payload[0], msg.payload[1]);
}
return msg.type;
}
);
worker.getRemoteCom().setManyHandler(this);
worker.onModuleLoaded = worker.request('initialize', {
contextService: {
workspace: this._contextService.getWorkspace(),
configuration: this._contextService.getConfiguration(),
options: this._contextService.getOptions()
}
});
return worker;
}
private _getWorkerIndex(obj: IThreadSynchronizableObject, affinity: ThreadAffinity): number {
if (affinity === ThreadAffinity.None) {
let winners: number[] = [0],
winnersQueueSize = this._workerPool[0].getQueueSize();
for (let i = 1; i < this._workerPool.length; i++) {
let queueSize = this._workerPool[i].getQueueSize();
if (queueSize < winnersQueueSize) {
winnersQueueSize = queueSize;
winners = [i];
} else if (queueSize === winnersQueueSize) {
winners.push(i);
}
} | return winners[Math.floor(Math.random() * winners.length)];
}
let scramble = 0;
if (this._affinityScrambler.hasOwnProperty(obj.getId())) {
scramble = this._affinityScrambler[obj.getId()];
} else {
scramble = Math.floor(Math.random() * this._workerPool.length);
this._affinityScrambler[obj.getId()] = scramble;
}
return (scramble + affinity) % this._workerPool.length;
}
OneWorker(obj: IThreadSynchronizableObject, methodName: string, target: Function, params: any[], affinity: ThreadAffinity): TPromise<any> {
return this._afterWorkers().then(() => {
if (this._workerPool.length === 0) {
throw new Error('Cannot fulfill request...');
}
let workerIdx = this._getWorkerIndex(obj, affinity);
return this._remoteCall(this._workerPool[workerIdx], obj, methodName, params);
});
}
AllWorkers(obj: IThreadSynchronizableObject, methodName: string, target: Function, params: any[]): TPromise<any> {
return this._afterWorkers().then(() => {
return TPromise.join(this._workerPool.map((w) => {
return this._remoteCall(w, obj, methodName, params);
}));
});
}
private _remoteCall(worker: Worker.WorkerClient, obj: IThreadSynchronizableObject, methodName: string, params: any[]): TPromise<any> {
let id = obj.getId();
if (!id) {
throw new Error('Synchronizable Objects must have an identifier');
}
return worker.request('threadService', [id, methodName, params]);
}
protected _registerAndInstantiateMainProcessActor<T>(id: string, descriptor: SyncDescriptor0<T>): T {
return this._getOrCreateLocalInstance(id, descriptor);
}
protected _registerMainProcessActor<T>(id: string, actor: T): void {
this._registerLocalInstance(id, actor);
}
protected _registerAndInstantiateExtHostActor<T>(id: string, descriptor: SyncDescriptor0<T>): T {
throw new Error('Not supported in this runtime context: Cannot communicate to non-existant Extension Host!');
}
protected _registerExtHostActor<T>(id: string, actor: T): void {
throw new Error('Not supported in this runtime context!');
}
protected _registerAndInstantiateWorkerActor<T>(id: string, descriptor: SyncDescriptor0<T>, whichWorker: ThreadAffinity): T {
let helper = this._createWorkerProxyHelper(whichWorker);
return this._getOrCreateProxyInstance(helper, id, descriptor);
}
protected _registerWorkerActor<T>(id: string, actor: T): void {
throw new Error('Not supported in this runtime context!');
}
private _createWorkerProxyHelper(whichWorker: ThreadAffinity): remote.IProxyHelper {
return {
callOnRemote: (proxyId: string, path: string, args: any[]): TPromise<any> => {
return this._callOnWorker(whichWorker, proxyId, path, args);
}
};
}
private _callOnWorker(whichWorker: ThreadAffinity, proxyId: string, path: string, args: any[]): TPromise<any> {
if (whichWorker === ThreadAffinity.None) {
return TPromise.as(null);
}
return this._afterWorkers().then(() => {
if (whichWorker === ThreadAffinity.All) {
let promises = this._workerPool.map(w => w.getRemoteCom()).map(rCom => rCom.callOnRemote(proxyId, path, args));
return TPromise.join(promises);
}
let workerIdx = whichWorker % this._workerPool.length;
let worker = this._workerPool[workerIdx];
return worker.getRemoteCom().callOnRemote(proxyId, path, args);
});
}
} | random_line_split | |
mainThreadService.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import {TPromise} from 'vs/base/common/winjs.base';
import Worker = require('vs/base/common/worker/workerClient');
import abstractThreadService = require('vs/platform/thread/common/abstractThreadService');
import Env = require('vs/base/common/flags');
import Platform = require('vs/base/common/platform');
import remote = require('vs/base/common/remote');
import {SyncDescriptor0} from 'vs/platform/instantiation/common/descriptors';
import {IThreadService, IThreadSynchronizableObject, ThreadAffinity} from 'vs/platform/thread/common/thread';
import {IWorkspaceContextService} from 'vs/platform/workspace/common/workspace';
import {DefaultWorkerFactory} from 'vs/base/worker/defaultWorkerFactory';
interface IAffinityMap {
[qualifiedMethodName: string]: number;
}
export interface IWorker {
getRemoteCom(): remote.IRemoteCom;
}
export interface IWorkerListenr {
(worker: IWorker): void;
}
export class MainThreadService extends abstractThreadService.AbstractThreadService implements IThreadService {
public serviceId = IThreadService;
static MAXIMUM_WORKER_CREATION_DELAY = 500; // 500ms
private _workerPool: Worker.WorkerClient[];
private _contextService: IWorkspaceContextService;
private _affinityScrambler: IAffinityMap;
private _workersCreatedPromise: TPromise<void>;
private _triggerWorkersCreatedPromise: (value: void) => void;
private _workerFactory: Worker.IWorkerFactory;
private _workerModuleId: string;
private _defaultWorkerCount: number;
constructor(contextService: IWorkspaceContextService, workerModuleId: string, defaultWorkerCount: number) {
super(true);
this._contextService = contextService;
this._workerModuleId = workerModuleId;
this._defaultWorkerCount = defaultWorkerCount;
this._workerFactory = new DefaultWorkerFactory();
if (!this.isInMainThread) {
throw new Error('Incorrect Service usage: this service must be used only in the main thread');
}
this._workerPool = [];
this._affinityScrambler = {};
this._workersCreatedPromise = new TPromise<void>((c, e, p) => {
this._triggerWorkersCreatedPromise = c;
}, () => {
// Not cancelable
});
// If nobody asks for workers to be created in 5s, the workers are created automatically
TPromise.timeout(MainThreadService.MAXIMUM_WORKER_CREATION_DELAY).then(() => this.ensureWorkers());
}
ensureWorkers(): void {
if (this._triggerWorkersCreatedPromise) {
// Workers not created yet
let createCount = Env.workersCount(this._defaultWorkerCount);
if (!Platform.hasWebWorkerSupport()) {
// Create at most 1 compatibility worker
createCount = Math.min(createCount, 1);
}
for (let i = 0; i < createCount; i++) {
this._createWorker();
}
let complete = this._triggerWorkersCreatedPromise;
this._triggerWorkersCreatedPromise = null;
complete(null);
}
}
private _afterWorkers(): TPromise<void> {
let shouldCancelPromise = false;
return new TPromise<void>((c, e, p) => {
// hide the initialize promise inside this
// promise so that it won't be canceled by accident
this._workersCreatedPromise.then(() => {
if (!shouldCancelPromise) {
c(null);
}
}, e, p);
}, () => {
// mark that this promise is canceled
shouldCancelPromise = true;
});
}
private _createWorker(): void {
this._workerPool.push(this._doCreateWorker());
}
private _shortName(major: string, minor: string): string {
return major.substring(major.length - 14) + '.' + minor.substr(0, 14);
}
private _doCreateWorker(): Worker.WorkerClient {
let worker = new Worker.WorkerClient(
this._workerFactory,
this._workerModuleId,
(msg) => {
if (msg.type === 'threadService') {
return this._shortName(msg.payload[0], msg.payload[1]);
}
return msg.type;
}
);
worker.getRemoteCom().setManyHandler(this);
worker.onModuleLoaded = worker.request('initialize', {
contextService: {
workspace: this._contextService.getWorkspace(),
configuration: this._contextService.getConfiguration(),
options: this._contextService.getOptions()
}
});
return worker;
}
private _getWorkerIndex(obj: IThreadSynchronizableObject, affinity: ThreadAffinity): number {
if (affinity === ThreadAffinity.None) {
let winners: number[] = [0],
winnersQueueSize = this._workerPool[0].getQueueSize();
for (let i = 1; i < this._workerPool.length; i++) |
return winners[Math.floor(Math.random() * winners.length)];
}
let scramble = 0;
if (this._affinityScrambler.hasOwnProperty(obj.getId())) {
scramble = this._affinityScrambler[obj.getId()];
} else {
scramble = Math.floor(Math.random() * this._workerPool.length);
this._affinityScrambler[obj.getId()] = scramble;
}
return (scramble + affinity) % this._workerPool.length;
}
OneWorker(obj: IThreadSynchronizableObject, methodName: string, target: Function, params: any[], affinity: ThreadAffinity): TPromise<any> {
return this._afterWorkers().then(() => {
if (this._workerPool.length === 0) {
throw new Error('Cannot fulfill request...');
}
let workerIdx = this._getWorkerIndex(obj, affinity);
return this._remoteCall(this._workerPool[workerIdx], obj, methodName, params);
});
}
AllWorkers(obj: IThreadSynchronizableObject, methodName: string, target: Function, params: any[]): TPromise<any> {
return this._afterWorkers().then(() => {
return TPromise.join(this._workerPool.map((w) => {
return this._remoteCall(w, obj, methodName, params);
}));
});
}
private _remoteCall(worker: Worker.WorkerClient, obj: IThreadSynchronizableObject, methodName: string, params: any[]): TPromise<any> {
let id = obj.getId();
if (!id) {
throw new Error('Synchronizable Objects must have an identifier');
}
return worker.request('threadService', [id, methodName, params]);
}
protected _registerAndInstantiateMainProcessActor<T>(id: string, descriptor: SyncDescriptor0<T>): T {
return this._getOrCreateLocalInstance(id, descriptor);
}
protected _registerMainProcessActor<T>(id: string, actor: T): void {
this._registerLocalInstance(id, actor);
}
protected _registerAndInstantiateExtHostActor<T>(id: string, descriptor: SyncDescriptor0<T>): T {
throw new Error('Not supported in this runtime context: Cannot communicate to non-existant Extension Host!');
}
protected _registerExtHostActor<T>(id: string, actor: T): void {
throw new Error('Not supported in this runtime context!');
}
protected _registerAndInstantiateWorkerActor<T>(id: string, descriptor: SyncDescriptor0<T>, whichWorker: ThreadAffinity): T {
let helper = this._createWorkerProxyHelper(whichWorker);
return this._getOrCreateProxyInstance(helper, id, descriptor);
}
protected _registerWorkerActor<T>(id: string, actor: T): void {
throw new Error('Not supported in this runtime context!');
}
private _createWorkerProxyHelper(whichWorker: ThreadAffinity): remote.IProxyHelper {
return {
callOnRemote: (proxyId: string, path: string, args: any[]): TPromise<any> => {
return this._callOnWorker(whichWorker, proxyId, path, args);
}
};
}
private _callOnWorker(whichWorker: ThreadAffinity, proxyId: string, path: string, args: any[]): TPromise<any> {
if (whichWorker === ThreadAffinity.None) {
return TPromise.as(null);
}
return this._afterWorkers().then(() => {
if (whichWorker === ThreadAffinity.All) {
let promises = this._workerPool.map(w => w.getRemoteCom()).map(rCom => rCom.callOnRemote(proxyId, path, args));
return TPromise.join(promises);
}
let workerIdx = whichWorker % this._workerPool.length;
let worker = this._workerPool[workerIdx];
return worker.getRemoteCom().callOnRemote(proxyId, path, args);
});
}
} | {
let queueSize = this._workerPool[i].getQueueSize();
if (queueSize < winnersQueueSize) {
winnersQueueSize = queueSize;
winners = [i];
} else if (queueSize === winnersQueueSize) {
winners.push(i);
}
} | conditional_block |
mainThreadService.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import {TPromise} from 'vs/base/common/winjs.base';
import Worker = require('vs/base/common/worker/workerClient');
import abstractThreadService = require('vs/platform/thread/common/abstractThreadService');
import Env = require('vs/base/common/flags');
import Platform = require('vs/base/common/platform');
import remote = require('vs/base/common/remote');
import {SyncDescriptor0} from 'vs/platform/instantiation/common/descriptors';
import {IThreadService, IThreadSynchronizableObject, ThreadAffinity} from 'vs/platform/thread/common/thread';
import {IWorkspaceContextService} from 'vs/platform/workspace/common/workspace';
import {DefaultWorkerFactory} from 'vs/base/worker/defaultWorkerFactory';
interface IAffinityMap {
[qualifiedMethodName: string]: number;
}
export interface IWorker {
getRemoteCom(): remote.IRemoteCom;
}
export interface IWorkerListenr {
(worker: IWorker): void;
}
export class MainThreadService extends abstractThreadService.AbstractThreadService implements IThreadService {
public serviceId = IThreadService;
static MAXIMUM_WORKER_CREATION_DELAY = 500; // 500ms
private _workerPool: Worker.WorkerClient[];
private _contextService: IWorkspaceContextService;
private _affinityScrambler: IAffinityMap;
private _workersCreatedPromise: TPromise<void>;
private _triggerWorkersCreatedPromise: (value: void) => void;
private _workerFactory: Worker.IWorkerFactory;
private _workerModuleId: string;
private _defaultWorkerCount: number;
constructor(contextService: IWorkspaceContextService, workerModuleId: string, defaultWorkerCount: number) {
super(true);
this._contextService = contextService;
this._workerModuleId = workerModuleId;
this._defaultWorkerCount = defaultWorkerCount;
this._workerFactory = new DefaultWorkerFactory();
if (!this.isInMainThread) {
throw new Error('Incorrect Service usage: this service must be used only in the main thread');
}
this._workerPool = [];
this._affinityScrambler = {};
this._workersCreatedPromise = new TPromise<void>((c, e, p) => {
this._triggerWorkersCreatedPromise = c;
}, () => {
// Not cancelable
});
// If nobody asks for workers to be created in 5s, the workers are created automatically
TPromise.timeout(MainThreadService.MAXIMUM_WORKER_CREATION_DELAY).then(() => this.ensureWorkers());
}
ensureWorkers(): void {
if (this._triggerWorkersCreatedPromise) {
// Workers not created yet
let createCount = Env.workersCount(this._defaultWorkerCount);
if (!Platform.hasWebWorkerSupport()) {
// Create at most 1 compatibility worker
createCount = Math.min(createCount, 1);
}
for (let i = 0; i < createCount; i++) {
this._createWorker();
}
let complete = this._triggerWorkersCreatedPromise;
this._triggerWorkersCreatedPromise = null;
complete(null);
}
}
private _afterWorkers(): TPromise<void> {
let shouldCancelPromise = false;
return new TPromise<void>((c, e, p) => {
// hide the initialize promise inside this
// promise so that it won't be canceled by accident
this._workersCreatedPromise.then(() => {
if (!shouldCancelPromise) {
c(null);
}
}, e, p);
}, () => {
// mark that this promise is canceled
shouldCancelPromise = true;
});
}
private _createWorker(): void {
this._workerPool.push(this._doCreateWorker());
}
private _shortName(major: string, minor: string): string {
return major.substring(major.length - 14) + '.' + minor.substr(0, 14);
}
private _doCreateWorker(): Worker.WorkerClient {
let worker = new Worker.WorkerClient(
this._workerFactory,
this._workerModuleId,
(msg) => {
if (msg.type === 'threadService') {
return this._shortName(msg.payload[0], msg.payload[1]);
}
return msg.type;
}
);
worker.getRemoteCom().setManyHandler(this);
worker.onModuleLoaded = worker.request('initialize', {
contextService: {
workspace: this._contextService.getWorkspace(),
configuration: this._contextService.getConfiguration(),
options: this._contextService.getOptions()
}
});
return worker;
}
private _getWorkerIndex(obj: IThreadSynchronizableObject, affinity: ThreadAffinity): number |
OneWorker(obj: IThreadSynchronizableObject, methodName: string, target: Function, params: any[], affinity: ThreadAffinity): TPromise<any> {
return this._afterWorkers().then(() => {
if (this._workerPool.length === 0) {
throw new Error('Cannot fulfill request...');
}
let workerIdx = this._getWorkerIndex(obj, affinity);
return this._remoteCall(this._workerPool[workerIdx], obj, methodName, params);
});
}
AllWorkers(obj: IThreadSynchronizableObject, methodName: string, target: Function, params: any[]): TPromise<any> {
return this._afterWorkers().then(() => {
return TPromise.join(this._workerPool.map((w) => {
return this._remoteCall(w, obj, methodName, params);
}));
});
}
private _remoteCall(worker: Worker.WorkerClient, obj: IThreadSynchronizableObject, methodName: string, params: any[]): TPromise<any> {
let id = obj.getId();
if (!id) {
throw new Error('Synchronizable Objects must have an identifier');
}
return worker.request('threadService', [id, methodName, params]);
}
protected _registerAndInstantiateMainProcessActor<T>(id: string, descriptor: SyncDescriptor0<T>): T {
return this._getOrCreateLocalInstance(id, descriptor);
}
protected _registerMainProcessActor<T>(id: string, actor: T): void {
this._registerLocalInstance(id, actor);
}
protected _registerAndInstantiateExtHostActor<T>(id: string, descriptor: SyncDescriptor0<T>): T {
throw new Error('Not supported in this runtime context: Cannot communicate to non-existant Extension Host!');
}
protected _registerExtHostActor<T>(id: string, actor: T): void {
throw new Error('Not supported in this runtime context!');
}
protected _registerAndInstantiateWorkerActor<T>(id: string, descriptor: SyncDescriptor0<T>, whichWorker: ThreadAffinity): T {
let helper = this._createWorkerProxyHelper(whichWorker);
return this._getOrCreateProxyInstance(helper, id, descriptor);
}
protected _registerWorkerActor<T>(id: string, actor: T): void {
throw new Error('Not supported in this runtime context!');
}
private _createWorkerProxyHelper(whichWorker: ThreadAffinity): remote.IProxyHelper {
return {
callOnRemote: (proxyId: string, path: string, args: any[]): TPromise<any> => {
return this._callOnWorker(whichWorker, proxyId, path, args);
}
};
}
private _callOnWorker(whichWorker: ThreadAffinity, proxyId: string, path: string, args: any[]): TPromise<any> {
if (whichWorker === ThreadAffinity.None) {
return TPromise.as(null);
}
return this._afterWorkers().then(() => {
if (whichWorker === ThreadAffinity.All) {
let promises = this._workerPool.map(w => w.getRemoteCom()).map(rCom => rCom.callOnRemote(proxyId, path, args));
return TPromise.join(promises);
}
let workerIdx = whichWorker % this._workerPool.length;
let worker = this._workerPool[workerIdx];
return worker.getRemoteCom().callOnRemote(proxyId, path, args);
});
}
} | {
if (affinity === ThreadAffinity.None) {
let winners: number[] = [0],
winnersQueueSize = this._workerPool[0].getQueueSize();
for (let i = 1; i < this._workerPool.length; i++) {
let queueSize = this._workerPool[i].getQueueSize();
if (queueSize < winnersQueueSize) {
winnersQueueSize = queueSize;
winners = [i];
} else if (queueSize === winnersQueueSize) {
winners.push(i);
}
}
return winners[Math.floor(Math.random() * winners.length)];
}
let scramble = 0;
if (this._affinityScrambler.hasOwnProperty(obj.getId())) {
scramble = this._affinityScrambler[obj.getId()];
} else {
scramble = Math.floor(Math.random() * this._workerPool.length);
this._affinityScrambler[obj.getId()] = scramble;
}
return (scramble + affinity) % this._workerPool.length;
} | identifier_body |
mainThreadService.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import {TPromise} from 'vs/base/common/winjs.base';
import Worker = require('vs/base/common/worker/workerClient');
import abstractThreadService = require('vs/platform/thread/common/abstractThreadService');
import Env = require('vs/base/common/flags');
import Platform = require('vs/base/common/platform');
import remote = require('vs/base/common/remote');
import {SyncDescriptor0} from 'vs/platform/instantiation/common/descriptors';
import {IThreadService, IThreadSynchronizableObject, ThreadAffinity} from 'vs/platform/thread/common/thread';
import {IWorkspaceContextService} from 'vs/platform/workspace/common/workspace';
import {DefaultWorkerFactory} from 'vs/base/worker/defaultWorkerFactory';
interface IAffinityMap {
[qualifiedMethodName: string]: number;
}
export interface IWorker {
getRemoteCom(): remote.IRemoteCom;
}
export interface IWorkerListenr {
(worker: IWorker): void;
}
export class MainThreadService extends abstractThreadService.AbstractThreadService implements IThreadService {
public serviceId = IThreadService;
static MAXIMUM_WORKER_CREATION_DELAY = 500; // 500ms
private _workerPool: Worker.WorkerClient[];
private _contextService: IWorkspaceContextService;
private _affinityScrambler: IAffinityMap;
private _workersCreatedPromise: TPromise<void>;
private _triggerWorkersCreatedPromise: (value: void) => void;
private _workerFactory: Worker.IWorkerFactory;
private _workerModuleId: string;
private _defaultWorkerCount: number;
constructor(contextService: IWorkspaceContextService, workerModuleId: string, defaultWorkerCount: number) {
super(true);
this._contextService = contextService;
this._workerModuleId = workerModuleId;
this._defaultWorkerCount = defaultWorkerCount;
this._workerFactory = new DefaultWorkerFactory();
if (!this.isInMainThread) {
throw new Error('Incorrect Service usage: this service must be used only in the main thread');
}
this._workerPool = [];
this._affinityScrambler = {};
this._workersCreatedPromise = new TPromise<void>((c, e, p) => {
this._triggerWorkersCreatedPromise = c;
}, () => {
// Not cancelable
});
// If nobody asks for workers to be created in 5s, the workers are created automatically
TPromise.timeout(MainThreadService.MAXIMUM_WORKER_CREATION_DELAY).then(() => this.ensureWorkers());
}
ensureWorkers(): void {
if (this._triggerWorkersCreatedPromise) {
// Workers not created yet
let createCount = Env.workersCount(this._defaultWorkerCount);
if (!Platform.hasWebWorkerSupport()) {
// Create at most 1 compatibility worker
createCount = Math.min(createCount, 1);
}
for (let i = 0; i < createCount; i++) {
this._createWorker();
}
let complete = this._triggerWorkersCreatedPromise;
this._triggerWorkersCreatedPromise = null;
complete(null);
}
}
private _afterWorkers(): TPromise<void> {
let shouldCancelPromise = false;
return new TPromise<void>((c, e, p) => {
// hide the initialize promise inside this
// promise so that it won't be canceled by accident
this._workersCreatedPromise.then(() => {
if (!shouldCancelPromise) {
c(null);
}
}, e, p);
}, () => {
// mark that this promise is canceled
shouldCancelPromise = true;
});
}
private _createWorker(): void {
this._workerPool.push(this._doCreateWorker());
}
private _shortName(major: string, minor: string): string {
return major.substring(major.length - 14) + '.' + minor.substr(0, 14);
}
private _doCreateWorker(): Worker.WorkerClient {
let worker = new Worker.WorkerClient(
this._workerFactory,
this._workerModuleId,
(msg) => {
if (msg.type === 'threadService') {
return this._shortName(msg.payload[0], msg.payload[1]);
}
return msg.type;
}
);
worker.getRemoteCom().setManyHandler(this);
worker.onModuleLoaded = worker.request('initialize', {
contextService: {
workspace: this._contextService.getWorkspace(),
configuration: this._contextService.getConfiguration(),
options: this._contextService.getOptions()
}
});
return worker;
}
private _getWorkerIndex(obj: IThreadSynchronizableObject, affinity: ThreadAffinity): number {
if (affinity === ThreadAffinity.None) {
let winners: number[] = [0],
winnersQueueSize = this._workerPool[0].getQueueSize();
for (let i = 1; i < this._workerPool.length; i++) {
let queueSize = this._workerPool[i].getQueueSize();
if (queueSize < winnersQueueSize) {
winnersQueueSize = queueSize;
winners = [i];
} else if (queueSize === winnersQueueSize) {
winners.push(i);
}
}
return winners[Math.floor(Math.random() * winners.length)];
}
let scramble = 0;
if (this._affinityScrambler.hasOwnProperty(obj.getId())) {
scramble = this._affinityScrambler[obj.getId()];
} else {
scramble = Math.floor(Math.random() * this._workerPool.length);
this._affinityScrambler[obj.getId()] = scramble;
}
return (scramble + affinity) % this._workerPool.length;
}
OneWorker(obj: IThreadSynchronizableObject, methodName: string, target: Function, params: any[], affinity: ThreadAffinity): TPromise<any> {
return this._afterWorkers().then(() => {
if (this._workerPool.length === 0) {
throw new Error('Cannot fulfill request...');
}
let workerIdx = this._getWorkerIndex(obj, affinity);
return this._remoteCall(this._workerPool[workerIdx], obj, methodName, params);
});
}
AllWorkers(obj: IThreadSynchronizableObject, methodName: string, target: Function, params: any[]): TPromise<any> {
return this._afterWorkers().then(() => {
return TPromise.join(this._workerPool.map((w) => {
return this._remoteCall(w, obj, methodName, params);
}));
});
}
private _remoteCall(worker: Worker.WorkerClient, obj: IThreadSynchronizableObject, methodName: string, params: any[]): TPromise<any> {
let id = obj.getId();
if (!id) {
throw new Error('Synchronizable Objects must have an identifier');
}
return worker.request('threadService', [id, methodName, params]);
}
protected _registerAndInstantiateMainProcessActor<T>(id: string, descriptor: SyncDescriptor0<T>): T {
return this._getOrCreateLocalInstance(id, descriptor);
}
protected _registerMainProcessActor<T>(id: string, actor: T): void {
this._registerLocalInstance(id, actor);
}
protected _registerAndInstantiateExtHostActor<T>(id: string, descriptor: SyncDescriptor0<T>): T {
throw new Error('Not supported in this runtime context: Cannot communicate to non-existant Extension Host!');
}
protected _registerExtHostActor<T>(id: string, actor: T): void {
throw new Error('Not supported in this runtime context!');
}
protected _registerAndInstantiateWorkerActor<T>(id: string, descriptor: SyncDescriptor0<T>, whichWorker: ThreadAffinity): T {
let helper = this._createWorkerProxyHelper(whichWorker);
return this._getOrCreateProxyInstance(helper, id, descriptor);
}
protected _registerWorkerActor<T>(id: string, actor: T): void {
throw new Error('Not supported in this runtime context!');
}
private | (whichWorker: ThreadAffinity): remote.IProxyHelper {
return {
callOnRemote: (proxyId: string, path: string, args: any[]): TPromise<any> => {
return this._callOnWorker(whichWorker, proxyId, path, args);
}
};
}
private _callOnWorker(whichWorker: ThreadAffinity, proxyId: string, path: string, args: any[]): TPromise<any> {
if (whichWorker === ThreadAffinity.None) {
return TPromise.as(null);
}
return this._afterWorkers().then(() => {
if (whichWorker === ThreadAffinity.All) {
let promises = this._workerPool.map(w => w.getRemoteCom()).map(rCom => rCom.callOnRemote(proxyId, path, args));
return TPromise.join(promises);
}
let workerIdx = whichWorker % this._workerPool.length;
let worker = this._workerPool[workerIdx];
return worker.getRemoteCom().callOnRemote(proxyId, path, args);
});
}
} | _createWorkerProxyHelper | identifier_name |
my.js | /* this is all example code which should be changed; see query.js for how it works */
authUrl = "http://importio-signedserver.herokuapp.com/";
reEx.push(/\/_source$/);
/*
//change doReady() to auto-query on document ready
var doReadyOrg = doReady;
doReady = function() {
doReadyOrg();
doQuery();//query on ready
}
*/
//change doReady() to add autocomplete-related events
// http://jqueryui.com/autocomplete/ http://api.jqueryui.com/autocomplete/
var acField;//autocomplete data field
var acSel;//autocomplete input selector
var acsSel = "#autocomplete-spin";//autocomplete spinner selector
var cache = {};//autocomplete cache
var termCur = "";//autocomplete current term
var doReadyOrg = doReady;
doReady = function() {
doReadyOrg();
$(acSel)
.focus()
.bind("keydown", function(event) {
// http://api.jqueryui.com/jQuery.ui.keyCode/
switch(event.keyCode) {
//don't fire autocomplete on certain keys
case $.ui.keyCode.LEFT:
case $.ui.keyCode.RIGHT:
event.stopImmediatePropagation();
return true;
break;
//submit form on enter
case $.ui.keyCode.ENTER:
doQuery();
$(this).autocomplete("close");
break;
}
})
.autocomplete({
minLength: 3,
source: function(request, response) {
var term = request.term.replace(/[^\w\s]/gi, '').trim().toUpperCase();//replace all but "words" [A-Za-z0-9_] & whitespaces
if (term in cache) {
doneCompleteCallbackStop();
response(cache[term]);
return;
}
termCur = term;
if (spinOpts) {
$(acsSel).spin(spinOpts);
}
cache[term] = [];
doComplete(term);
response(cache[term]);//send empty for now
}
});
};
function doComplete(term) |
var dataCompleteCallback = function(data, term) {
console.log("Data received", data);
for (var i = 0; i < data.length; i++) {
var d = data[i];
var c = d.data[acField];
if (typeof filterComplete === 'function') {
c = filterComplete(c);
}
c = c.trim();
if (!c) {
continue;
}
cache[term].push(c);
}
}
var doneCompleteCallback = function(data, term) {
console.log("Done, all data:", data);
console.log("cache:", cache);
// http://stackoverflow.com/questions/16747798/delete-duplicate-elements-from-an-array
cache[term] = cache[term].filter(
function(elem, index, self) {
return index == self.indexOf(elem);
});
if (termCur != term) {
return;
}
doneCompleteCallbackStop();
$(acSel).trigger("keydown");
}
var doneCompleteCallbackStop = function() {
termCur = "";
if (spinOpts) {
$(acsSel).spin(false);
}
}
/* Query for tile Store Locators
*/
fFields.push({id: "postcode", html: '<input id="postcode" type="text" value="EC2M 4TP" />'});
fFields.push({id: "autocomplete-spin", html: '<span id="autocomplete-spin"></span>'});
fFields.push({id: "submit", html: '<button id="submit" onclick="doQuery();">Query</button>'});
acField = "address";
var filterComplete = function(val) {
if (val.indexOf(", ") == -1) {
return "";
}
return val.split(", ").pop();
}
acSel = "#postcode";
qObj.connectorGuids = [
"8f628f9d-b564-4888-bc99-1fb54b2df7df",
"7290b98f-5bc0-4055-a5df-d7639382c9c3",
"14d71ff7-b58f-4b37-bb5b-e2475bdb6eb9",
"9c99f396-2b8c-41e0-9799-38b039fe19cc",
"a0087993-5673-4d62-a5ae-62c67c1bcc40"
];
var doQueryMy = function() {
qObj.input = {
"postcode": $("#postcode").val()
};
}
/* Here's some other example code for a completely different API
fFields.push({id: "title", html: '<input id="title" type="text" value="harry potter" />'});
fFields.push({id: "autocomplete-spin", html: '<span id="autocomplete-spin"></span>'});
fFields.push({id: "submit", html: '<button id="submit" onclick="doQuery();">Query</button>'});
acField = "title";
acSel = "#title";
filters["image"] = function(val, row) {
return '<a href="' + val + '" target="_blank">' + val + '</a>';
}
qObj.connectorGuids = [
"ABC"
];
var doQueryMy = function() {
qObj.input = {
"search": $("#title").val()
};
}
*/
/* Here's some other example code for a completely different API
colNames = ["ranking", "title", "artist", "album", "peak_pos", "last_pos", "weeks", "image", "spotify", "rdio", "video"];
filters["title"] = function(val, row) {
return "<b>" + val + "</b>";
}
filters["video"] = function(val, row) {
if (val.substring(0, 7) != "http://") {
return val;
}
return '<a href="' + val + '" target="_blank">' + val + '</a>';
}
doQuery = function() {
doQueryPre();
for (var page = 0; page < 10; page++) {
importio.query({
"connectorGuids": [
"XYZ"
],
"input": {
"webpage/url": "http://www.billboard.com/charts/hot-100?page=" + page
}
}, { "data": dataCallback, "done": doneCallback });
}
}
*/
| {
doQueryMy();
var qObjComplete = jQuery.extend({}, qObj);//copy to new obj
qObjComplete.maxPages = 1;
importio.query(qObjComplete,
{ "data": function(data) {
dataCompleteCallback(data, term);
},
"done": function(data) {
doneCompleteCallback(data, term);
}
}
);
} | identifier_body |
my.js | /* this is all example code which should be changed; see query.js for how it works */
authUrl = "http://importio-signedserver.herokuapp.com/";
reEx.push(/\/_source$/);
/*
//change doReady() to auto-query on document ready
var doReadyOrg = doReady;
doReady = function() {
doReadyOrg();
doQuery();//query on ready
}
*/
//change doReady() to add autocomplete-related events
// http://jqueryui.com/autocomplete/ http://api.jqueryui.com/autocomplete/
var acField;//autocomplete data field
var acSel;//autocomplete input selector
var acsSel = "#autocomplete-spin";//autocomplete spinner selector
var cache = {};//autocomplete cache
var termCur = "";//autocomplete current term
var doReadyOrg = doReady;
doReady = function() {
doReadyOrg();
$(acSel)
.focus()
.bind("keydown", function(event) {
// http://api.jqueryui.com/jQuery.ui.keyCode/
switch(event.keyCode) {
//don't fire autocomplete on certain keys
case $.ui.keyCode.LEFT:
case $.ui.keyCode.RIGHT:
event.stopImmediatePropagation();
return true;
break;
//submit form on enter
case $.ui.keyCode.ENTER:
doQuery();
$(this).autocomplete("close");
break;
}
})
.autocomplete({
minLength: 3,
source: function(request, response) {
var term = request.term.replace(/[^\w\s]/gi, '').trim().toUpperCase();//replace all but "words" [A-Za-z0-9_] & whitespaces
if (term in cache) {
doneCompleteCallbackStop();
response(cache[term]);
return;
}
termCur = term;
if (spinOpts) {
$(acsSel).spin(spinOpts);
}
cache[term] = [];
doComplete(term);
response(cache[term]);//send empty for now
}
});
};
function doComplete(term) {
doQueryMy();
var qObjComplete = jQuery.extend({}, qObj);//copy to new obj
qObjComplete.maxPages = 1;
importio.query(qObjComplete,
{ "data": function(data) {
dataCompleteCallback(data, term);
},
"done": function(data) {
doneCompleteCallback(data, term);
}
}
);
}
var dataCompleteCallback = function(data, term) {
console.log("Data received", data);
for (var i = 0; i < data.length; i++) {
var d = data[i];
var c = d.data[acField];
if (typeof filterComplete === 'function') {
c = filterComplete(c);
}
c = c.trim();
if (!c) {
continue;
}
cache[term].push(c);
}
}
var doneCompleteCallback = function(data, term) {
console.log("Done, all data:", data);
console.log("cache:", cache);
// http://stackoverflow.com/questions/16747798/delete-duplicate-elements-from-an-array
cache[term] = cache[term].filter(
function(elem, index, self) {
return index == self.indexOf(elem);
});
if (termCur != term) {
return;
}
doneCompleteCallbackStop();
$(acSel).trigger("keydown");
}
var doneCompleteCallbackStop = function() {
termCur = "";
if (spinOpts) {
$(acsSel).spin(false);
}
}
/* Query for tile Store Locators
*/
fFields.push({id: "postcode", html: '<input id="postcode" type="text" value="EC2M 4TP" />'});
fFields.push({id: "autocomplete-spin", html: '<span id="autocomplete-spin"></span>'});
fFields.push({id: "submit", html: '<button id="submit" onclick="doQuery();">Query</button>'});
acField = "address";
var filterComplete = function(val) {
if (val.indexOf(", ") == -1) {
return "";
}
return val.split(", ").pop();
}
acSel = "#postcode";
qObj.connectorGuids = [
"8f628f9d-b564-4888-bc99-1fb54b2df7df",
"7290b98f-5bc0-4055-a5df-d7639382c9c3",
"14d71ff7-b58f-4b37-bb5b-e2475bdb6eb9",
"9c99f396-2b8c-41e0-9799-38b039fe19cc",
"a0087993-5673-4d62-a5ae-62c67c1bcc40"
];
var doQueryMy = function() {
qObj.input = {
"postcode": $("#postcode").val()
};
}
/* Here's some other example code for a completely different API
fFields.push({id: "title", html: '<input id="title" type="text" value="harry potter" />'});
fFields.push({id: "autocomplete-spin", html: '<span id="autocomplete-spin"></span>'});
fFields.push({id: "submit", html: '<button id="submit" onclick="doQuery();">Query</button>'});
acField = "title";
acSel = "#title";
filters["image"] = function(val, row) { | ];
var doQueryMy = function() {
qObj.input = {
"search": $("#title").val()
};
}
*/
/* Here's some other example code for a completely different API
colNames = ["ranking", "title", "artist", "album", "peak_pos", "last_pos", "weeks", "image", "spotify", "rdio", "video"];
filters["title"] = function(val, row) {
return "<b>" + val + "</b>";
}
filters["video"] = function(val, row) {
if (val.substring(0, 7) != "http://") {
return val;
}
return '<a href="' + val + '" target="_blank">' + val + '</a>';
}
doQuery = function() {
doQueryPre();
for (var page = 0; page < 10; page++) {
importio.query({
"connectorGuids": [
"XYZ"
],
"input": {
"webpage/url": "http://www.billboard.com/charts/hot-100?page=" + page
}
}, { "data": dataCallback, "done": doneCallback });
}
}
*/ | return '<a href="' + val + '" target="_blank">' + val + '</a>';
}
qObj.connectorGuids = [
"ABC" | random_line_split |
my.js | /* this is all example code which should be changed; see query.js for how it works */
authUrl = "http://importio-signedserver.herokuapp.com/";
reEx.push(/\/_source$/);
/*
//change doReady() to auto-query on document ready
var doReadyOrg = doReady;
doReady = function() {
doReadyOrg();
doQuery();//query on ready
}
*/
//change doReady() to add autocomplete-related events
// http://jqueryui.com/autocomplete/ http://api.jqueryui.com/autocomplete/
var acField;//autocomplete data field
var acSel;//autocomplete input selector
var acsSel = "#autocomplete-spin";//autocomplete spinner selector
var cache = {};//autocomplete cache
var termCur = "";//autocomplete current term
var doReadyOrg = doReady;
doReady = function() {
doReadyOrg();
$(acSel)
.focus()
.bind("keydown", function(event) {
// http://api.jqueryui.com/jQuery.ui.keyCode/
switch(event.keyCode) {
//don't fire autocomplete on certain keys
case $.ui.keyCode.LEFT:
case $.ui.keyCode.RIGHT:
event.stopImmediatePropagation();
return true;
break;
//submit form on enter
case $.ui.keyCode.ENTER:
doQuery();
$(this).autocomplete("close");
break;
}
})
.autocomplete({
minLength: 3,
source: function(request, response) {
var term = request.term.replace(/[^\w\s]/gi, '').trim().toUpperCase();//replace all but "words" [A-Za-z0-9_] & whitespaces
if (term in cache) {
doneCompleteCallbackStop();
response(cache[term]);
return;
}
termCur = term;
if (spinOpts) {
$(acsSel).spin(spinOpts);
}
cache[term] = [];
doComplete(term);
response(cache[term]);//send empty for now
}
});
};
function | (term) {
doQueryMy();
var qObjComplete = jQuery.extend({}, qObj);//copy to new obj
qObjComplete.maxPages = 1;
importio.query(qObjComplete,
{ "data": function(data) {
dataCompleteCallback(data, term);
},
"done": function(data) {
doneCompleteCallback(data, term);
}
}
);
}
var dataCompleteCallback = function(data, term) {
console.log("Data received", data);
for (var i = 0; i < data.length; i++) {
var d = data[i];
var c = d.data[acField];
if (typeof filterComplete === 'function') {
c = filterComplete(c);
}
c = c.trim();
if (!c) {
continue;
}
cache[term].push(c);
}
}
var doneCompleteCallback = function(data, term) {
console.log("Done, all data:", data);
console.log("cache:", cache);
// http://stackoverflow.com/questions/16747798/delete-duplicate-elements-from-an-array
cache[term] = cache[term].filter(
function(elem, index, self) {
return index == self.indexOf(elem);
});
if (termCur != term) {
return;
}
doneCompleteCallbackStop();
$(acSel).trigger("keydown");
}
var doneCompleteCallbackStop = function() {
termCur = "";
if (spinOpts) {
$(acsSel).spin(false);
}
}
/* Query for tile Store Locators
*/
fFields.push({id: "postcode", html: '<input id="postcode" type="text" value="EC2M 4TP" />'});
fFields.push({id: "autocomplete-spin", html: '<span id="autocomplete-spin"></span>'});
fFields.push({id: "submit", html: '<button id="submit" onclick="doQuery();">Query</button>'});
acField = "address";
var filterComplete = function(val) {
if (val.indexOf(", ") == -1) {
return "";
}
return val.split(", ").pop();
}
acSel = "#postcode";
qObj.connectorGuids = [
"8f628f9d-b564-4888-bc99-1fb54b2df7df",
"7290b98f-5bc0-4055-a5df-d7639382c9c3",
"14d71ff7-b58f-4b37-bb5b-e2475bdb6eb9",
"9c99f396-2b8c-41e0-9799-38b039fe19cc",
"a0087993-5673-4d62-a5ae-62c67c1bcc40"
];
var doQueryMy = function() {
qObj.input = {
"postcode": $("#postcode").val()
};
}
/* Here's some other example code for a completely different API
fFields.push({id: "title", html: '<input id="title" type="text" value="harry potter" />'});
fFields.push({id: "autocomplete-spin", html: '<span id="autocomplete-spin"></span>'});
fFields.push({id: "submit", html: '<button id="submit" onclick="doQuery();">Query</button>'});
acField = "title";
acSel = "#title";
filters["image"] = function(val, row) {
return '<a href="' + val + '" target="_blank">' + val + '</a>';
}
qObj.connectorGuids = [
"ABC"
];
var doQueryMy = function() {
qObj.input = {
"search": $("#title").val()
};
}
*/
/* Here's some other example code for a completely different API
colNames = ["ranking", "title", "artist", "album", "peak_pos", "last_pos", "weeks", "image", "spotify", "rdio", "video"];
filters["title"] = function(val, row) {
return "<b>" + val + "</b>";
}
filters["video"] = function(val, row) {
if (val.substring(0, 7) != "http://") {
return val;
}
return '<a href="' + val + '" target="_blank">' + val + '</a>';
}
doQuery = function() {
doQueryPre();
for (var page = 0; page < 10; page++) {
importio.query({
"connectorGuids": [
"XYZ"
],
"input": {
"webpage/url": "http://www.billboard.com/charts/hot-100?page=" + page
}
}, { "data": dataCallback, "done": doneCallback });
}
}
*/
| doComplete | identifier_name |
workfile.rs | use std;
use std::io;
use std::io::prelude::*;
use std::fs::File;
use std::io::{Error, ErrorKind};
// Helps creating a working file and move the working file
// to the final file when done, or automatically delete the
// working file in case of error.
pub struct | {
file_path: String,
work_file_path: String,
file: Option<File>
}
impl WorkFile {
pub fn create(file_path: &str) -> io::Result<WorkFile> {
let work_file_path: String = format!("{}.work", file_path);
let file = match File::create(&work_file_path) {
Ok(file) => file,
Err(err) => { return Err(err); }
};
Ok(WorkFile {
file_path: file_path.to_string(),
work_file_path: work_file_path,
file: Some(file)
})
}
pub fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let ret = match self.file {
Some(ref mut some_file) => some_file.write(buf),
None => Err( Error::new(ErrorKind::Other, "oops") )
};
ret
}
pub fn commit(&mut self) {
let file = self.file.take();
drop(file);
match std::fs::rename(&self.work_file_path, &self.file_path) {
Ok(_) => (),
Err(err) => panic!("commit failed: {}", err)
}
}
}
impl Drop for WorkFile {
fn drop(&mut self) {
if self.file.is_some() {
drop(self.file.take());
match std::fs::remove_file(&self.work_file_path) {
Ok(_) => (),
Err(err) => panic!("rollback failed: {}", err)
}
}
}
}
| WorkFile | identifier_name |
workfile.rs | use std;
use std::io;
use std::io::prelude::*;
use std::fs::File;
use std::io::{Error, ErrorKind};
// Helps creating a working file and move the working file
// to the final file when done, or automatically delete the
// working file in case of error.
pub struct WorkFile {
file_path: String,
work_file_path: String,
file: Option<File>
}
impl WorkFile {
pub fn create(file_path: &str) -> io::Result<WorkFile> {
let work_file_path: String = format!("{}.work", file_path);
let file = match File::create(&work_file_path) {
Ok(file) => file,
Err(err) => { return Err(err); }
};
Ok(WorkFile {
file_path: file_path.to_string(),
work_file_path: work_file_path,
file: Some(file)
})
}
pub fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let ret = match self.file {
Some(ref mut some_file) => some_file.write(buf),
None => Err( Error::new(ErrorKind::Other, "oops") )
};
ret
}
pub fn commit(&mut self) {
let file = self.file.take();
drop(file);
match std::fs::rename(&self.work_file_path, &self.file_path) {
Ok(_) => (),
Err(err) => panic!("commit failed: {}", err)
}
}
}
impl Drop for WorkFile {
fn drop(&mut self) |
}
| {
if self.file.is_some() {
drop(self.file.take());
match std::fs::remove_file(&self.work_file_path) {
Ok(_) => (),
Err(err) => panic!("rollback failed: {}", err)
}
}
} | identifier_body |
workfile.rs | use std;
use std::io;
use std::io::prelude::*;
use std::fs::File; | // Helps creating a working file and move the working file
// to the final file when done, or automatically delete the
// working file in case of error.
pub struct WorkFile {
file_path: String,
work_file_path: String,
file: Option<File>
}
impl WorkFile {
pub fn create(file_path: &str) -> io::Result<WorkFile> {
let work_file_path: String = format!("{}.work", file_path);
let file = match File::create(&work_file_path) {
Ok(file) => file,
Err(err) => { return Err(err); }
};
Ok(WorkFile {
file_path: file_path.to_string(),
work_file_path: work_file_path,
file: Some(file)
})
}
pub fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let ret = match self.file {
Some(ref mut some_file) => some_file.write(buf),
None => Err( Error::new(ErrorKind::Other, "oops") )
};
ret
}
pub fn commit(&mut self) {
let file = self.file.take();
drop(file);
match std::fs::rename(&self.work_file_path, &self.file_path) {
Ok(_) => (),
Err(err) => panic!("commit failed: {}", err)
}
}
}
impl Drop for WorkFile {
fn drop(&mut self) {
if self.file.is_some() {
drop(self.file.take());
match std::fs::remove_file(&self.work_file_path) {
Ok(_) => (),
Err(err) => panic!("rollback failed: {}", err)
}
}
}
} | use std::io::{Error, ErrorKind};
| random_line_split |
PatternResolver.ts | import template = require("es6-template-string");
import { dirname, parse, relative } from "upath";
import { Progress, TextDocument } from "vscode";
import { ConversionType } from "../../Conversion/ConversionType";
import { Resources } from "../../Properties/Resources";
import { IProgressState } from "../Tasks/IProgressState";
import { IPatternContext } from "./IPatternContext";
/**
* Provides the functionality to resolve path-patterns.
*/
export class PatternResolver
{
/**
* The pattern to resolve.
*/
private pattern: string;
/**
* A component for reporting progress.
*/
private reporter: Progress<IProgressState>;
/**
* The variables inside the pattern.
*/
private variables: Array<string | number | symbol>;
/**
* Initializes a new instance of the {@link PatternResolver `PatternResolver`} class.
*
* @param pattern
* The pattern to resolve.
*
* @param reporter
* A component for reporting progress.
*/
public constructor(pattern: string, reporter?: Progress<IProgressState>)
|
/**
* Gets the pattern to resolve.
*/
public get Pattern(): string
{
return this.pattern;
}
/**
* Gets the variables inside the pattern.
*/
public get Variables(): ReadonlyArray<string | number | symbol>
{
return [...this.variables];
}
/**
* Gets a component for reporting progress.
*/
protected get Reporter(): Progress<IProgressState>
{
return this.reporter;
}
/**
* Resolves the pattern.
*
* @param workspaceFolder
* The path to the current workspace.
*
* @param document
* The document to create the destination-path for.
*
* @param type
* The type of the file to create the filename for.
*
* @returns
* The resolved pattern.
*/
public Resolve(workspaceFolder: string, document: TextDocument, type: ConversionType): string
{
let extension: string;
let context: IPatternContext;
let parsedPath = parse(document.fileName);
switch (type)
{
case ConversionType.SelfContainedHTML:
case ConversionType.HTML:
extension = "html";
break;
case ConversionType.JPEG:
extension = "jpg";
break;
case ConversionType.PNG:
extension = "png";
break;
case ConversionType.PDF:
extension = "pdf";
break;
}
context = {
filename: parsedPath.base,
basename: parsedPath.name,
extension,
dirname: document.isUntitled ? "." : relative(workspaceFolder, dirname(document.fileName)),
workspaceFolder
};
this.Reporter?.report(
{
message: Resources.Resources.Get("Progress.ResolveFileName")
});
let result = template(this.Pattern, context);
return result;
}
}
| {
let variables: Array<string | number | symbol> = [];
this.pattern = pattern;
this.reporter = reporter;
let context: IPatternContext = {
basename: "",
extension: "",
filename: "",
dirname: "",
workspaceFolder: ""
};
template(pattern, context);
for (let key of Object.keys(context))
{
delete context[key as keyof IPatternContext];
try
{
template(pattern, context);
}
catch
{
variables.push(key);
}
context[key as keyof IPatternContext] = "";
}
this.variables = variables;
} | identifier_body |
PatternResolver.ts | import template = require("es6-template-string");
import { dirname, parse, relative } from "upath";
import { Progress, TextDocument } from "vscode";
import { ConversionType } from "../../Conversion/ConversionType";
import { Resources } from "../../Properties/Resources";
import { IProgressState } from "../Tasks/IProgressState";
import { IPatternContext } from "./IPatternContext";
/**
* Provides the functionality to resolve path-patterns.
*/
export class PatternResolver
{
/**
* The pattern to resolve.
*/
private pattern: string;
/**
* A component for reporting progress.
*/
private reporter: Progress<IProgressState>;
/**
* The variables inside the pattern.
*/
private variables: Array<string | number | symbol>;
/**
* Initializes a new instance of the {@link PatternResolver `PatternResolver`} class.
*
* @param pattern
* The pattern to resolve.
*
* @param reporter
* A component for reporting progress.
*/
public | (pattern: string, reporter?: Progress<IProgressState>)
{
let variables: Array<string | number | symbol> = [];
this.pattern = pattern;
this.reporter = reporter;
let context: IPatternContext = {
basename: "",
extension: "",
filename: "",
dirname: "",
workspaceFolder: ""
};
template(pattern, context);
for (let key of Object.keys(context))
{
delete context[key as keyof IPatternContext];
try
{
template(pattern, context);
}
catch
{
variables.push(key);
}
context[key as keyof IPatternContext] = "";
}
this.variables = variables;
}
/**
* Gets the pattern to resolve.
*/
public get Pattern(): string
{
return this.pattern;
}
/**
* Gets the variables inside the pattern.
*/
public get Variables(): ReadonlyArray<string | number | symbol>
{
return [...this.variables];
}
/**
* Gets a component for reporting progress.
*/
protected get Reporter(): Progress<IProgressState>
{
return this.reporter;
}
/**
* Resolves the pattern.
*
* @param workspaceFolder
* The path to the current workspace.
*
* @param document
* The document to create the destination-path for.
*
* @param type
* The type of the file to create the filename for.
*
* @returns
* The resolved pattern.
*/
public Resolve(workspaceFolder: string, document: TextDocument, type: ConversionType): string
{
let extension: string;
let context: IPatternContext;
let parsedPath = parse(document.fileName);
switch (type)
{
case ConversionType.SelfContainedHTML:
case ConversionType.HTML:
extension = "html";
break;
case ConversionType.JPEG:
extension = "jpg";
break;
case ConversionType.PNG:
extension = "png";
break;
case ConversionType.PDF:
extension = "pdf";
break;
}
context = {
filename: parsedPath.base,
basename: parsedPath.name,
extension,
dirname: document.isUntitled ? "." : relative(workspaceFolder, dirname(document.fileName)),
workspaceFolder
};
this.Reporter?.report(
{
message: Resources.Resources.Get("Progress.ResolveFileName")
});
let result = template(this.Pattern, context);
return result;
}
}
| constructor | identifier_name |
PatternResolver.ts | import template = require("es6-template-string");
import { dirname, parse, relative } from "upath";
import { Progress, TextDocument } from "vscode";
import { ConversionType } from "../../Conversion/ConversionType";
import { Resources } from "../../Properties/Resources";
import { IProgressState } from "../Tasks/IProgressState";
import { IPatternContext } from "./IPatternContext";
/**
* Provides the functionality to resolve path-patterns.
*/
export class PatternResolver
{
/**
* The pattern to resolve.
*/
private pattern: string;
/**
* A component for reporting progress.
*/
private reporter: Progress<IProgressState>;
/** |
/**
* Initializes a new instance of the {@link PatternResolver `PatternResolver`} class.
*
* @param pattern
* The pattern to resolve.
*
* @param reporter
* A component for reporting progress.
*/
public constructor(pattern: string, reporter?: Progress<IProgressState>)
{
let variables: Array<string | number | symbol> = [];
this.pattern = pattern;
this.reporter = reporter;
let context: IPatternContext = {
basename: "",
extension: "",
filename: "",
dirname: "",
workspaceFolder: ""
};
template(pattern, context);
for (let key of Object.keys(context))
{
delete context[key as keyof IPatternContext];
try
{
template(pattern, context);
}
catch
{
variables.push(key);
}
context[key as keyof IPatternContext] = "";
}
this.variables = variables;
}
/**
* Gets the pattern to resolve.
*/
public get Pattern(): string
{
return this.pattern;
}
/**
* Gets the variables inside the pattern.
*/
public get Variables(): ReadonlyArray<string | number | symbol>
{
return [...this.variables];
}
/**
* Gets a component for reporting progress.
*/
protected get Reporter(): Progress<IProgressState>
{
return this.reporter;
}
/**
* Resolves the pattern.
*
* @param workspaceFolder
* The path to the current workspace.
*
* @param document
* The document to create the destination-path for.
*
* @param type
* The type of the file to create the filename for.
*
* @returns
* The resolved pattern.
*/
public Resolve(workspaceFolder: string, document: TextDocument, type: ConversionType): string
{
let extension: string;
let context: IPatternContext;
let parsedPath = parse(document.fileName);
switch (type)
{
case ConversionType.SelfContainedHTML:
case ConversionType.HTML:
extension = "html";
break;
case ConversionType.JPEG:
extension = "jpg";
break;
case ConversionType.PNG:
extension = "png";
break;
case ConversionType.PDF:
extension = "pdf";
break;
}
context = {
filename: parsedPath.base,
basename: parsedPath.name,
extension,
dirname: document.isUntitled ? "." : relative(workspaceFolder, dirname(document.fileName)),
workspaceFolder
};
this.Reporter?.report(
{
message: Resources.Resources.Get("Progress.ResolveFileName")
});
let result = template(this.Pattern, context);
return result;
}
} | * The variables inside the pattern.
*/
private variables: Array<string | number | symbol>; | random_line_split |
createContext.js | /*global define*/
define([
'Core/clone',
'Core/defaultValue',
'Core/defined',
'Core/queryToObject',
'Renderer/Context',
'Specs/createCanvas',
'Specs/createFrameState'
], function(
clone,
defaultValue,
defined,
queryToObject,
Context,
createCanvas,
createFrameState) {
"use strict";
function | (options, canvasWidth, canvasHeight) {
// clone options so we can change properties
options = clone(defaultValue(options, {}));
options.webgl = clone(defaultValue(options.webgl, {}));
options.webgl.alpha = defaultValue(options.webgl.alpha, true);
options.webgl.antialias = defaultValue(options.webgl.antialias, false);
var canvas = createCanvas(canvasWidth, canvasHeight);
var context = new Context(canvas, options);
var parameters = queryToObject(window.location.search.substring(1));
if (defined(parameters.webglValidation)) {
context.validateShaderProgram = true;
context.validateFramebuffer = true;
context.logShaderCompilation = true;
context.throwOnWebGLError = true;
}
var us = context.uniformState;
us.update(context, createFrameState());
return context;
}
return createContext;
}); | createContext | identifier_name |
createContext.js | /*global define*/
define([
'Core/clone',
'Core/defaultValue',
'Core/defined',
'Core/queryToObject',
'Renderer/Context',
'Specs/createCanvas',
'Specs/createFrameState'
], function(
clone,
defaultValue,
defined,
queryToObject,
Context,
createCanvas,
createFrameState) {
"use strict";
function createContext(options, canvasWidth, canvasHeight) |
return createContext;
}); | {
// clone options so we can change properties
options = clone(defaultValue(options, {}));
options.webgl = clone(defaultValue(options.webgl, {}));
options.webgl.alpha = defaultValue(options.webgl.alpha, true);
options.webgl.antialias = defaultValue(options.webgl.antialias, false);
var canvas = createCanvas(canvasWidth, canvasHeight);
var context = new Context(canvas, options);
var parameters = queryToObject(window.location.search.substring(1));
if (defined(parameters.webglValidation)) {
context.validateShaderProgram = true;
context.validateFramebuffer = true;
context.logShaderCompilation = true;
context.throwOnWebGLError = true;
}
var us = context.uniformState;
us.update(context, createFrameState());
return context;
} | identifier_body |
createContext.js | /*global define*/
define([
'Core/clone',
'Core/defaultValue',
'Core/defined',
'Core/queryToObject',
'Renderer/Context',
'Specs/createCanvas',
'Specs/createFrameState'
], function(
clone,
defaultValue,
defined,
queryToObject,
Context,
createCanvas,
createFrameState) {
"use strict"; | options = clone(defaultValue(options, {}));
options.webgl = clone(defaultValue(options.webgl, {}));
options.webgl.alpha = defaultValue(options.webgl.alpha, true);
options.webgl.antialias = defaultValue(options.webgl.antialias, false);
var canvas = createCanvas(canvasWidth, canvasHeight);
var context = new Context(canvas, options);
var parameters = queryToObject(window.location.search.substring(1));
if (defined(parameters.webglValidation)) {
context.validateShaderProgram = true;
context.validateFramebuffer = true;
context.logShaderCompilation = true;
context.throwOnWebGLError = true;
}
var us = context.uniformState;
us.update(context, createFrameState());
return context;
}
return createContext;
}); |
function createContext(options, canvasWidth, canvasHeight) {
// clone options so we can change properties | random_line_split |
createContext.js | /*global define*/
define([
'Core/clone',
'Core/defaultValue',
'Core/defined',
'Core/queryToObject',
'Renderer/Context',
'Specs/createCanvas',
'Specs/createFrameState'
], function(
clone,
defaultValue,
defined,
queryToObject,
Context,
createCanvas,
createFrameState) {
"use strict";
function createContext(options, canvasWidth, canvasHeight) {
// clone options so we can change properties
options = clone(defaultValue(options, {}));
options.webgl = clone(defaultValue(options.webgl, {}));
options.webgl.alpha = defaultValue(options.webgl.alpha, true);
options.webgl.antialias = defaultValue(options.webgl.antialias, false);
var canvas = createCanvas(canvasWidth, canvasHeight);
var context = new Context(canvas, options);
var parameters = queryToObject(window.location.search.substring(1));
if (defined(parameters.webglValidation)) |
var us = context.uniformState;
us.update(context, createFrameState());
return context;
}
return createContext;
}); | {
context.validateShaderProgram = true;
context.validateFramebuffer = true;
context.logShaderCompilation = true;
context.throwOnWebGLError = true;
} | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.