code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
// Generated on 2014-11-07 using generator-angular 0.9.8
'use strict';
// # Globbing
// for performance reasons we're only matching one level down:
// 'test/spec/{,*/}*.js'
// use this if you want to recursively match all subfolders:
// 'test/spec/**/*.js'
module.exports = function (grunt) {
// Load grunt tasks automatically
require('load-grunt-tasks')(grunt);
// Time how long tasks take. Can help when optimizing build times
require('time-grunt')(grunt);
// Configurable paths for the application
var appConfig = {
app: require('./bower.json').appPath || 'app',
dist: 'dist'
};
// Define the configuration for all the tasks
grunt.initConfig({
// Project settings
yeoman: appConfig,
// Watches files for changes and runs tasks based on the changed files
watch: {
bower: {
files: ['bower.json'],
tasks: ['wiredep']
},
js: {
files: ['<%= yeoman.app %>/scripts/{,*/}*.js'],
tasks: ['newer:jshint:all'],
options: {
livereload: '<%= connect.options.livereload %>'
}
},
jsTest: {
files: ['test/spec/{,*/}*.js'],
tasks: ['newer:jshint:test', 'karma']
},
compass: {
files: ['<%= yeoman.app %>/styles/{,*/}*.{scss,sass}'],
tasks: ['compass:server', 'autoprefixer']
},
gruntfile: {
files: ['Gruntfile.js']
},
livereload: {
options: {
livereload: '<%= connect.options.livereload %>'
},
files: [
'<%= yeoman.app %>/{,*/}*.html',
'.tmp/styles/{,*/}*.css',
'<%= yeoman.app %>/images/{,*/}*.{png,jpg,jpeg,gif,webp,svg}'
]
}
},
// The actual grunt server settings
connect: {
options: {
port: 9000,
// Change this to '0.0.0.0' to access the server from outside.
hostname: 'localhost',
livereload: 35729
},
livereload: {
options: {
open: true,
middleware: function (connect) {
return [
connect.static('.tmp'),
connect().use(
'/bower_components',
connect.static('./bower_components')
),
connect.static(appConfig.app)
];
}
}
},
test: {
options: {
port: 9001,
middleware: function (connect) {
return [
connect.static('.tmp'),
connect.static('test'),
connect().use(
'/bower_components',
connect.static('./bower_components')
),
connect.static(appConfig.app)
];
}
}
},
dist: {
options: {
open: true,
base: '<%= yeoman.dist %>'
}
}
},
// Make sure code styles are up to par and there are no obvious mistakes
jshint: {
options: {
jshintrc: '.jshintrc',
reporter: require('jshint-stylish')
},
all: {
src: [
'Gruntfile.js',
'<%= yeoman.app %>/scripts/{,*/}*.js'
]
},
test: {
options: {
jshintrc: 'test/.jshintrc'
},
src: ['test/spec/{,*/}*.js']
}
},
// Empties folders to start fresh
clean: {
dist: {
files: [{
dot: true,
src: [
'.tmp',
'<%= yeoman.dist %>/{,*/}*',
'!<%= yeoman.dist %>/.git*'
]
}]
},
server: '.tmp'
},
// Add vendor prefixed styles
autoprefixer: {
options: {
browsers: ['last 1 version']
},
dist: {
files: [{
expand: true,
cwd: '.tmp/styles/',
src: '{,*/}*.css',
dest: '.tmp/styles/'
}]
}
},
// Automatically inject Bower components into the app
wiredep: {
app: {
src: ['<%= yeoman.app %>/index.html'],
ignorePath: /\.\.\//
},
sass: {
src: ['<%= yeoman.app %>/styles/{,*/}*.{scss,sass}'],
ignorePath: /(\.\.\/){1,2}bower_components\//
}
},
// Compiles Sass to CSS and generates necessary files if requested
compass: {
options: {
sassDir: '<%= yeoman.app %>/styles',
cssDir: '.tmp/styles',
generatedImagesDir: '.tmp/images/generated',
imagesDir: '<%= yeoman.app %>/images',
javascriptsDir: '<%= yeoman.app %>/scripts',
fontsDir: '<%= yeoman.app %>/styles/fonts',
importPath: './bower_components',
httpImagesPath: '/images',
httpGeneratedImagesPath: '/images/generated',
httpFontsPath: '/styles/fonts',
relativeAssets: false,
assetCacheBuster: false,
raw: 'Sass::Script::Number.precision = 10\n'
},
dist: {
options: {
generatedImagesDir: '<%= yeoman.dist %>/images/generated'
}
},
server: {
options: {
debugInfo: true
}
}
},
// Renames files for browser caching purposes
filerev: {
dist: {
src: [
'<%= yeoman.dist %>/scripts/{,*/}*.js',
'<%= yeoman.dist %>/styles/{,*/}*.css',
'<%= yeoman.dist %>/images/{,*/}*.{png,jpg,jpeg,gif,webp,svg}',
'<%= yeoman.dist %>/styles/fonts/*'
]
}
},
// Reads HTML for usemin blocks to enable smart builds that automatically
// concat, minify and revision files. Creates configurations in memory so
// additional tasks can operate on them
useminPrepare: {
html: '<%= yeoman.app %>/index.html',
options: {
dest: '<%= yeoman.dist %>',
flow: {
html: {
steps: {
js: ['concat', 'uglifyjs'],
css: ['cssmin']
},
post: {}
}
}
}
},
// Performs rewrites based on filerev and the useminPrepare configuration
usemin: {
html: ['<%= yeoman.dist %>/{,*/}*.html'],
css: ['<%= yeoman.dist %>/styles/{,*/}*.css'],
options: {
assetsDirs: ['<%= yeoman.dist %>','<%= yeoman.dist %>/images']
}
},
// The following *-min tasks will produce minified files in the dist folder
// By default, your `index.html`'s <!-- Usemin block --> will take care of
// minification. These next options are pre-configured if you do not wish
// to use the Usemin blocks.
// cssmin: {
// dist: {
// files: {
// '<%= yeoman.dist %>/styles/main.css': [
// '.tmp/styles/{,*/}*.css'
// ]
// }
// }
// },
// uglify: {
// dist: {
// files: {
// '<%= yeoman.dist %>/scripts/scripts.js': [
// '<%= yeoman.dist %>/scripts/scripts.js'
// ]
// }
// }
// },
// concat: {
// dist: {}
// },
imagemin: {
dist: {
files: [{
expand: true,
cwd: '<%= yeoman.app %>/images',
src: '{,*/}*.{png,jpg,jpeg,gif}',
dest: '<%= yeoman.dist %>/images'
}]
}
},
svgmin: {
dist: {
files: [{
expand: true,
cwd: '<%= yeoman.app %>/images',
src: '{,*/}*.svg',
dest: '<%= yeoman.dist %>/images'
}]
}
},
htmlmin: {
dist: {
options: {
collapseWhitespace: true,
conservativeCollapse: true,
collapseBooleanAttributes: true,
removeCommentsFromCDATA: true,
removeOptionalTags: true
},
files: [{
expand: true,
cwd: '<%= yeoman.dist %>',
src: ['*.html', 'views/{,*/}*.html'],
dest: '<%= yeoman.dist %>'
}]
}
},
// ng-annotate tries to make the code safe for minification automatically
// by using the Angular long form for dependency injection.
ngAnnotate: {
dist: {
files: [{
expand: true,
cwd: '.tmp/concat/scripts',
src: ['*.js', '!oldieshim.js'],
dest: '.tmp/concat/scripts'
}]
}
},
// Replace Google CDN references
cdnify: {
dist: {
html: ['<%= yeoman.dist %>/*.html']
}
},
// Copies remaining files to places other tasks can use
copy: {
dist: {
files: [{
expand: true,
dot: true,
cwd: '<%= yeoman.app %>',
dest: '<%= yeoman.dist %>',
src: [
'*.{ico,png,txt}',
'.htaccess',
'*.html',
'views/{,*/}*.html',
'images/{,*/}*.{webp}',
'fonts/*'
]
}, {
expand: true,
cwd: '.tmp/images',
dest: '<%= yeoman.dist %>/images',
src: ['generated/*']
}, {
expand: true,
cwd: '.',
src: 'bower_components/bootstrap-sass-official/assets/fonts/bootstrap/*',
dest: '<%= yeoman.dist %>'
}]
},
styles: {
expand: true,
cwd: '<%= yeoman.app %>/styles',
dest: '.tmp/styles/',
src: '{,*/}*.css'
}
},
// Run some tasks in parallel to speed up the build process
concurrent: {
server: [
'compass:server'
],
test: [
'compass'
],
dist: [
'compass:dist',
'imagemin',
'svgmin'
]
},
// Test settings
karma: {
unit: {
configFile: 'test/karma.conf.js',
singleRun: true
}
}
});
grunt.registerTask('serve', 'Compile then start a connect web server', function (target) {
if (target === 'dist') {
return grunt.task.run(['build', 'connect:dist:keepalive']);
}
grunt.task.run([
'clean:server',
'wiredep',
'concurrent:server',
'autoprefixer',
'connect:livereload',
'watch'
]);
});
grunt.registerTask('server', 'DEPRECATED TASK. Use the "serve" task instead', function (target) {
grunt.log.warn('The `server` task has been deprecated. Use `grunt serve` to start a server.');
grunt.task.run(['serve:' + target]);
});
grunt.registerTask('test', [
'clean:server',
'concurrent:test',
'autoprefixer',
'connect:test',
'karma'
]);
grunt.registerTask('build', [
'clean:dist',
'wiredep',
'useminPrepare',
'concurrent:dist',
'autoprefixer',
'concat',
'ngAnnotate',
'copy:dist',
'cdnify',
'cssmin',
'uglify',
'filerev',
'usemin',
'htmlmin'
]);
grunt.registerTask('default', [
'newer:jshint',
'test',
'build'
]);
};
| MightyPixel/MatchUp | matchup-client/Gruntfile.js | JavaScript | mit | 10,761 |
import { Injectable } from '@angular/core';
@Injectable()
export class UuidService {
constructor() { }
/* tslint:disable:no-bitwise */
get() {
let uuid = '';
let i;
let random;
for (i = 0; i < 32; i++) {
random = Math.random() * 16 | 0;
if (i === 8 || i === 12 || i === 16 || i === 20) {
uuid += '-';
}
uuid += (i === 12 ? 4 : (i === 16 ? (random & 3 | 8) : random)).toString(16);
}
return uuid;
}
/* tslint:enable:no-bitwise */
}
| fabriciofmsilva/order-app-angular | src/app/core/utils/uuid.service.ts | TypeScript | mit | 504 |
function List(storage, $) {
var items = [];
var doneItems = [];
var nextId = 0;
this.storage = storage;
this.toDo = items;
this.done = doneItems;
this.add = function (text) {
var newItem = new ListItemModel(nextId,text);
items.push(newItem);
storage.store(newItem.id, JSON.stringify(newItem));
nextId++;
return newItem;
};
this.markDone = function(id) {
var currentDate = new Date();
var item = get(id, items);
doneItems.push(item);
item.done = true;
item.dateDone = currentDate;
storage.store(item.id, JSON.stringify(item));
return item;
};
this.loadItems = function(areDone) {
var deferred = $.Deferred();
storage.load()
.then(populateLists)
.then(function(){
deferred.resolve();
});
return deferred.promise();
};
function populateLists(data){
for(var i=0; i < data.length ;i++){
var item = JSON.parse(data[i], reviver);
if(item.id > nextId){
nextId = item.id;
}
if(item.done){
doneItems.push(item);
} else {
items.push(item);
}
}
// increase nextId by 1 so that it ready for use
nextId++;
}
function get(id, list){
for(var i=0; i < list.length; i++){
if(list[i].id == id){
return list[i];
}
}
return null;
}
} | NathanGloyn/to-doge | Local/js/list.js | JavaScript | mit | 1,279 |
require "spec_helper"
describe Tantot::Agent::Block do
context "normal usage" do
let(:value) { {changes: 0} }
let(:changes) { {obj: nil} }
before do
v = value
c = changes
stub_model(:city) do
watch {|changes| v[:changes] += 1; c[:obj] = changes}
end
end
it "should call the block" do
city = nil
Tantot.manager.run do
city = City.create!
end
expect(value[:changes]).to eq(1)
expect(changes[:obj]).to eq(Tantot::Changes::ById.new({city.id => {"id" => [nil, 1]}}))
end
it "should call a single time if multiple changes occur" do
Tantot.manager.run do
3.times { City.create! }
end
expect(value[:changes]).to eq(1)
expect(changes[:obj]).to eq(Tantot::Changes::ById.new({1=>{"id"=>[nil, 1]}, 2=>{"id"=>[nil, 2]}, 3=>{"id"=>[nil, 3]}}))
end
end
context "validations" do
it "should prevent registering twice with the same options" do
expect do
stub_model(:city) do
watch {}
watch {}
end
end.to raise_error(Tantot::MultipleWatchesProhibited, /Can't have multiple/)
end
it "should allow registering twice with different options" do
expect do
stub_model(:city) do
watch {}
watch(:name) {}
end
end.not_to raise_error
end
end
end
| petalmd/tantot | spec/collector/block_spec.rb | Ruby | mit | 1,376 |
/*
The MIT License (MIT)
Copyright (c) 2016 EMC Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package com.emc.ecs.metadata.dao.elasticsearch;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map.Entry;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.Settings.Builder;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.transport.ReceiveTimeoutTransportException;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.elasticsearch.xpack.client.PreBuiltXPackTransportClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.emc.ecs.metadata.dao.EcsCollectionType;
import com.emc.ecs.metadata.dao.ObjectDAO;
import com.emc.ecs.metadata.utils.Constants;
import com.emc.object.s3.bean.AbstractVersion;
import com.emc.object.s3.bean.DeleteMarker;
import com.emc.object.s3.bean.ListObjectsResult;
import com.emc.object.s3.bean.ListVersionsResult;
import com.emc.object.s3.bean.QueryMetadata;
import com.emc.object.s3.bean.QueryObject;
import com.emc.object.s3.bean.QueryObjectsResult;
import com.emc.object.s3.bean.S3Object;
import com.emc.object.s3.bean.Version;
public class ElasticS3ObjectDAO implements ObjectDAO {
private final static String CLIENT_SNIFFING_CONFIG = "client.transport.sniff";
private final static String CLIENT_TRANSPORT_PING_TIMEOUT = "client.transport.ping_timeout";
private final static String CLIENT_CLUSTER_NAME_CONFIG = "cluster.name";
public final static String S3_OBJECT_INDEX_NAME = "ecs-s3-object";
public final static String S3_OBJECT_VERSION_INDEX_NAME = "ecs-object-version";
public final static String S3_OBJECT_INDEX_TYPE = "object-info";
public final static String S3_OBJECT_VERSION_INDEX_TYPE = "object-version-info";
public final static String COLLECTION_TIME = "collection_time";
public final static String ANALYZED_TAG = "_analyzed";
public final static String NOT_ANALYZED_INDEX = "not_analyzed";
public final static String ANALYZED_INDEX = "analyzed";
public final static String LAST_MODIFIED_TAG = "last_modified";
public final static String SIZE_TAG = "size";
public final static String KEY_TAG = "key";
public final static String OWNER_ID_TAG = "owner_id";
public final static String OWNER_NAME_TAG = "owner_name";
public final static String NAMESPACE_TAG = "namespace";
public final static String BUCKET_TAG = "bucket";
public final static String ETAG_TAG = "e_tag";
public final static String VERSION_ID_TAG = "version_id";
public final static String IS_LATEST_TAG = "is_latest";
public final static String CUSTOM_GID_TAG = "x-amz-meta-x-emc-posix-group-owner-name";
public final static String CUSTOM_UID_TAG = "x-amz-meta-x-emc-posix-owner-name";
public final static String CUSTOM_MODIFIED_TIME_TAG = "mtime";
//=========================
// Private members
//=========================
private TransportClient elasticClient;
private static Logger LOGGER = LoggerFactory.getLogger(ElasticS3ObjectDAO.class);
private static final String DATA_DATE_PATTERN = "yyyy-MM-dd";
private static final String DATA_DATE_PATTERN_SEC = "yyyy-MM-dd HH:mm:ss";
private static final SimpleDateFormat DATA_DATE_FORMAT = new SimpleDateFormat(DATA_DATE_PATTERN);
private static final SimpleDateFormat DATA_DATE_FORMAT_SEC = new SimpleDateFormat(DATA_DATE_PATTERN_SEC);
private static String s3ObjectVersionIndexDayName;
private static String s3ObjectIndexDayName;
private ElasticDAOConfig config;
//=========================
// Public methods
//=========================
public ElasticS3ObjectDAO( ElasticDAOConfig config ) {
try {
this.config = config;
Builder builder = Settings.builder();
// Check for new hosts within the cluster
builder.put(CLIENT_SNIFFING_CONFIG, true);
builder.put(CLIENT_TRANSPORT_PING_TIMEOUT, "15s");
if (config.getXpackUser() != null) {
builder.put(Constants.XPACK_SECURITY_USER, config.getXpackUser() + ":" + config.getXpackPassword());
builder.put(Constants.XPACK_SSL_KEY, config.getXpackSslKey());
builder.put(Constants.XPACK_SSL_CERTIFICATE, config.getXpackSslCertificate());
builder.put(Constants.XPACK_SSL_CERTIFICATE_AUTH, config.getXpackSslCertificateAuthorities());
builder.put(Constants.XPACK_SECURITY_TRANPORT_ENABLED, "true");
}
// specify cluster name
if( config.getClusterName() != null ) {
builder.put(CLIENT_CLUSTER_NAME_CONFIG, config.getClusterName());
}
Settings settings = builder.build();
// create client
// create client
if (config.getXpackUser() != null) {
elasticClient = new PreBuiltXPackTransportClient(settings);
} else {
elasticClient = new PreBuiltTransportClient(settings);
}
// add hosts
for( String elasticHost : config.getHosts()) {
elasticClient.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(elasticHost), config.getPort()));
}
} catch (UnknownHostException e) {
throw new RuntimeException(e.getLocalizedMessage());
} catch (ReceiveTimeoutTransportException re) {
LOGGER.error("An error occured while connecting to ElasticSearch Cluster ", re);
System.exit(1);
}
}
/**
* Init indexes
* @param collectionTime - collection time
*/
@Override
public void initIndexes(Date collectionTime) {
// init S3 Object Index
if( config.getCollectionType().equals(EcsCollectionType.object) ) {
initS3ObjectIndex( collectionTime );
}
// init S3 Object Version Index
if( config.getCollectionType().equals(EcsCollectionType.object_version) ) {
initS3ObjectVersionIndex( collectionTime );
}
}
/**
* {@inheritDoc}
*/
@Override
public void insert(ListObjectsResult listObjectsResult, String namespace, String bucket, Date collectionTime) {
if( listObjectsResult == null ||
listObjectsResult.getObjects() == null ||
listObjectsResult.getObjects().isEmpty() ) {
// nothing to insert
return;
}
BulkRequestBuilder requestBuilder = elasticClient.prepareBulk();
// Generate JSON for object buckets info
for( S3Object s3Object : listObjectsResult.getObjects() ) {
XContentBuilder s3ObjectBuilder = toJsonFormat(s3Object, namespace, bucket, collectionTime);
IndexRequestBuilder request = elasticClient.prepareIndex()
.setIndex(s3ObjectIndexDayName)
.setType(S3_OBJECT_INDEX_TYPE)
.setSource(s3ObjectBuilder);
requestBuilder.add(request);
}
BulkResponse bulkResponse = requestBuilder.execute().actionGet();
int items = bulkResponse.getItems().length;
LOGGER.info( "Took " + bulkResponse.getTookInMillis() + " ms to index [" + items + "] items in Elasticsearch " + "index: " +
s3ObjectIndexDayName + " index type: " + S3_OBJECT_INDEX_TYPE );
if( bulkResponse.hasFailures() ) {
LOGGER.error( "Failure(s) occured while items in Elasticsearch " + "index: " +
s3ObjectIndexDayName + " index type: " + S3_OBJECT_INDEX_TYPE );
}
}
/**
* {@inheritDoc}
*/
@Override
public void insert( QueryObjectsResult queryObjectsResult, String namespace,
String bucketName, Date collectionTime ) {
if( queryObjectsResult == null ||
queryObjectsResult.getObjects() == null ||
queryObjectsResult.getObjects().isEmpty() ) {
// nothing to insert
return;
}
BulkRequestBuilder requestBuilder = elasticClient.prepareBulk();
// Generate JSON for object buckets info
for( QueryObject queryObject : queryObjectsResult.getObjects() ) {
XContentBuilder s3ObjectBuilder = toJsonFormat(queryObject, namespace, bucketName, collectionTime);
IndexRequestBuilder request = elasticClient.prepareIndex()
.setIndex(s3ObjectIndexDayName)
.setType(S3_OBJECT_INDEX_TYPE)
.setSource(s3ObjectBuilder);
requestBuilder.add(request);
}
BulkResponse bulkResponse = requestBuilder.execute().actionGet();
int items = bulkResponse.getItems().length;
LOGGER.info( "Took " + bulkResponse.getTookInMillis() + " ms to index [" + items + "] items in Elasticsearch " + "index: " +
s3ObjectIndexDayName + " index type: " + S3_OBJECT_INDEX_TYPE );
if( bulkResponse.hasFailures() ) {
LOGGER.error( "Failure(s) occured while items in Elasticsearch " + "index: " +
s3ObjectIndexDayName + " index type: " + S3_OBJECT_INDEX_TYPE );
}
}
/**
* {@inheritDoc}
*/
@Override
public void insert(ListVersionsResult listVersionsResult, String namespace,
String bucketName, Date collectionTime) {
if( listVersionsResult == null ||
listVersionsResult.getVersions() == null ||
listVersionsResult.getVersions().isEmpty() ) {
// nothing to insert
return;
}
BulkRequestBuilder requestBuilder = elasticClient.prepareBulk();
// Generate JSON for object version info
for( AbstractVersion abstractVersion : listVersionsResult.getVersions() ) {
if(abstractVersion instanceof Version) {
XContentBuilder s3ObjectVersionBuilder = toJsonFormat((Version)abstractVersion, namespace, bucketName, collectionTime);
IndexRequestBuilder request = elasticClient.prepareIndex()
.setIndex(s3ObjectVersionIndexDayName)
.setType(S3_OBJECT_VERSION_INDEX_TYPE)
.setSource(s3ObjectVersionBuilder);
requestBuilder.add(request);
} else if(abstractVersion instanceof DeleteMarker) {
XContentBuilder s3ObjectVersionBuilder = toJsonFormat((DeleteMarker)abstractVersion, namespace, bucketName, collectionTime);
IndexRequestBuilder request = elasticClient.prepareIndex()
.setIndex(s3ObjectVersionIndexDayName)
.setType(S3_OBJECT_VERSION_INDEX_TYPE)
.setSource(s3ObjectVersionBuilder);
requestBuilder.add(request);
}
}
BulkResponse bulkResponse = requestBuilder.execute().actionGet();
int items = bulkResponse.getItems().length;
LOGGER.info( "Took " + bulkResponse.getTookInMillis() + " ms to index [" + items + "] items in Elasticsearch " + "index: " +
s3ObjectVersionIndexDayName + " index type: " + S3_OBJECT_VERSION_INDEX_TYPE );
if( bulkResponse.hasFailures() ) {
LOGGER.error( "Failure(s) occured while items in Elasticsearch " + "index: " +
s3ObjectVersionIndexDayName + " index type: " + S3_OBJECT_VERSION_INDEX_TYPE );
}
}
/**
* {@inheritDoc}
*/
@Override
public Long purgeOldData(ObjectDataType type, Date thresholdDate) {
switch(type) {
case object:
// Purge old S3 Objects
ElasticIndexCleaner.truncateOldIndexes( elasticClient, thresholdDate,
S3_OBJECT_INDEX_NAME, S3_OBJECT_INDEX_TYPE);
return 0L;
case object_versions:
// Purge old S3 Object Versions
ElasticIndexCleaner.truncateOldIndexes( elasticClient, thresholdDate,
S3_OBJECT_VERSION_INDEX_NAME,
S3_OBJECT_VERSION_INDEX_TYPE );
return 0L;
default:
return 0L;
}
}
/**
* Converts Object data in JSON format for Elasticsearch
*
* @param s3Object - S3 Object
* @param namespace - namespace
* @param bucket - bucket name
* @param collectionTime - collection time
* @return XContentBuilder
*/
public static XContentBuilder toJsonFormat( S3Object s3Object, String namespace, String bucket, Date collectionTime ) {
return toJsonFormat(s3Object, namespace, bucket,collectionTime, null);
}
/**
* Converts Object data in JSON format for Elasticsearch
*
* @param version - version
* @param namespace - namespace
* @param bucketName - bucket name
* @param collectionTime - collectionTime
* @return XContentBuilder
*/
public XContentBuilder toJsonFormat(Version version,
String namespace, String bucketName, Date collectionTime) {
return toJsonFormat( version, namespace, bucketName, collectionTime, null);
}
/**
* Converts Object data in JSON format for Elasticsearch
*
* @param deleteMarker - deleteMarker
* @param namespace - namespace
* @param bucketName - bucket name
* @param collectionTime - collection time
* @return XContentBuilders
*/
public XContentBuilder toJsonFormat(DeleteMarker deleteMarker,
String namespace, String bucketName, Date collectionTime) {
return toJsonFormat( deleteMarker, namespace, bucketName, collectionTime, null);
}
/**
* Converts Object data in JSON format for Elasticsearch
*
* @param s3Object - S3 Object
* @param namespace - namespace
* @param bucket - bucket
* @param collectionTime - collection time
* @return XContentBuilder
*/
public static XContentBuilder toJsonFormat( QueryObject s3Object, String namespace, String bucket, Date collectionTime ) {
return toJsonFormat(s3Object, namespace, bucket,collectionTime, null);
}
//=======================
// Private methods
//=======================
/**
* Init Object index
*/
private void initS3ObjectIndex( Date collectionTime ) {
String collectionDayString = DATA_DATE_FORMAT_SEC.format(collectionTime);
s3ObjectIndexDayName = S3_OBJECT_INDEX_NAME + "-" + collectionDayString.replaceAll(" ", "-");
if (elasticClient
.admin()
.indices()
.exists(new IndicesExistsRequest(s3ObjectIndexDayName))
.actionGet()
.isExists()) {
// Index already exists need to truncate it and recreate it
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(s3ObjectIndexDayName);
ActionFuture<DeleteIndexResponse> futureResult = elasticClient.admin().indices().delete(deleteIndexRequest);
// Wait until deletion is done
while( !futureResult.isDone() ) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
elasticClient.admin().indices().create(new CreateIndexRequest(s3ObjectIndexDayName)).actionGet();
try {
PutMappingResponse putMappingResponse = elasticClient.admin().indices()
.preparePutMapping(s3ObjectIndexDayName)
.setType(S3_OBJECT_INDEX_TYPE)
.setSource(XContentFactory.jsonBuilder().prettyPrint()
.startObject()
.startObject(S3_OBJECT_INDEX_TYPE)
// ========================================
// Define how the basic fields are defined
// ========================================
.startObject("properties")
// LAST_MODIFIED_TAG
.startObject( LAST_MODIFIED_TAG ).field("type", "date")
.field("format", "strict_date_optional_time||epoch_millis").endObject()
// SIZE_TAG
.startObject( SIZE_TAG ).field("type", "string").field("type", "long").endObject()
// KEY_TAG
.startObject( KEY_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// KEY_TAG Analyzed
.startObject( KEY_TAG + ANALYZED_TAG).field("type", "string")
.field("index", ANALYZED_INDEX).endObject()
.startObject( ETAG_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// NAMESPACE_TAG
.startObject( NAMESPACE_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// BUCKET_TAG
.startObject( BUCKET_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// OWNER_ID_TAG
.startObject( OWNER_ID_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// OWNER_NAME_TAG
.startObject( OWNER_NAME_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// COLLECTION_TIME
.startObject( COLLECTION_TIME ).field("type", "date")
.field("format", "strict_date_optional_time||epoch_millis||date_time_no_millis").endObject()
// CUSTOM_GID_TAG
.startObject( CUSTOM_GID_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// CUSTOM_UID_TAG
.startObject( CUSTOM_UID_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// CUSTOM_MODIFIED_TIME_TAG
.startObject( CUSTOM_MODIFIED_TIME_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
.endObject()
// =================================
// Dynamic fields won't be analyzed
// =================================
.startArray("dynamic_templates")
.startObject()
.startObject("notanalyzed")
.field("match", "*")
.field("match_mapping_type", "string")
.startObject( "mapping" ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
.endObject()
.endObject()
.endArray()
.endObject()
.endObject() )
.execute().actionGet();
if (putMappingResponse.isAcknowledged()) {
LOGGER.info("Index Created: " + s3ObjectIndexDayName);
} else {
LOGGER.error("Index {} did not exist. " +
"While attempting to create the index from stored ElasticSearch " +
"Templates we were unable to get an acknowledgement.", s3ObjectIndexDayName);
LOGGER.error("Error Message: {}", putMappingResponse.toString());
throw new RuntimeException("Unable to create index " + s3ObjectIndexDayName);
}
} catch (IOException e) {
throw new RuntimeException( "Unable to create index " +
s3ObjectIndexDayName + " " + e.getMessage() );
}
}
/**
* Converts Object data into JSON format
*
* @param s3Object - S3 Object
* @param namespace - namespace
* @param bucket - bucket
* @param collectionTime - collection time
* @param builder - builder
* @return XContentBuilder
*/
private static XContentBuilder toJsonFormat( S3Object s3Object,
String namespace,
String bucket,
Date collectionTime,
XContentBuilder builder) {
try {
if(builder == null) {
builder = XContentFactory.jsonBuilder();
}
// add relevant fileds
builder = builder.startObject()
.field( LAST_MODIFIED_TAG, s3Object.getLastModified() )
.field( SIZE_TAG, s3Object.getSize() )
.field( KEY_TAG, s3Object.getKey() )
.field( KEY_TAG + ANALYZED_TAG, s3Object.getKey() )
.field( ETAG_TAG , s3Object.getETag())
.field( NAMESPACE_TAG, namespace )
.field( BUCKET_TAG, bucket )
.field( OWNER_ID_TAG, (s3Object.getOwner() != null && s3Object.getOwner().getId() != null)
? s3Object.getOwner().getId() : null )
.field( OWNER_NAME_TAG, (s3Object.getOwner() != null && s3Object.getOwner().getDisplayName() != null)
? s3Object.getOwner().getDisplayName() : null )
.field( COLLECTION_TIME, collectionTime )
.endObject();
} catch (IOException e) {
throw new RuntimeException(e.getLocalizedMessage());
}
return builder;
}
/**
* Init Object version index
*/
private void initS3ObjectVersionIndex( Date collectionTime ) {
String collectionDayString = DATA_DATE_FORMAT.format(collectionTime);
s3ObjectVersionIndexDayName = S3_OBJECT_VERSION_INDEX_NAME + "-" + collectionDayString;
if (elasticClient
.admin()
.indices()
.exists(new IndicesExistsRequest(s3ObjectVersionIndexDayName))
.actionGet()
.isExists()) {
// Index already exists need to truncate it and recreate it
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(s3ObjectVersionIndexDayName);
ActionFuture<DeleteIndexResponse> futureResult = elasticClient.admin().indices().delete(deleteIndexRequest);
// Wait until deletion is done
while( !futureResult.isDone() ) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
elasticClient.admin().indices().create(new CreateIndexRequest(s3ObjectVersionIndexDayName)).actionGet();
try {
PutMappingResponse putMappingResponse = elasticClient.admin().indices()
.preparePutMapping(s3ObjectVersionIndexDayName)
.setType(S3_OBJECT_VERSION_INDEX_TYPE)
.setSource(XContentFactory.jsonBuilder().prettyPrint()
.startObject()
.startObject(S3_OBJECT_VERSION_INDEX_TYPE)
// ========================================
// Define how the basic fields are defined
// ========================================
.startObject("properties")
// LAST_MODIFIED_TAG
.startObject( LAST_MODIFIED_TAG ).field("type", "date")
.field("format", "strict_date_optional_time||epoch_millis").endObject()
// SIZE_TAG
.startObject( SIZE_TAG ).field("type", "string").field("type", "long").endObject()
// KEY_TAG
.startObject( KEY_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// KEY_TAG Analyzed
.startObject( KEY_TAG + ANALYZED_TAG).field("type", "string")
.field("index", ANALYZED_INDEX).endObject()
.startObject( ETAG_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// NAMESPACE_TAG
.startObject( NAMESPACE_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// BUCKET_TAG
.startObject( BUCKET_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// VERSION_ID_TAG
.startObject( VERSION_ID_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// IS_LATEST_TAG
.startObject( IS_LATEST_TAG ).field("type", "boolean")
.field("index", NOT_ANALYZED_INDEX).endObject()
// OWNER_ID_TAG
.startObject( OWNER_ID_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// OWNER_NAME_TAG
.startObject( OWNER_NAME_TAG ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
// COLLECTION_TIME
.startObject( COLLECTION_TIME ).field("type", "date")
.field("format", "strict_date_optional_time||epoch_millis").endObject()
.endObject()
// =================================
// Dynamic fields won't be analyzed
// =================================
.startArray("dynamic_templates")
.startObject()
.startObject("notanalyzed")
.field("match", "*")
.field("match_mapping_type", "string")
.startObject( "mapping" ).field("type", "string")
.field("index", NOT_ANALYZED_INDEX).endObject()
.endObject()
.endObject()
.endArray()
.endObject()
.endObject() )
.execute().actionGet();
if (putMappingResponse.isAcknowledged()) {
LOGGER.info("Index Created: " + s3ObjectVersionIndexDayName);
} else {
LOGGER.error("Index {} did not exist. " +
"While attempting to create the index from stored ElasticSearch " +
"Templates we were unable to get an acknowledgement.", s3ObjectVersionIndexDayName);
LOGGER.error("Error Message: {}", putMappingResponse.toString());
throw new RuntimeException("Unable to create index " + s3ObjectVersionIndexDayName);
}
} catch (IOException e) {
throw new RuntimeException( "Unable to create index " +
s3ObjectVersionIndexDayName +
" " + e.getMessage() );
}
}
/**
* Converts object version data to json
*
* @param version - version
* @param namespace - namespace
* @param bucket - bucket
* @param collectionTime - collection time
* @param builder - builder
* @return XContentBuilder
*/
private static XContentBuilder toJsonFormat( Version version,
String namespace,
String bucket,
Date collectionTime,
XContentBuilder builder) {
try {
if(builder == null) {
builder = XContentFactory.jsonBuilder();
}
// add relevant fields
builder = builder.startObject()
.field( LAST_MODIFIED_TAG, version.getLastModified() )
.field( SIZE_TAG, version.getSize() )
.field( KEY_TAG, version.getKey() )
.field( KEY_TAG + ANALYZED_TAG, version.getKey() )
.field( ETAG_TAG , version.getETag())
.field( NAMESPACE_TAG, namespace )
.field( BUCKET_TAG, bucket )
.field( VERSION_ID_TAG, version.getVersionId() )
.field( IS_LATEST_TAG, version.isLatest())
.field( OWNER_ID_TAG, (version.getOwner() != null && version.getOwner().getId() != null)
? version.getOwner().getId() : null )
.field( OWNER_NAME_TAG, (version.getOwner() != null && version.getOwner().getDisplayName() != null)
? version.getOwner().getDisplayName() : null )
.field( COLLECTION_TIME, collectionTime )
.endObject();
} catch (IOException e) {
throw new RuntimeException(e.getLocalizedMessage());
}
return builder;
}
/**
* Converts
*
* @param deleteMarker - delete marker
* @param namespace - namespace
* @param bucket - bucket
* @param collectionTime - collection time
* @param builder - builder
* @return XContentBuilder
*/
private static XContentBuilder toJsonFormat( DeleteMarker deleteMarker,
String namespace,
String bucket,
Date collectionTime,
XContentBuilder builder) {
try {
if(builder == null) {
builder = XContentFactory.jsonBuilder();
}
// add relevant fields
builder = builder.startObject()
.field( LAST_MODIFIED_TAG, deleteMarker.getLastModified() )
.field( KEY_TAG, deleteMarker.getKey() )
.field( KEY_TAG + ANALYZED_TAG, deleteMarker.getKey() )
.field( NAMESPACE_TAG, namespace )
.field( BUCKET_TAG, bucket )
.field( VERSION_ID_TAG, deleteMarker.getVersionId() )
.field( IS_LATEST_TAG, deleteMarker.isLatest())
.field( OWNER_ID_TAG, (deleteMarker.getOwner() != null && deleteMarker.getOwner().getId() != null)
? deleteMarker.getOwner().getId() : null )
.field( OWNER_NAME_TAG, (deleteMarker.getOwner() != null && deleteMarker.getOwner().getDisplayName() != null)
? deleteMarker.getOwner().getDisplayName() : null )
.field( COLLECTION_TIME, collectionTime )
.endObject();
} catch (IOException e) {
throw new RuntimeException(e.getLocalizedMessage());
}
return builder;
}
/**
* Converts Query Object data into JSON
*
* @param queryObject - Query Object
* @param namespace - Namespace
* @param bucket - Bucket
* @param collectionTime - Collection Time
* @param builder - Builder
* @return XContentBuilder
*/
private static XContentBuilder toJsonFormat( QueryObject queryObject,
String namespace,
String bucket,
Date collectionTime,
XContentBuilder builder) {
try {
if(builder == null) {
builder = XContentFactory.jsonBuilder();
}
// add known basic fields
builder = builder.startObject()
.field( KEY_TAG, queryObject.getObjectName() )
.field( KEY_TAG + ANALYZED_TAG, queryObject.getObjectName() )
.field( ETAG_TAG , queryObject.getObjectId())
.field( NAMESPACE_TAG, namespace )
.field( BUCKET_TAG, bucket )
.field( COLLECTION_TIME, collectionTime );
// Add custom MS Key values as dynamic fields
for( QueryMetadata metadata : queryObject.getQueryMds() ) {
for( Entry<String, String> entry : metadata.getMdMap().entrySet() ) {
builder.field(entry.getKey(), entry.getValue());
}
}
builder.endObject();
} catch (IOException e) {
throw new RuntimeException(e.getLocalizedMessage());
}
return builder;
}
}
| carone1/ecs-dashboard | ecs-metadata-elasticsearch-dao/src/main/java/com/emc/ecs/metadata/dao/elasticsearch/ElasticS3ObjectDAO.java | Java | mit | 29,830 |
/*
* MapsActivity
*
* Version 1.0
*
* November 12, 2017
*
* Copyright (c) 2017 Team NOTcmput301, CMPUT301, University of Alberta - All Rights Reserved
* You may use, distribute, or modify this code under terms and conditions of the Code of Student Behavior at University of Alberta.
* You can find a copy of the license in the project wiki on github. Otherwise please contact miller4@ualberta.ca.
*/
package com.notcmput301.habitbook;
import android.Manifest;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.location.Criteria;
import android.location.Location;
import android.location.LocationManager;
import android.os.Build;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.FragmentActivity;
import android.os.Bundle;
import android.util.Log;
import android.widget.Toast;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.Circle;
import com.google.android.gms.maps.model.CircleOptions;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import java.util.ArrayList;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
/**
* Activity to handle map related functions
*
* @author NOTcmput301
* @version 1.0
* @since 1.0
*/
public class MapsActivity extends FragmentActivity implements GoogleMap.OnMyLocationButtonClickListener,
OnMapReadyCallback{
private GoogleMap mMap;
private ArrayList<HabitEvent> habitEvents;
private Gson gson = new Gson();
/**
* Called when the activity is first created.
*
* @param savedInstanceState previous instance of activity
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_maps);
// Obtain the SupportMapFragment and get notified when the map is ready to be used.
SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager()
.findFragmentById(R.id.map);
mapFragment.getMapAsync(this);
Intent receiver = getIntent();
String e = receiver.getExtras().getString("events");
try{
habitEvents = gson.fromJson(e, new TypeToken<ArrayList<HabitEvent>>(){}.getType());
} catch (Exception e1){
Log.e("error", e1.toString() );
}
Integer i = habitEvents.size();
Log.e("error", i.toString() );
//finish();
}
/**
* Manipulates the map once available.
* This callback is triggered when the map is ready to be used.
* This is where we can add markers or lines, add listeners or move the camera. In this case,
* we just add a marker near Sydney, Australia.
* If Google Play services is not installed on the device, the user will be prompted to install
* it inside the SupportMapFragment. This method will only be triggered once the user has
* installed Google Play services and returned to the app.
*/
@Override
public void onMapReady(GoogleMap googleMap) {
mMap = googleMap;
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
requestPermissions(new String[]{Manifest.permission.ACCESS_COARSE_LOCATION,Manifest.permission.ACCESS_FINE_LOCATION,Manifest.permission.INTERNET}
,10);
}
return;
}
mMap.setMyLocationEnabled(true);
mMap.setOnMyLocationButtonClickListener(this);
// Add a marker in Sydney and move the camera
if (habitEvents.size()>0){
for (HabitEvent e : habitEvents){
Log.e("error", e.getComment());
if (e.getLatitude() != null){
Log.e("error", e.getLatitude().toString());
}
if (e.getLatitude() != null) {
LatLng event = new LatLng(e.getLatitude(), e.getLongitude());
mMap.addMarker(new MarkerOptions().position(event).title(e.getComment()));
mMap.moveCamera(CameraUpdateFactory.newLatLng(event));
}
}
}
}
/**
* Function for handling location button clicks
*
*/
@Override
public boolean onMyLocationButtonClick() {
LocationManager locationManager = (LocationManager)
getSystemService(Context.LOCATION_SERVICE);
Criteria criteria = new Criteria();
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
requestPermissions(new String[]{Manifest.permission.ACCESS_COARSE_LOCATION,Manifest.permission.ACCESS_FINE_LOCATION,Manifest.permission.INTERNET}
,10);
}
}
Location location = locationManager.getLastKnownLocation(locationManager
.getBestProvider(criteria, false));
double latitude = location.getLatitude();
double longitude = location.getLongitude();
Toast.makeText(this, "MyLocation button clicked", Toast.LENGTH_SHORT).show();
Circle circle = mMap.addCircle(new CircleOptions()
.center(new LatLng(latitude, longitude))
.radius(5000)
.strokeColor(Color.RED));
// Return false so that we don't consume the event and the default behavior still occurs
// (the camera animates to the user's current position).
return false;
}
}
| CMPUT301F17T28/NOTcmput301 | app/src/main/java/com/notcmput301/habitbook/MapsActivity.java | Java | mit | 6,346 |
import * as angular from 'angular';
import {IRequestServiceProvider} from '../../common/utils/request-utils.provider';
import RecoveryPasswordComponent from './recovery-password.component';
import Core from '../../core';
import Utils from '../../common/utils';
export default angular
.module('app.recovery-password', [
Core,
Utils
])
.component('recoveryPassword', RecoveryPasswordComponent)
.config(['$stateProvider', '$translatePartialLoaderProvider', 'RequestServiceProvider',
($stateProvider: ng.ui.IStateProvider,
$translatePartialLoaderProvider: ng.translate.ITranslatePartialLoaderService,
RequestServiceProvider: IRequestServiceProvider) => {
$stateProvider
.state('access.recovery_password', {
url: '/recovery-password?userId?code',
component: 'recoveryPassword'
});
$translatePartialLoaderProvider.addPart('components/recovery-password');
RequestServiceProvider.register('recovery', {
url: 'account/resetpassword'
});
}])
.name;
| LekoWebDeveloper/angular-webapi-login-template | client/src/components/recovery-password/index.ts | TypeScript | mit | 1,155 |
version https://git-lfs.github.com/spec/v1
oid sha256:5181d344dc3334a5a80ecae84df1bb3107af7d92135639b56a7f73ec2ea1931c
size 3057
| yogeshsaroya/new-cdnjs | ajax/libs/highlight.js/8.5/languages/cpp.min.js | JavaScript | mit | 129 |
<?php
namespace Widgets\Dropdown;
class Dropdown extends \Widgets\Core\Base{
public $options;
public function __construct($options){
$this->template = 'dropdown';
$this->package = 'dropdown';
parent::__construct($options);
}
} | tylersavery/Houston | widgets/dropdown/dropdown.php | PHP | mit | 243 |
using System;
using System.Runtime.Serialization;
// ReSharper disable CheckNamespace
namespace SharpRemote
// ReSharper restore CheckNamespace
{
/// <summary>
/// This exception is thrown when a thrown exception should be marshalled (because it crosses the proxy / servant threshold)
/// but cannot, for example because it is missing the <see cref="SerializableAttribute"/> or a proper constructor.
///
/// It preserves a lot of information about the original exception to help document the original problem as well as
/// to find it, if necessary.
/// </summary>
[Serializable]
public class UnserializableException
: SharpRemoteException
{
private readonly string _originalMessage;
private readonly string _originalSource;
private readonly string _originalStacktrace;
private readonly string _originalTargetSite;
private readonly string _originalTypename;
/// <summary>
/// Creates a new UnserializableException.
/// </summary>
/// <param name="message"></param>
public UnserializableException(string message)
: this(message, null)
{ }
/// <summary>
/// Creates a new UnserializableException that tries to capture
/// as much information about the original (unserializable) exception
/// as possible to ease debugging.
/// </summary>
/// <param name="message"></param>
/// <param name="innerException"></param>
public UnserializableException(string message, Exception innerException)
: base(message, innerException)
{ }
/// <summary>
/// Creates a new UnserializableException that tries to capture
/// as much information about the original (unserializable) exception
/// as possible to ease debugging.
/// </summary>
/// <param name="originalException"></param>
public UnserializableException(Exception originalException)
: base(originalException.Message)
{
_originalMessage = originalException.Message;
_originalStacktrace = originalException.StackTrace;
_originalTypename = originalException.GetType().AssemblyQualifiedName;
_originalSource = originalException.Source;
_originalTargetSite = originalException.TargetSite.Name;
HResult = originalException.HResult;
}
/// <summary>
/// Restores an UnserializableException from the given stream.
/// </summary>
/// <param name="info"></param>
/// <param name="context"></param>
public UnserializableException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
_originalMessage = info.GetString("OriginalMessage");
_originalStacktrace = info.GetString("OriginalStacktrace");
_originalTypename = info.GetString("OriginalExceptionType");
_originalSource = info.GetString("OriginalSource");
_originalTargetSite = info.GetString("OriginalTargetSite");
}
/// <inheritdoc />
public override void GetObjectData(SerializationInfo info, StreamingContext context)
{
base.GetObjectData(info, context);
info.AddValue("OriginalMessage", _originalMessage);
info.AddValue("OriginalStacktrace", _originalStacktrace);
info.AddValue("OriginalExceptionType", _originalTypename);
info.AddValue("OriginalSource", _originalSource);
info.AddValue("OriginalTargetSite", _originalTargetSite);
}
/// <summary>
/// Initializes a new instance of this exception.
/// </summary>
public UnserializableException()
{}
/// <summary>
/// The <see cref="Exception.Message"/> of the
/// original exception that could not be serialized.
/// </summary>
public string OriginalMessage => _originalMessage;
/// <summary>
/// The <see cref="Exception.StackTrace"/> of the
/// original exception that could not be serialized.
/// </summary>
public string OriginalStacktrace => _originalStacktrace;
/// <summary>
/// The <see cref="Exception.Source"/> of the
/// original exception that could not be serialized.
/// </summary>
public string OriginalSource => _originalSource;
/// <summary>
/// The fully qualified typename of the original
/// exception that could not be serialized.
/// </summary>
public string OriginalTypename => _originalTypename;
/// <summary>
/// The name of the <see cref="Exception.TargetSite"/> of the
/// original exception that could not be serialized.
/// </summary>
public string OriginalTargetSite => _originalTargetSite;
}
} | Kittyfisto/SharpRemote | SharpRemote/Exceptions/UnserializableException.cs | C# | mit | 4,319 |
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _reactRouter = require('react-router');
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var Menu = function (_Component) {
_inherits(Menu, _Component);
function Menu(props) {
_classCallCheck(this, Menu);
return _possibleConstructorReturn(this, (Menu.__proto__ || Object.getPrototypeOf(Menu)).call(this, props));
}
_createClass(Menu, [{
key: 'render',
value: function render() {
return _react2.default.createElement(
'div',
{ className: 'menu-component' },
_react2.default.createElement(
'nav',
null,
_react2.default.createElement(
_reactRouter.Link,
{ to: '/anuncios', activeClassName: 'selected' },
_react2.default.createElement('img', { src: '/imgs/building.svg' }),
_react2.default.createElement(
'span',
null,
'An\xFAncios'
)
),
_react2.default.createElement(
_reactRouter.Link,
{ to: '/novo-anuncio', activeClassName: 'selected' },
_react2.default.createElement('img', { src: '/imgs/plus-icon.svg' }),
_react2.default.createElement(
'span',
null,
'Novo an\xFAncio'
)
)
)
);
}
}]);
return Menu;
}(_react.Component);
Menu.propTypes = {};
exports.default = Menu; | raapperez/vrf | frontend/js/components/menu.js | JavaScript | mit | 3,426 |
bql`
image(size="foo") {
width
height
src
}
`; | bicyclejs/babel-plugin-transform-bql | test/test-cases/invalid-character.js | JavaScript | mit | 62 |
package de.gurkenlabs.litiengine.entities;
import de.gurkenlabs.litiengine.graphics.RenderEngine;
import java.awt.Graphics2D;
import java.util.EventObject;
/**
* This {@code EventObject} contains data about the rendering process of an entity.
*
* @see RenderEngine#renderEntity(Graphics2D, IEntity)
*/
public class EntityRenderEvent extends EventObject {
private static final long serialVersionUID = 6397005859146712222L;
private final transient Graphics2D graphics;
private final transient IEntity entity;
public EntityRenderEvent(final Graphics2D graphics, final IEntity entity) {
super(entity);
this.graphics = graphics;
this.entity = entity;
}
/**
* Gets the graphics object on which the entity is rendered.
*
* @return The graphics object on which the entity is rendered.
*/
public Graphics2D getGraphics() {
return this.graphics;
}
/**
* Get the entity involved with the rendering process.
*
* @return The entity involved with the rendering process.
*/
public IEntity getEntity() {
return this.entity;
}
}
| gurkenlabs/litiengine | core/src/main/java/de/gurkenlabs/litiengine/entities/EntityRenderEvent.java | Java | mit | 1,089 |
import plain from '../structure/plain'
import immutable from '../structure/immutable'
import defaultShouldError from '../defaultShouldError'
describe('defaultShouldError', () => {
it('should validate when initialRender is true', () => {
expect(
defaultShouldError({
initialRender: true
})
).toBe(true)
})
const describeDefaultShouldError = structure => {
const { fromJS } = structure
it('should validate if values have changed', () => {
expect(
defaultShouldError({
initialRender: false,
structure,
values: fromJS({
foo: 'fooInitial'
}),
nextProps: {
values: fromJS({
foo: 'fooChanged'
})
}
})
).toBe(true)
})
it('should not validate if values have not changed', () => {
expect(
defaultShouldError({
initialRender: false,
structure,
values: fromJS({
foo: 'fooInitial'
}),
nextProps: {
values: fromJS({
foo: 'fooInitial'
})
}
})
).toBe(false)
})
it('should validate if field validator keys have changed', () => {
expect(
defaultShouldError({
initialRender: false,
structure,
values: fromJS({
foo: 'fooValue'
}),
nextProps: {
values: fromJS({
foo: 'fooValue'
})
},
lastFieldValidatorKeys: [],
fieldValidatorKeys: ['foo']
})
).toBe(true)
})
it('should not validate if field validator keys have not changed', () => {
expect(
defaultShouldError({
initialRender: false,
structure,
values: fromJS({
foo: 'fooInitial'
}),
nextProps: {
values: fromJS({
foo: 'fooInitial'
})
},
lastFieldValidatorKeys: ['foo'],
fieldValidatorKeys: ['foo']
})
).toBe(false)
})
}
describeDefaultShouldError(plain)
describeDefaultShouldError(immutable)
})
| erikras/redux-form | src/__tests__/defaultShouldError.spec.js | JavaScript | mit | 2,200 |
const path = require('path');
module.exports = {
HOST: 'localhost',
PORT: 3000,
URL: {
ROOT: 'https://bootflex.herokuapp.com',
API: 'https://bootflex.herokuapp.com/api'
},
PATH: {
ROOT: path.join(__dirname, '..')
}
};
| zrosenbauer/bootflex | doc-viewer/configs/production.js | JavaScript | mit | 243 |
import Chaffle from "chaffle";
const scrambleAuthor = () => {
const elements = document.querySelectorAll("[data-chaffle]");
elements.forEach(el => {
const chaffle = new Chaffle(el, {
speed: 10,
delay: 20,
});
el.addEventListener("mouseover", () => {
chaffle.init();
});
});
};
export { scrambleAuthor };
| IvanWoo/subjpop.github.io | src/animations.js | JavaScript | mit | 386 |
// Copyright (c) 2015 Uber Technologies, Inc.
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
import React, {PropTypes, Component} from 'react';
import ViewportMercator from 'viewport-mercator-project';
import window from 'global/window';
export default class CanvasOverlay extends Component {
static propTypes = {
width: PropTypes.number.isRequired,
height: PropTypes.number.isRequired,
latitude: PropTypes.number.isRequired,
longitude: PropTypes.number.isRequired,
zoom: PropTypes.number.isRequired,
redraw: PropTypes.func.isRequired,
isDragging: PropTypes.bool.isRequired
};
componentDidMount() {
this._redraw();
}
componentDidUpdate() {
this._redraw();
}
_redraw() {
const pixelRatio = window.devicePixelRatio || 1;
const canvas = this.refs.overlay;
const ctx = canvas.getContext('2d');
ctx.save();
ctx.scale(pixelRatio, pixelRatio);
const mercator = ViewportMercator(this.props);
this.props.redraw({
width: this.props.width,
height: this.props.height,
ctx,
project: mercator.project,
unproject: mercator.unproject,
isDragging: this.props.isDragging
});
ctx.restore();
}
render() {
const pixelRatio = window.devicePixelRatio || 1;
return (
<canvas
ref="overlay"
width={ this.props.width * pixelRatio }
height={ this.props.height * pixelRatio }
style={ {
width: `${this.props.width}px`,
height: `${this.props.height}px`,
position: 'absolute',
pointerEvents: 'none',
left: 0,
top: 0
} }/>
);
}
} | RanaRunning/rana | web/src/components/MapGL/overlays/canvas.react.js | JavaScript | mit | 2,671 |
<?php
/* AdminBundle:Admin:gestionCategories.html.twig */
class __TwigTemplate_d67b492aebafcd5f8ab65d887049fd9f778babe4069922cd9ffcb861388774ee extends Twig_Template
{
public function __construct(Twig_Environment $env)
{
parent::__construct($env);
// line 1
$this->parent = $this->loadTemplate("layout/layoutAdmin.html.twig", "AdminBundle:Admin:gestionCategories.html.twig", 1);
$this->blocks = array(
'title' => array($this, 'block_title'),
'bread' => array($this, 'block_bread'),
'body' => array($this, 'block_body'),
'angular' => array($this, 'block_angular'),
);
}
protected function doGetParent(array $context)
{
return "layout/layoutAdmin.html.twig";
}
protected function doDisplay(array $context, array $blocks = array())
{
$__internal_cde43e323352527c7dce020759e59a34f735f773650cd3c8debd02ad4afe50e2 = $this->env->getExtension("Symfony\\Bundle\\WebProfilerBundle\\Twig\\WebProfilerExtension");
$__internal_cde43e323352527c7dce020759e59a34f735f773650cd3c8debd02ad4afe50e2->enter($__internal_cde43e323352527c7dce020759e59a34f735f773650cd3c8debd02ad4afe50e2_prof = new Twig_Profiler_Profile($this->getTemplateName(), "template", "AdminBundle:Admin:gestionCategories.html.twig"));
$__internal_a3e67b5942a51cae08f3a97d8fefbbf6838cbff08c3f39a90a6f06a2e8cdc9a3 = $this->env->getExtension("Symfony\\Bridge\\Twig\\Extension\\ProfilerExtension");
$__internal_a3e67b5942a51cae08f3a97d8fefbbf6838cbff08c3f39a90a6f06a2e8cdc9a3->enter($__internal_a3e67b5942a51cae08f3a97d8fefbbf6838cbff08c3f39a90a6f06a2e8cdc9a3_prof = new Twig_Profiler_Profile($this->getTemplateName(), "template", "AdminBundle:Admin:gestionCategories.html.twig"));
$this->parent->display($context, array_merge($this->blocks, $blocks));
$__internal_cde43e323352527c7dce020759e59a34f735f773650cd3c8debd02ad4afe50e2->leave($__internal_cde43e323352527c7dce020759e59a34f735f773650cd3c8debd02ad4afe50e2_prof);
$__internal_a3e67b5942a51cae08f3a97d8fefbbf6838cbff08c3f39a90a6f06a2e8cdc9a3->leave($__internal_a3e67b5942a51cae08f3a97d8fefbbf6838cbff08c3f39a90a6f06a2e8cdc9a3_prof);
}
// line 3
public function block_title($context, array $blocks = array())
{
$__internal_079df296d951057f1621359ed00bca076690b15930c830e2f97c9a6b202ac183 = $this->env->getExtension("Symfony\\Bundle\\WebProfilerBundle\\Twig\\WebProfilerExtension");
$__internal_079df296d951057f1621359ed00bca076690b15930c830e2f97c9a6b202ac183->enter($__internal_079df296d951057f1621359ed00bca076690b15930c830e2f97c9a6b202ac183_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "title"));
$__internal_e124196bf64e610832b44c3175fc7e3d48077e78d94b0725a604f1e8cc3f7e42 = $this->env->getExtension("Symfony\\Bridge\\Twig\\Extension\\ProfilerExtension");
$__internal_e124196bf64e610832b44c3175fc7e3d48077e78d94b0725a604f1e8cc3f7e42->enter($__internal_e124196bf64e610832b44c3175fc7e3d48077e78d94b0725a604f1e8cc3f7e42_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "title"));
echo "Dashboad";
$__internal_e124196bf64e610832b44c3175fc7e3d48077e78d94b0725a604f1e8cc3f7e42->leave($__internal_e124196bf64e610832b44c3175fc7e3d48077e78d94b0725a604f1e8cc3f7e42_prof);
$__internal_079df296d951057f1621359ed00bca076690b15930c830e2f97c9a6b202ac183->leave($__internal_079df296d951057f1621359ed00bca076690b15930c830e2f97c9a6b202ac183_prof);
}
// line 6
public function block_bread($context, array $blocks = array())
{
$__internal_382c7fc54e6323ab81fede2cbfb23ade7c4a21db62f2fa56d6e71324d8935d01 = $this->env->getExtension("Symfony\\Bundle\\WebProfilerBundle\\Twig\\WebProfilerExtension");
$__internal_382c7fc54e6323ab81fede2cbfb23ade7c4a21db62f2fa56d6e71324d8935d01->enter($__internal_382c7fc54e6323ab81fede2cbfb23ade7c4a21db62f2fa56d6e71324d8935d01_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "bread"));
$__internal_2df00f4c839063dfac10d57588d51910a9f6b22ee636ee297dd6156979a6c4a7 = $this->env->getExtension("Symfony\\Bridge\\Twig\\Extension\\ProfilerExtension");
$__internal_2df00f4c839063dfac10d57588d51910a9f6b22ee636ee297dd6156979a6c4a7->enter($__internal_2df00f4c839063dfac10d57588d51910a9f6b22ee636ee297dd6156979a6c4a7_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "bread"));
// line 7
echo "
<div class=\"row wrapper border-bottom white-bg page-heading\">
<div class=\"col-sm-4\">
<h2>Gestion catégories</h2>
<ol class=\"breadcrumb\">
<li>
<a href=\"";
// line 14
echo $this->env->getExtension('Symfony\Bridge\Twig\Extension\RoutingExtension')->getPath("admin_homepage");
echo "\">Admin</a>
</li>
<li class=\"active\">
<strong>Gestion Catégories</strong>
</li>
</ol>
</div>
</div>
";
$__internal_2df00f4c839063dfac10d57588d51910a9f6b22ee636ee297dd6156979a6c4a7->leave($__internal_2df00f4c839063dfac10d57588d51910a9f6b22ee636ee297dd6156979a6c4a7_prof);
$__internal_382c7fc54e6323ab81fede2cbfb23ade7c4a21db62f2fa56d6e71324d8935d01->leave($__internal_382c7fc54e6323ab81fede2cbfb23ade7c4a21db62f2fa56d6e71324d8935d01_prof);
}
// line 29
public function block_body($context, array $blocks = array())
{
$__internal_8650bd4d84fa446e9a145bb3e2b14f5378b57446d85338804d96bd00fde7920a = $this->env->getExtension("Symfony\\Bundle\\WebProfilerBundle\\Twig\\WebProfilerExtension");
$__internal_8650bd4d84fa446e9a145bb3e2b14f5378b57446d85338804d96bd00fde7920a->enter($__internal_8650bd4d84fa446e9a145bb3e2b14f5378b57446d85338804d96bd00fde7920a_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "body"));
$__internal_edd6a65c7f003fd9676fa1538de0ee47701f4e3fbc5cc7a6ec81ce9897b7ff3f = $this->env->getExtension("Symfony\\Bridge\\Twig\\Extension\\ProfilerExtension");
$__internal_edd6a65c7f003fd9676fa1538de0ee47701f4e3fbc5cc7a6ec81ce9897b7ff3f->enter($__internal_edd6a65c7f003fd9676fa1538de0ee47701f4e3fbc5cc7a6ec81ce9897b7ff3f_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "body"));
// line 30
echo "
<div class=\"wrapper wrapper-content\" ng-controller=\"gestionCatCtrl\">
<div class=\" text-center animated fadeInRightBig\">
<div class=\"row\" ng-init=\"txtSearch=''\">
<div class=\"form-group\">
<div class=\"col-sm-1\"></div>
<div class=\"col-sm-10\"><input placeholder=\"Chercher des catégories\" ng-model=\"txtSearch\" ng-change=\"getCategories(txtSearch)\" type=\"text\" class=\"form-control\"></div>
</div>
</div>
<div class=\"clearP\"></div>
<div class=\"row\">
<div class=\"col-lg-3\" ng-repeat=\"categorie in categories\" >
<div class=\"contact-box center-version\" style=\"height: 280px!important;\">
<a href=\"#\">
<img alt=\"image\" class=\"img-circle\" src=\"";
// line 60
echo twig_escape_filter($this->env, $this->env->getExtension('Symfony\Bridge\Twig\Extension\AssetExtension')->getAssetUrl("img/a2.jpg"), "html", null, true);
echo "\">
<h3 class=\"m-b-xs\"><strong ng-bind=\"categorie.nom\"></strong></h3>
<address class=\"m-t-md\">
<span ng-bind=\"categorie.desc | limitTo:50\"></span>
<span ng-if=\"categorie.desc.length > 50\">...</span>
</address>
</a>
<div class=\"contact-box-footer\">
<div class=\"m-t-xs btn-group\">
<a class=\"btn btn-xs btn-white\"><i class=\"fa fa-pencil\"></i> Modifier </a>
<a class=\"btn btn-xs btn-white\" ng-click=\"openModalDel(categorie.id,categorie.nom)\" ><i class=\"fa fa-close\"></i> Supprimer</a>
</div>
</div>
</div>
</div>
</div>
</div>
<div class=\"modal inmodal fade\" id=\"modalDelete\" tabindex=\"-1\" role=\"dialog\" aria-hidden=\"true\">
<div class=\"modal-dialog modal-sm\">
<div class=\"modal-content\">
<div class=\"modal-header\">
<button type=\"button\" class=\"close\" data-dismiss=\"modal\"><span aria-hidden=\"true\">×</span><span class=\"sr-only\">Close</span></button>
<h4 class=\"modal-title\">Suppression</h4>
</div>
<div class=\"modal-body\">
<h3>
Vous etes sûr de <strong>supprimer</strong> la catégorie <span ng-bind=\"currentNameToDel\"></span>
</h3>
</div>
<div class=\"modal-footer\">
<button type=\"button\" class=\"btn btn-danger\" ng-click=\"deleteCategorie(currentIdToDel)\">Supprimer</button>
<button type=\"button\" class=\"btn btn-white\" data-dismiss=\"modal\">Annuler</button>
</div>
</div>
</div>
</div>
</div>
";
$__internal_edd6a65c7f003fd9676fa1538de0ee47701f4e3fbc5cc7a6ec81ce9897b7ff3f->leave($__internal_edd6a65c7f003fd9676fa1538de0ee47701f4e3fbc5cc7a6ec81ce9897b7ff3f_prof);
$__internal_8650bd4d84fa446e9a145bb3e2b14f5378b57446d85338804d96bd00fde7920a->leave($__internal_8650bd4d84fa446e9a145bb3e2b14f5378b57446d85338804d96bd00fde7920a_prof);
}
// line 131
public function block_angular($context, array $blocks = array())
{
$__internal_e098835ee57ea6fc8e88750a5bc6ea99258053fa5791e59d0ddaf331696e506b = $this->env->getExtension("Symfony\\Bundle\\WebProfilerBundle\\Twig\\WebProfilerExtension");
$__internal_e098835ee57ea6fc8e88750a5bc6ea99258053fa5791e59d0ddaf331696e506b->enter($__internal_e098835ee57ea6fc8e88750a5bc6ea99258053fa5791e59d0ddaf331696e506b_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "angular"));
$__internal_0449b6bb9057f83abfb5af1ba1df0fd19fc961e1364f086aac02c6e0fba65027 = $this->env->getExtension("Symfony\\Bridge\\Twig\\Extension\\ProfilerExtension");
$__internal_0449b6bb9057f83abfb5af1ba1df0fd19fc961e1364f086aac02c6e0fba65027->enter($__internal_0449b6bb9057f83abfb5af1ba1df0fd19fc961e1364f086aac02c6e0fba65027_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "angular"));
// line 132
echo " <script>
appAdmin.controller(\"gestionCatCtrl\",function(\$scope,\$http){
\$scope.categories=[];
\$scope.txtSearch=\"\";
\$scope.currentNameToDel = \"\";
\$scope.currentIdToDel = 0;
\$scope.getCategories = function(txtChange){
if(txtChange == undefined){
\$scope.txtRecherche = \"\";
}else{
\$scope.txtRecherche = txtChange;
}
\$scope.objToSend = {
txt : \$scope.txtRecherche
}
\$http.post(\"http://172.16.128.8/formation/web/app_dev.php/Admin/Categories/getCategoriesJson\",\$scope.objToSend)
.then(function (response) {
\$scope.categories = response.data;
});
}
\$scope.getCategories(\$scope.txtSearch);
setInterval(function(){
\$scope.getCategories(\$scope.txtSearch);
},3000);
\$scope.openModalDel = function(id,nom){
\$scope.currentNameToDel = nom;
\$scope.currentIdToDel = id;
\$(\"#modalDelete\").modal('show');
}
\$scope.deleteCategorie = function(id){
\$scope.urlDel = \"http://172.16.128.8/formation/web/app_dev.php/Admin/Categories/deleteCategorieJson/\" + id ;
\$http.post(\$scope.urlDel)
.then(function (reponse) {
if(reponse.data.success){
toastr.success(reponse.data.message,\"Suppression\");
}else{
toastr.error(reponse.data.message,\"Suppression\");
}
\$scope.getCategories(\$scope.txtSearch);
\$(\"#modalDelete\").modal('hide');
});
}
});
</script>
";
$__internal_0449b6bb9057f83abfb5af1ba1df0fd19fc961e1364f086aac02c6e0fba65027->leave($__internal_0449b6bb9057f83abfb5af1ba1df0fd19fc961e1364f086aac02c6e0fba65027_prof);
$__internal_e098835ee57ea6fc8e88750a5bc6ea99258053fa5791e59d0ddaf331696e506b->leave($__internal_e098835ee57ea6fc8e88750a5bc6ea99258053fa5791e59d0ddaf331696e506b_prof);
}
public function getTemplateName()
{
return "AdminBundle:Admin:gestionCategories.html.twig";
}
public function isTraitable()
{
return false;
}
public function getDebugInfo()
{
return array ( 228 => 132, 219 => 131, 141 => 60, 109 => 30, 100 => 29, 79 => 14, 70 => 7, 61 => 6, 43 => 3, 11 => 1,);
}
/** @deprecated since 1.27 (to be removed in 2.0). Use getSourceContext() instead */
public function getSource()
{
@trigger_error('The '.__METHOD__.' method is deprecated since version 1.27 and will be removed in 2.0. Use getSourceContext() instead.', E_USER_DEPRECATED);
return $this->getSourceContext()->getCode();
}
public function getSourceContext()
{
return new Twig_Source("{% extends 'layout/layoutAdmin.html.twig' %}
{% block title %}Dashboad{% endblock %}
{% block bread %}
<div class=\"row wrapper border-bottom white-bg page-heading\">
<div class=\"col-sm-4\">
<h2>Gestion catégories</h2>
<ol class=\"breadcrumb\">
<li>
<a href=\"{{ path('admin_homepage') }}\">Admin</a>
</li>
<li class=\"active\">
<strong>Gestion Catégories</strong>
</li>
</ol>
</div>
</div>
{% endblock %}
{% block body %}
<div class=\"wrapper wrapper-content\" ng-controller=\"gestionCatCtrl\">
<div class=\" text-center animated fadeInRightBig\">
<div class=\"row\" ng-init=\"txtSearch=''\">
<div class=\"form-group\">
<div class=\"col-sm-1\"></div>
<div class=\"col-sm-10\"><input placeholder=\"Chercher des catégories\" ng-model=\"txtSearch\" ng-change=\"getCategories(txtSearch)\" type=\"text\" class=\"form-control\"></div>
</div>
</div>
<div class=\"clearP\"></div>
<div class=\"row\">
<div class=\"col-lg-3\" ng-repeat=\"categorie in categories\" >
<div class=\"contact-box center-version\" style=\"height: 280px!important;\">
<a href=\"#\">
<img alt=\"image\" class=\"img-circle\" src=\"{{ asset('img/a2.jpg') }}\">
<h3 class=\"m-b-xs\"><strong ng-bind=\"categorie.nom\"></strong></h3>
<address class=\"m-t-md\">
<span ng-bind=\"categorie.desc | limitTo:50\"></span>
<span ng-if=\"categorie.desc.length > 50\">...</span>
</address>
</a>
<div class=\"contact-box-footer\">
<div class=\"m-t-xs btn-group\">
<a class=\"btn btn-xs btn-white\"><i class=\"fa fa-pencil\"></i> Modifier </a>
<a class=\"btn btn-xs btn-white\" ng-click=\"openModalDel(categorie.id,categorie.nom)\" ><i class=\"fa fa-close\"></i> Supprimer</a>
</div>
</div>
</div>
</div>
</div>
</div>
<div class=\"modal inmodal fade\" id=\"modalDelete\" tabindex=\"-1\" role=\"dialog\" aria-hidden=\"true\">
<div class=\"modal-dialog modal-sm\">
<div class=\"modal-content\">
<div class=\"modal-header\">
<button type=\"button\" class=\"close\" data-dismiss=\"modal\"><span aria-hidden=\"true\">×</span><span class=\"sr-only\">Close</span></button>
<h4 class=\"modal-title\">Suppression</h4>
</div>
<div class=\"modal-body\">
<h3>
Vous etes sûr de <strong>supprimer</strong> la catégorie <span ng-bind=\"currentNameToDel\"></span>
</h3>
</div>
<div class=\"modal-footer\">
<button type=\"button\" class=\"btn btn-danger\" ng-click=\"deleteCategorie(currentIdToDel)\">Supprimer</button>
<button type=\"button\" class=\"btn btn-white\" data-dismiss=\"modal\">Annuler</button>
</div>
</div>
</div>
</div>
</div>
{% endblock %}
{% block angular %}
<script>
appAdmin.controller(\"gestionCatCtrl\",function(\$scope,\$http){
\$scope.categories=[];
\$scope.txtSearch=\"\";
\$scope.currentNameToDel = \"\";
\$scope.currentIdToDel = 0;
\$scope.getCategories = function(txtChange){
if(txtChange == undefined){
\$scope.txtRecherche = \"\";
}else{
\$scope.txtRecherche = txtChange;
}
\$scope.objToSend = {
txt : \$scope.txtRecherche
}
\$http.post(\"http://172.16.128.8/formation/web/app_dev.php/Admin/Categories/getCategoriesJson\",\$scope.objToSend)
.then(function (response) {
\$scope.categories = response.data;
});
}
\$scope.getCategories(\$scope.txtSearch);
setInterval(function(){
\$scope.getCategories(\$scope.txtSearch);
},3000);
\$scope.openModalDel = function(id,nom){
\$scope.currentNameToDel = nom;
\$scope.currentIdToDel = id;
\$(\"#modalDelete\").modal('show');
}
\$scope.deleteCategorie = function(id){
\$scope.urlDel = \"http://172.16.128.8/formation/web/app_dev.php/Admin/Categories/deleteCategorieJson/\" + id ;
\$http.post(\$scope.urlDel)
.then(function (reponse) {
if(reponse.data.success){
toastr.success(reponse.data.message,\"Suppression\");
}else{
toastr.error(reponse.data.message,\"Suppression\");
}
\$scope.getCategories(\$scope.txtSearch);
\$(\"#modalDelete\").modal('hide');
});
}
});
</script>
{% endblock %}
", "AdminBundle:Admin:gestionCategories.html.twig", "/var/www/html/formation/src/Admin/AdminBundle/Resources/views/Admin/gestionCategories.html.twig");
}
}
| AziziNidhal/formation | var/cache/dev/twig/9e/9e7f6ac2722fe89fb849c145c79b7b4076f3b4cafa96f3c76d520acf3fe06829.php | PHP | mit | 20,345 |
<?php
/**
*
* Proxy Pattern
*
* A proxy pattern creates an entry point which interacts behind the scenes with other objects.
* Can be useful for implementing access control, to implement lazy loading of resource intensive
* objects, or to simply act as a wrapper to reduce the options available to another more complex object
*/
interface HttpInterface
{
public function get();
}
/**
* HttpProxy Usage Instructions:
*
* $proxy = new HttpProxy('http://rss.cnn.com/rss/cnn_world.rss');
* echo $proxy->get();
*
*/
class HttpProxy implements HttpInterface
{
protected $address;
protected $string;
/**
* Constructor
*
* @param $address
*/
public function __construct($address)
{
$this->address = filter_var($address, FILTER_SANITIZE_URL);
}
/**
* Method uses HTTP address to retrieve contents of the page
*
* @return mixed
* @throws Exception
*/
public function get()
{
$handle = curl_init();
curl_setopt($handle, CURLOPT_URL, $this->address);
curl_setopt($handle, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($handle, CURLOPT_HEADER, 0);
try {
$data = curl_exec($handle);
$response = curl_getinfo($handle);
if ($response['http_code'] == 200) {
} else {
throw new Exception;
}
curl_close($handle);
} catch (Exception $e) {
throw new Exception ('Request for address: ' . $this->address . ' failed.');
}
$this->string = $data;
return $this->__toString();
}
/**
* Format output as a string
*
* @return string
*/
public function __toString()
{
return $this->string;
}
}
/**
* Try the Proxy Pattern Example:
*
* 1. Download and place file on your local server.
* 2. Open the file using your browser.
*/
$proxy = new HttpProxy('http://www.youtube.com/watch?v=oHg5SJYRHA0');
echo $proxy->get();
| voidabhi/reddy | proxy-pattern.php | PHP | mit | 2,019 |
#!/usr/bin/python -*- coding:utf-8 -*-
__Author__ = "Riyaz Ahmad Bhat"
__Email__ = "riyaz.ah.bhat@gmail.com"
import re
from collections import namedtuple
from sanity_checker import SanityChecker
class DefaultList(list):
"""Equivalent of Default dictionaries for Indexing Errors."""
def __init__(self, default=None):
self.default = default
list.__init__(self)
def __getitem__(self, index):
try: return list.__getitem__(self, index)
except IndexError: return self.default
class SSFReader (SanityChecker):
def __init__ (self, sentence):
super(SSFReader, self).__init__()
self.id_ = int()
self.nodeList = list()
self.chunk_word = dict()
self.sentence = sentence
self.modifierModified = dict()
self.node = namedtuple('node',
('id', 'head', 'children', 'pos', 'poslcat', 'af', 'vpos', 'name','drel','parent',
'chunkId', 'chunkType', 'mtype', 'troot', 'coref', 'stype','voicetype', 'posn'))
self.features = namedtuple('features',
('lemma','cat','gen','num','per','case','vib','tam'))
def getAnnotations (self):
children_ = list()
for line in self.sentence.split("\n"):
nodeInfo = line.decode("utf-8").split("\t")
if nodeInfo[0].isdigit():
assert len(nodeInfo) == 4 # no need to process trash! FIXME
attributeValue_pairs = self.FSPairs(nodeInfo[3][4:-1])
attributes = self.updateFSValues(attributeValue_pairs)
h = attributes.get #NOTE h -> head node attributes
elif nodeInfo[0].replace(".",'',1).isdigit():
assert (len(nodeInfo) == 4) and (nodeInfo[1] and nodeInfo[2] != '') # FIXME
self.id_ += 1
pos_ = nodeInfo[2].encode("utf-8").decode("ascii",'ignore').encode("ascii")
wordForm_ = nodeInfo[1]
attributeValue_pairs = self.FSPairs(nodeInfo[3][4:-1])
if attributeValue_pairs['name'] == h('head_'):# NOTE head word of the chunk
self.nodeList.append(self.node(str(self.id_),wordForm_,children_,pos_,h('poslcat_'),
self.features(h('lemma_') if h('lemma_') else wordForm_ ,h('cat_'),h('gen_'), h('num_'),
h('per_'),h('case_'),h('vib_'),h('tam_')),h('vpos_'),h('head_'),h('drel_'),
h('parent_'),h('chunkId_'),":".join(('head',h('chunkId_'))),h('mtype_'),h('troot_'),
h('coref_'),h('stype_'),h('voicetype_'),h('posn_')))
self.modifierModified[h('chunkId_')] = h('parent_')
self.chunk_word[h('chunkId_')] = h('head_')
else:
attributes = self.updateFSValues(attributeValue_pairs)
c = attributes.get #NOTE c -> child node attributes
children_.append(self.node(str(self.id_),wordForm_,[],pos_,c('poslcat_'),self.features(c('lemma_') \
if c('lemma_') else wordForm_ ,c('cat_'),c('gen_'),c('num_'),c('per_'),c('case_'),c('vib_'),
c('tam_')),c('vpos_'),c('name_'),"_","_",None,":".join(('child',h('chunkId_'))),c('mtype_'),
c('troot_'),c('coref_'),None, None, c('posn_')))
else: children_ = list()
return self
def FSPairs (self, FS) :
feats = dict()
for feat in FS.split():
if "=" not in feat:continue
feat = re.sub("af='+","af='",feat.replace("dmrel=",'drel='))
assert len(feat.split("=")) == 2
attribute,value = feat.split("=")
feats[attribute] = value
return feats
def morphFeatures (self, AF):
"LEMMA,CAT,GEN,NUM,PER,CASE,VIB,TAM"
assert len(AF[:-1].split(",")) == 8 # no need to process trash! FIXME
lemma_,cat_,gen_,num_,per_,case_,vib_,tam_ = AF.split(",")
if len(lemma_) > 1: lemma_ = lemma_.strip("'")
return lemma_.strip("'"),cat_,gen_,num_,per_,case_,vib_,tam_.strip("'")
def updateFSValues (self, attributeValue_pairs):
attributes = dict(zip(['head_','poslcat_','af_','vpos_','name_','drel_','parent_','mtype_','troot_','chunkId_',\
'coref_','stype_','voicetype_','posn_'], [None] * 14))
attributes.update(dict(zip(['lemma_','cat_','gen_','num_','per_','case_','vib_','tam_'], [''] * 8)))
for key,value in attributeValue_pairs.items():
if key == "af":
attributes['lemma_'],attributes['cat_'],attributes['gen_'],attributes['num_'],\
attributes['per_'],attributes['case_'],attributes['vib_'],attributes['tam_'] = \
self.morphFeatures (value)
elif key == "drel":
assert len(value.split(":")) == 2 # no need to process trash! FIXME
attributes['drel_'], attributes['parent_'] = re.sub("'|\"",'',value).split(":")
assert attributes['drel_'] and attributes['parent_'] != "" # no need to process trash! FIXME
else:
variable = str(key) + "_"
if variable == "name_": attributes['chunkId_'] = re.sub("'|\"",'',value)
attributes[variable] = re.sub("'|\"",'',value)
return attributes
| darshan95/Shift-Reduce-Chunk-Expander | src/ssf_reader.py | Python | mit | 4,608 |
# frozen_string_literal: true
module Faker
module Witcher
class << self
extend Gem::Deprecate
def character
Faker::Games::Witcher.character
end
def witcher
Faker::Games::Witcher.witcher
end
def school
Faker::Games::Witcher.school
end
def location
Faker::Games::Witcher.location
end
def quote
Faker::Games::Witcher.quote
end
def monster
Faker::Games::Witcher.monster
end
deprecate :character, 'Faker::Games::Witcher.character', 2018, 10
deprecate :witcher, 'Faker::Games::Witcher.witcher', 2018, 10
deprecate :school, 'Faker::Games::Witcher.school', 2018, 10
deprecate :location, 'Faker::Games::Witcher.location', 2018, 10
deprecate :quote, 'Faker::Games::Witcher.quote', 2018, 10
deprecate :monster, 'Faker::Games::Witcher.monster', 2018, 10
end
end
end
| Dakurei/faker | lib/faker/default/witcher.rb | Ruby | mit | 936 |
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.md')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_jinja2',
'pyramid_debugtoolbar',
'pyramid_tm',
'SQLAlchemy',
'transaction',
'zope.sqlalchemy',
'waitress',
]
setup(name='guestbook',
version='0.1',
description='guestbook',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python :: 3",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web wsgi bfg pylons pyramid',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='guestbook',
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = guestbook:main
[console_scripts]
initialize_guestbook_db = guestbook.scripts.initializedb:main
""",
)
| necaris/embedded-js-in-python-example | setup.py | Python | mit | 1,216 |
// Copyright (c) 2014 AlphaSierraPapa for the SharpDevelop Team
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this
// software and associated documentation files (the "Software"), to deal in the Software
// without restriction, including without limitation the rights to use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
// to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or
// substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
using System;
using System.Diagnostics;
using System.Windows;
using System.Windows.Input;
namespace WPFDesign.Designer.Services
{
/// <summary>
/// Base class for mouse gestures that should start dragging only after a minimum drag distance.
/// </summary>
public abstract class ClickOrDragMouseGesture : MouseGestureBase
{
protected Point startPoint;
protected bool hasDragStarted;
protected IInputElement positionRelativeTo;
const double MinimumDragDistance = 3;
protected sealed override void OnStarted(MouseButtonEventArgs e)
{
Debug.Assert(positionRelativeTo != null);
hasDragStarted = false;
startPoint = e.GetPosition(positionRelativeTo);
}
protected override void OnMouseMove(object sender, MouseEventArgs e)
{
if (!hasDragStarted)
{
Vector v = e.GetPosition(positionRelativeTo) - startPoint;
if (Math.Abs(v.X) >= SystemParameters.MinimumHorizontalDragDistance
|| Math.Abs(v.Y) >= SystemParameters.MinimumVerticalDragDistance)
{
hasDragStarted = true;
OnDragStarted(e);
}
}
}
protected override void OnStopped()
{
hasDragStarted = false;
}
protected virtual void OnDragStarted(MouseEventArgs e)
{
}
}
} | iodes/WPFDesign | WPFDesign.Designer/Services/ClickOrDragMouseGesture.cs | C# | mit | 2,615 |
using System.Collections.Immutable;
using System.Linq;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using MoreLinq;
namespace WarHub.ArmouryModel.Source.CodeGeneration
{
internal abstract class CoreChildBase
{
protected CoreChildBase(
IPropertySymbol symbol,
bool isInherited,
ImmutableArray<AttributeListSyntax> xmlAttributeLists,
XmlResolvedInfo xml)
{
Symbol = symbol;
IsInherited = isInherited;
XmlAttributeLists = xmlAttributeLists;
Xml = xml;
}
public IPropertySymbol Symbol { get; }
/// <summary>
/// Gets true if this property was declared in descriptor's type, not inherited.
/// </summary>
public bool IsDeclared => !IsInherited;
/// <summary>
/// Gets true if this property was inherited, not declared in descriptor's type.
/// </summary>
public bool IsInherited { get; }
public ImmutableArray<AttributeListSyntax> XmlAttributeLists { get; }
public XmlResolvedInfo Xml { get; }
private PropertyDeclarationSyntax DeclarationSyntax =>
declarationSyntax ??=
(PropertyDeclarationSyntax)Symbol.DeclaringSyntaxReferences.Single().GetSyntax();
private PropertyDeclarationSyntax? declarationSyntax;
/// <summary>
/// PascalCase (original) identifier
/// </summary>
public SyntaxToken Identifier =>
identifier ??= DeclarationSyntax.Identifier.WithoutTrivia();
private SyntaxToken? identifier;
/// <summary>
/// PascalCase (original) <see cref="IdentifierNameSyntax"/>.
/// </summary>
public IdentifierNameSyntax IdentifierName =>
identifierName ??= SyntaxFactory.IdentifierName(Identifier);
private IdentifierNameSyntax? identifierName;
public TypeSyntax Type => type ??= DeclarationSyntax.Type;
private TypeSyntax? type;
public SyntaxToken CamelCaseIdentifier =>
camelCaseIdentifier ??= SyntaxFactory.Identifier(Identifier.ValueText.ToLowerFirstLetter());
private SyntaxToken? camelCaseIdentifier;
public IdentifierNameSyntax CamelCaseIdentifierName =>
camelCaseIdentifierName ??= SyntaxFactory.IdentifierName(CamelCaseIdentifier);
private IdentifierNameSyntax? camelCaseIdentifierName;
}
}
| WarHub/wham | src/WarHub.ArmouryModel.Source.CodeGeneration/CoreChildBase.cs | C# | mit | 2,521 |
using BNogent.StateMachine;
namespace BNogent.StateMachineSamples
{
class MultipleMethodsSample
{
public void Build()
{
StateMachineBuilder smb = new StateMachineBuilder() { GenerateGraph = true };
State a = new State("A");
State b = new State("B");
State c = new State("C");
Method m1 = new Method("public void M1()", "m1");
Method m2 = new Method("public void M2(int q)", "m2");
smb.Add(new Transition(m1, a, b));
smb.Add(new Transition(m2, b, c, new Condition("q==5")));
smb.Build(a, "StateMachineSamples.Samples",
"public class {0}", "MultipleMethods");
}
}
}
| bnogent/state-machine | StateMachineSamples/MultipleMethodsSample.cs | C# | mit | 751 |
class Solution {
public:
bool search(vector<int>& nums, int target) {
int l = 0, r = nums.size() - 1;
if(nums.empty()) return false;
while(l != r && nums[l] == nums[r]) l++;
while(l < r) {
int mid = (l + r) >> 1;
if(nums[mid] == target) return true;
else if(nums[mid] > nums[r]) l = mid + 1;
else r = mid;
}
if(l != 0 && target <= nums[l - 1] && target >= nums[0]) {
r = l - 1;
l = 0;
} else {
r = nums.size() - 1;
}
while(l < r) {
int mid = (l + r) >> 1;
if(nums[mid] == target) return true;
else if(nums[mid] > target) r = mid;
else l = mid + 1;
}
if(nums[l] == target) return true;
return false;
}
};
| w181496/OJ | LeetCode/81.Search_in_Rotated_Sorted_Array_II.cpp | C++ | mit | 875 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using SassyStudio.Editor;
namespace SassyStudio.Compiler.Parsing
{
public class VariableReference : ComplexItem, IResolvableToken
{
public VariableName Name { get; protected set; }
public override bool Parse(IItemFactory itemFactory, ITextProvider text, ITokenStream stream)
{
var name = new VariableName(SassClassifierType.VariableReference);
if (name.Parse(itemFactory, text, stream))
{
Name = name;
Children.Add(name);
}
return Children.Count > 0;
}
public ParseItem GetSourceToken()
{
return ReverseSearch.Find<VariableDefinition>(this, x => x.Name.Equals(Name));
}
}
}
| darrenkopp/SassyStudio | SassyStudio.Compiler/Parsing/VariableReference.cs | C# | mit | 838 |
package mcjty.rftools.blocks.teleporter;
import mcjty.rftools.network.PacketListFromServer;
import io.netty.buffer.ByteBuf;
import java.util.List;
public class PacketReceiversReady extends PacketListFromServer<PacketReceiversReady,TeleportDestinationClientInfo> {
public PacketReceiversReady() {
}
public PacketReceiversReady(int x, int y, int z, String command, List<TeleportDestinationClientInfo> list) {
super(x, y, z, command, list);
}
@Override
protected TeleportDestinationClientInfo createItem(ByteBuf buf) {
return new TeleportDestinationClientInfo(buf);
}
}
| Adaptivity/RFTools | src/main/java/mcjty/rftools/blocks/teleporter/PacketReceiversReady.java | Java | mit | 617 |
"use strict";
const removeDiacritics = require('diacritics').remove;
const request = require('request');
//const pSegCases = require('../test/promiseSwitchCase.js');
var utils = {
/**
* Resolve all promises in Object via for ... in loop
* @param {object} obj - The object containing function properties => Switch cases that resolve
*/
switchCasePromiseResolver: function switchCasePromiseResolver (obj, event) {
//Promise Resolver For...In Loop - returns out to Var as Array
let i = -1;
var promisesArr = [];
//Loop through the segmented Switch Cases (test is an obj with each Switch Case as a property)
for (var ligneFn in obj) {
//console.log(ligneFn);
i++;
//resolve each switch case with the event.message.text and return resolve
promisesArr[i] = Promise.resolve( obj[ligneFn](event) );
}
/**
* Returns newly filled in Arr from loop
* @return {array} - returns array with promise status (resolve, false) in array
*/
return promisesArr;
},
//////////////////
// Text Cleaners
//////////////////
cleanseText: function cleanseText (text) {
return removeDiacritics(
text.toLowerCase()
.replace(/\s\s+|[.-]/g, function (match) { return (match === "-" || " " ? " " : "") }
).trim())
//([\uD800-\uDBFF][\uDC00-\uDFFF]) to remove emojis
},
//////////////////////////////////////////
//Format Time before SearchStop Function
/////////////////////////////////////////
timeFormatter (time) {
var timeArr = time.split('T');
var finalTime = timeArr[1].slice(0,2) + ':' + timeArr[1].slice(2,4);
return finalTime;
},
//Random Number between 2 values
randNum (min, max) {
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(Math.random() * (max - min)) + min;
},
//Whitelist them domains bruh
setWhiteList(domains) {
if (!Array.isArray(domains)) {
throw "Error ... domains param MUST be an array. You passed in: " + typeof domains;
} else {
request(
{
method: 'POST',
uri: 'https://graph.facebook.com/v2.6/me/messenger_profile?access_token=' + process.env.PAGE_ACCESS_TOKEN,
headers: {
'content-type': 'application/json'
},
body: {
whitelisted_domains: domains
},
json: true
}, function (error, response, body) {
if (!error) {
request(
{
method: 'GET',
uri: 'https://graph.facebook.com/v2.6/me/messenger_profile?fields=whitelisted_domains&access_token=' + process.env.PAGE_ACCESS_TOKEN
}, function (error, response, body) {
if (!error) {
console.log('Displaying whitelisted sites:');
console.log(body);
} else if (error) {
console.error (error);
}
})
} else if (error) {
console.error(error);
}
}
);
};
}
}
module.exports = utils; | W3stside/glitch | lib/utils.js | JavaScript | mit | 3,112 |
var my = require('my');
var maxHeight = 300, maxWidth = 300;
exports.view = function(data) {
console.log("view: m.js");
console.log(data);
var topic = data.topic;
return(
my.page({title: 'Hello World', scripts:["http://code.jquery.com/jquery-latest.js"]},
/*my.div({id: 'myDiv', style: {height: '800px', border: 'red 1px solid'}},
'Actor ' + data.name
),*/
my.h1(topic.name),
tabs(topic.friends),
gallery2(topic.friends[0].entities, '100%', '300px', '200px', '270px', '30px', '30px')
)
)}
function tabs(friends)
{
var tabs = my.div({});
for (var i = 0; i < friends.length; i++)
tabs.children.push(my.p(friends[i]));
return gallery;
}
function gallery(imgUrls, width, height, thumbWidth, thumbHeight, hGap, vGap) {
var galleryStyle = {
margin: 'auto',
width: width,
height: height
};
var thumbStyle = {
'margin-top': vGap,
'margin-left': hGap,
'max-width': thumbWidth,
'max-height': thumbHeight,
'-moz-box-shadow': '1px 1px 6px #999',
'-webkit-box-shadow': '1px 1px 6px #999'
};
var gallery = my.div({style: galleryStyle});
for (var i = 0; i < imgUrls.length; i++)
gallery.children.push(my.img({style: thumbStyle, src: imgUrls[i]}));
return gallery;
}
function gallery2(imgUrls, width, height, thumbWidth, thumbHeight, hGap, vGap) {
var galleryStyle = {
display: 'inline-block',
width: width,
height: height
};
var thumbDivStyle = {
display: 'inline-block',
'margin-top': vGap,
'margin-left': hGap,
'width': thumbWidth,
'height': thumbHeight,
'text-align': 'center'
};
var thumbStyle = {
'max-width': thumbWidth,
'max-height': thumbHeight,
'-moz-box-shadow': '1px 1px 6px #999',
'-webkit-box-shadow': '1px 1px 6px #999'
};
var gallery = my.div({style: galleryStyle});
for (var i = 0; i < imgUrls.length; i++)
{
var imgUrl = "http://img.freebase.com/api/trans/image_thumb"+imgUrls[i].id+"?mode=fit&maxheight="+maxHeight+"&maxwidth="+maxWidth;
//console.log(imgUrls[i].id);
gallery.children.push(
my.div({style: thumbDivStyle},
my.img({style: thumbStyle, src: imgUrl}
)
));
}
return gallery;
}
| greyvugrin/openwhyd | whydJS/app/views/public/m.js | JavaScript | mit | 2,229 |
using System;
using System.Runtime.InteropServices;
namespace NAudio.CoreAudioApi.Interfaces
{
[Guid("C8ADBD64-E71E-48a0-A4DE-185C395CD317"),
InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IAudioCaptureClient
{
/*HRESULT GetBuffer(
BYTE** ppData,
UINT32* pNumFramesToRead,
DWORD* pdwFlags,
UINT64* pu64DevicePosition,
UINT64* pu64QPCPosition
);*/
int GetBuffer(
out IntPtr dataBuffer,
out int numFramesToRead,
out AudioClientBufferFlags bufferFlags,
out long devicePosition,
out long qpcPosition);
int ReleaseBuffer(int numFramesRead);
int GetNextPacketSize(out int numFramesInNextPacket);
}
} | Aragas/VKPlayer | VKPlayer/NAudio/CoreAudioApi/Interfaces/IAudioCaptureClient.cs | C# | mit | 810 |
"""
atomorder/parse_args.py
Parses command line arguments and overwrites setting defaults
"""
from . import settings
import argparse
import sys
description = ""
epilog = ""
parser = argparse.ArgumentParser(
description = description,
formatter_class = argparse.RawDescriptionHelpFormatter,
epilog = epilog)
parser = argparse.ArgumentParser(description='Fit probability density functions to data-files')
parser.add_argument('-r', '--reactants', help='Reactant structures in a coordinate file format.', action='store', type=str, nargs='+')
parser.add_argument('-p', '--products', help='Product structures in a coordinate file format.', action='store', type=str, nargs='+')
parser.add_argument('--print-level', help='Print-level - 0: quiet, 1: results and errors, 2: +warnings, 3: +progress, 4: excess, 5: EXTREME',
action='store', choices = range(0,6), default=1, type=int)
parser.add_argument('-f', '--format', help='File format', type=str, action='store', default='guess', choices=["guess","xyz","pdb"])
parser.add_argument('-m', '--method', help='Method to use.\n \
rotate: Ignore bond order, align a single reactant and product molecule and match all atoms\n \
no-bond: Atom matching by rotation and atomic similarity\n \
full: Atom matching by rotation and bond similarity\n \
info: Information about molecule sybyl atom types, bond types and conjugated sub systems',
choices = ['rotate', 'full', 'info', 'no-bond'], action='store', default='full')
parser.add_argument('-o', '--output', help='Given a filename, output the reordered product in xyz format instead of printing to stdout', action='store', type=str, default=sys.stdout)
parser.add_argument('--atomic-sybyl-weight', action='store', default=1, type=float)
parser.add_argument('--bond-weight', action='store', default=1, type=float)
# TODO output to folder
# TODO output atom mapping oneline, save reordered products
# TODO allow possibility to give pickle with reaction object
# TODO output sybyl
# TODO batch reactions
# TODO output aromatic/conjugated subgroups
args = parser.parse_args()
# override setting defaults
settings.update(args)
| larsbratholm/atomorder | atomorder/parse_args.py | Python | mit | 2,393 |
using System.Threading.Tasks;
namespace AppZen.Mvvm.Core.Interfaces
{
public interface IViewFactory
{
void CloseView(string id);
Task ShowViewModel<T>(object argumentsAsAnonymousType) where T : IViewModel;
Task ShowViewModel<T>() where T : IViewModel;
}
} | jodendaal/AppZen.Mvvm | src/AppZen.MVVM.Core/Interfaces/IViewFactory.cs | C# | mit | 292 |
<?php
/**
* i4Web includes
*/
require_once locate_template('/lib/utils.php'); // Utility functions
require_once locate_template('/lib/init.php'); // Initial theme setup and constants
require_once locate_template('/lib/wrapper.php'); // Theme wrapper class
require_once locate_template('/lib/sidebar.php'); // Sidebar class
require_once locate_template('/lib/config.php'); // Configuration
require_once locate_template('/lib/activation.php'); // Theme activation
require_once locate_template('/lib/titles.php'); // Page titles
require_once locate_template('/lib/cleanup.php'); // Cleanup
require_once locate_template('/lib/nav.php'); // Custom nav modifications
require_once locate_template('/lib/gallery.php'); // Custom [gallery] modifications
require_once locate_template('/lib/comments.php'); // Custom comments modifications
require_once locate_template('/lib/relative-urls.php'); // Root relative URLs
require_once locate_template('/lib/widgets.php'); // Sidebars and widgets
require_once locate_template('/lib/scripts.php'); // Scripts and stylesheets
require_once locate_template('/lib/custom.php'); // Custom functions
require_once locate_template('/lib/achilles-customizer.php'); //Customize the Template
| i4web/dcook | functions.php | PHP | mit | 1,338 |
/*eslint no-console: 1 */
console.warn('You are using the default filter for the fileMeta service. For more information about event filters see https://docs.feathersjs.com/api/events.html#event-filtering'); // eslint-disable-line no-console
module.exports = function(data, connection, hook) { // eslint-disable-line no-unused-vars
return data;
};
| mauricedoepke/docscloud-server | src/services/files/files.filters.js | JavaScript | mit | 350 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web;
using System.Web.Mvc;
namespace Famliy.Finance.Common
{
public static class HtmlHelper2
{
/// <summary>
/// 自定义一个@html.Image()
/// </summary>
/// <param name="helper"></param>
/// <param name="src">src属性</param>
/// <param name="alt">alt属性</param>
/// <returns></returns>
public static MvcHtmlString Image(this HtmlHelper helper, string src, string alt)
{
var builder = new TagBuilder("img");
builder.MergeAttribute("src", src);
builder.MergeAttribute("alt", alt);
builder.ToString(TagRenderMode.SelfClosing);
return MvcHtmlString.Create(builder.ToString());
}
}
}
| erikzhouxin/CSharpSolution | TechTester/FamliyFinanceWebSI/App_Api/Famliy.Finance.Common/HtmlHelper2.cs | C# | mit | 876 |
<?php
namespace Handler\Admin\Article\Ad;
if (!defined('IN_PX'))
exit;
use Handler\Admin\AbstractCommon;
use Admin\Helper;
use Tools\Auxi;
use Tools\MsgHelper;
/**
* 读取
*/
class Read extends AbstractCommon {
public function processRequest(Array & $context) {
$this->_pushSetting();
if (!$_POST['sortName'])
$_POST['sortName'] = 'a.ad_id';
if (!$_POST['sortOrder'])
$_POST['sortOrder'] = 'ASC';
if (!$_POST['page'])
$_POST['page'] = 1;
if (!$_POST['rp'])
$_POST['rp'] = 10;
$_start = (($_POST['page'] - 1) * $_POST['rp']);
$_where = '0 = 0';
$_bindParam = array();
if (isset($_POST['sltLanguage']) && $_POST['sltLanguage'] != '') {
$_where = ' AND a.`ilanguage` = :sltLanguage';
$_bindParam[':sltLanguage'] = $_POST['sltLanguage'];
}
if (isset($_POST['strSearchKeyword']) && $_POST['strSearchKeyword'] != '') {
$_where .= ' AND (a.`ad_title` LIKE :strSearchKeyword)';
$_bindParam[':strSearchKeyword'] = '%' . trim($_POST['strSearchKeyword']) . '%';
}
if (isset($_POST['type_id']) && $_POST['type_id'] != '') {
$_where .= ' AND a.`type_id` = :type_id';
$_bindParam[':type_id'] = $_POST['type_id'];
}
$_table = '`#@__@ad` a';
$_total = $this->db->table($_table)->where($_where)->bind($_bindParam)->count();
//$this->db->debug();
$_rs = $this->db->select('a.*')
->table($_table)
->where($_where)
->order($_POST['sortName'], $_POST['sortOrder'])
->limit($_start, $_POST['rp'])
->bind($_bindParam)
->findAll();
$_rsp = array(
'totalResults' => $_total,
'rows' => array()
);
if ($_total) {
foreach ($_rs as $m) {
$_idValue = $m->ad_id;
$_tempBool = $m->end_date > time() ? 0 : 1; //判断是否已到期 0未到期 1 到期
array_push($_rsp['rows'], array(
'id' => $_idValue,
'cell' => array(
$_idValue,
$this->setting['aryAd'][intval($m->type_id)],
$m->ad_title,
Helper::createSmallImg($context['__CDN__'], $context['__ASSETS__'], $m->ad_img, $m->ad_title),
$m->ad_url,
'<span' . Auxi::getDeepColor(intval($m->is_display)) . '>'
. $this->setting['aryBool'][intval($m->is_display)] . '</span>',
Auxi::getTime($m->start_date),
Auxi::getTime($m->end_date),
$m->ad_sort,
'<span' . Auxi::getDeepColor($_tempBool) . '>' . $this->setting['aryBool'][$_tempBool] . '</span>'
)
));
}
}
echo(MsgHelper::json('SUCCESS', '数据返回成功', $_rsp));
}
}
| MiaoZhua/mzworld-php | core/Handler/Admin/Article/Ad/Read.handler.php | PHP | mit | 3,167 |
export default function HomeService($rootScope, $window, $http) {
var service = {
send: send,
loadSettings: loadSettings,
};
return service;
function send(msg, data) {
console.log(msg, data);
//$rootScope.$broadcast(msg, data);
}
function loadSettings() {
return $http({
method: 'GET',
url: 'http://dev.app.com/api/v1/foodtruck',
headers: {
'Content-Type': 'application/json',
'X-Requested-With': 'XMLHttpRequest',
}
}).then(function successCallback(response) {
return response;
}, function errorCallback(response) {
return "Error loading settings";
});
}
}
| mariasaavedra/foodtruck-ui | src/app/core/home/home.service.js | JavaScript | mit | 755 |
package com.elderbyte.josc.api;
import com.elderbyte.josc.core.BlobObjectUtils;
import java.time.Instant;
import java.util.Map;
import java.util.Optional;
/**
* Represents a blob object
*/
public interface BlobObject {
/**
* Gets the bucket name where this object is stored
*/
String getBucket();
/**
* Gets the object key.
*
* The object key is unique inside a bucket.
* It may contain slashes '/', which are considered as virtual directory notations.
*/
String getObjectKey();
/**
* @deprecated Please switch to getObjectKey()
*/
default String getObjectName() {
return getObjectKey();
}
/**
* The blob object size in bytes
*/
long getLength();
/**
* Gets the content type (mime-type) of this object.
*/
Optional<String> getContentType();
/**
* Gets the objects server side calculated hash.
* Might not be available.
*/
Optional<String> getObjectHash();
/**
* @deprecated Please switch to getObjectHash()
*/
default String hash() {
return getObjectHash().orElse(null);
}
/**
* Last modified / creation date of this object
*/
Optional<Instant> getLastModified();
/**
* Other metadata data
*/
Map<String,String> getMetaData();
/**
* Returns true if this object is actually a directory.
*/
boolean isDirectory();
/**
* Returns the filename of this object.
* Slashes are interpreted as virtual directory indicators.
*
* @return Returns the last part after the last '/', if no '/' is found returns the input string.
*/
default String getVirtualFileName(){
return BlobObjectUtils.extractVirtualFileName(getObjectKey());
}
/**
* Extracts the extension from this object.
* Only the file name part is considered for extension scanning.
*
* @return Returns the extension with the dot, such as '.png'
*/
default String getVirtualExtension(){
return BlobObjectUtils.extractVirtualExtensionWithDot(getObjectKey());
}
}
| ElderByte-/josc | josc-api/src/main/java/com/elderbyte/josc/api/BlobObject.java | Java | mit | 2,141 |
package main
import (
"github.com/robertkrimen/otto"
)
func evaluateScript(src string, payload []FinalInput) (string, error) {
javaScript := otto.New()
var evalFunc otto.Value
javaScript.Set("eval", func(call otto.FunctionCall) otto.Value {
evalFunc = call.Argument(0)
return otto.UndefinedValue()
})
javaScript.Run(src)
arg, err := javaScript.ToValue(payload)
if err != nil {
return "", err
}
ret, err := evalFunc.Call(otto.NullValue(), arg)
if err != nil {
return "", err
}
return ret.ToString()
}
| gophergala2016/scattr | evaluate.go | GO | mit | 525 |
import React, { Component } from 'react'
import { connect } from 'react-redux'
import { setSearchTerm } from './actionCreators'
import { Link } from 'react-router'
class Header extends Component {
constructor (props) {
super(props)
this.handleSearchTermChange = this.handleSearchTermChange.bind(this)
}
handleSearchTermChange (event) {
this.props.dispatch(setSearchTerm(event.target.value))
}
render () {
let utilSpace
if (this.props.showSearch) {
utilSpace = <input onChange={this.handleSearchTermChange} value={this.props.searchTerm} type='text' placeholder='Search' />
} else {
utilSpace = (
<h2>
<Link to='/search'>Back</Link>
</h2>
)
}
return (
<header>
<h1>
<Link to='/'>
jordaflix
</Link>
</h1>
{utilSpace}
</header>
)
}
}
const mapStateToProps = (state) => {
return {
searchTerm: state.searchTerm
}
}
const { func, bool, string } = React.PropTypes
Header.propTypes = {
handleSearchTermChange: func,
dispatch: func,
showSearch: bool,
searchTerm: string
}
export default connect(mapStateToProps)(Header)
| joordas/react-fem | js/Header.js | JavaScript | mit | 1,191 |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.Globalization;
using System.Threading;
using Microsoft.Xna.Framework;
namespace Gearset.Components.Profiler {
public class Profiler : Gear {
/// <summary>The maximum number of discrete heirarchical levels.</summary>
public const int MaxLevels = 12;
/// <summary>Maximum sample number for each level. </summary>
const int MaxSamples = 2560;
/// <summary>Maximum nest calls for each level.</summary>
const int MaxNestCall = 32;
/// <summary>Maximum display frames.</summary>
const int MaxSampleFrames = 4;
/// <summary>
/// Duration (in frame count) for take snap shot of log.
/// </summary>
const int LogSnapDuration = 120;
// Logs for each frames.
readonly FrameLog[] _logs;
// Stopwatch for measure the time.
readonly Stopwatch _stopwatch = new Stopwatch();
// Marker information array.
readonly List<MarkerInfo> _markers = new List<MarkerInfo>();
// Dictionary that maps from marker name to marker id.
readonly Dictionary<string, int> _markerNameToIdMap = new Dictionary<string, int>();
readonly InternalLabeler _internalLabeler = new InternalLabeler();
readonly Dictionary<string, int> _nameMap = new Dictionary<string, int>();
readonly object _locker = new object();
//Temp Box drawer for performance grid
internal TempBoxDrawer TempBoxDrawer = new TempBoxDrawer();
internal string[] LevelNames = new string[MaxLevels];
// Previous frame log.
FrameLog _prevLog;
// Current log.
FrameLog _curLog;
// Current frame count.
int _frameCount;
// You want to call StartFrame at beginning of Game.Update method.
// But Game.Update gets calls multiple time when game runs slow in fixed time step mode.
// In this case, we should ignore StartFrame call.
// To do this, we just keep tracking of number of StartFrame calls until Draw gets called.
int _updateCount;
int _currentLevel = -1;
public Profiler() : base(GearsetSettings.Instance.ProfilerConfig) {
RefreshSummary = true;
Children.Add(_internalLabeler);
Children.Add(TempBoxDrawer);
_logs = new FrameLog[2];
for (var i = 0; i < _logs.Length; ++i)
_logs[i] = new FrameLog();
GenerateLevelNames();
CreateTimeRuler();
CreatePerformanceGraph();
CreateProfilerSummary();
}
public bool RefreshSummary { get; private set; }
public TimeRuler TimeRuler { get; private set; }
public bool FrameStarted { get; private set; }
public PerformanceGraph PerformanceGraph { get; private set; }
public ProfilerSummary ProfilerSummary { get; private set; }
public ProfilerConfig Config { get { return GearsetSettings.Instance.ProfilerConfig; } }
//Settings the game can use for Profiling scenarios (e.g. to test if CPU/GPU bound)
public bool Sleep { get; set; }
public bool SkipUpdate { get; set; }
/// <summary>
/// Gets/Sets target sample frames.
/// </summary>
public int TargetSampleFrames { get; set; }
internal IEnumerable<MarkerInfo> Markers { get { return _markers; } }
void GenerateLevelNames() {
for (var i = 0; i < MaxLevels; i++)
LevelNames[i] = "Level " + (i + 1);
}
internal string GetLevelNameFromLevelId(int levelId) {
return LevelNames[levelId];
}
void CreateTimeRuler() {
TargetSampleFrames = 1;
var minSize = new Vector2(100, 16);
var size = Vector2.Max(minSize, Config.TimeRulerConfig.Size);
TimeRuler = new TimeRuler(this, Config.TimeRulerConfig, size, TargetSampleFrames);
TimeRuler.Visible = Config.TimeRulerConfig.Visible;
TimeRuler.VisibleChanged += (sender, args) => { Config.TimeRulerConfig.Visible = TimeRuler.Visible; };
TimeRuler.LevelsEnabledChanged += (sender, args) => { Config.TimeRulerConfig.VisibleLevelsFlags = TimeRuler.VisibleLevelsFlags; };
TimeRuler.Dragged += (object sender, ref Vector2 args) => { Config.TimeRulerConfig.Position = TimeRuler.Position; };
TimeRuler.ScaleNob.Dragged += (object sender, ref Vector2 args) => { Config.TimeRulerConfig.Size = TimeRuler.Size; };
}
void CreatePerformanceGraph() {
var minSize = new Vector2(100, 16);
var size = Vector2.Max(minSize, Config.PerformanceGraphConfig.Size);
PerformanceGraph = new PerformanceGraph(this, Config.PerformanceGraphConfig, size);
PerformanceGraph.Visible = Config.PerformanceGraphConfig.Visible;
PerformanceGraph.VisibleChanged += (sender, args) => { Config.PerformanceGraphConfig.Visible = PerformanceGraph.Visible; };
PerformanceGraph.LevelsEnabledChanged += (sender, args) => { Config.PerformanceGraphConfig.VisibleLevelsFlags = PerformanceGraph.VisibleLevelsFlags; };
PerformanceGraph.Dragged += (object sender, ref Vector2 args) => { Config.PerformanceGraphConfig.Position = PerformanceGraph.Position; };
PerformanceGraph.ScaleNob.Dragged += (object sender, ref Vector2 args) => { Config.PerformanceGraphConfig.Size = PerformanceGraph.Size; };
}
void CreateProfilerSummary() {
var minSize = new Vector2(100, 16);
var size = Vector2.Max(minSize, Config.ProfilerSummaryConfig.Size);
ProfilerSummary = new ProfilerSummary(this, Config.ProfilerSummaryConfig, size);
ProfilerSummary.Visible = Config.ProfilerSummaryConfig.Visible;
ProfilerSummary.VisibleChanged += (sender, args) => { Config.ProfilerSummaryConfig.Visible = ProfilerSummary.Visible; };
ProfilerSummary.LevelsEnabledChanged += (sender, args) => { Config.ProfilerSummaryConfig.VisibleLevelsFlags = ProfilerSummary.VisibleLevelsFlags; };
ProfilerSummary.Dragged += (object sender, ref Vector2 args) => { Config.ProfilerSummaryConfig.Position = ProfilerSummary.Position; };
ProfilerSummary.ScaleNob.Dragged += (object sender, ref Vector2 args) => { Config.ProfilerSummaryConfig.Size = ProfilerSummary.Size; };
}
public void StartFrame() {
FrameStarted = true; //lazy fix to stop crashing
lock (_locker) {
RefreshSummary = false;
// We skip reset frame when this method gets called multiple times.
var count = Interlocked.Increment(ref _updateCount);
if (Visible && (1 < count && count < MaxSampleFrames))
return;
// Update current frame log.
_prevLog = _logs[_frameCount++ & 0x1];
_curLog = _logs[_frameCount & 0x1];
var endFrameTime = (float)_stopwatch.Elapsed.TotalMilliseconds;
// Update marker and create a log.
for (var levelIdx = 0; levelIdx < _prevLog.Levels.Length; ++levelIdx) {
var prevLevel = _prevLog.Levels[levelIdx];
var nextLevel = _curLog.Levels[levelIdx];
// Re-open marker that didn't get called EndMark in previous frame.
for (var nest = 0; nest < prevLevel.NestCount; ++nest) {
var markerIdx = prevLevel.MarkerNests[nest];
prevLevel.Markers[markerIdx].EndTime = endFrameTime;
nextLevel.MarkerNests[nest] = nest;
nextLevel.Markers[nest].MarkerId = prevLevel.Markers[markerIdx].MarkerId;
nextLevel.Markers[nest].BeginTime = 0;
nextLevel.Markers[nest].EndTime = -1;
nextLevel.Markers[nest].Color = prevLevel.Markers[markerIdx].Color;
}
// Update marker log.
for (var markerIdx = 0; markerIdx < prevLevel.MarkCount; ++markerIdx) {
var duration = prevLevel.Markers[markerIdx].EndTime - prevLevel.Markers[markerIdx].BeginTime;
var markerId = prevLevel.Markers[markerIdx].MarkerId;
var m = _markers[markerId];
m.Logs[levelIdx].Color = prevLevel.Markers[markerIdx].Color;
if (!m.Logs[levelIdx].Initialized) {
// First frame process.
m.Logs[levelIdx].Min = duration;
m.Logs[levelIdx].Max = duration;
m.Logs[levelIdx].Avg = duration;
m.Logs[levelIdx].Initialized = true;
}
else {
// Process after first frame.
m.Logs[levelIdx].Min = Math.Min(m.Logs[levelIdx].Min, duration);
m.Logs[levelIdx].Max = Math.Min(m.Logs[levelIdx].Max, duration);
m.Logs[levelIdx].Avg += duration;
m.Logs[levelIdx].Avg *= 0.5f;
if (m.Logs[levelIdx].Samples++ >= LogSnapDuration) {
RefreshSummary = true;
//m.Logs[levelIdx].SnapMin = m.Logs[levelIdx].Min;
//m.Logs[levelIdx].SnapMax = m.Logs[levelIdx].Max;
m.Logs[levelIdx].SnapAvg = m.Logs[levelIdx].Avg;
m.Logs[levelIdx].Samples = 0;
}
}
}
nextLevel.MarkCount = prevLevel.NestCount;
nextLevel.NestCount = prevLevel.NestCount;
}
// Start measuring.
_stopwatch.Reset();
_stopwatch.Start();
}
}
public void BeginMark(string markerName, Color color) {
lock (_locker) {
//Look up the name in map or create a new level if this is a new name
int levelIndex;
if (_nameMap.ContainsKey(markerName)) {
levelIndex = _nameMap[markerName];
}
else {
_currentLevel++;
levelIndex = _currentLevel;
_nameMap[markerName] = levelIndex;
}
BeginMark(levelIndex, markerName, color);
}
}
void BeginMark(int levelIndex, string markerName, Color color) {
if (levelIndex < 0 || levelIndex >= MaxLevels)
throw new ArgumentOutOfRangeException("levelIndex");
var level = _curLog.Levels[levelIndex];
if (level.MarkCount >= MaxSamples)
//throw new OverflowException("Exceeded sample count.\n" + "Either set larger number to TimeRuler.MaxSmpale or" + "lower sample count.");
level.MarkCount = 0; //lazy fix to prevent crashing when minimized since the game runs at light speed when minimized.
if (level.NestCount >= MaxNestCall)
throw new OverflowException("Exceeded nest count.\n" + "Either set larget number to TimeRuler.MaxNestCall or" + "lower nest calls.");
// Gets registered marker.
int markerId;
if (!_markerNameToIdMap.TryGetValue(markerName, out markerId)) {
// Register this if this marker is not registered.
markerId = _markers.Count;
_markerNameToIdMap.Add(markerName, markerId);
_markers.Add(new MarkerInfo(markerName));
}
// Start measuring.
level.MarkerNests[level.NestCount++] = level.MarkCount;
// Fill marker parameters.
level.Markers[level.MarkCount].MarkerId = markerId;
level.Markers[level.MarkCount].Color = color;
level.Markers[level.MarkCount].BeginTime = (float)_stopwatch.Elapsed.TotalMilliseconds;
level.Markers[level.MarkCount].EndTime = -1;
level.MarkCount++;
}
public void EndMark(string markerName) {
lock (_locker) {
int levelIndex;
if (_nameMap.ContainsKey(markerName)) {
levelIndex = _nameMap[markerName];
}
else {
//End called before Begin throw!
throw new InvalidOperationException("EndMark could not find name: " + markerName + ". Ensure you call BeginMark first.");
}
var nestLevels = EndMark(levelIndex, markerName);
if (nestLevels == 0) {
_nameMap.Remove(markerName);
_currentLevel--;
}
}
}
int EndMark(int levelIndex, string markerName) {
if (levelIndex < 0 || levelIndex >= MaxLevels)
throw new ArgumentOutOfRangeException("levelIndex");
var level = _curLog.Levels[levelIndex];
if (level.NestCount <= 0)
throw new InvalidOperationException("Call BeingMark method before call EndMark method.");
int markerId;
if (!_markerNameToIdMap.TryGetValue(markerName, out markerId))
throw new InvalidOperationException(String.Format("Maker '{0}' is not registered." + "Make sure you specifed same name as you used for BeginMark" + " method.", markerName));
var markerIdx = level.MarkerNests[--level.NestCount];
if (level.Markers[markerIdx].MarkerId != markerId)
throw new InvalidOperationException("Incorrect call order of BeginMark/EndMark method." + "You call it like BeginMark(A), BeginMark(B), EndMark(B), EndMark(A)" + " But you can't call it like " + "BeginMark(A), BeginMark(B), EndMark(A), EndMark(B).");
level.Markers[markerIdx].EndTime = (float)_stopwatch.Elapsed.TotalMilliseconds;
return level.NestCount;
}
/// <summary>
/// Get average time of given level index and marker name.
/// </summary>
/// <param name="levelIndex">Index of level</param>
/// <param name="markerName">name of marker</param>
/// <returns>average spending time in ms.</returns>
public float GetAverageTimeInMilliseconds(int levelIndex, string markerName) {
if (levelIndex < 0 || levelIndex >= MaxLevels)
throw new ArgumentOutOfRangeException("levelIndex");
float result = 0;
int markerId;
if (_markerNameToIdMap.TryGetValue(markerName, out markerId))
result = _markers[markerId].Logs[levelIndex].Avg;
return result;
}
public void ResetMarkerLog() {
lock (_locker) {
foreach (var markerInfo in _markers) {
for (var i = 0; i < markerInfo.Logs.Length; ++i) {
markerInfo.Logs[i].Initialized = false;
markerInfo.Logs[i].SnapAvg = 0;
markerInfo.Logs[i].Min = 0;
markerInfo.Logs[i].Max = 0;
markerInfo.Logs[i].Avg = 0;
markerInfo.Logs[i].Samples = 0;
}
}
}
}
public bool DoUpdate() {
//If the sleep has no effect, I must be GPU bound.
//If skipping Update speeds things up, I must be CPU bound.
//If skipping Update has no effect but sleeping does slow things down, the two must be evenly balanced.
if (Sleep)
Thread.Sleep(1);
return !SkipUpdate;
}
public override void Draw(GameTime gameTime) {
// Reset update count.
Interlocked.Exchange(ref _updateCount, 0);
TimeRuler.Draw(_prevLog);
PerformanceGraph.Draw(_internalLabeler, _prevLog);
ProfilerSummary.Draw(_internalLabeler, _prevLog);
}
public sealed class LevelItem : IComparable<LevelItem>, INotifyPropertyChanged {
Boolean _enabled = true;
public LevelItem(int levelId) {
LevelId = levelId;
}
public int LevelId { get; private set; }
public String Name { get; set; }
public Boolean Enabled {
get { return _enabled; }
set {
_enabled = value;
OnPropertyChanged("Enabled");
}
}
public int CompareTo(LevelItem other) {
return String.Compare(Name, other.Name, CultureInfo.InvariantCulture, CompareOptions.IgnoreCase);
}
public event PropertyChangedEventHandler PropertyChanged;
void OnPropertyChanged(string p) {
if (PropertyChanged != null)
PropertyChanged(this, new PropertyChangedEventArgs(p));
}
}
/// <summary>
/// Marker structure.
/// </summary>
internal struct Marker {
public int MarkerId;
public float BeginTime;
public float EndTime;
public Color Color;
}
/// <summary>
/// Collection of markers.
/// </summary>
internal sealed class MarkerCollection {
// Marker collection.
public readonly Marker[] Markers = new Marker[MaxSamples];
// Marker nest information.
public readonly int[] MarkerNests = new int[MaxNestCall];
public int MarkCount;
public int NestCount;
}
/// <summary>
/// Frame logging information.
/// </summary>
internal sealed class FrameLog {
public readonly MarkerCollection[] Levels;
public FrameLog() {
// Initialize markers.
Levels = new MarkerCollection[MaxLevels];
for (var i = 0; i < MaxLevels; ++i)
Levels[i] = new MarkerCollection();
}
}
/// <summary>
/// Marker information
/// </summary>
internal sealed class MarkerInfo {
// Name of marker.
public readonly string Name;
// Marker log.
public readonly MarkerLog[] Logs = new MarkerLog[MaxLevels];
public MarkerInfo(string name) {
Name = name;
}
}
/// <summary>
/// Marker log information.
/// </summary>
public struct MarkerLog {
public float Min;
public float Max;
public float Avg;
public int Samples;
public bool Initialized;
public float SnapAvg { get; set; }
public Color Color { get; set; }
public string Name { get; set; }
public string Level { get; set; }
}
/// <summary>
/// Marker log information.
/// </summary>
public struct TimingSummaryItem {
public float SnapAvg { get; set; }
public string Color { get; set; }
public string Name { get; set; }
public string Level { get; set; }
}
}
}
| bartwe/Gearset | Gearset/Components/Profiler/Profiler.cs | C# | mit | 19,770 |
package ch.heigvd.amt.mvcdemo.rest.resources;
import ch.heigvd.amt.mvcdemo.model.entities.Sector;
import ch.heigvd.amt.mvcdemo.rest.dto.SectorDTO;
import ch.heigvd.amt.mvcdemo.services.dao.BusinessDomainEntityNotFoundException;
import ch.heigvd.amt.mvcdemo.services.dao.SectorsDAOLocal;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import javax.ejb.EJB;
import javax.ejb.Stateless;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
/**
*
* @author Olivier Liechti (olivier.liechti@heig-vd.ch)
*/
@Stateless
@Path("/sectors")
public class SectorsResource {
@Context
UriInfo uriInfo;
@EJB
private SectorsDAOLocal sectorsDAO;
@GET
@Produces("application/json")
public List<SectorDTO> getSectors() {
List<SectorDTO> result = new ArrayList<>();
List<Sector> sectors = sectorsDAO.findAll();
for (Sector sector : sectors) {
long sectorId = sector.getId();
URI sectorHref = uriInfo
.getAbsolutePathBuilder()
.path(SectorsResource.class, "getSector")
.build(sectorId);
SectorDTO dto = new SectorDTO();
dto.setHref(sectorHref);
dto.setName(sector.getName());
result.add(dto);
}
return result;
}
@POST
@Consumes("application/json")
public Response createSector(SectorDTO sectorDTO) {
boolean created;
long sectorId;
try {
sectorId = sectorsDAO.findByName(sectorDTO.getName()).getId();
created = false;
} catch (BusinessDomainEntityNotFoundException ex) {
created = true;
sectorId = sectorsDAO.create(new Sector(sectorDTO.getName()));
}
URI sectorUri = uriInfo
.getBaseUriBuilder()
.path(SectorsResource.class)
.path(SectorsResource.class, "getSector")
.build(sectorId);
ResponseBuilder builder;
if (created) {
builder = Response.created(sectorUri);
} else {
builder = Response.ok().location(sectorUri);
}
return builder.build();
}
@GET
@Path("/{id}")
@Produces("application/json")
public Sector getSector(@PathParam(value = "id") long id) throws BusinessDomainEntityNotFoundException {
return sectorsDAO.findById(id);
}
@PUT
@Path("/{id}")
@Consumes("application/json")
public Response updateSector(SectorDTO sectorDTO, @PathParam(value = "id") long id) throws BusinessDomainEntityNotFoundException {
Sector sector = sectorsDAO.findById(id);
sector.setName(sectorDTO.getName());
return Response.ok().build();
}
@DELETE
@Path("/{id}")
public Response deleteSector(@PathParam(value = "id") long id) throws BusinessDomainEntityNotFoundException {
Sector sector = sectorsDAO.findById(id);
sectorsDAO.delete(sector);
return Response.ok().build();
}
}
| SoftEng-HEIGVD/Teaching-HEIGVD-AMT-Example-MVC | MVCDemo/src/main/java/ch/heigvd/amt/mvcdemo/rest/resources/SectorsResource.java | Java | mit | 3,061 |
using System;
using System.Diagnostics;
using Pgno = System.UInt32;
using PTRMAP = Contoso.Core.MemPage.PTRMAP;
using Contoso.Threading;
namespace Contoso.Core
{
public partial class BtShared
{
internal RC allocateBtreePage(ref MemPage ppPage, ref Pgno pPgno, Pgno nearby, byte exact)
{
MemPage pTrunk = null;
MemPage pPrevTrunk = null;
Debug.Assert(MutexEx.Held(this.Mutex));
var pPage1 = this.Page1;
var mxPage = btreePagecount(); // Total size of the database file
var n = ConvertEx.Get4(pPage1.Data, 36); // Number of pages on the freelist
if (n >= mxPage)
return SysEx.SQLITE_CORRUPT_BKPT();
RC rc;
if (n > 0)
{
// There are pages on the freelist. Reuse one of those pages.
Pgno iTrunk;
byte searchList = 0; // If the free-list must be searched for 'nearby'
// If the 'exact' parameter was true and a query of the pointer-map shows that the page 'nearby' is somewhere on the free-list, then the entire-list will be searched for that page.
#if !SQLITE_OMIT_AUTOVACUUM
if (exact != 0 && nearby <= mxPage)
{
Debug.Assert(nearby > 0);
Debug.Assert(this.AutoVacuum);
PTRMAP eType = 0;
uint dummy0 = 0;
rc = ptrmapGet(nearby, ref eType, ref dummy0);
if (rc != RC.OK)
return rc;
if (eType == PTRMAP.FREEPAGE)
searchList = 1;
pPgno = nearby;
}
#endif
// Decrement the free-list count by 1. Set iTrunk to the index of the first free-list trunk page. iPrevTrunk is initially 1.
rc = Pager.Write(pPage1.DbPage);
if (rc != RC.OK)
return rc;
ConvertEx.Put4(pPage1.Data, 36, n - 1);
// The code within this loop is run only once if the 'searchList' variable is not true. Otherwise, it runs once for each trunk-page on the
// free-list until the page 'nearby' is located.
do
{
pPrevTrunk = pTrunk;
iTrunk = (pPrevTrunk != null ? ConvertEx.Get4(pPrevTrunk.Data, 0) : ConvertEx.Get4(pPage1.Data, 32));
rc = (iTrunk > mxPage ? SysEx.SQLITE_CORRUPT_BKPT() : btreeGetPage(iTrunk, ref pTrunk, 0));
if (rc != RC.OK)
{
pTrunk = null;
goto end_allocate_page;
}
var k = ConvertEx.Get4(pTrunk.Data, 4); // # of leaves on this trunk page
if (k == 0 && searchList == 0)
{
// The trunk has no leaves and the list is not being searched. So extract the trunk page itself and use it as the newly allocated page
Debug.Assert(pPrevTrunk == null);
rc = Pager.Write(pTrunk.DbPage);
if (rc != RC.OK)
goto end_allocate_page;
pPgno = iTrunk;
Buffer.BlockCopy(pTrunk.Data, 0, pPage1.Data, 32, 4);
ppPage = pTrunk;
pTrunk = null;
Btree.TRACE("ALLOCATE: %d trunk - %d free pages left\n", pPgno, n - 1);
}
else if (k > (uint)(this.UsableSize / 4 - 2))
{
// Value of k is out of range. Database corruption
rc = SysEx.SQLITE_CORRUPT_BKPT();
goto end_allocate_page;
#if !SQLITE_OMIT_AUTOVACUUM
}
else if (searchList != 0 && nearby == iTrunk)
{
// The list is being searched and this trunk page is the page to allocate, regardless of whether it has leaves.
Debug.Assert(pPgno == iTrunk);
ppPage = pTrunk;
searchList = 0;
rc = Pager.Write(pTrunk.DbPage);
if (rc != RC.OK)
goto end_allocate_page;
if (k == 0)
{
if (pPrevTrunk == null)
{
pPage1.Data[32 + 0] = pTrunk.Data[0 + 0];
pPage1.Data[32 + 1] = pTrunk.Data[0 + 1];
pPage1.Data[32 + 2] = pTrunk.Data[0 + 2];
pPage1.Data[32 + 3] = pTrunk.Data[0 + 3];
}
else
{
rc = Pager.Write(pPrevTrunk.DbPage);
if (rc != RC.OK)
goto end_allocate_page;
pPrevTrunk.Data[0 + 0] = pTrunk.Data[0 + 0];
pPrevTrunk.Data[0 + 1] = pTrunk.Data[0 + 1];
pPrevTrunk.Data[0 + 2] = pTrunk.Data[0 + 2];
pPrevTrunk.Data[0 + 3] = pTrunk.Data[0 + 3];
}
}
else
{
// The trunk page is required by the caller but it contains pointers to free-list leaves. The first leaf becomes a trunk page in this case.
var pNewTrunk = new MemPage();
var iNewTrunk = (Pgno)ConvertEx.Get4(pTrunk.Data, 8);
if (iNewTrunk > mxPage)
{
rc = SysEx.SQLITE_CORRUPT_BKPT();
goto end_allocate_page;
}
rc = btreeGetPage(iNewTrunk, ref pNewTrunk, 0);
if (rc != RC.OK)
goto end_allocate_page;
rc = Pager.Write(pNewTrunk.DbPage);
if (rc != RC.OK)
{
pNewTrunk.releasePage();
goto end_allocate_page;
}
pNewTrunk.Data[0 + 0] = pTrunk.Data[0 + 0];
pNewTrunk.Data[0 + 1] = pTrunk.Data[0 + 1];
pNewTrunk.Data[0 + 2] = pTrunk.Data[0 + 2];
pNewTrunk.Data[0 + 3] = pTrunk.Data[0 + 3];
ConvertEx.Put4(pNewTrunk.Data, 4, (uint)(k - 1));
Buffer.BlockCopy(pTrunk.Data, 12, pNewTrunk.Data, 8, (int)(k - 1) * 4);
pNewTrunk.releasePage();
if (pPrevTrunk == null)
{
Debug.Assert(Pager.IsPageWriteable(pPage1.DbPage));
ConvertEx.Put4(pPage1.Data, 32, iNewTrunk);
}
else
{
rc = Pager.Write(pPrevTrunk.DbPage);
if (rc != RC.OK)
goto end_allocate_page;
ConvertEx.Put4(pPrevTrunk.Data, 0, iNewTrunk);
}
}
pTrunk = null;
Btree.TRACE("ALLOCATE: %d trunk - %d free pages left\n", pPgno, n - 1);
#endif
}
else if (k > 0)
{
// Extract a leaf from the trunk
uint closest;
var aData = pTrunk.Data;
if (nearby > 0)
{
closest = 0;
var dist = Math.Abs((int)(ConvertEx.Get4(aData, 8) - nearby));
for (uint i = 1; i < k; i++)
{
int dist2 = Math.Abs((int)(ConvertEx.Get4(aData, 8 + i * 4) - nearby));
if (dist2 < dist)
{
closest = i;
dist = dist2;
}
}
}
else
closest = 0;
//
var iPage = (Pgno)ConvertEx.Get4(aData, 8 + closest * 4);
if (iPage > mxPage)
{
rc = SysEx.SQLITE_CORRUPT_BKPT();
goto end_allocate_page;
}
if (searchList == 0 || iPage == nearby)
{
pPgno = iPage;
Btree.TRACE("ALLOCATE: %d was leaf %d of %d on trunk %d" + ": %d more free pages\n", pPgno, closest + 1, k, pTrunk.ID, n - 1);
rc = Pager.Write(pTrunk.DbPage);
if (rc != RC.OK)
goto end_allocate_page;
if (closest < k - 1)
Buffer.BlockCopy(aData, (int)(4 + k * 4), aData, 8 + (int)closest * 4, 4);
ConvertEx.Put4(aData, 4, (k - 1));
var noContent = (!btreeGetHasContent(pPgno) ? 1 : 0);
rc = btreeGetPage(pPgno, ref ppPage, noContent);
if (rc == RC.OK)
{
rc = Pager.Write((ppPage).DbPage);
if (rc != RC.OK)
ppPage.releasePage();
}
searchList = 0;
}
}
pPrevTrunk.releasePage();
pPrevTrunk = null;
} while (searchList != 0);
}
else
{
// There are no pages on the freelist, so create a new page at the end of the file
rc = Pager.Write(this.Page1.DbPage);
if (rc != RC.OK)
return rc;
this.Pages++;
if (this.Pages == MemPage.PENDING_BYTE_PAGE(this))
this.Pages++;
#if !SQLITE_OMIT_AUTOVACUUM
if (this.AutoVacuum && MemPage.PTRMAP_ISPAGE(this, this.Pages))
{
// If pPgno refers to a pointer-map page, allocate two new pages at the end of the file instead of one. The first allocated page
// becomes a new pointer-map page, the second is used by the caller.
MemPage pPg = null;
Btree.TRACE("ALLOCATE: %d from end of file (pointer-map page)\n", pPgno);
Debug.Assert(this.Pages != MemPage.PENDING_BYTE_PAGE(this));
rc = btreeGetPage(this.Pages, ref pPg, 1);
if (rc == RC.OK)
{
rc = Pager.Write(pPg.DbPage);
pPg.releasePage();
}
if (rc != RC.OK)
return rc;
this.Pages++;
if (this.Pages == MemPage.PENDING_BYTE_PAGE(this))
this.Pages++;
}
#endif
ConvertEx.Put4(this.Page1.Data, 28, this.Pages);
pPgno = this.Pages;
Debug.Assert(pPgno != MemPage.PENDING_BYTE_PAGE(this));
rc = btreeGetPage(pPgno, ref ppPage, 1);
if (rc != RC.OK)
return rc;
rc = Pager.Write((ppPage).DbPage);
if (rc != RC.OK)
ppPage.releasePage();
Btree.TRACE("ALLOCATE: %d from end of file\n", pPgno);
}
Debug.Assert(pPgno != MemPage.PENDING_BYTE_PAGE(this));
end_allocate_page:
pTrunk.releasePage();
pPrevTrunk.releasePage();
if (rc == RC.OK)
{
if (Pager.GetPageRefCount((ppPage).DbPage) > 1)
{
ppPage.releasePage();
return SysEx.SQLITE_CORRUPT_BKPT();
}
(ppPage).HasInit = false;
}
else
ppPage = null;
Debug.Assert(rc != RC.OK || Pager.IsPageWriteable((ppPage).DbPage));
return rc;
}
internal RC freePage2(MemPage pMemPage, Pgno iPage)
{
MemPage pTrunk = null; // Free-list trunk page
var pPage1 = this.Page1; // Local reference to page 1
Debug.Assert(MutexEx.Held(this.Mutex));
Debug.Assert(iPage > 1);
Debug.Assert(pMemPage == null || pMemPage.ID == iPage);
MemPage pPage; // Page being freed. May be NULL.
if (pMemPage != null)
{
pPage = pMemPage;
Pager.AddPageRef(pPage.DbPage);
}
else
pPage = btreePageLookup(iPage);
// Increment the free page count on pPage1
var rc = Pager.Write(pPage1.DbPage);
if (rc != RC.OK)
goto freepage_out;
var nFree = (int)ConvertEx.Get4(pPage1.Data, 36); // Initial number of pages on free-list
ConvertEx.Put4(pPage1.Data, 36, nFree + 1);
if (this.SecureDelete)
{
// If the secure_delete option is enabled, then always fully overwrite deleted information with zeros.
if ((pPage == null && ((rc = btreeGetPage(iPage, ref pPage, 0)) != RC.OK)) || ((rc = Pager.Write(pPage.DbPage)) != RC.OK))
goto freepage_out;
Array.Clear(pPage.Data, 0, (int)pPage.Shared.PageSize);
}
// If the database supports auto-vacuum, write an entry in the pointer-map to indicate that the page is free.
#if !SQLITE_OMIT_AUTOVACUUM
if (this.AutoVacuum)
#else
if (false)
#endif
{
ptrmapPut(iPage, PTRMAP.FREEPAGE, 0, ref rc);
if (rc != RC.OK)
goto freepage_out;
}
// Now manipulate the actual database free-list structure. There are two possibilities. If the free-list is currently empty, or if the first
// trunk page in the free-list is full, then this page will become a new free-list trunk page. Otherwise, it will become a leaf of the
// first trunk page in the current free-list. This block tests if it is possible to add the page as a new free-list leaf.
Pgno iTrunk = 0; // Page number of free-list trunk page
if (nFree != 0)
{
uint nLeaf; // Initial number of leaf cells on trunk page
iTrunk = (Pgno)ConvertEx.Get4(pPage1.Data, 32); // Page number of free-list trunk page
rc = btreeGetPage(iTrunk, ref pTrunk, 0);
if (rc != RC.OK)
goto freepage_out;
nLeaf = ConvertEx.Get4(pTrunk.Data, 4);
Debug.Assert(this.UsableSize > 32);
if (nLeaf > (uint)this.UsableSize / 4 - 2)
{
rc = SysEx.SQLITE_CORRUPT_BKPT();
goto freepage_out;
}
if (nLeaf < (uint)this.UsableSize / 4 - 8)
{
// In this case there is room on the trunk page to insert the page being freed as a new leaf.
// Note: that the trunk page is not really full until it contains usableSize/4 - 2 entries, not usableSize/4 - 8 entries as we have
// coded. But due to a coding error in versions of SQLite prior to 3.6.0, databases with freelist trunk pages holding more than
// usableSize/4 - 8 entries will be reported as corrupt. In order to maintain backwards compatibility with older versions of SQLite,
// we will continue to restrict the number of entries to usableSize/4 - 8 for now. At some point in the future (once everyone has upgraded
// to 3.6.0 or later) we should consider fixing the conditional above to read "usableSize/4-2" instead of "usableSize/4-8".
rc = Pager.Write(pTrunk.DbPage);
if (rc == RC.OK)
{
ConvertEx.Put4(pTrunk.Data, 4, nLeaf + 1);
ConvertEx.Put4(pTrunk.Data, (uint)(8 + nLeaf * 4), iPage);
if (pPage != null && !this.SecureDelete)
Pager.DontWrite(pPage.DbPage);
rc = btreeSetHasContent(iPage);
}
Btree.TRACE("FREE-PAGE: %d leaf on trunk page %d\n", iPage, pTrunk.ID);
goto freepage_out;
}
}
// If control flows to this point, then it was not possible to add the the page being freed as a leaf page of the first trunk in the free-list.
// Possibly because the free-list is empty, or possibly because the first trunk in the free-list is full. Either way, the page being freed
// will become the new first trunk page in the free-list.
if (pPage == null && (rc = btreeGetPage(iPage, ref pPage, 0)) != RC.OK)
goto freepage_out;
rc = Pager.Write(pPage.DbPage);
if (rc != RC.OK)
goto freepage_out;
ConvertEx.Put4L(pPage.Data, iTrunk);
ConvertEx.Put4(pPage.Data, 4, 0);
ConvertEx.Put4(pPage1.Data, 32, iPage);
Btree.TRACE("FREE-PAGE: %d new trunk page replacing %d\n", pPage.ID, iTrunk);
freepage_out:
if (pPage != null)
pPage.HasInit = false;
pPage.releasePage();
pTrunk.releasePage();
return rc;
}
}
}
| Grimace1975/gpustructs | src/Structs.Data.Btree/Core+Btree/BtShared+Alloc.cs | C# | mit | 18,930 |
# frozen_string_literal: true
require 'cucumber/core/test/timer'
module Cucumber
module Core
module Test
class Runner
attr_reader :event_bus, :running_test_case, :running_test_step
private :event_bus, :running_test_case, :running_test_step
def initialize(event_bus)
@event_bus = event_bus
end
def test_case(test_case, &descend)
@running_test_case = RunningTestCase.new
@running_test_step = nil
event_bus.test_case_starting(test_case)
descend.call(self)
event_bus.test_case_finished(test_case, running_test_case.result)
self
end
def test_step(test_step)
@running_test_step = test_step
event_bus.test_step_starting test_step
step_result = running_test_case.execute(test_step)
event_bus.test_step_finished test_step, step_result
@running_test_step = nil
self
end
def around_hook(hook, &continue)
result = running_test_case.execute(hook, &continue)
event_bus.test_step_finished running_test_step, result if running_test_step
@running_test_step = nil
self
end
def done
self
end
class RunningTestCase
def initialize
@timer = Timer.new.start
@status = Status::Unknown.new(Result::Unknown.new)
end
def execute(test_step, &continue)
status.execute(test_step, self, &continue)
end
def result
status.result(@timer.duration)
end
def failed(step_result)
@status = Status::Failing.new(step_result)
self
end
def passed(step_result)
@status = Status::Passing.new(step_result)
self
end
def pending(message, step_result)
@status = Status::Pending.new(step_result)
self
end
def skipped(step_result)
@status = Status::Skipping.new(step_result)
self
end
def undefined(step_result)
failed(step_result)
self
end
def exception(step_exception, step_result)
self
end
def duration(step_duration, step_result)
self
end
attr_reader :status
private :status
module Status
class Base
attr_reader :step_result
private :step_result
def initialize(step_result)
@step_result = step_result
end
def execute(test_step, monitor, &continue)
result = test_step.execute(monitor.result, &continue)
result = result.with_message(%(Undefined step: "#{test_step.name}")) if result.undefined?
result = result.with_appended_backtrace(test_step.source.last) if IsStepVisitor.new(test_step).step?
result.describe_to(monitor, result)
end
def result
raise NoMethodError, "Override me"
end
end
class Unknown < Base
def result(duration)
Result::Unknown.new
end
end
class Passing < Base
def result(duration)
Result::Passed.new(duration)
end
end
class Failing < Base
def execute(test_step, monitor, &continue)
test_step.skip(monitor.result)
end
def result(duration)
step_result.with_duration(duration)
end
end
Pending = Class.new(Failing)
class Skipping < Failing
def result(duration)
step_result.with_duration(duration)
end
end
end
end
end
end
end
end
| kamenlitchev/cucumber-ruby-core | lib/cucumber/core/test/runner.rb | Ruby | mit | 4,031 |
require 'spec_helper'
require 'ostruct'
module BtcPrice
describe Price do
before do
Price.adapter = OpenStruct.new(current_price: 492)
end
it "returns the current BTC price" do
Price.current_price.must_equal 492
end
end
end
| mecampbellsoup/btc_price | spec/lib/price_spec.rb | Ruby | mit | 258 |
var answers = [];
var validate = () => {
$("#afev-answer-1").animate(
{ backgroundColor: "green"}
);
var good = true;
if (answers.indexOf(1) === -1) {
good = false;
}
if (answers.indexOf(2) !== -1) {
$("#afev-answer-2").animate(
{ backgroundColor: "red"}
);
good = false;
}
if (answers.indexOf(3) !== -1) {
$("#afev-answer-3").animate(
{ backgroundColor: "red"}
);
good = false;
}
if (good) {
$("#afev-secret-right").show();
}
else {
$("#afev-secret-wrong").show();
}
$("#afev-secret-response").show();
}
window.onload = () => {
for (var i = 0; i < 5; i++) {
let index = i + 1;
$("#afev-answer-" + index).on("click", () => {
answers.push(index);
$("#afev-answer-" + index).animate(
{ backgroundColor: "#1e5abc"}
);
})
}
var score = localStorage.getItem("afev-score");
if (score !== null) {
score = score.substring(0, 3) + '1' + score.substring(4, 5);
localStorage.setItem("afev-score", score);
} else {
localStorage.setItem("afev-score", "10000");
}
initLayout();
}; | SvenFrankson/SvenFrankson.github.io | afev/scripts/question-4-1.js | JavaScript | mit | 1,257 |
#!/usr/bin/python
import glob
import os
import shutil
import subprocess
import sys
import yaml
def create_role(role):
ret = subprocess.check_output(
'ansible-galaxy init {}'.format(role).split())
if not ret.strip().endswith('created successfully'):
raise Exception('could not create role "{}"'.format(role))
def get_metadata(role):
try:
main = open(os.path.join(role, 'meta/main.yml'))
return yaml.load(main)
except IOError:
return {}
def ensure_meta(role):
"""Ensure the role has a meta directory"""
try:
os.makedirs(os.path.join(role, 'meta'))
except OSError:
pass
def set_metadata(role, metadata):
ensure_meta(role)
new_main = os.path.join(role, 'meta/main.yml.new')
orig_main = os.path.join(role, 'meta/main.yml')
with open(new_main, 'w') as out:
yaml.dump(metadata, out, default_flow_style=False, explicit_start=True)
os.rename(new_main, orig_main)
def add_dependency(src_role, target_role):
"""Add metadata saying that 'target_role' depends on 'src_role'"""
md = get_metadata(target_role)
deps = md.setdefault('dependencies', [])
deps.append(os.path.join(target_role, 'roles', src_role))
set_metadata(target_role, md)
def sub_roles(role):
try:
return glob.glob(os.path.join(role, 'roles/*'))
except OSError:
return []
def fix_dependency(role, for_destination):
"""Fix the sub-role dependency.
Dependency on a sub-role has to be changed once we move the base
role.
"""
metadata = get_metadata(role)
deps = metadata.setdefault('dependencies', [])
def f(dep):
if dep.startswith(role):
return os.path.join(for_destination, 'roles', dep)
else:
return dep
metadata['dependencies'] = [f(dep) for dep in deps]
set_metadata(role, metadata)
def fix_dependencies(src_role, for_destination):
for role in sub_roles(src_role):
fix_dependencies(role, for_destination)
fix_dependency(src_role, for_destination)
def move(src_role, target_role, copy=False):
op = shutil.copytree if copy else shutil.move
try:
os.makedirs(os.path.join(target_role, 'roles'))
except OSError:
pass
fix_dependencies(src_role, for_destination=target_role)
op(src_role, os.path.join(target_role, 'roles', src_role))
add_dependency(src_role, target_role)
def concat(roles, into, copy=False):
create_role(into)
for role in roles:
move(role, target_role=into, copy=copy)
def test():
roles = ['foo', 'bar', 'spam']
try:
for role in roles:
create_role(role)
move('foo', 'bar')
assert get_metadata('bar')['dependencies'] == ['bar/roles/foo']
move('bar', 'spam')
assert get_metadata('spam')['dependencies'] == ['spam/roles/bar']
assert get_metadata('spam/roles/bar')['dependencies'] == ['spam/roles/bar/roles/foo']
finally:
for role in roles:
shutil.rmtree(role, ignore_errors=True)
def main():
roles_path = None
if roles_path is not None:
os.chdir(roles_path)
concat([sys.argv[1], sys.argv[2]], into=sys.argv[3])
if __name__ == '__main__':
main()
| waltermoreira/dockeransible | app_builder/app_builder_image/concat_roles.py | Python | mit | 3,259 |
import {Component, DynamicComponentLoader, ElementRef, Injector}
from 'angular2/core';
import DynamicComponent from './dynamic-component';
import Hello from './hello';
@Component({
selector: 'load-as-root-component',
directives: [
Hello
],
template: `
<div class="wrapper">
<h3>LoadAsRoot Component</h3>
<button class="btn btn-warning" (click)="loadComponent()">
Load Component
</button>
<div id="anchor"></div>
</div>`
})
export default class LoadAsRootComponent {
constructor(
private dcl: DynamicComponentLoader,
private elementRef: ElementRef,
private injector: Injector) { }
loadComponent() {
this.dcl.loadAsRoot(DynamicComponent, '#anchor', this.injector)
.then(componentRef => console.log('loadAsRoot', componentRef));
}
}
| vanessayuenn/batarangle | example-apps/kitchen-sink-example/source/components/dynamic-controls/load-as-root-component.ts | TypeScript | mit | 812 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package eu.diversify.ffbpg.evolution.platforms;
import eu.diversify.ffbpg.Application;
import eu.diversify.ffbpg.BPGraph;
import eu.diversify.ffbpg.Platform;
import eu.diversify.ffbpg.Service;
import eu.diversify.ffbpg.collections.SortedIntegerSet;
import eu.diversify.ffbpg.random.RandomUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Hashtable;
/**
* This operator selects a random platform and makes it drop one service.
* The removed services is either an unused ser
*
* @author ffl
*/
public class DropOneRandomService extends PlatformEvolutionOperator {
@Override
public boolean execute(BPGraph graph, Platform p) {
ArrayList<Integer> unused_services = PlatformSrvHelper.getValidServicesToRemove(graph, p);
if(!unused_services.isEmpty()) {
p.getProvidedServices().remove(unused_services.get(0));
p.clearAllCachedData();
return true;
}
else {
return false;
}
}
}
| ffleurey/FFBPG | src/main/java/eu/diversify/ffbpg/evolution/platforms/DropOneRandomService.java | Java | mit | 1,234 |
import { SolidarityRunContext, SolidaritySettings } from '../../types'
module.exports = (settings: SolidaritySettings, context: SolidarityRunContext): void => {
const { filesystem } = context
if (settings.requirements) {
// Write file
filesystem.write('.solidarity', JSON.stringify(settings, null, 2), { atomic: true })
} else {
throw 'You must have a requirements key to be a valid solidarity file'
}
}
| infinitered/solidarity | src/extensions/functions/setSolidaritySettings.ts | TypeScript | mit | 425 |
<?php
/*
* This file is part of the API Platform project.
*
* (c) Kévin Dunglas <dunglas@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace ApiPlatform\Doctrine\Odm\Filter;
use ApiPlatform\Doctrine\Common\PropertyHelperTrait;
use ApiPlatform\Doctrine\Odm\PropertyHelperTrait as MongoDbOdmPropertyHelperTrait;
use Doctrine\ODM\MongoDB\Aggregation\Builder;
use Doctrine\Persistence\ManagerRegistry;
use Psr\Log\LoggerInterface;
use Psr\Log\NullLogger;
use Symfony\Component\Serializer\NameConverter\NameConverterInterface;
/**
* {@inheritdoc}
*
* Abstract class for easing the implementation of a filter.
*
* @experimental
*
* @author Alan Poulain <contact@alanpoulain.eu>
*/
abstract class AbstractFilter implements FilterInterface
{
use MongoDbOdmPropertyHelperTrait;
use PropertyHelperTrait;
protected $managerRegistry;
protected $logger;
protected $properties;
protected $nameConverter;
public function __construct(ManagerRegistry $managerRegistry, LoggerInterface $logger = null, array $properties = null, NameConverterInterface $nameConverter = null)
{
$this->managerRegistry = $managerRegistry;
$this->logger = $logger ?? new NullLogger();
$this->properties = $properties;
$this->nameConverter = $nameConverter;
}
/**
* {@inheritdoc}
*/
public function apply(Builder $aggregationBuilder, string $resourceClass, string $operationName = null, array &$context = [])
{
foreach ($context['filters'] as $property => $value) {
$this->filterProperty($this->denormalizePropertyName($property), $value, $aggregationBuilder, $resourceClass, $operationName, $context);
}
}
/**
* Passes a property through the filter.
*
* @param mixed $value
*/
abstract protected function filterProperty(string $property, $value, Builder $aggregationBuilder, string $resourceClass, string $operationName = null, array &$context = []);
protected function getManagerRegistry(): ManagerRegistry
{
return $this->managerRegistry;
}
protected function getProperties(): ?array
{
return $this->properties;
}
protected function getLogger(): LoggerInterface
{
return $this->logger;
}
/**
* Determines whether the given property is enabled.
*/
protected function isPropertyEnabled(string $property, string $resourceClass): bool
{
if (null === $this->properties) {
// to ensure sanity, nested properties must still be explicitly enabled
return !$this->isPropertyNested($property, $resourceClass);
}
return \array_key_exists($property, $this->properties);
}
protected function denormalizePropertyName($property)
{
if (!$this->nameConverter instanceof NameConverterInterface) {
return $property;
}
return implode('.', array_map([$this->nameConverter, 'denormalize'], explode('.', (string) $property)));
}
protected function normalizePropertyName($property)
{
if (!$this->nameConverter instanceof NameConverterInterface) {
return $property;
}
return implode('.', array_map([$this->nameConverter, 'normalize'], explode('.', (string) $property)));
}
}
| api-platform/core | src/Doctrine/Odm/Filter/AbstractFilter.php | PHP | mit | 3,450 |
using Microsoft.Xna.Framework;
using NutEngine.Physics.Shapes;
namespace NutEngine.Physics
{
public static partial class Collider
{
public static bool Collide(IBody<Circle> a, IBody<Circle> b, out IntersectionArea intersection)
{
// Probably here should be the sector collisions check,
// but it needs additional method without manifolds
//
// "There is only one god, and His name is Code.
// And there is only one thing we say to Code: 'not today'."
var normal = b.Position - a.Position;
float radius = a.Shape.Radius + b.Shape.Radius;
if (normal.LengthSquared() >= radius * radius) {
intersection = null;
return false; // Circles doesn't collide
}
else {
float distance = normal.Length();
if (distance == 0) {
intersection = new IntersectionArea() {
Depth = a.Shape.Radius,
Normal = Vector2.UnitX,
};
}
else {
var normalizable = normal / distance;
intersection = new IntersectionArea() {
Depth = radius - distance,
Normal = normalizable,
};
}
return true;
}
}
public static bool Collide(IBody<Circle> a, IBody<Circle> b)
{
var rSquare = (a.Shape.Radius + b.Shape.Radius) * (a.Shape.Radius + b.Shape.Radius);
return rSquare > (a.Position.X - b.Position.X) * (a.Position.X - b.Position.X)
+ (a.Position.Y - b.Position.Y) * (a.Position.Y - b.Position.Y);
}
}
}
| EasyPeasyLemonSqueezy/MadCat | MadCat/NutEngine/Physics/Collider/CirclevsCircle.cs | C# | mit | 1,838 |
import { h } from 'omi';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(h(h.f, null, h("path", {
d: "M17.5 8c.46 0 .91-.05 1.34-.12C17.44 5.56 14.9 4 12 4c-.46 0-.91.05-1.34.12C12.06 6.44 14.6 8 17.5 8zM8.08 5.03C6.37 6 5.05 7.58 4.42 9.47c1.71-.97 3.03-2.55 3.66-4.44z",
opacity: ".3"
}), h("path", {
d: "M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 2c2.9 0 5.44 1.56 6.84 3.88-.43.07-.88.12-1.34.12-2.9 0-5.44-1.56-6.84-3.88.43-.07.88-.12 1.34-.12zM8.08 5.03C7.45 6.92 6.13 8.5 4.42 9.47 5.05 7.58 6.37 6 8.08 5.03zM12 20c-4.41 0-8-3.59-8-8 0-.05.01-.1.01-.15 2.6-.98 4.68-2.99 5.74-5.55 1.83 2.26 4.62 3.7 7.75 3.7.75 0 1.47-.09 2.17-.24.21.71.33 1.46.33 2.24 0 4.41-3.59 8-8 8z"
}), h("circle", {
cx: "9",
cy: "13",
r: "1.25"
}), h("circle", {
cx: "15",
cy: "13",
r: "1.25"
})), 'FaceTwoTone'); | AlloyTeam/Nuclear | components/icon/esm/face-two-tone.js | JavaScript | mit | 873 |
/*
Copyright (c) 2003-2016, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'horizontalrule', 'no', {
toolbar: 'Sett inn horisontal linje'
} );
| Rudhie/simlab | assets/ckeditor/plugins/horizontalrule/lang/no.js | JavaScript | mit | 241 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Make use of synaptic as backend."""
# Copyright (C) 2008-2010 Sebastian Heinlein <devel@glatzor.de>
# Copyright (C) 2005-2007 Canonical
#
# Licensed under the GNU General Public License Version 2
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
__author__ = "Sebastian Heinlein <devel@glatzor.de>, " \
"Michael Vogt <mvo@canonical.com"
import tempfile
from gettext import gettext as _
from gi.repository import GObject
from defer import Deferred
import sessioninstaller.errors
class SynapticBackend(object):
"""Make use of Synaptic to install and remove packages."""
def _run_synaptic(self, xid, opt, tempf, interaction):
deferred = Deferred()
if tempf:
opt.extend(["--set-selections-file", "%s" % tempf.name])
#FIXME: Take interaction into account
opt.extend(["-o", "Synaptic::closeZvt=true"])
if xid:
opt.extend(["--parent-window-id", "%s" % (xid)])
cmd = ["/usr/bin/gksu",
"--desktop", "/usr/share/applications/update-manager.desktop",
"--", "/usr/sbin/synaptic", "--hide-main-window",
"--non-interactive"]
cmd.extend(opt)
flags = GObject.SPAWN_DO_NOT_REAP_CHILD
(pid, stdin, stdout, stderr) = GObject.spawn_async(cmd, flags=flags)
GObject.child_watch_add(pid, self._on_synaptic_exit, (tempf, deferred))
return deferred
def _on_synaptic_exit(self, pid, condition, (tempf, deferred)):
if tempf:
tempf.close()
if condition == 0:
deferred.callback()
else:
deferred.errback(sessioninstaller.errors.ModifyFailed())
def remove_packages(self, xid, package_names, interaction):
opt = []
# custom progress strings
#opt.append("--progress-str")
#opt.append("%s" % _("Please wait, this can take some time."))
#opt.append("--finish-str")
#opt.append("%s" % _("Update is complete"))
tempf = tempfile.NamedTemporaryFile()
for pkg_name in package_names:
tempf.write("%s\tuninstall\n" % pkg_name)
tempf.flush()
return self._run_synaptic(xid, opt, tempf, interaction)
def install_packages(self, xid, package_names, interaction):
opt = []
# custom progress strings
#opt.append("--progress-str")
#opt.append("%s" % _("Please wait, this can take some time."))
#opt.append("--finish-str")
#opt.append("%s" % _("Update is complete"))
tempf = tempfile.NamedTemporaryFile()
for pkg_name in package_names:
tempf.write("%s\tinstall\n" % pkg_name)
tempf.flush()
return self._run_synaptic(xid, opt, tempf, interaction)
def install_package_files(self, xid, package_names, interaction):
raise NotImplemented
# vim:ts=4:sw=4:et
| yasoob/PythonRSSReader | venv/lib/python2.7/dist-packages/sessioninstaller/backends/synaptic.py | Python | mit | 3,567 |
<?php
// Based on https://github.com/Wixel/GUMP
// DataValidationClass
if (!defined ("_DATAVALIDATION_CLASS_") ) {
define("_DATAVALIDATION_CLASS_", TRUE);
Class DataValidation {
var $field=null;
var $typeError = 'field';
var $errorMsg='';
var $error=false;
var $errorFields = [];
/**
* Validate the content of $data based on $model
* @param array $model
* @param array $data
* @param array $dictionaries
* @param bool $all
* @param string $extrakey
* @return bool
*/
public function validateModel (array &$model, array &$data, array &$dictionaries=[], $all=true, $extrakey='') {
$error = '';
foreach ($model as $key=>$value) {
// because $all==true Ignore those fields that does not exist in $data and are optional or internal
if($all && !key_exists($key,$data) && isset($value['validation']) && (strpos($value['validation'], 'optional') !== false || strpos($value['validation'], 'internal') !== false)) continue;
// because $all==false Ignore those fields that does not exist in $data and they are not mandatory
if(!$all && !key_exists($key,$data)) continue;
// Does type field exist?.. If not return false and break the loop
if(!isset($value['type'])) {
$this->setError('Missing type attribute in model for ' . $extrakey . $key);
return false;
}
//region: excludeifexist:
// $excludeif controls the exintence of the field depends on others fields
$excludeif = [];
if (isset($value['validation']) && strpos($value['validation'], 'excludeifexist:') !== false) {
$excludeif = explode(',',$this->extractOptionValue('excludeifexist:',$value['validation']));
foreach ($excludeif as $excludefield) if(strlen($excludefield = trim($excludefield))) {
if(!isset($model[$excludefield])) {
$this->setError('Wrong \'excludeifexist:\' tag in '.$extrakey . $key.'. Missing field attribute in model for \'' . $excludefield.'\'' );
$this->typeError = 'model';
} else {
// If it exist and also the exludes then error
if(key_exists($key,$data)) {
if (key_exists($excludefield,$data)) {
$this->setError('This field is not allowed because other field exists: \'' . $excludefield . '\'');
break;
}
} else {
if (!key_exists($excludefield,$data) && stripos($value['validation'],'allownull')===false) {
$this->setError('This field is mandatory because is missing other field in \'excludeifexist:' . $excludefield . '\'');
break;
}
}
}
}
// If the field does not exist but there are exclude fields and there is not error.. continue to next field
if(!$this->error && strlen(trim($excludeif[0])) && !key_exists($key,$data)) continue;
}
//endregion
// Transform values and check if we have an empty value
if(!$this->error && isset($value['validation'])) {
// Transform values based on defaultvalue, forcevalue, tolowercase, touppercase,trim
if(!array_key_exists($key,$data)) $data[$key] = null;
$data[$key] = $this->transformValue($data[$key],$value['validation']);
if( null===$data[$key] || (is_string($data[$key]) && !strlen($data[$key])) || (is_array($data[$key]) && !count($data[$key]))) {
// OPTIONAL: -- Allow empty values if we have optional in options
if(stripos($value['validation'],'allownull')!==false) {
continue; // OK.. next
}else {
if(!key_exists($key,$data))
$this->setError('Missing '.$extrakey.$key);
else
$this->setError('Empty value for '.$extrakey.$key);
}
}
}
// Let's valid types and recursive contents..
if(!$this->error) {
if(!$this->validType($extrakey.$key,$value['type'],$data[$key])) {
$this->setError(((is_string($data[$key]) && !strlen($data[$key]))?'Empty':'Wrong').' data received for field {'.$extrakey.$key.'} with type {'.$value['type'].'} value=['.json_encode($data[$key]).']');
}
elseif($value['type']=='model') {
// Recursive CALL
$this->validateModel($value['fields'],$data[$key],$dictionaries,$all,$extrakey.$key.'-');
}
elseif(isset($value['validation']) && !$this->validContent($extrakey.$key,$value['validation'],$data[$key]))
$this->setError('Wrong content in field {'.$extrakey.$key.'} with validation {'.$value['validation'].'}');
}
if($this->error) {
if(!strlen($this->field ))
$this->field = $extrakey.$key.': ['.$value['type'].']('.(isset($value['validation'])?:'').')';
return false;
}
}
return !$this->error;
}
function setError($msg) {
$this->error=true;
$this->errorMsg = $msg;
}
/**
* Transform data based on obtions: forcevalue, defaultvalue, trim, tolowercase, touppercase
* @param $data
* @param $options
*/
public function transformValue($data, $options) {
if(strpos($options,'forcevalue:')!==false) {
$data = $this->extractOptionValue('forcevalue:',$options);
//if deault is "null"
if($data=="null") $data=null;
} elseif(strpos($options,'defaultvalue:')!==false && !strlen($data) && !is_bool($data) ) {
$data = $this->extractOptionValue('defaultvalue:',$options);
//if deault is "null"
if($data=="null") $data=null;
}
if( strpos($options,'tolowercase')!==false && strlen($data)) (is_array($data))?$data = array_map('strtolower',$data):$data = strtolower($data);
if( strpos($options,'touppercase')!==false && strlen($data)) (is_array($data))?$data = array_map('strtoupper',$data):$data = strtoupper($data);
if( strpos($options,'trim')!==false && strlen($data)) (is_array($data))?$data = array_map('trim',$data):$data = trim($data);
if( strpos($options,'regex_delete:')!==false) {
$regex = $this->extractOptionValue("regex_delete:",$options);
if(strlen($regex)) {
if(is_array($data)) foreach ($data as &$item) $item = preg_replace("/$regex/",'',$item);
else $data = preg_replace("/$regex/",'',$data);
}
}
//Convert a string into an array
if( strpos($options,'toarray:')!==false && !is_array($data) && is_string($data)) {
$sep = $this->extractOptionValue('toarray:',$options);
if(strlen($data))
$data = explode($sep,$data);
else $data = [];
}
//Convert an array into string
if( strpos($options,'tostring:')!==false && is_array($data) ) {
$sep = $this->extractOptionValue('tostring:',$options);
if(!$sep) $sep=',';
if(count($data))
$data = implode($sep,$data);
else $data = "";
}
return $data;
}
/**
* Validate no empty data based in the type
* @param $key
* @param $type
* @param $data
* @return bool
*/
public function validType($key, $type, &$data) {
if(!is_bool($data) && !is_array($data) && is_string($data) && !strlen($data)) return false;
// database conversion types
$type = preg_replace('/\(.*/','',$type);
switch (strtolower($type)) {
case "varbinary": case "varchar": case "char": case "string": return is_string($data);
case "text": case "txt": return is_string($data);
case "integer": if(strval(intval($data))===strval($data)) $data=intval($data);return is_integer($data);
case "decimal": case "float": if(floatval($data)!=0 || $data==="0" || $data === 0) $data=floatval($data);return is_float($data);
case "bit": if(strval(intval($data))===strval($data)) $data=intval($data);return ($data==0 || $data==1);
case "model": return is_array($data) && !empty($data);
case "json": if(is_array($data)) $data = json_encode($data);return is_string($data) && is_array(json_decode($data,true));
case "name": return $this->validateName($key,$data);
case "ip": return filter_var($data,FILTER_VALIDATE_IP);
case "url": return filter_var($data,FILTER_VALIDATE_URL);
case "email": return is_string($data) && $this->validateEmail($key,"email",$data);
case "emails": return is_array($data) && $this->validateEmail($key,"email",$data);
case "phone": return is_string($data);
case "zip": return is_string($data);
case "keyname": return is_string($data);
case "key": return is_string($data);
case "date": return $this->validateDate($data);
case "datetime": return $this->validateDateTime($data);
case "datetimeiso": return $this->validateDateTimeISO($data);
case "currency": return is_numeric($data);
case "boolean": if(!is_bool($data) && ($data=='true' || $data=='false')) $data = ($data == 'true');return is_bool($data);
case "array": return is_array($data);
case "list": return is_array($data);
case "array_to_string": if(is_array($data)) $data=implode(",",$data);return is_string($data);
default: return false;
}
}
public function validContent($key,$options,&$data, array &$dictionaries=[]) {
if(!strlen(trim($options))) return true;
if(strpos($options,'optional')===false && is_string($data) && !strlen($data)) return false;
// Potential Validators
if(!$this->validateMaxLength($key,$options,$data)) return false;
if(!$this->validateMinLength($key,$options,$data)) return false;
if(!$this->validateFixLength($key,$options,$data)) return false;
if(!$this->validateEmail($key,$options,$data)) return false;
if(!$this->validateRegexMatch($key,$options,$data)) return false;
if(!$this->validateValues($key,$options,$data)) return false;
if(!$this->validateRange($key,$options,$data)) return false;
if(!$this->validateUnsigned($key,$options,$data)) return false;
return true;
}
/**
* Formats: Length bt. 8 to 10 depending of the year formar (YY or YYYY)
* @param $data
* @return bool
*/
public function validateDate($data)
{
if($data =='now' || (strlen($data)>=8 && strlen($data)<=10)) {
try {
$value_time = new DateTime($data);
return true;
} catch (Exception $e) {
// Is not a valida Date
}
}
return false;
}
/**
* Formats: Length bt. 15 to 17 depending of the year formar (YY or YYYY)
* @param $data
* @return bool
*/
public function validateDateTime($data)
{
if($data =='now' || (strlen($data)>=15)) {
try {
$value_time = new DateTime($data);
return true;
} catch (Exception $e) {
// Is not a valida Date
$this->errorFields[] = [$e.$this->errorMsg];
}
} else {
$this->errorFields[] = 'DateTime field is not "now" o it does not have 15 characters';
}
return false;
}
/**
* Formats: Length bt. 23 or 25 depending of the year formar (YY or YYYY)
* @param $data
* @return bool
*/
public function validateDateTimeISO($data)
{
if($data =='now' || (strlen($data)>=23)) {
try {
$value_time = new DateTime($data);
return true;
} catch (Exception $e) {
$this->errorFields[] = [$e.$this->errorMsg];
// Is not a valida Date
}
}
return false;
}
public function validateMaxLength($key,$options,$data) {
if(strlen($options) && (is_integer($options) || strpos($options,'maxlength:')!==false)){
if(!is_integer($options) ) $options = intval($this->extractOptionValue('maxlength:',$options));
if(!is_array($data)) $data = [$data];
foreach ($data as $item) {
if(strlen($item) > $options) {
$this->errorFields[] = ['key'=>$key,'method'=>__FUNCTION__,'options'=>$options,'data'=>$data];
return false;
}
}
}
return true;
}
public function validateMinLength($key,$options,$data) {
if(strlen($options) && (is_integer($options) || strpos($options,'minlength:')!==false)){
if(!is_integer($options) ) $options = intval($this->extractOptionValue('minlength:',$options));
if(!is_array($data)) $data = [$data];
foreach ($data as $item) {
if(strlen($item) < $options) {
$this->errorFields[] = ['key'=>$key,'method'=>__FUNCTION__,'options'=>$options,'data'=>$data];
return false;
}
}
}
return true;
}
public function validateFixLength($key, $options,$data) {
if(strlen($options) && (is_integer($options) || strpos($options,'fixlength:')!==false)){
if(!is_integer($options) ) $options = intval($this->extractOptionValue('fixlength:',$options));
if(!is_array($data)) $data = [$data];
foreach ($data as $item) {
if(strlen($item) != $options) {
$this->errorFields[] = ['key'=>$key,'method'=>__FUNCTION__,'options'=>$options,'data'=>$data];
return false;
}
}
}
return true;
}
public function validateEmail($key,$options,$data) {
if(strlen($options) && strpos($options,'email')!==false){
if(!is_array($data)) $data = [$data];
foreach ($data as $item) {
if(!filter_var($item,FILTER_VALIDATE_EMAIL)) {
$this->errorFields[] = ['key'=>$key,'method'=>__FUNCTION__,'data'=>$data];
return false;
}
}
}
return true;
}
public function validateRange($key,$options,$data) {
if(strlen($options) && (strpos($options,'range:')!==false)){
$options = explode(',',($this->extractOptionValue('range:',$options)));
$ok=true;
if(isset($options[0]) && strlen($options[0])) $ok = $data >= $options[0];
if($ok && isset($options[1]) && strlen($options[1])) $ok = $data <= $options[1];
if(!$ok) {
$this->errorFields[] = ['key'=>$key,'method'=>__FUNCTION__,'options'=>$options,'data'=>$data];
return false;
}
}
return true;
}
public function validateValues($key,$options,$data) {
if(strlen($options) && (strpos($options,'values:')!==false)){
$options = explode(',',($this->extractOptionValue('values:',$options)));
$ok= in_array($data,$options);
if(!$ok) {
$this->errorFields[] = ['key'=>$key,'method'=>__FUNCTION__,'options'=>$options,'data'=>$data];
return false;
}
}
return true;
}
public function validateUnsigned($key,$options,$data) {
if(strlen($options) && (strpos($options,'unsigned')!==false)){
if(intval($data) < 0) {
$this->errorFields[] = ['key'=>$key,'method'=>__FUNCTION__,'options'=>$options,'data'=>$data];
return false;
}
}
return true;
}
/**
* Validate if the content match with a regex expresion
* @param $key
* @param $options
* @param $data
* @return bool|int
*/
public function validateRegexMatch($key, $options, $data) {
if(strlen($options) && strpos($options,'regex_match')!==false){
$regex = $this->extractOptionValue('regex_match:',$options);
if(strlen($regex)) {
if (is_string($data)) {
if (!preg_match('/'.$regex.'/', trim($data))) {
$this->errorFields[] = ['key'=>$key,'method'=>__FUNCTION__,'regex'=>$regex];
return false;
}
} elseif(is_array($data)) foreach ($data as $item) {
if (!preg_match('/'.$regex.'/', trim($item))) {
$this->errorFields[] = ['key'=>$key,'method'=>__FUNCTION__,'regex'=>$regex];
return false;
}
}
}
}
return true;
}
public function validateName($key,$data) {
if(strlen(trim($data)) < 2 || !preg_match("/^([a-zÀÁÂÃÄÅÇÈÉÊËÌÍÎÏÒÓÔÕÖßÙÚÛÜÝàáâãäåçèéêëìíîïñðòóôõöùúûüýÿ '-])+$/i", trim($data))) {
$this->errorFields[] = ['key'=>$key,'method'=>__FUNCTION__,'data'=>$data];
return false;
}
return true;
}
private function extractOptionValue($tag,$options) {
list($foo,$value) = explode($tag,$options,2);
return(preg_replace('/( |\|).*/','',trim($value)));
}
}
}
| CloudFramework-io/appengine-php-core | src/class/DataValidation.php | PHP | mit | 19,606 |
<div class="col-sm-9 col-sm-offset-3 col-md-10 col-md-offset-2 main">
<div class="alert alert-success" role="alert">
<?= $msg; ?>
</div>
</div> | lenesvc/ProjetoMvc | application/views/includes/msg_sucesso.php | PHP | mit | 153 |
<?php
/*
* This file is part of the overtrue/laravel-pinyin.
*
* (c) overtrue <i@overtrue.me>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
*/
use Illuminate\Support\Facades\Log;
use Overtrue\Pinyin\Pinyin;
if (!function_exists('pinyin')) {
/**
* Get the Pinyin of given string.
*
* @param string $string
* @param string $option
*
* @return string
*/
function pinyin($string, $option = Pinyin::NONE)
{
return app(Pinyin::class)->convert($string, $option);
}
} else {
Log::warning('There exist multiple function "pinyin".');
}
if (!function_exists('pinyin_abbr')) {
/**
* Get the fist letters of given string.
*
* @param string $string
* @param string $delimiter
*
* @return string
*/
function pinyin_abbr($string, $delimiter = '')
{
return app(Pinyin::class)->abbr($string, $delimiter);
}
} else {
Log::warning('There exist multiple function "pinyin_abbr".');
}
if (!function_exists('pinyin_permlink')) {
/**
* Get a pinyin permalink from string.
*
* @param string $string
* @param string $delimiter
*
* @return string
*
* @deprecated since version 3.0.1. Use the "pinyin_permalink" method instead.
*/
function pinyin_permlink($string, $delimiter = '-')
{
return app(Pinyin::class)->permalink($string, $delimiter);
}
} else {
Log::warning('There exist multiple function "pinyin_permlink".');
}
if (!function_exists('pinyin_permalink')) {
/**
* Get a pinyin permalink from string.
*
* @param string $string
* @param string $delimiter
*
* @return string
*/
function pinyin_permalink($string, $delimiter = '-')
{
return app(Pinyin::class)->permalink($string, $delimiter);
}
} else {
Log::warning('There exist multiple function "pinyin_permalink".');
}
if (!function_exists('pinyin_sentence')) {
/**
* Get the fist pinyin and letters of given string.
*
* @param string $string
* @param string $tone
*
* @return string
*/
function pinyin_sentence($string, $tone = false)
{
return app(Pinyin::class)->sentence($string, $tone);
}
} else {
Log::warning('There exist multiple function "pinyin_sentence".');
}
| overtrue/laravel-pinyin | src/helpers.php | PHP | mit | 2,405 |
module Smalrubot
class Board
# namespace for Studuino
module Studuino
PORT_M1 = 0
PORT_M2 = 1
PORT_A0 = 0
PORT_A1 = 1
PORT_A2 = 2
PORT_A3 = 3
PORT_A4 = 4
PORT_A5 = 5
PORT_A6 = 6
PORT_A7 = 7
PIDOPEN = 0
PIDLED = 1
PIDBUZZER = 2
PIDLIGHTSENSOR = 3
PIDSOUNDSENSOR = 4
PIDIRPHOTOREFLECTOR = 5
PIDACCELEROMETER = 6
PIDTOUCHSENSOR = 7
PIDPUSHSWITCH = 8
NORMAL = 0
REVERSE = 1
BRAKE = 2
COAST = 3
ON = 1
OFF = 0
end
end
end
| smalruby/smalrubot | lib/smalrubot/board/studuino.rb | Ruby | mit | 587 |
'use strict';
module.exports = function(localeA, localeB, options) {
options = options || {};
if (typeof localeA !== 'object' || typeof localeB !== 'object') {
throw new Error('s18n: `localeA` and `localeB` must be objects.');
}
var localeAMissing = {};
var localeBMissing = {};
var unmodifiedStrings = {};
var modifiedStrings = [];
for (var hashA in localeA) {
if (typeof localeB[hashA] === 'undefined') {
localeBMissing[hashA] = localeA[hashA];
} else {
if (localeA[hashA] === localeB[hashA]) {
unmodifiedStrings[hashA] = localeA[hashA];
} else {
modifiedStrings.push({
hash: hashA,
strings: [
localeA[hashA],
localeB[hashA]
]
});
}
}
}
for (var hashB in localeB) {
if (typeof localeA[hashB] === 'undefined') {
localeAMissing[hashB] = localeB[hashB];
}
}
return [
arrayifyResults(localeAMissing),
arrayifyResults(localeBMissing),
arrayifyResults(unmodifiedStrings),
modifiedStrings
];
};
function arrayifyResults(resultsObj) {
var array = [];
for (var hash in resultsObj) {
array.push({
hash: hash,
string: resultsObj[hash]
});
}
return array;
}
| bitjson/s18n | lib/compareLocales.js | JavaScript | mit | 1,256 |
package org.sagebionetworks.dashboard.model;
public interface AccessRecord extends Record{
String getSessionId();
String getUserId();
String getObjectId();
String getMethod();
String getUri();
String getQueryString();
String getStatus();
Long getLatency();
String getUserAgent();
String getStack();
String getHost();
String getInstance();
String getVM();
String getThreadId();
}
| Sage-Bionetworks/dashboard-core | src/main/java/org/sagebionetworks/dashboard/model/AccessRecord.java | Java | mit | 436 |
//basic physics entity
//This entity extends the joncom base entity and is responsible for
//collision categories
//property inheritance
//default settings/hooks
//It also defines the ObjectWorld and RenderWorld objects "soma" and "animus" (or whatever distinct names you can think of)
//The soma cannot call animus functions and vice versa. Both must be called from top level functions like update, init, draw, or top level physics callbacks like beginContact, preSolve, etc:.
//This is to maintain separation between ObjectWorld and RenderWorld
ig.module('game.entities.physEnt')
.requires('plugins.joncom.box2d.entity', 'game.const_defs', 'plugins.tween', 'plugins.tileUtil')
.defines(function() {
EntityPhysEnt = ig.Entity.extend({
//default settings, overwritten by _loadSettings
gravityFactor: 1,
categoryBits: ig.Filter.NOCOLLIDE,
maskBits: ig.Filter.ALL,
isTransient: false,
currentDim: 'normal',
currentFix: null,
init: function( x, y, settings ) {
//inject filter data into settings before creating box2d body
settings.categoryBits = this.categoryBits;
settings.maskBits = this.maskBits;
this.parent( x, y, settings );
//this._loadSettings(settings);
//presume non-rotating body
//will almost certainly be entity-specific later
if (!ig.global.wm) {
this.body.SetFixedRotation(this.isFixedRotation);
this.currentFix = this.body.GetFixtureList();
}
this.setupAnimation();
},
//checks that allow zero value... is there a shorter way to handle this?
//allows entities to get context-sensitive properties, though most settings will still be pre-defined
_loadSettings: function(settings) {
if (typeof(settings.categoryBits) !== 'null' && typeof(settings.categoryBits) !== 'undefined') {
console.log("Category is happening");
this.categoryBits = settings.categoryBits;
}
if (typeof(settings.maskBits) !== 'null' && typeof(settings.maskBits) !== 'undefined' ) {
console.log("Mask is happening");
this.maskBits = settings.maskBits;
}
if (typeof(settings.gravityFactor) !== 'null' && typeof(settings.gravityFactor) !== 'undefined') {
console.log("Gravity is happening");
this.gravityFactor = settings.gravityFactor;
}
if (typeof(settings.isFixedRotation) !== 'null' && typeof(settings.isFixedRotation) !== 'undefined') {
console.log("Rotation is happening");
this.isFixedRotation = settings.isFixedRotation;
}
if (settings.isTransient !== 'null' && settings.isTransient !== undefined) {
console.log("Transient is happening");
this.isTransient = settings.isTransient;
}
},
beginContact: function(other, contact) {
this.parent(other,contact);
},
setupAnimation: function() { },
//creates a sensor fixture for altering an entity's shape or size
makeDim: function(name, size, filterSettings) {
var shapeDef = new Box2D.Collision.Shapes.b2PolygonShape();
shapeDef.SetAsBox(size.x / 2 * Box2D.SCALE, size.y / 2 * Box2D.SCALE);
var fixtureDef = new Box2D.Dynamics.b2FixtureDef();
fixtureDef.shape = shapeDef;
fixtureDef.density = 0; //massless sensor
fixtureDef.friction = this.uniFriction;
fixtureDef.restitution = this.bounciness;
fixtureDef.userData = {name: name, categoryBits: null, maskBits: null, type: 'dim'};
if (filterSettings) {
fixtureDef.userData.categoryBits = filterSettings.categoryBits;
fixtureDef.userData.maskBits = filterSettings.maskBits;
}
else {
fixtureDef.userData.categoryBits = this.body.GetFixtureList().GetFilterData().categoryBits;
fixtureDef.userData.maskBits = this.body.GetFixtureList().GetFilterData().maskBits;
}
fixtureDef.filter.categoryBits = ig.Filter.NOCOLLIDE;
fixtureDef.filter.maskBits = ig.Filter.NOCOLLIDE;
fixtureDef.isSensor = true;
this.body.CreateFixture(fixtureDef);
},
//set a sensor fixture as the solid fixture that represents the entity. Automatically turns the current solid fixture into a sensor (standby).
setDim: function(name) {
var fix = this.body.GetFixtureList();
var curr = null;
var next = null;
do {
if (fix.GetUserData().name == name) {
next = fix;
}
if (fix.GetUserData().name == this.currentDim) {
curr = fix;
}
if (next && curr) {
break;
}
} while (fix = fix.GetNext());
if (next && curr) {
next.SetDensity(curr.GetDensity()); //should actually set to a density that sets the same mass
curr.SetSensor(true);
next.SetSensor(false);
curr.SetDensity(0);
this.currentDim = name;
this.currentFix = next;
var filt = curr.GetFilterData();
filt.categoryBits = ig.Filter.NOCOLLIDE;
filt.maskBits = ig.Filter.NOCOLLIDE;
curr.SetFilterData(filt);
filt = next.GetFilterData();
filt.categoryBits = next.GetUserData().categoryBits;
filt.maskBits = next.GetUserData().maskBits;
next.SetFilterData(filt);
}
else {
//PANIC
console.log("PANIC");
}
},
makeSense: function(name, senseObj) {
var shapeDef = new Box2D.Collision.Shapes.b2PolygonShape();
shapeDef.SetAsOrientedBox(senseObj.size.x*Box2D.SCALE/2, senseObj.size.y*Box2D.SCALE/2, new Box2D.Common.Math.b2Vec2(senseObj.pos.x*Box2D.SCALE, senseObj.pos.y*Box2D.SCALE), 0);
var fixtureDef = new Box2D.Dynamics.b2FixtureDef();
fixtureDef.shape = shapeDef;
fixtureDef.density = 0; //massless sensor
fixtureDef.friction = 0;
fixtureDef.restitution = 0;
fixtureDef.userData = {name: name, categoryBits: senseObj.categoryBits, maskBits: senseObj.maskBits, type: 'sense'};
fixtureDef.filter.categoryBits = senseObj.categoryBits;
fixtureDef.filter.maskBits = senseObj.maskBits;
fixtureDef.isSensor = true;
senseObj.fixture = this.body.CreateFixture(fixtureDef);
console.log(senseObj.fixture.GetFilterData());
},
getFirstNonSensor: function() {
for (var fix = this.body.GetFixtureList(); fix; fix = fix.GetNext()) {
if (!fix.IsSensor()) {
return fix;
}
}
return null;
},
//dump all fixture info to console
_dumpFixtureData: function() {
console.log("***FIXTURE DUMP***");
for (var fix = this.body.GetFixtureList(); fix; fix = fix.GetNext()) {
console.log(fix);
}
console.log("***END FIX DUMP***");
},
//draw all non-sensor, massive fixtures associated with this entity
_debugDraw: function() {
for (var fix = this.body.GetFixtureList(); fix; fix = fix.GetNext()) {
if (!fix.IsSensor() && fix.GetDensity()) {
this._debugDrawFixture(fix, 0);
}
}
},
//draw all fixtures associated with this entity, regardless of status
_debugDrawAll: function() {
for (var fix = this.body.GetFixtureList(); fix; fix = fix.GetNext()) {
this._debugDrawFixture(fix, 0);
}
},
//draw the given fixture, using the second parameter to generate a random outline color
//guess we ignore colorRand for now...
//currently only works for 4 vertex box shapes
//WILL NOT ERROR CHECK. The function will only work if the fixture's shape is an axially aligned box
_debugDrawFixture: function(fix, colorRand) {
if (!fix.GetUserData().color) {
var r = Math.floor(Math.random() * 255);
var g = Math.floor(Math.random() * 255);
var b = Math.floor(Math.random() * 255);
fix.SetUserData({name: fix.GetUserData().name,
color: { r: r, g: g, b:b}
});
}
var color = fix.GetUserData().color;
ig.system.context.strokeStyle = 'rgba(' + color.r.toString() + ',' + color.g.toString() + ',' + color.b.toString() + "," + '1)';
//figure out where we need to draw this box...
var bodyPos = this.body.GetPosition(); //center and scaled
var fixShape = fix.GetShape().GetVertices();
var width, height = null;
//lazy method to find width and height
for (var i = 0; i < fixShape.length; i++) {
for (var j = 0; j < fixShape.length; j++) {
if (i == j) continue;
if (fixShape[i].x == fixShape[j].x) {
if (height == null) {
height = Math.abs(fixShape[i].y - fixShape[j].y)/ Box2D.SCALE;
}
}
if (fixShape[i].y == fixShape[j].y) {
if (width == null) {
width = Math.abs(fixShape[i].x - fixShape[j].x)/ Box2D.SCALE;
}
}
}
}
var worldPos = {
x: (bodyPos.x/Box2D.SCALE) - width/2,
y: (bodyPos.y/Box2D.SCALE) - height/2,
};
//console.log("Drawing rect @ ", worldPos);
//console.log("Body position @ ", this.pos);
ig.system.context.strokeRect(
ig.system.getDrawPos(worldPos.x - ig.game.screen.x),
ig.system.getDrawPos(worldPos.y - ig.game.screen.y),
ig.system.getDrawPos(width),
ig.system.getDrawPos(height)
);
},
draw: function() {
this.parent();
if (this._debugD) {
this._debugDraw();
}
},
//spawn an entity @ local body coordinates rather than world coordinates
//x and y are already scaled (in pixels). Technically not local coords then
localSpawnEntity: function(entityType, x, y, settings) {
var worldX = this.body.GetPosition().x + x;
var worldY = this.body.GetPosition().y + y;
ig.game.spawnEntity(entityType, worldX, worldY, settings);
},
//passthrough
//some serious issues with getting rid of bodies...
kill: function() {
this.parent();
if (this.body && this._killed) {
ig.game.entityKillList.push(this.body);
}
},
update: function() {
this.parent();
},
//left = 1
//right = 2
//just checks if the current setup would result in cover
//unit is responsible for making sure all other conditions are met
checkCover: function() {
var result = 0;
if (this._checkCoverRight()) {
result += 2;
}
if (this._checkCoverLeft()) {
result +=1;
}
return result;
},
_checkCoverRight: function() {
var leading = {x: this.pos.x + this.size.x, y: this.pos.y + this.size.y};
var checkCoord = tileUtil.pxToTile(leading.x, leading.y);
checkCoord.tX += 1;
var pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 1 ) { //only regular solid blocks for now
return false;
}
if (pixelCoord.pX - (this.pos.x + this.size.x) > 8) {
return false;
}
checkCoord.tY -= 1;
pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 0) { //only totally blank spaces for now
return false;
}
checkCoord.tX -= 1;
pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 0) {
return false;
}
//underneath
checkCoord.tY += 2;
pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 1) {
return false;
}
return true;
},
//almost carbon copy!
_checkCoverLeft: function() {
var leading = {x: this.pos.x, y: this.pos.y + this.size.y};
var checkCoord = tileUtil.pxToTile(leading.x, leading.y);
checkCoord.tX -= 1;
var pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 1 ) { //only regular solid blocks for now
return false;
}
if (this.pos.x - pixelCoord.pX > 24) {
return false;
}
checkCoord.tY -= 1;
pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 0) { //only totally blank spaces for now
return false;
}
checkCoord.tX += 1;
pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 0) {
return false;
}
//underneath
checkCoord.tY += 2;
pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 1) {
return false;
}
return true;
},
});
}); | standardgaussian/Titania | lib/game/entities/physEnt.js | JavaScript | mit | 12,058 |
module Krikri::Enrichments
##
# Splits a string given in `lat' or `long' in an edm:Place object and
# assigns `lat' and `long' with the split values.
#
# @example
#
# Where val is a DPLA::MAP::Place,
# and val.lat == '40.7127,74.0059',
# assign val.lat = '40.7127' and val.long = '74.0059'
#
# If long is filled in instead of lat, the values will be assigned in the
# reverse order, with lat taking '74.0059' and long taking '40.7127'.
#
class SplitCoordinates
include Audumbla::FieldEnrichment
##
# Enrich a `DPLA::MAP::Place' object by splitting the string given
# in its `lat' or `long'.
#
# place.lat and place.long are ActiveTriples::Terms, we only care
# about the first value. If multiple values are given, this enrichment
# will remove them.
#
# @param place [DPLA::MAP::Place]
#
# @return [DPLA::MAP::Place]
def enrich_value(place)
return place if !place.is_a? DPLA::MAP::Place
return place unless splittable?(place.lat) || splittable?(place.long)
if place.lat.any?
latlong = coord_values(place.lat.first)
assign_latlong!(place, latlong.first, latlong.last)
elsif place.long.any?
latlong = coord_values(place.long.first)
assign_latlong!(place, latlong.last, latlong.first)
end
place
end
def assign_latlong!(place, lat, long)
place.lat = lat if lat
place.long = long if long
end
##
# Given a String `s', return an array of two elements split on a comma
# and any whitespace around the comma.
#
# If the string does not split into two strings representing decimal
# values, then return [nil, nil] because the string does not make sense as
# coordinates.
#
# @param s [String] String of, hopefully, comma-separated decimals
#
# @return [Array]
def coord_values(s)
coords = s.split(/ *, */)
return [nil, nil] if coords.size != 2
coords.map! { |c| c.to_f.to_s == c ? c : nil } # must be decimal ...
return [nil, nil] unless coords[0] && coords[1] # ... i.e. not nil
[coords[0], coords[1]]
end
private
##
# @param value [ActiveTriples::Term<String>]
#
# @return [Boolean] true if value contains a string with a ','; false
# otherwise
def splittable?(value)
return false if value.empty?
value.first.include? ','
end
end
end
| dpla/KriKri | lib/krikri/enrichments/split_coordinates.rb | Ruby | mit | 2,449 |
package code.template;
/**
*
* @author
*/
public class QueueArray {
int QUEUE_SIZE;
int front;
int back;
Integer[] queueArray;
public QueueArray(int size) {
queueArray = new Integer[size];
QUEUE_SIZE = size;
}
public void equeue(int putInBackArray) {
if (isFull()) {
System.out.println("Sorry the quere is full");
} else if (isEmpty()) {
front = 0;
back = 0;
} else {
back = (back + 1) % QUEUE_SIZE;
}
queueArray[back] = putInBackArray;
}
public int dequeue() {
if (isEmpty()) {
System.out.println("The queue is empty");
} else if (front == back) {
front = back - 1;
} else {
back = back + 1 % QUEUE_SIZE;
}
return queueArray[front];
}
public boolean isEmpty() {
if (front == -1 && back == -1) {
return true;
} else {
return false;
}
}
public boolean isFull() {
if ((back + 1) % QUEUE_SIZE == front) {
return true;
} else {
return false;
}
}
public int size() {
return front;
}
public void print() {
for (int i = 0; i < queueArray.length; i++) {
System.out.println(i+"/t" + queueArray[i]);
}
}
}
| BenSaud-CS/Collage | Practical4_Stack&Queue/src/code/template/QueueArray.java | Java | mit | 1,484 |
package org.fakekoji.xmlrpc.server.expensiveobjectscache;
import org.fakekoji.xmlrpc.server.xmlrpcrequestparams.XmlRpcRequestParams;
import java.io.BufferedWriter;
import java.io.IOException;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import edu.umd.cs.findbugs.annotations.*;
public class SingleUrlResponseCache {
private final URL id;
private final Map<XmlRpcRequestParams, ResultWithTimeStamp> cache = Collections.synchronizedMap(new HashMap<>());
public SingleUrlResponseCache(final URL u) {
this.id = u;
}
public ResultWithTimeStamp get(final XmlRpcRequestParams params) {
return cache.get(params);
}
public void put(final Object result, XmlRpcRequestParams params) {
cache.put(params, new ResultWithTimeStamp(result));
}
public void remove(XmlRpcRequestParams key) {
cache.remove(key);
}
public URL getId() {
return id;
}
private static String asMinutes(long l) {
return " (" + (l / 1000 / 600) + "min)";
}
private static final Map<Class<?>, Class<?>> WRAPPER_TYPE_MAP;
static {
WRAPPER_TYPE_MAP = new HashMap<Class<?>, Class<?>>(20);
WRAPPER_TYPE_MAP.put(Integer.class, int.class);
WRAPPER_TYPE_MAP.put(Byte.class, byte.class);
WRAPPER_TYPE_MAP.put(Character.class, char.class);
WRAPPER_TYPE_MAP.put(Boolean.class, boolean.class);
WRAPPER_TYPE_MAP.put(Double.class, double.class);
WRAPPER_TYPE_MAP.put(Float.class, float.class);
WRAPPER_TYPE_MAP.put(Long.class, long.class);
WRAPPER_TYPE_MAP.put(Short.class, short.class);
WRAPPER_TYPE_MAP.put(Void.class, void.class);
WRAPPER_TYPE_MAP.put(String.class, String.class);
}
public synchronized void dump(String preffix, BufferedWriter bw, RemoteRequestsCache validator) throws IOException {
List<Map.Entry<XmlRpcRequestParams, ResultWithTimeStamp>> entries = new ArrayList(cache.entrySet());
entries.sort((o1, o2) -> o1.getKey().getMethodName().compareTo(o2.getKey().getMethodName()));
for (Map.Entry<XmlRpcRequestParams, ResultWithTimeStamp> entry : entries) {
bw.write(preffix + XmlRpcRequestParams.toNiceString(entry.getKey()) + ": ");
bw.newLine();
bw.write(preffix + " dateCreated: " + entry.getValue().dateCreated);
bw.newLine();
bw.write(preffix + " notBeingRepalced: " + entry.getValue().notBeingRepalced);
bw.newLine();
bw.write(preffix + " validity: " + validator.isValid(entry.getValue(), entry.getKey().getMethodName(), id.getHost()));
bw.newLine();
long ttl = validator.getPerMethodValidnesMilis(entry.getKey().getMethodName(), id.getHost());
bw.write(preffix + " original ttl: " + ttl + "ms" + asMinutes(ttl));
bw.newLine();
long cttl = new Date().getTime() - entry.getValue().dateCreated.getTime();
bw.write(preffix + " time alive " + cttl + "ms" + asMinutes(cttl));
bw.newLine();
bw.write(preffix + " => ttl: " + (ttl - cttl) + "ms" + asMinutes(ttl - cttl));
bw.newLine();
if (WRAPPER_TYPE_MAP.containsKey(entry.getValue().result.getClass())) {
bw.write(preffix + " result: " + entry.getValue().result + " (" + entry.getValue().result.getClass().getName() + ")");
bw.newLine();
} else {
bw.write(preffix + " result: ");
bw.newLine();
entry.getValue().dump(preffix + " ", bw);
}
}
bw.write(preffix + "total: " + entries.size());
bw.newLine();
}
@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}, justification = "pure wrapper class")
public static final class ResultWithTimeStamp {
private final Date dateCreated;
private final Object result;
private boolean notBeingRepalced = true;
public ResultWithTimeStamp(final Object result) {
this.dateCreated = new Date();
this.result = result;
}
public Date getDateCreated() {
return dateCreated;
}
public Object getResult() {
return result;
}
public boolean isNotBeingReplaced() {
return notBeingRepalced;
}
public void flagBeingReplaced() {
this.notBeingRepalced = false;
}
public void dump(String preffix, BufferedWriter bw) throws IOException {
dump(preffix, result, bw);
}
private static final String FINAL_INCREMENT = " ";
public static void dump(String preffix, Object o, BufferedWriter bw) throws IOException {
if (o == null) {
bw.write(preffix + "null");
bw.newLine();
return;
}
if (o instanceof Map) {
bw.write(preffix + " map " + o.getClass().getName() + " map (size: " + ((Map) o).size());
bw.newLine();
Set<Map.Entry> entries = ((Map) o).entrySet();
for (Map.Entry e : entries) {
if (e.getKey() == null) {
bw.write(preffix + FINAL_INCREMENT + "null=");
bw.newLine();
dump(preffix + FINAL_INCREMENT + FINAL_INCREMENT, e.getValue(), bw);
} else {
bw.write(preffix + FINAL_INCREMENT + e.getKey() + "=");
bw.newLine();
dump(preffix + FINAL_INCREMENT + FINAL_INCREMENT, e.getValue(), bw);
}
}
} else if (o.getClass().isArray()) {
bw.write(preffix + " ary " + o.getClass().getName() + " ary (size: " + Array.getLength(o));
bw.newLine();
if (o instanceof Object[]) {
for (Object e : (Object[]) o) {
dump(preffix + FINAL_INCREMENT, e, bw);
}
} else if (o instanceof int[]) {
bw.write(preffix + FINAL_INCREMENT);
for (int e : (int[]) o) {
bw.write("" + e + ",");
}
bw.newLine();
} else if (o instanceof byte[]) {
bw.write(preffix + FINAL_INCREMENT);
for (byte e : (byte[]) o) {
bw.write("" + e + ",");
}
bw.newLine();
} else if (o instanceof char[]) {
bw.write(preffix + FINAL_INCREMENT);
for (char e : (char[]) o) {
bw.write("" + e + ",");
}
bw.newLine();
} else if (o instanceof boolean[]) {
bw.write(preffix + FINAL_INCREMENT);
for (boolean e : (boolean[]) o) {
bw.write("" + e + ",");
}
bw.newLine();
} else if (o instanceof double[]) {
bw.write(preffix + FINAL_INCREMENT);
for (double e : (double[]) o) {
bw.write("" + e + ",");
}
bw.newLine();
} else if (o instanceof float[]) {
bw.write(preffix + FINAL_INCREMENT);
for (float e : (float[]) o) {
bw.write("" + e + ",");
}
bw.newLine();
} else if (o instanceof long[]) {
bw.write(preffix + FINAL_INCREMENT);
for (long e : (long[]) o) {
bw.write("" + e + ",");
}
bw.newLine();
} else if (o instanceof short[]) {
bw.write(preffix + FINAL_INCREMENT);
for (short e : (short[]) o) {
bw.write("" + e + ",");
}
bw.newLine();
}
} else if (o instanceof Collection) {
bw.write(preffix + " col " + o.getClass().getName() + " col (size: " + ((Collection) o).size());
bw.newLine();
for (Object e : (Collection) o) {
dump(preffix + FINAL_INCREMENT, e, bw);
}
} else if (o instanceof Iterable) {
bw.write(preffix + " ite " + o.getClass().getName() + " ite");
bw.newLine();
for (Object e : (Iterable) o) {
dump(preffix + FINAL_INCREMENT, e, bw);
}
} else {
bw.write(preffix + o + " (" + o.getClass().getName() + ")");
bw.newLine();
}
}
}
Set<Map.Entry<XmlRpcRequestParams, ResultWithTimeStamp>> getContent() {
return cache.entrySet();
}
}
| judovana/jenkins-scm-koji-plugin | koji-scm-lib/src/main/java/org/fakekoji/xmlrpc/server/expensiveobjectscache/SingleUrlResponseCache.java | Java | mit | 9,352 |
import Ember from 'ember';
export default Ember.Mixin.create({
reduxStore: Ember.inject.service(),
dispatch(action) {
return this.get('reduxStore').dispatch(action);
},
dispatchAction(actionName, ...args) {
return this.dispatch(this.action(actionName).apply(this, args));
},
getState(path) {
return path ?
this.get(`reduxStore.state.${path}`) :
this.get('reduxStore.state');
},
action(actionName) {
if (!this.reduxActions[actionName]) {throw new Error(`No redux action found for ${actionName}`);}
return this.reduxActions[actionName].bind(this);
},
});
| AltSchool/ember-cli-redux | addon/mixins/ember-redux.js | JavaScript | mit | 610 |
package router
import (
"github.com/Unknwon/macaron"
"github.com/macaron-contrib/binding"
"github.com/containerops/crew/handler"
)
func SetRouters(m *macaron.Macaron) {
m.Group("/v1", func() {
//Session Router
m.Group("/token", func() {
m.Post("/", handler.W1PostToken)
})
//User Router
m.Group("/user", func() {
//Signin and Signup
m.Post("/", binding.Bind(handler.UserSignup{}), handler.W1UserSignup)
m.Post("/auth", handler.W1UserSignin)
//List All Users
m.Get("/list/:count/:page", handler.W1GetUserList)
//Profile
m.Put("/:user/profile", handler.W1PutUserProfile)
m.Get("/:user/profile", handler.W1GetUserProfile)
m.Post("/:user/gravatar", handler.W1PostUserGravatar)
//Put Password
m.Put("/:user/passwd", handler.W1PutUserPasswd)
//List User Teams and Organizations
m.Get("/:user/organizations", handler.W1GetUserOrganizations)
m.Get("/:user/teams", handler.W1GetUserTeams)
})
//Organization Router
m.Group("/org", func() {
m.Post("/", handler.W1PostOrganization)
m.Put("/:org", handler.W1PutOrganization)
m.Get("/:org", handler.W1GetOrganization)
m.Delete("/:org", handler.W1DeleteOrganization)
//Team Router
m.Group("/:org/team", func() {
m.Post("/", handler.W1PostTeam)
m.Get("/list", handler.W1GetTeams)
m.Put("/:team", handler.W1PutTeam)
m.Get("/:team", handler.W1GetTeam)
m.Delete("/:team", handler.W1DeleteTeam)
//User Management
m.Group("/:team/user", func() {
m.Get("/list", handler.W1GetTeamUsers)
m.Put("/:user", handler.W1PutTeamUser)
m.Delete("/:user", handler.W1DeleteTeamUser)
})
})
})
})
}
| yonh/wharf | Godeps/_workspace/src/github.com/containerops/crew/router/router.go | GO | mit | 1,656 |
const gulp = require('gulp');
const nodemon = require('gulp-nodemon');
const mocha = require('gulp-spawn-mocha');
gulp.task('start', () => {
nodemon({
script: 'server.js',
ext: 'html js ejs css',
ignore: ['node_modules'],
})
.on('restart', () => {
console.log('restarted')
});
});
gulp.task('test', () => {
return gulp.src('test/test.js', {read: false})
.pipe(mocha({
// report 종류
R: 'spec',
}));
});
| bynaki/nowonlib.node | gulpfile.js | JavaScript | mit | 446 |
<?php
namespace app\models;
use Yii;
use yii\base\Model;
/**
* LoginForm is the model behind the login form.
*
* @property User|null $user This property is read-only.
*
*/
class AdminLogin extends Model
{
public $username;
public $password;
public $rememberMe = true;
private $_user = false;
/**
* @return array the validation rules.
*/
public function rules()
{
return [
// username and password are both required
[['username', 'password'], 'required'],
// rememberMe must be a boolean value
['rememberMe', 'boolean'],
// password is validated by validatePassword()
['password', 'validatePassword'],
];
}
/**
* Validates the password.
* This method serves as the inline validation for password.
*
* @param string $attribute the attribute currently being validated
* @param array $params the additional name-value pairs given in the rule
*/
public function validatePassword($attribute, $params)
{
if (!$this->hasErrors()) {
$user = $this->getUser();
if (!$user || !$user->validatePassword($this->password)) {
$this->addError($attribute, 'Incorrect username or password.');
}
}
}
/**
* Logs in a user using the provided username and password.
* @return bool whether the user is logged in successfully
*/
public function login()
{
if ($this->validate()) {
return Yii::$app->user->login($this->getUser(), $this->rememberMe ? 3600*24*30 : 0);
}
return false;
}
/**
* Finds user by [[username]]
*
* @return User|null
*/
public function getUser()
{
if ($this->_user === false) {
$this->_user = User::findByUsername($this->username);
}
return $this->_user;
}
}
| Chotainghe/chotainghe | YiiBasic/models/AdminLogin.php | PHP | mit | 1,953 |
module Wice
# a matrix for all declared columns
class TableColumnMatrix < Hash #:nodoc:
# a structure to hold generates Arels for all column filters
attr_reader :generated_conditions
# init a matrix of all columns
def initialize #:nodoc:
super
@generated_conditions = []
@by_table_names = HashWithIndifferentAccess.new
end
# add an Arel for a column
def add_condition(column, conditions)
@generated_conditions << [column, conditions] unless conditions.blank?
end
# returns a list of all Arels
def conditions
@generated_conditions.collect { |_, cond| cond }
end
# returns Arels for one model
alias_method :get, :[]
# returns the main ActiveRecord model class
attr_reader :default_model_class
# sets the main ActiveRecord model class
def default_model_class=(model) #:nodoc:
init_columns_of_table(model) unless key?(model)
@default_model_class = model
end
# returns Arels for one model
def [](model) #:nodoc:
init_columns_of_table(model) unless key?(model)
get(model)
end
def get_column_by_model_class_and_column_name(model_class, column_name) #:nodoc:
self[model_class][column_name]
end
def get_column_in_default_model_class_by_column_name(column_name) #:nodoc:
if @default_model_class.nil?
raise WiceGridException.new("Cannot search for this column(#{column_name}) in a default model(#{@default_model_class}) as the default model is not set")
end
self[@default_model_class][column_name]
end
def init_columns_of_table(model) #:nodoc:
self[model] = HashWithIndifferentAccess.new(model.columns.index_by(&:name))
@by_table_names[model.table_name] = self[model]
self[model].each_value { |c| c.model = model }
end
alias_method :<<, :init_columns_of_table
end
end
| leikind/wice_grid | lib/wice/table_column_matrix.rb | Ruby | mit | 1,898 |
package distribution
import (
"testing"
"github.com/ready-steady/assert"
)
func TestParse(t *testing.T) {
cases := []struct {
line string
success bool
}{
{"Beta(1, 1)", true},
{"beta(0.5, 1.5)", true},
{" Beta \t (1, 1)", true},
{"Gamma(1, 1)", false},
{"Beta(1, 1, 1)", false},
{"beta(-1, 1)", false},
{"beta(0, 1)", false},
{"beta(1, -1)", false},
{"beta(1, 0)", false},
{"beta(1, 0)", false},
{"uniform()", true},
{"uniform( )", true},
}
for _, c := range cases {
if _, err := Parse(c.line); c.success {
assert.Success(err, t)
} else {
assert.Failure(err, t)
}
}
}
| turing-complete/laboratory | src/internal/distribution/main_test.go | GO | mit | 622 |
/*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.item.inventory.properties;
import org.spongepowered.api.data.Property;
import org.spongepowered.api.item.ItemType;
import org.spongepowered.api.item.inventory.InventoryProperty;
import org.spongepowered.api.util.Coerce;
import java.util.Collection;
import java.util.List;
/**
* A property type intended for use with
* {@link org.spongepowered.api.item.inventory.slots.InputSlot}s in order to
* query for slots which can accept items of the specified type. It is intended
* that the semantics of the {@link #equals} will be such that the method will
* return true if the other property contains <em>any</em> item present in this
* property's collection.
*/
public class AcceptsItems extends AbstractInventoryProperty<String, Collection<ItemType>> {
/**
* Create a new AcceptsItems property with the supplied value.
*
* @param value Item types to accept
*/
public AcceptsItems(Collection<ItemType> value) {
super(value);
}
/**
* Create a new AcceptsItems property with the supplied value and operator.
*
* @param value Item types to accept
* @param operator Logical operator to apply when comparing with other
* properties
*/
public AcceptsItems(Collection<ItemType> value, Operator operator) {
super(value, operator);
}
/**
* Create a new AcceptsItems property with the supplied value and operator.
*
* @param value Item types to accept
* @param operator Logical operator to apply when comparing with other
* properties
*/
public AcceptsItems(Object value, Operator operator) {
super(Coerce.toListOf(value, ItemType.class), operator);
}
/* (non-Javadoc)
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
@Override
public int compareTo(Property<?, ?> other) {
// This breaks the contract of Comparable, but we don't have a meaningful
// way of providing a natural ordering
return this.equals(other) ? 0 : this.hashCode() - this.hashCodeOf(other);
}
/**
* Returns true if <em>other</em> is also an {@link AcceptsItems} property
* and <b>any</b> item appearing in the other property's collecion appears
* in this property's collection. In formal terms, the method returns true
* if the size of the intersection between the two item type collections is
* greater than zero.
*/
@Override
public boolean equals(Object obj) {
if (!(obj instanceof InventoryProperty)) {
return false;
}
InventoryProperty<?, ?> other = (InventoryProperty<?, ?>) obj;
if (!other.getKey().equals(this.getKey())) {
return false;
}
List<ItemType> otherTypes = Coerce.toListOf(other.getValue(), ItemType.class);
for (ItemType t : this.value) {
if (otherTypes.contains(t)) {
return true;
}
}
return false;
}
/**
* Create an AcceptsItems property which matches AcceptsItems properties
* with containing one or more of the supplied values.
*
* @param value {@link ItemType}s to accept
* @return new property
*/
public static AcceptsItems of(Object... value) {
return new AcceptsItems(value, Operator.EQUAL);
}
}
| frogocomics/SpongeAPI | src/main/java/org/spongepowered/api/item/inventory/properties/AcceptsItems.java | Java | mit | 4,628 |
<?php use_javascript(sfConfig::get('app_base_url') . 'PubsPlugin/js/follow.js') ?>
<?php if ($sf_user->getGuardUser()->getId() == $datos->getId()) { ?>
<?php include_partial('follow', array('datos' => $datos, 'follows' => $followers, 'action' => 'follower')) ?>
<?php } else { ?>
<?php $follow = Doctrine::getTable('Follow')->getFollowing($sf_user->getGuardUser()->getId(), $datos->getId()) ?>
<?php if ($follow) { ?>
<?php if ($follow->getIsActive() == "1") { ?>
<?php include_partial('follow', array('datos' => $datos, 'follows' => $followers, 'action' => 'follower')) ?>
<?php } else { ?>
<div class="protected-box">
<h1 class="logged-out"> <?php echo __('The Fallows', null, 'follow') ?> <?php echo $datos->getUsername() ?> <?php echo __('are protected.', null, 'follow') ?> </h1>
<p> <?php echo __('Only confirmed fans have access to the full profile', null, 'follow') ?> <?php echo $datos->getUsername() ?>. <?php echo __('You need to request access before you can continue this account.', null, 'follow') ?> </p>
</div>
<?php } ?>
<?php } else { ?>
<div class="protected-box">
<h1 class="logged-out"> <?php echo __('The Fallows', null, 'follow') ?> <?php echo $datos->getUsername() ?> <?php echo __('are protected.', null, 'follow') ?> </h1>
<p> <?php echo __('Only confirmed fans have access to the full profile', null, 'follow') ?> <?php echo $datos->getUsername() ?>. <?php echo __('You need to request access before you can continue this account.', null, 'follow') ?> </p>
</div>
<?php } ?>
<?php } ?> | sebardo/social-sandbox | plugins/PubsPlugin/modules/follow/templates/listFollowersSuccess.php | PHP | mit | 1,676 |
using Avalonia.Media;
using Avalonia.Platform;
using Avalonia.Utilities;
using Avalonia.VisualTree;
namespace Avalonia.Rendering
{
public class RenderLayer
{
private readonly IDrawingContextImpl _drawingContext;
public RenderLayer(
IDrawingContextImpl drawingContext,
Size size,
double scaling,
IVisual layerRoot)
{
_drawingContext = drawingContext;
Bitmap = RefCountable.Create(drawingContext.CreateLayer(size));
Size = size;
Scaling = scaling;
LayerRoot = layerRoot;
}
public IRef<IRenderTargetBitmapImpl> Bitmap { get; private set; }
public double Scaling { get; private set; }
public Size Size { get; private set; }
public IVisual LayerRoot { get; }
public void ResizeBitmap(Size size, double scaling)
{
if (Size != size || Scaling != scaling)
{
var resized = RefCountable.Create(_drawingContext.CreateLayer(size));
using (var context = resized.Item.CreateDrawingContext(null))
{
context.Clear(Colors.Transparent);
context.DrawImage(Bitmap, 1, new Rect(Size), new Rect(Size));
Bitmap.Dispose();
Bitmap = resized;
Size = size;
}
}
}
}
}
| jkoritzinsky/Perspex | src/Avalonia.Visuals/Rendering/RenderLayer.cs | C# | mit | 1,456 |
/**
* @license Copyright (c) 2012, Viet Trinh All Rights Reserved.
* Available via MIT license.
*/
/**
* An authorizeation interceptor used to determine if the user can access the given resource.
*/
define([ 'framework/controller/interceptor/i_interceptor',
'framework/core/utils/clazz',
'framework/core/deferred/deferred' ],
function(IInterceptor,
ClazzUtils,
Deferred)
{
var AuthorizationInterceptor = function()
{
IInterceptor.call(this);
return this;
}
AuthorizationInterceptor.prototype = new IInterceptor();
ClazzUtils.generateProperties(AuthorizationInterceptor);
// @override
AuthorizationInterceptor.prototype.before = function(requestContext)
{
var controller = requestContext.getController(),
secured = controller.getSecured && controller.getSecured() === true,
securedAdmin = controller.getSecuredAdmin && controller.getSecuredAdmin() === true,
stateService = requestContext.getStateService();
// If this controller is secured and there's no user logged in, then fail.
if ((secured || securedAdmin) && (!stateService.getCurrentUser() || !stateService.getCurrentUser().getId()))
{
requestContext.setStatusCode(401);
requestContext.setErrorMessage('Unauthorized Access! Please log in to access the given resource.');
return Deferred.rejectedPromise(requestContext);
}
// If this controller is secured admin and the user doesn't have access priveledges.
else if (securedAdmin && stateService.getCurrentUser().getIsAdmin() !== true)
{
requestContext.setStatusCode(403);
requestContext.setErrorMessage('Unauthorized Access! You are not allowed to access the given resource.');
return Deferred.rejectedPromise(requestContext);
}
// Assumes this function doesn't fail.
return Deferred.resolvedPromise(requestContext);
};
// @override
AuthorizationInterceptor.prototype.after = function(requestContext)
{
return Deferred.resolvedPromise(requestContext);
};
return AuthorizationInterceptor;
}); | DragonDTG/vex | lib/private/static/scripts/framework/controller/interceptor/authorization_interceptor.js | JavaScript | mit | 2,004 |
namespace MeasureMap
{
/// <summary>
/// Chain of responsibility for executing tasks
/// </summary>
public interface ISessionHandler
{
/// <summary>
/// Set the next execution item
/// </summary>
/// <param name="next">The next handler for the session</param>
void SetNext(ISessionHandler next);
/// <summary>
/// Executes the task
/// </summary>
/// <param name="task">The task to run</param>
/// <param name="settings">The settings for the profiler</param>
/// <returns>The resulting collection of the executions</returns>
IProfilerResult Execute(ITask task, ProfilerSettings settings);
}
}
| WickedFlame/MeasureMap | src/MeasureMap/SessionHandlers/ISessionHandler.cs | C# | mit | 718 |
var pkg = require('./package.json'),
gulp = require('gulp'),
gutil = require('gulp-util'),
coffee = require('gulp-coffee'),
concat = require('gulp-concat'),
uglify = require('gulp-uglify'),
livereload = require('gulp-livereload'),
rename = require('gulp-rename'),
coffeelint = require('gulp-coffeelint'),
jade = require('gulp-jade'),
mainBowerFiles = require('main-bower-files'),
filter = require('gulp-filter'),
less = require('gulp-less'),
autoprefixer = require('gulp-autoprefixer'),
minify = require('gulp-minify-css'),
inject = require('gulp-inject'),
ignore = require('gulp-ignore');
gulp.task('default', ['coffee', 'jade', 'bower', 'less']);
gulp.task('coffee', function() {
return gulp.src('src/coffee/*.coffee')
.pipe(coffeelint())
.pipe(coffeelint.reporter())
.pipe(coffee({ bare:true }).on('error', gutil.log))
.pipe(concat(pkg.name + '.all.js'))
.pipe(rename({ suffix: '.min' }))
.pipe(uglify())
.pipe(gulp.dest('public/js'))
.pipe(livereload({ auto: false }));
});
gulp.task('jade', function() {
return gulp.src('src/jade/**/*.jade')
.pipe(jade({ pretty: true }).on('error', gutil.log))
.pipe(gulp.dest('public'))
.pipe(livereload({ auto: false }));
});
gulp.task('bower', function() {
var jsFilter = filter('*.js');
var cssFilter = filter('*.css');
return gulp.src(mainBowerFiles())
.pipe(cssFilter)
.pipe(gulp.dest('public/css/vendor'))
.pipe(cssFilter.restore())
.pipe(jsFilter)
.pipe(gulp.dest('public/js/vendor'));
});
gulp.task('less', function() {
return gulp.src('src/less/style.less')
.pipe(less().on('error', gutil.log))
.pipe(autoprefixer("last 2 versions", "> 5%", "ie 8"))
.pipe(minify())
.pipe(rename(pkg.name + '.min.css'))
.pipe(gulp.dest('public/css/'))
.pipe(livereload({ auto: false }));
});
gulp.task('inject', function() {
gulp.src('src/jade/base.jade')
.pipe(inject(
gulp.src(['public/**/*.css', 'public/**/*.js'], { read: false })
.pipe(ignore(['**/normalize.css', '**/modernizr.js', '**/jquery.min.js'])), { ignorePath: 'public' }
))
.pipe(gulp.dest('src/jade'));
});
gulp.task('watch', function() {
livereload.listen();
gulp.watch('src/coffee/*.coffee', ['coffee']);
gulp.watch('src/jade/*.jade', ['jade']);
gulp.watch('src/less/*.less', ['less']);
}); | nullzion/nullstrap | gulpfile.js | JavaScript | mit | 2,280 |
import _plotly_utils.basevalidators
class BgcolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="bgcolor", parent_name="sankey.node.hoverlabel", **kwargs
):
super(BgcolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/sankey/node/hoverlabel/_bgcolor.py | Python | mit | 521 |
const Card = require('./src/main');
Card.install = function(Vue) {
Vue.component(Card.name, Card);
};
module.exports = Card;
| slovebj/zxui | packages/card/index.js | JavaScript | mit | 129 |
/* Copyright (C) 2011-2014 Mattias Ekendahl. Used under MIT license, see full details at https://github.com/developedbyme/dbm/blob/master/LICENSE.txt */
/**
* Base object for objects that are using properites to flow updates through the application.
*/
dbm.registerClass("dbm.core.FlowBaseObject", "dbm.core.BaseObject", function(objectFunctions, staticFunctions, ClassReference) {
//console.log("dbm.core.FlowBaseObject");
//"use strict";
//Self reference
var FlowBaseObject = dbm.importClass("dbm.core.FlowBaseObject");
//Error report
var ErrorManager = dbm.importClass("dbm.core.globalobjects.errormanager.ErrorManager");
var ReportTypes = dbm.importClass("dbm.constants.error.ReportTypes");
var ReportLevelTypes = dbm.importClass("dbm.constants.error.ReportLevelTypes");
//Dependencies
var Property = dbm.importClass("dbm.core.objectparts.Property");
var GhostProperty = dbm.importClass("dbm.core.objectparts.GhostProperty");
var UpdateFunction = dbm.importClass("dbm.core.objectparts.UpdateFunction");
var UpdateFunctionWithArguments = dbm.importClass("dbm.core.objectparts.UpdateFunctionWithArguments");
var NamedArray = dbm.importClass("dbm.utils.data.NamedArray");
//Utils
var VariableAliases = dbm.importClass("dbm.utils.data.VariableAliases");
//Constants
/**
* Constructor
*/
objectFunctions._init = function() {
//console.log("dbm.core.FlowBaseObject::_init");
this.superCall();
this.__nodeId = (dbm.singletons.dbmIdManager) ? dbm.singletons.dbmIdManager.getNewId(this.__className) : this.__className;
this._properties = this.addDestroyableObject(NamedArray.create(true));
this._updateFunctions = this.addDestroyableObject(NamedArray.create(true));
return this;
};
objectFunctions.createUpdateFunction = function(aName, aUpdateFunction, aInputsArray, aOutputsArray) {
var newUpdateFunction = UpdateFunction.create(this, aUpdateFunction, aInputsArray, aOutputsArray);
newUpdateFunction.name = this.__nodeId + "::" + aName + "(f)";
this._updateFunctions.addObject(aName, newUpdateFunction);
return newUpdateFunction;
};
objectFunctions.createUpdateFunctionWithArguments = function(aName, aUpdateFunction, aInputsArray, aOutputsArray) {
var newUpdateFunction = UpdateFunctionWithArguments.create(this, aUpdateFunction, aInputsArray, aOutputsArray);
newUpdateFunction.name = this.__nodeId + "::" + aName + "(f)";
this._updateFunctions.addObject(aName, newUpdateFunction);
return newUpdateFunction;
};
objectFunctions.createGhostUpdateFunction = function(aName, aInputsArray, aOutputsArray) {
var newUpdateFunction = UpdateFunction.createGhostFunction(aInputsArray, aOutputsArray);
newUpdateFunction.name = this.__nodeId + "::" + aName + "(gf)";
this._updateFunctions.addObject(aName, newUpdateFunction);
return newUpdateFunction;
};
/**
* Creates (and adds) a property on this object.
*
* @param aName String The name of the property.
* @param aValue * The value that the property should have.
*
* @return Property The newly created property.
*/
objectFunctions.createProperty = function(aName, aValue) {
//console.log("dbm.core.FlowBaseObject::createProperty");
//console.log(aName, aValue);
var newProperty = Property.create(aValue);
newProperty.name = this.__nodeId + "::" + aName;
this._properties.addObject(aName, newProperty);
return newProperty;
};
/**
* Adds a property to this object.
*
* @param aName String The name of the property.
* @param aProperty Property The property to add.
*
* @return Property The property that is passed in.
*/
objectFunctions.addProperty = function(aName, aProperty) {
//console.log("dbm.core.FlowBaseObject::addProperty");
//console.log(aName, aProperty);
aProperty.name = this.__nodeId + "::" + aName;
this._properties.addObject(aName, aProperty);
//console.log("//dbm.core.FlowBaseObject::addProperty");
return aProperty;
};
/**
* Creates (and adds) a ghost property (a property without value) on this object.
*
* @param aName String The name of the property.
*
* @return GhostProperty The newly created property.
*/
objectFunctions.createGhostProperty = function(aName) {
//console.log("dbm.core.FlowBaseObject::createGhostProperty");
var newProperty = GhostProperty.create();
newProperty.name = this.__nodeId + "::" + aName + "(g)";
this._properties.addObject(aName, newProperty);
return newProperty;
};
/**
* Gets a property by name.
*
* @param aName String The name of the property.
*
* @return Property The property that matches the name. Null if property doesn't exist.
*/
objectFunctions.getProperty = function(aName) {
//console.log("dbm.core.FlowBaseObject::getProperty");
//console.log(this, aName);
if(this._properties !== null && this._properties.select(aName)) {
return this._properties.currentSelectedItem;
}
ErrorManager.getInstance().report(ReportTypes.ERROR, ReportLevelTypes.NORMAL, this, "getProperty", "Property " + aName + " doesn't exist on " + this + ".");
return null;
};
/**
* Checks if a property exists.
*
* @param aName String The name of the property.
*
* @return Boolean True if property exist.
*/
objectFunctions.hasProperty = function(aName) {
//console.log("dbm.core.FlowBaseObject::hasProperty");
//console.log(this, aName);
return (this._properties !== null && this._properties.select(aName));
};
/**
* Gets an update function on this object.
*
* @param aName String The name of the update function to get.
*
* @return UpdateFunction The update function that matches the name. Null if update function doesn't exist.
*/
objectFunctions.getUpdateFunction = function(aName) {
//console.log("dbm.core.FlowBaseObject::getUpdateFunction");
//console.log(this, aName);
if(this._updateFunctions.select(aName)) {
return this._updateFunctions.currentSelectedItem;
}
ErrorManager.getInstance().report(ReportTypes.ERROR, ReportLevelTypes.NORMAL, this, "getUpdateFunction", "Update function " + aName + " doesn't exist on " + this + ".");
return null;
};
/**
* Set the value to a property, or connects the input if it is a property.
*
* @param aName String The name of the property to set.
* @param aInput Property|* The value to set or the property to connect.
*
* @return self
*/
objectFunctions.setPropertyInput = function(aName, aInput) {
var theProperty = this.getProperty(aName);
if(theProperty === null) {
ErrorManager.getInstance().report(ReportTypes.ERROR, ReportLevelTypes.NORMAL, this, "setPropertyInput", "Property " + aName + " doesn't exist.");
return this;
}
dbm.singletons.dbmFlowManager.setPropertyInput(theProperty, aInput);
return this;
};
/**
* Set the value to a property, or connects the input if it is a property. Current value is not overridden if input is null/undefined.
*
* @param aName String The name of the property to set.
* @param aInput Property|* The value to set or the property to connect.
*
* @return self
*/
objectFunctions.setPropertyInputWithoutNull = function(aName, aInput) {
if(VariableAliases.isSet(aInput)) {
this.setPropertyInput(aName, aInput);
}
return this;
};
/**
* Gets the parameters for this class. Part of the toString function.
*
* @param aReturnArray Array The array that gets filled with the parameters description.
*/
objectFunctions._toString_getAttributes = function(aReturnArray) {
this.superCall(aReturnArray);
if(this._properties !== null && this._properties !== undefined) {
aReturnArray.push("properties: [" + this._properties.getNamesArray().join(", ") + "]");
}
};
/**
* Sets all the references to null. Part of the destroy function.
*/
objectFunctions.setAllReferencesToNull = function() {
this._properties = null;
this._updateFunctions = null;
this.superCall();
};
}); | developedbyme/dbm | javascripts/dbm/classes/dbm/core/FlowBaseObject.js | JavaScript | mit | 7,899 |
from pyperator.decorators import inport, outport, component, run_once
from pyperator.nodes import Component
from pyperator.DAG import Multigraph
from pyperator.utils import InputPort, OutputPort
import pyperator.components | baffelli/pyperator | pyperator/__init__.py | Python | mit | 222 |
import itertools
import os.path
import sys
import time
from . import core
from . import file_io
from . import geometry
from . import stringconv
from . import version
#
# Functions
#
def save_output(profileli, opt):
""" Save a summary of results of evaluated profiles
"""
def m(x, pixelwidth):
return geometry.to_metric_units(x, pixelwidth)
def m2(x, pixelwidth):
# For area units...
return geometry.to_metric_units(x, pixelwidth**2)
def na(x):
if x in (None, -1):
return "N/A"
else:
return x
def write_session_summary():
with file_io.FileWriter("session.summary", opt) as f:
f.writerow(["%s version:" % version.title,
"%s (Last modified %s %s, %s)"
% ((version.version,) + version.date)])
f.writerow(["Number of evaluated profiles:", len(eval_proli)])
if err_fli:
f.writerow(["Number of non-evaluated profiles:", len(err_fli)])
f.writerow(["Metric unit:", eval_proli[0].metric_unit])
f.writerow(["Spatial resolution:", opt.spatial_resolution, eval_proli[0].metric_unit])
f.writerow(["Shell width:", opt.shell_width, eval_proli[0].metric_unit])
f.writerow(["Interpoint distances calculated:",
stringconv.yes_or_no(opt.determine_interpoint_dists)])
if opt.determine_interpoint_dists:
f.writerow(["Interpoint distance mode:", opt.interpoint_dist_mode])
f.writerow(["Shortest interpoint distances:",
stringconv.yes_or_no(opt.interpoint_shortest_dist)])
f.writerow(["Lateral interpoint distances:",
stringconv.yes_or_no(opt.interpoint_lateral_dist)])
f.writerow(["Monte Carlo simulations performed:",
stringconv.yes_or_no(opt.run_monte_carlo)])
if opt.run_monte_carlo:
f.writerow(["Number of Monte Carlo runs:", opt.monte_carlo_runs])
f.writerow(["Monte Carlo simulation window:", opt.monte_carlo_simulation_window])
f.writerow(["Strict localization in simulation window:",
stringconv.yes_or_no(opt.monte_carlo_strict_location)])
f.writerow(["Clusters determined:", stringconv.yes_or_no(opt.determine_clusters)])
if opt.determine_clusters:
f.writerow(["Within-cluster distance:",
opt.within_cluster_dist, eval_proli[0].metric_unit])
if clean_fli:
f.writerow(["Input files processed cleanly:"])
f.writerows([[fn] for fn in clean_fli])
if nop_fli:
f.writerow(["Input files processed but which generated no point distances:"])
f.writerows([[fn] for fn in nop_fli])
if warn_fli:
f.writerow(["Input files processed but which generated "
"warnings (see log for details):"])
f.writerows([[fn] for fn in warn_fli])
if err_fli:
f.writerow(["Input files not processed or not included in "
"summary (see log for details):"])
f.writerows([[fn] for fn in err_fli])
def write_profile_summary():
with file_io.FileWriter("profile.summary", opt) as f:
f.writerow(["Postsynaptic element length",
"Presynaptic element length",
"Number of PSDs:",
"Total postsynaptic membrane length incl perforations:",
"Total postsynaptic membrane length excl perforations:",
"Total PSD area:",
"Particles (total)",
"Particles in PSD",
"Particles within %s %s of PSD"
% (opt.spatial_resolution, eval_proli[0].metric_unit),
"Shell particles strictly synaptic and postsynaptic",
"Shell particles strictly synaptic and postsynaptic "
"or associated with postsynaptic membrane",
"Synaptic particles associated w/ postsynaptic "
"membrane",
"Synaptic particles associated w/ presynaptic membrane",
"Perisynaptic particles associated w/ postsynaptic "
"membrane",
"Perisynaptic particles associated w/ presynaptic "
"membrane",
"Within-perforation particles associated w/ "
"postsynaptic membrane",
"Within-perforation particles associated w/ "
"presynaptic membrane",
"Presynaptic profile",
"Postsynaptic profile",
"ID",
"Input file",
"Comment"])
f.writerows([[m(pro.posel.length(), pro.pixelwidth),
m(pro.prsel.length(), pro.pixelwidth),
len(pro.psdli),
m(pro.total_posm.length(), pro.pixelwidth),
sum([m(psd.posm.length(), pro.pixelwidth)
for psd in pro.psdli]),
sum([m2(psd.psdposm.area(), pro.pixelwidth)
for psd in pro.psdli]),
len(pro.pli),
len([p for p in pro.pli if p.is_within_psd]),
len([p for p in pro.pli if p.is_associated_with_psd]),
len([p for p in pro.pli
if p.strict_lateral_location == "synaptic" and
p.axodendritic_location == "postsynaptic" and
p.is_within_postsynaptic_membrane_shell]),
len([p for p in pro.pli
if p.strict_lateral_location == "synaptic" and
(p.axodendritic_location == "postsynaptic" and
p.is_within_postsynaptic_membrane_shell) or
p.is_postsynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "synaptic" and
p.is_postsynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "synaptic" and
p.is_presynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "perisynaptic" and
p.is_postsynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "perisynaptic" and
p.is_presynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "within perforation"
and p.is_postsynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "within perforation"
and p.is_presynaptic_membrane_associated]),
pro.presyn_profile,
pro.postsyn_profile,
pro.id,
pro.comment,
os.path.basename(pro.inputfn)] for pro in eval_proli])
def write_point_summary(ptype):
if ptype == "particle":
pli = "pli"
pstr = "particle"
elif ptype == "random":
if not opt.use_random:
return
else:
pli = "randomli"
pstr = "point"
else:
return
with file_io.FileWriter("%s.summary" % ptype, opt) as f:
f.writerow(["%s number (as appearing in input file)" % pstr.capitalize(),
"Coordinates (in pixels)",
"Axodendritic location",
"Distance to postsynaptic element membrane",
"Distance to presynaptic element membrane",
"Lateral location",
"Strict lateral location",
"Lateral distance to nearest PSD center",
"Normalized lateral distance to nearest PSD center",
"Within PSD",
"Within %s %s of PSD" % (opt.spatial_resolution, eval_proli[0].metric_unit),
"Total postsynaptic membrane length incl perforations",
"Total postsynaptic membrane length excl perforations",
"Length of laterally closest PSD",
"Presynaptic profile",
"Postsynaptic profile",
"Profile ID",
"Input file",
"Comment"])
f.writerows([[n+1,
p,
p.axodendritic_location,
m(p.dist_to_posel, pro.pixelwidth),
m(p.dist_to_prsel, pro.pixelwidth),
p.lateral_location,
p.strict_lateral_location,
m(p.lateral_dist_psd, pro.pixelwidth),
p.norm_lateral_dist_psd,
stringconv.yes_or_no(p.is_within_psd),
stringconv.yes_or_no(p.is_associated_with_psd),
m(pro.total_posm.length(), pro.pixelwidth),
m(sum([psd.posm.length() for psd in pro.psdli]),
pro.pixelwidth),
m(p.nearest_psd.posm.length(), pro.pixelwidth),
pro.presyn_profile,
pro.postsyn_profile,
pro.id,
os.path.basename(pro.inputfn),
pro.comment] for pro in eval_proli for n, p in
enumerate(pro.__dict__[pli])])
def write_cluster_summary():
if not opt.determine_clusters:
return
with file_io.FileWriter("cluster.summary", opt) as f:
f.writerow(["Cluster number",
"Number of particles in cluster",
"Distance to postsynaptic membrane of centroid",
"Distance to nearest cluster along postsynaptic element membrane",
"Profile ID",
"Input file",
"Comment"])
f.writerows([[n + 1,
len(c),
m(c.dist_to_posel, pro.pixelwidth),
m(na(c.dist_to_nearest_cluster), pro.pixelwidth),
pro.id,
os.path.basename(pro.inputfn),
pro.comment]for pro in eval_proli for n, c in
enumerate(pro.clusterli)])
def write_interpoint_summaries():
if not opt.determine_interpoint_dists:
return
ip_rels = dict([(key, val)
for key, val in opt.interpoint_relations.items()
if val and 'simulated' not in key])
if not opt.use_random:
for key, val in opt.interpoint_relations.items():
if 'random' in key and val:
del ip_rels[key]
if (len(ip_rels) == 0 or not
(opt.interpoint_shortest_dist or opt.interpoint_lateral_dist)):
return
table = []
if opt.interpoint_dist_mode == 'all':
s = "all distances"
else:
s = "nearest neighbour distances"
table.append(["Mode: " + s])
headerli = list(ip_rels.keys())
prefixli = []
for key, val in ip_rels.items():
prefix = key[0] + key[key.index("- ") + 2] + "_"
prefixli.append(prefix)
if opt.interpoint_shortest_dist and opt.interpoint_lateral_dist:
headerli.extend(headerli)
prefixli.extend([t + 'lat' for t in prefixli])
topheaderli = []
if opt.interpoint_shortest_dist:
topheaderli.append("Shortest distances")
if opt.interpoint_lateral_dist:
topheaderli.extend([""] * (len(ip_rels) - 1))
if opt.interpoint_lateral_dist:
topheaderli.append("Lateral distances along postsynaptic element "
"membrane")
table.extend([topheaderli, headerli])
cols = [[] for _ in prefixli]
for pro in eval_proli:
for n, li in enumerate([pro.__dict__[prefix + "distli"]
for prefix in prefixli]):
cols[n].extend([m(e, pro.pixelwidth) for e in li])
# transpose cols and append to table
table.extend(list(itertools.zip_longest(*cols, fillvalue="")))
with file_io.FileWriter("interpoint.summary", opt) as f:
f.writerows(table)
def write_mc_dist_to_psd(dtype):
if not opt.run_monte_carlo:
return
table = []
if dtype == 'metric':
table.append(["Lateral distances in %s to center of the nearest PSD"
% eval_proli[0].metric_unit])
elif dtype == 'normalized':
table.append(["Normalized lateral distances to the center of the nearest PSD"])
table.append(["Run %d" % (n + 1) for n in range(0, opt.monte_carlo_runs)])
for pro in eval_proli:
if dtype == 'metric':
table.extend(zip(*[[m(p.lateral_dist_psd, pro.pixelwidth) for p in li["pli"]]
for li in pro.mcli]))
elif dtype == 'normalized':
table.extend(zip(*[[p.norm_lateral_dist_psd for p in li["pli"]]
for li in pro.mcli]))
with file_io.FileWriter("simulated.PSD.%s.lateral.distances" % dtype, opt) as f:
f.writerows(table)
def write_mc_dist_to_posel():
if not opt.run_monte_carlo:
return
table = [["Run %d" % (n + 1) for n in range(0, opt.monte_carlo_runs)]]
for pro in eval_proli:
table.extend(itertools.zip_longest(*[[m(p.dist_to_posel, pro.pixelwidth)
for p in li['pli']] for li in pro.mcli]))
with file_io.FileWriter(
"simulated.postsynaptic.element.membrane.distances", opt) as f:
f.writerows(table)
def write_mc_ip_dists(dist_type):
def m_li(*_li):
return [m(x, pro.pixelwidth) for x in _li]
if not (opt.run_monte_carlo and opt.determine_interpoint_dists):
return
for ip_type in [key for key, val in opt.interpoint_relations.items()
if 'simulated' in key and val]:
if ((dist_type == 'shortest' and not opt.interpoint_shortest_dist) or
(dist_type == 'lateral' and not opt.interpoint_lateral_dist)):
return
if dist_type == 'lateral':
short_dist_type = 'lat'
else:
short_dist_type = ''
table = [["Run %d" % (n + 1) for n in range(0, opt.monte_carlo_runs)]]
for pro in eval_proli:
table.extend(itertools.zip_longest(*[m(p, pro.pixelwidth)
for li in pro.mcli
for p in li[ip_type]["%sdist"
% short_dist_type]]))
with file_io.FileWriter("%s.interpoint.%s.distances"
% (ip_type.replace(" ", ""), dist_type), opt) as f:
f.writerows(table)
def write_mc_cluster_summary():
if not (opt.determine_clusters and opt.run_monte_carlo):
return
table = [["N particles in cluster", "Run",
"Distance to postsynaptic element membrane from centroid",
"Distance to nearest cluster along postsynaptic element membrane",
"Profile ID",
"Input file",
"Comment"]]
for pro in eval_proli:
for n in range(0, opt.monte_carlo_runs):
for c in pro.mcli[n]["clusterli"]:
table.append([len(c), n + 1,
m(c.dist_to_posel, pro.pixelwidth),
m(na(c.dist_to_nearest_cluster),
pro.pixelwidth),
pro.id,
os.path.basename(pro.inputfn),
pro.comment])
with file_io.FileWriter("simulated.clusters", opt) as f:
f.writerows(table)
sys.stdout.write("\nSaving summaries to %s:\n" % opt.output_dir)
opt.save_result = {'any_saved': False, 'any_err': False}
eval_proli = [profile for profile in profileli if not profile.errflag]
clean_fli = [profile.inputfn for profile in profileli
if not (profile.errflag or profile.warnflag)]
warn_fli = [profile.inputfn for profile in profileli if profile.warnflag]
err_fli = [profile.inputfn for profile in profileli if profile.errflag]
nop_fli = [profile.inputfn for profile in eval_proli if not profile.pli]
write_session_summary()
write_profile_summary()
write_point_summary('particle')
write_point_summary('random')
write_interpoint_summaries()
write_cluster_summary()
write_mc_dist_to_posel()
write_mc_dist_to_psd('metric')
write_mc_dist_to_psd('normalized')
write_mc_ip_dists('shortest')
write_mc_ip_dists('lateral')
write_mc_cluster_summary()
if opt.save_result['any_err']:
sys.stdout.write("Note: One or more summaries could not be saved.\n")
if opt.save_result['any_saved']:
sys.stdout.write("Done.\n")
else:
sys.stdout.write("No summaries saved.\n")
def reset_options(opt):
""" Deletes certain options that should always be set anew for each run
(each time the "Start" button is pressed)
"""
for optstr in ('metric_unit', 'use_random'):
if hasattr(opt, optstr):
delattr(opt, optstr)
def show_options(opt):
sys.stdout.write("{} version: {} (Last modified {} {}, {})\n".format(
version.title, version.version, *version.date))
sys.stdout.write("Output file format: %s\n" % opt.output_file_format)
sys.stdout.write("Suffix of output files: %s\n" % opt.output_filename_suffix)
sys.stdout.write("Output directory: %s\n" % opt.output_dir)
sys.stdout.write("Spatial resolution: %d\n" % opt.spatial_resolution)
sys.stdout.write("Shell width: %d metric units\n" % opt.shell_width)
sys.stdout.write("Interpoint distances calculated: %s\n"
% stringconv.yes_or_no(opt.determine_interpoint_dists))
if opt.determine_interpoint_dists:
sys.stdout.write("Interpoint distance mode: %s\n" % opt.interpoint_dist_mode.capitalize())
sys.stdout.write("Shortest interpoint distances: %s\n"
% stringconv.yes_or_no(opt.interpoint_shortest_dist))
sys.stdout.write("Lateral interpoint distances: %s\n"
% stringconv.yes_or_no(opt.interpoint_lateral_dist))
sys.stdout.write("Monte Carlo simulations performed: %s\n"
% stringconv.yes_or_no(opt.run_monte_carlo))
if opt.run_monte_carlo:
sys.stdout.write("Number of Monte Carlo runs: %d\n"
% opt.monte_carlo_runs)
sys.stdout.write("Monte Carlo simulation window: %s\n"
% opt.monte_carlo_simulation_window)
sys.stdout.write("Strict localization in simulation window: %s\n"
% stringconv.yes_or_no(opt.monte_carlo_strict_location))
sys.stdout.write("Clusters determined: %s\n" % stringconv.yes_or_no(opt.determine_clusters))
if opt.determine_clusters:
sys.stdout.write("Within-cluster distance: %d\n" % opt.within_cluster_dist)
def get_output_format(opt):
if opt.output_file_format == 'excel':
try:
import openpyxl
except ImportError:
sys.stdout.write("Unable to write Excel files: resorting to csv format.\n")
opt.output_file_format = 'csv'
if opt.output_file_format == 'csv':
opt.output_filename_ext = '.csv'
opt.csv_format = {'dialect': 'excel', 'lineterminator': '\n'}
if opt.csv_delimiter == 'tab':
opt.csv_format['delimiter'] = '\t'
if opt.output_filename_date_suffix:
from datetime import date
opt.output_filename_suffix = "." + date.today().isoformat()
if opt.output_filename_other_suffix != '':
opt.output_filename_suffix += "." + opt.output_filename_other_suffix
def main_proc(parent):
""" Process profile data files
"""
opt = parent.opt
if not opt.input_file_list:
sys.stdout.write("No input files.\n")
return 0
i, n = 0, 0
profileli = []
sys.stdout.write("--- Session started %s local time ---\n" % time.ctime())
for f in opt.input_file_list:
if opt.input_file_list.count(f) > 1:
sys.stdout.write("Duplicate input filename %s:\n => removing first occurrence in "
"list\n" % f)
opt.input_file_list.remove(f)
get_output_format(opt)
reset_options(opt)
show_options(opt)
while True:
if i < len(opt.input_file_list):
inputfn = opt.input_file_list[i]
i += 1
else:
sys.stdout.write("\nNo more input files...\n")
break
parent.process_queue.put(("new_file", inputfn))
profileli.append(core.ProfileData(inputfn, opt))
profileli[-1].process()
if opt.stop_requested:
sys.stdout.write("\n--- Session aborted by user %s local time ---\n" % time.ctime())
return 3
if not profileli[-1].errflag:
n += 1
if profileli[-1].warnflag:
sys.stdout.write("Warning(s) found while processing input file.\n")
continue
else:
sys.stdout.write("Error(s) found while processing input file =>\n"
" => No distances could be determined.\n")
continue
# no more input files
errfli = [pro.inputfn for pro in profileli if pro.errflag]
warnfli = [pro.inputfn for pro in profileli if pro.warnflag]
if errfli:
sys.stdout.write("\n%s input %s generated one or more errors:\n"
% (stringconv.plurality("This", len(errfli)),
stringconv.plurality("file", len(errfli))))
sys.stdout.write("%s\n" % "\n".join([fn for fn in errfli]))
if warnfli:
sys.stdout.write("\n%s input %s generated one or more warnings:\n"
% (stringconv.plurality("This", len(warnfli)),
stringconv.plurality("file", len(warnfli))))
sys.stdout.write("%s\n" % "\n".join([fn for fn in warnfli]))
if n > 0:
parent.process_queue.put(("saving_summaries", ""))
save_output(profileli, opt)
else:
sys.stdout.write("\nNo files processed.\n")
sys.stdout.write("--- Session ended %s local time ---\n" % time.ctime())
parent.process_queue.put(("done", ""))
if errfli:
return 0
elif warnfli:
return 2
else:
return 1
# End of main.py
| maxdl/Synapse.py | synapse/main.py | Python | mit | 24,280 |
# frozen_string_literal: true
require "rails_helper"
RSpec.describe Admin::ArticlesController, type: :controller do
render_views
before do
create :blog
end
describe "index" do
let(:publisher) { create(:user, :as_publisher) }
let!(:article) { create(:article) }
before do
sign_in publisher
end
context "simple query" do
before { get :index }
it { expect(response).to be_successful }
it { expect(response).to render_template("index", layout: "administration") }
end
it "return article that match with search query" do
get :index, params: { search: { searchstring: article.body[0..4] } }
expect(assigns(:articles)).to eq([article])
end
it "search query and limit on published_at" do
get :index, params: { search: {
searchstring: article.body[0..4],
published_at: article.published_at + 2.days,
} }
expect(assigns(:articles)).to be_empty
end
context "search for state" do
let!(:draft_article) { create(:article, state: "draft") }
let!(:pending_article) do
create(:article, state: "publication_pending", published_at: "2020-01-01")
end
before { get :index, params: { search: state } }
context "draft only" do
let(:state) { { state: "drafts" } }
it { expect(assigns(:articles)).to eq([draft_article]) }
end
context "publication_pending only" do
let(:state) { { state: "pending" } }
it { expect(assigns(:articles)).to eq([pending_article]) }
end
context "with a bad state" do
let(:state) { { state: "3vI1 1337 h4x0r" } }
it "returns all states" do
expect(assigns(:articles).sort).
to eq([article, pending_article, draft_article].sort)
end
end
end
end
describe "#autosave" do
let(:publisher) { create(:user, :as_publisher) }
before do
sign_in publisher
end
context "first time save" do
it "creates a new draft Article" do
expect do
post :autosave, xhr: true, params: { article: attributes_for(:article) }
end.to change(Article, :count).by(1)
end
it "creates tags for the draft article if relevant" do
expect do
post :autosave,
xhr: true, params: { article: attributes_for(:article, :with_tags) }
end.to change(Tag, :count).by(2)
end
end
context "second call to save" do
let!(:draft) { create(:article, state: "draft") }
it "does not create an extra draft" do
expect do
post :autosave,
xhr: true, params: { article: { id: draft.id,
body_and_extended: "new body" } }
end.not_to change(Article, :count)
end
end
context "with an other existing draft" do
let!(:draft) { create(:article, state: "draft", body: "existing body") }
it "creates a new draft Article" do
expect do
post :autosave, xhr: true, params: { article: attributes_for(:article) }
end.to change(Article, :count).by(1)
end
it "does not replace existing draft" do
post :autosave, xhr: true, params: { article: attributes_for(:article) }
expect(assigns(:article).id).not_to eq(draft.id)
expect(assigns(:article).body).not_to eq(draft.body)
end
end
end
describe "#new" do
let(:publisher) { create(:user, :as_publisher) }
before do
sign_in publisher
get :new
end
it { expect(response).to be_successful }
it { expect(response).to render_template("new") }
it { expect(assigns(:article)).not_to be_nil }
it { expect(assigns(:article).redirect).to be_nil }
end
describe "#create" do
shared_examples_for "create action" do
def base_article(options = {})
{ title: "posted via tests!",
body: "A good body",
allow_comments: "1",
allow_pings: "1" }.merge(options)
end
it "sends notifications on create" do
u = create(:user, notify_via_email: true, notify_on_new_articles: true)
u.save!
ActionMailer::Base.deliveries.clear
emails = ActionMailer::Base.deliveries
post :create, params: { "article" => base_article }
assert_equal(1, emails.size)
assert_equal(u.email, emails.first.to[0])
end
it "creates an article with tags" do
post :create, params: { "article" => base_article(keywords: "foo bar") }
new_article = Article.last
assert_equal 2, new_article.tags.size
end
it "creates an article with a password" do
post :create, params: { "article" => base_article(password: "foobar") }
new_article = Article.last
expect(new_article.password).to eq("foobar")
end
it "creates an article with a unique Tag instance named lang:FR" do
post :create, params: { "article" => base_article(keywords: "lang:FR") }
new_article = Article.last
expect(new_article.tags.map(&:name)).to include("lang-fr")
end
it "interprets time zone in :published_at correctly" do
article = base_article(published_at: "February 17, 2011 08:47 PM GMT+0100 (CET)")
post :create, params: { article: article }
new_article = Article.last
assert_equal Time.utc(2011, 2, 17, 19, 47), new_article.published_at
end
it 'respects "GMT+0000 (UTC)" in :published_at' do
article = base_article(published_at: "August 23, 2011 08:40 PM GMT+0000 (UTC)")
post :create, params: { article: article }
new_article = Article.last
assert_equal Time.utc(2011, 8, 23, 20, 40), new_article.published_at
end
it "creates a filtered article" do
Article.delete_all
body = "body via *markdown*"
extended = "*foo*"
post :create,
params: { article: { title: "another test", body: body, extended: extended } }
assert_response :redirect, action: "index"
new_article = Article.order(created_at: :desc).first
expect(new_article.body).to eq body
expect(new_article.extended).to eq extended
expect(new_article.text_filter.name).to eq "markdown smartypants"
expect(new_article.html(:body)).to eq "<p>body via <em>markdown</em></p>"
expect(new_article.html(:extended)).to eq "<p><em>foo</em></p>"
end
context "with a previously autosaved draft" do
before do
@draft = create(:article, body: "draft", state: "draft")
post :create,
params: { article: { id: @draft.id, body: "update", published: true } }
end
it "updates the draft" do
expect(Article.find(@draft.id).body).to eq "update"
end
it "makes the draft published" do
expect(Article.find(@draft.id)).to be_published
end
end
describe "with an unrelated draft in the database" do
before do
@draft = create(:article, state: "draft")
end
describe "saving new article as draft" do
it "leaves the original draft in existence" do
post :create, params: { article: base_article(draft: "save as draft") }
expect(assigns(:article).id).not_to eq(@draft.id)
expect(Article.find(@draft.id)).not_to be_nil
end
end
end
end
context "as a publisher" do
let(:publisher) { create(:user, :as_publisher) }
let(:article_params) { { title: "posted via tests!", body: "a good boy" } }
before do
sign_in publisher
@user = publisher
end
it "creates an article" do
expect do
post :create, params: { article: article_params }
end.to change(Article, :count).by(1)
end
context "classic" do
before { post :create, params: { article: article_params } }
it { expect(response).to redirect_to(action: :index) }
it { expect(flash[:success]).to eq(I18n.t("admin.articles.create.success")) }
it { expect(assigns(:article)).to be_published }
it { expect(assigns(:article).user).to eq(publisher) }
context "when doing a draft" do
let(:article_params) do
{ title: "posted via tests!", body: "a good boy", draft: "true" }
end
it { expect(assigns(:article)).not_to be_published }
end
end
context "writing for the future" do
let(:article_params) do
{ title: "posted via tests!", body: "a good boy",
published_at: 1.hour.from_now.to_s }
end
before do
post :create, params: { article: article_params }
end
it "does not create a short url" do
expect(Redirect.count).to eq 0
end
it "creates a trigger to publish the article" do
expect(Trigger.count).to eq 1
end
it "does not publish the article" do
expect(assigns(:article)).to be_publication_pending
end
it "sets the publication time in the future" do
expect(assigns(:article).published_at).to be > 10.minutes.from_now
end
end
end
context "as an admin" do
let(:admin) { create(:user, :as_admin) }
before do
sign_in admin
@user = admin
end
it_behaves_like "create action"
end
end
describe "#edit" do
context "as an admin" do
let(:admin) { create(:user, :as_admin) }
let(:article) { create(:article) }
before do
sign_in admin
end
it "edits article" do
get :edit, params: { "id" => article.id }
expect(response).to render_template "edit"
expect(assigns(:article)).not_to be_nil
expect(assigns(:article)).to be_valid
expect(response.body).to match(/body/)
expect(response.body).to match(/extended content/)
end
it "correctly converts multi-word tags" do
a = create(:article, keywords: '"foo bar", baz')
get :edit, params: { id: a.id }
expect(response.body).
to have_selector("input[id=article_keywords][value='baz, \"foo bar\"']")
end
end
context "as a publisher" do
let(:publisher) { create(:user, :as_publisher) }
before do
sign_in publisher
end
context "with an article from an other user" do
let(:article) { create(:article, user: create(:user, login: "another_user")) }
before { get :edit, params: { id: article.id } }
it { expect(response).to redirect_to(action: "index") }
end
context "with an article from current user" do
let(:article) { create(:article, user: publisher) }
before { get :edit, params: { id: article.id } }
it { expect(response).to render_template("edit") }
it { expect(assigns(:article)).not_to be_nil }
it { expect(assigns(:article)).to be_valid }
end
end
end
describe "#update" do
context "as an admin" do
let(:admin) { create(:user, :as_admin) }
let(:article) { create(:article) }
before do
sign_in admin
end
it "updates article" do
emails = ActionMailer::Base.deliveries
emails.clear
art_id = article.id
body = "another *markdown* test"
put :update, params: { id: art_id,
article: { body: body, text_filter_name: "markdown" } }
assert_response :redirect, action: "show", id: art_id
article.reload
expect(article.text_filter.name).to eq("markdown")
expect(body).to eq(article.body)
expect(emails.size).to eq(0)
end
it "allows updating body_and_extended" do
put :update, params: { "id" => article.id, "article" => {
"body_and_extended" => "foo<!--more-->bar<!--more-->baz",
} }
assert_response :redirect
article.reload
expect(article.body).to eq("foo")
expect(article.extended).to eq("bar<!--more-->baz")
end
it "allows updating password" do
put :update, params: { "id" => article.id, "article" => {
"password" => "foobar",
} }
assert_response :redirect
article.reload
expect(article.password).to eq("foobar")
end
context "when a published article has drafts" do
let(:original_published_at) { 2.days.ago.to_date }
let!(:original) { create(:article, published_at: original_published_at) }
let!(:draft) { create(:article, parent_id: original.id, state: "draft") }
let!(:second_draft) { create(:article, parent_id: original.id, state: "draft") }
describe "publishing the published article" do
before do
put(:update, params: {
id: original.id,
article: { id: draft.id, body: "update" },
})
end
it "updates the article" do
expect(original.reload.body).to eq "update"
end
it "deletes all drafts" do
assert_raises ActiveRecord::RecordNotFound do
Article.find(draft.id)
end
assert_raises ActiveRecord::RecordNotFound do
Article.find(second_draft.id)
end
end
it "keeps the original publication date" do
expect(original.reload.published_at).to eq original_published_at
end
end
describe "publishing a draft copy of the published article" do
before do
put(:update, params: {
id: draft.id,
article: { id: draft.id, body: "update" },
})
end
it "updates the original" do
expect(original.reload.body).to eq("update")
end
it "deletes all drafts" do
assert_raises ActiveRecord::RecordNotFound do
Article.find(draft.id)
end
assert_raises ActiveRecord::RecordNotFound do
Article.find(second_draft.id)
end
end
it "keeps the original publication date" do
expect(original.reload.published_at).to eq original_published_at
end
end
describe "publishing a draft copy with a new publication date" do
before do
put(:update, params: {
id: draft.id,
article: { id: draft.id, body: "update", published_at: "2016-07-07" },
})
end
it "updates the original publication date" do
expect(original.reload.published_at).to eq Date.new(2016, 7, 7)
end
end
end
describe "saving a published article as draft" do
before do
@orig = create(:article)
put(:update, params: {
id: @orig.id,
article: { title: @orig.title, draft: "draft", body: "update" },
})
end
it "leaves the original published" do
@orig.reload
expect(@orig).to be_published
end
it "leaves the original as is" do
@orig.reload
expect(@orig.body).not_to eq("update")
end
it "redirects to the index" do
expect(response).to redirect_to(action: "index")
end
it "creates a draft" do
draft = Article.child_of(@orig.id).first
expect(draft.parent_id).to eq(@orig.id)
expect(draft).not_to be_published
end
end
end
context "as a publisher" do
let(:publisher) { create(:user, :as_publisher) }
before do
sign_in publisher
end
context "with an article" do
let(:article) { create(:article, body: "another *markdown* test", user: publisher) }
let(:body) { "not the *same* text" }
before do
put :update,
params: { id: article.id,
article: { body: body, text_filter_name: "markdown" } }
end
it { expect(response).to redirect_to(action: "index") }
it { expect(article.reload.text_filter.name).to eq("markdown") }
it { expect(article.reload.body).to eq(body) }
end
end
end
describe "#auto_complete_for_article_keywords" do
let(:publisher) { create(:user, :as_publisher) }
before do
sign_in publisher
end
before do
create(:tag, name: "foo", contents: [create(:article)])
create(:tag, name: "bazz", contents: [create(:article)])
create(:tag, name: "bar", contents: [create(:article)])
end
it "returns foo for keywords fo" do
get :auto_complete_for_article_keywords, params: { article: { keywords: "fo" } }
expect(response).to be_successful
expect(response.body).to eq('["bar","bazz","foo"]')
end
end
describe "#destroy" do
let(:publisher) { create(:user, :as_publisher) }
before do
sign_in publisher
end
context "with an article from other user" do
let(:article) { create(:article, user: create(:user, login: "other_user")) }
before { delete :destroy, params: { id: article.id } }
it { expect(response).to redirect_to(action: "index") }
it { expect(Article.count).to eq(1) }
end
context "with an article from user" do
let(:article) { create(:article, user: publisher) }
before { delete :destroy, params: { id: article.id } }
it { expect(response).to redirect_to(action: "index") }
it { expect(Article.count).to eq(0) }
end
end
end
| publify/publify | publify_core/spec/controllers/admin/articles_controller_spec.rb | Ruby | mit | 17,734 |
<?php
namespace HackAveiro\Web2Badge\Util;
class StringCleaner
{
static function removeAccents($string)
{
$unwanted_array = array('Š'=>'S', 'š'=>'s', 'Ž'=>'Z', 'ž'=>'z', 'À'=>'A',
'Á'=>'A', 'Â'=>'A', 'Ã'=>'A', 'Ä'=>'A', 'Å'=>'A',
'Æ'=>'A', 'Ç'=>'C', 'È'=>'E', 'É'=>'E', 'Ê'=>'E',
'Ë'=>'E', 'Ì'=>'I', 'Í'=>'I', 'Î'=>'I', 'Ï'=>'I',
'Ñ'=>'N', 'Ò'=>'O', 'Ó'=>'O', 'Ô'=>'O', 'Õ'=>'O',
'Ö'=>'O', 'Ø'=>'O', 'Ù'=>'U', 'Ú'=>'U', 'Û'=>'U',
'Ü'=>'U', 'Ý'=>'Y', 'Þ'=>'B', 'ß'=>'Ss', 'à'=>'a',
'á'=>'a', 'â'=>'a', 'ã'=>'a', 'ä'=>'a', 'å'=>'a',
'æ'=>'a', 'ç'=>'c', 'è'=>'e', 'é'=>'e', 'ê'=>'e',
'ë'=>'e', 'ì'=>'i', 'í'=>'i', 'î'=>'i', 'ï'=>'i',
'ð'=>'o', 'ñ'=>'n', 'ò'=>'o', 'ó'=>'o', 'ô'=>'o',
'õ'=>'o', 'ö'=>'o', 'ø'=>'o', 'ù'=>'u', 'ú'=>'u',
'û'=>'u', 'ý'=>'y', 'ý'=>'y', 'þ'=>'b', 'ÿ'=>'y');
$cleanedString = strtr($string, $unwanted_array);
return $cleanedString;
}
static function removeLines($string)
{
return str_replace("\n", ' ', $string);
}
static function clean($string)
{
$cleanedString = self::removeAccents($string);
$cleanedString = self::removeLines($cleanedString);
//$cleanedString = mb_convert_encoding($cleanedString, 'ASCII', 'UTF-8');
return $cleanedString;
}
} | HackAveiro/web2badge-server | src/HackAveiro/Web2Badge/Util/StringCleaner.php | PHP | mit | 1,742 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Windows.UI.Xaml.Data;
namespace AppUIBasics.Common
{
public class NullableBooleanToBooleanConverter : IValueConverter
{
public object Convert(object value, Type targetType, object parameter, string language)
{
if(value is bool?)
{
return (bool)value;
}
return false;
}
public object ConvertBack(object value, Type targetType, object parameter, string language)
{
if (value is bool)
return (bool)value;
return false;
}
}
}
| mobilecp/Windows-universal-samples | xaml_xamluibasics/CS/AppUIBasics/Common/NullableBooleanToBooleanConverter.cs | C# | mit | 741 |
(function(exports) {
function changeSky(location) {
var sky = document.getElementById("image-360");
sky.setAttribute('src', location);
}
function addMonolith() {
var box = document.createElement('a-box');
document.querySelector('a-scene').appendChild(box);
box.setAttribute('id', 'monolith');
box.setAttribute('color', '#222');
box.setAttribute('width', '0.5');
box.setAttribute('height', '4');
box.setAttribute('depth', '2');
box.setAttribute('position', '-5 2 0');
box.setAttribute('scale', '0.4 0.4 0.4');
}
function removeMonolith() {
var element = document.getElementById('monolith');
element.parentNode.removeChild(element);
}
function addRain() {
var element = document.getElementById('scene')
console.log(element)
element.setAttribute('rain', '');
}
function stopRain() {
var element = document.getElementById('scene')
element.removeAttribute('rain', '');
}
exports.addRain = addRain;
exports.stopRain = stopRain;
exports.addMonolith = addMonolith;
exports.removeMonolith = removeMonolith;
exports.changeSky = changeSky;
})(this);
(function(exports) {
function captureToken(token) {
var database = firebase.database();
var browserTokens = database.ref('browserTokens')
var data = {
timestamp: Date.now(),
token: token
};
browserTokens.push(data, finished)
};
function finished(error) {
if (error) {
console.log('Did not save to DB' + error);
} else {
console.log('Browser token saved to DB');
}
}
exports.captureToken = captureToken
})(this);
| nazwhale/spaceship | public/js/interface.js | JavaScript | mit | 1,629 |
function injectScript(file, node) {
var th = document.getElementsByTagName(node)[0];
var s = document.createElement('script');
s.setAttribute('type', 'text/javascript');
s.setAttribute('src', file);
th.appendChild(s);
}
function injectStyle(file, node) {
var th = document.getElementsByTagName(node)[0];
var s = document.createElement('link');
s.setAttribute('type', 'text/css');
s.setAttribute('href', file);
s.setAttribute('rel', "stylesheet");
th.appendChild(s);
}
injectScript(chrome.extension.getURL("js/page-scripts.js"), "body");
injectStyle(chrome.extension.getURL("css/page-styles.css"), "body");
injectStyle("https://fonts.googleapis.com/css?family=Oxygen:400,700,300", "body");
chrome.storage.sync.get("backImgTog", function(storage) {
if (storage.backImgTog == undefined || !storage.backImgTog) {
$.get("https://www.bing.com/HPImageArchive.aspx?format=js&idx=0&n=1&mkt=en-US", function(response) {
var url = "https://www.bing.com" + response.images[0].url;
$(window).trigger('resize');
$("#authenticate").css("background-image", "url(" + url + ")");
});
}
});
$(document).ready(function() {
$("form[name=authenticate] fieldset input[type=submit]").val("Login");
console.log("SchedulesPlus Ready!");
});
| SchedulesPlus/SchedulesPlus | js/inject.js | JavaScript | mit | 1,296 |
class CommentPolicy < ApplicationPolicy
attr_reader :user, :comment
def initialize(user, comment)
@user = user
@comment = comment
end
def index?
true
end
def show?
comment.project.user_id == @user.id
end
def create?
show?
end
def new?
create?
end
def update?
show?
end
def edit?
show?
end
def destroy?
show?
end
class Scope
attr_reader :user, :scope
def initialize(user, scope)
@user = user
@scope = scope
end
def resolve
@user.comments
end
end
end
| abratashov/TManager | app/policies/comment_policy.rb | Ruby | mit | 572 |
// Get User's Coordinate from their Browser
window.onload = function() {
// HTML5/W3C Geolocation
if (navigator.geolocation) {
navigator.geolocation.getCurrentPosition(UserLocation);
}
// Default to Washington, DC
else
NearestCity(38.8951, -77.0367);
}
// Callback function for asynchronous call to HTML5 geolocation
function UserLocation(position) {
NearestCity(position.coords.latitude, position.coords.longitude);
}
// Convert Degress to Radians
function Deg2Rad(deg) {
return deg * Math.PI / 180;
}
function PythagorasEquirectangular(lat1, lon1, lat2, lon2) {
lat1 = Deg2Rad(lat1);
lat2 = Deg2Rad(lat2);
lon1 = Deg2Rad(lon1);
lon2 = Deg2Rad(lon2);
var R = 6371; // km
var x = (lon2 - lon1) * Math.cos((lat1 + lat2) / 2);
var y = (lat2 - lat1);
var d = Math.sqrt(x * x + y * y) * R;
return d;
}
var lat = 20; // user's latitude
var lon = 40; // user's longitude
var cities = [
["3","Aachen","50.782659","6.094087","202","Nordrhein-Westfalen"],
["44","Großenkneten","52.933541","8.236997","43.5","Niedersachsen"],
["73","Aldersbach-Kriestorf","48.615935","13.050595","340","Bayern"],
["78","Alfhausen","52.485314","7.912553","65","Niedersachsen"],
["91","Alsfeld-Eifa","50.744591","9.344972","300","Hessen"],
["142","Altomünster-Maisbrunn","48.406038","11.311716","510","Bayern"],
["150","Alzey","49.7273","8.116356","215","Rheinland-Pfalz"],
["151","Amberg-Unterammersricht","49.469064","11.854641","383","Bayern"],
["164","Angermünde","53.031601","13.99066","54","Brandenburg"],
["183","Arkona","54.67916","13.434252","42","Mecklenburg-Vorpommern"],
["198","Artern","51.374461","11.291977","164","Thüringen"],
["217","Attenkam","47.877407","11.364245","672","Bayern"],
["232","Augsburg","48.425393","10.942011","461.4","Bayern"],
["243","Aurich","53.462052","7.466971","4","Niedersachsen"],
["257","Baden-Baden-Geroldsau","48.72696","8.245757","240","Baden-Württemberg"],
["282","Bamberg","49.874176","10.920581","240","Bayern"],
["298","Barth","54.340582","12.710828","3","Mecklenburg-Vorpommern"],
["320","Heinersreuth-Vollhof","49.96667","11.519692","350","Bayern"],
["330","Beerfelden","49.561729","8.967329","450","Hessen"],
["377","BadBergzabern","49.107025","7.996749","252","Rheinland-Pfalz"],
["400","Berlin-Buch","52.630941","13.50215","60","Berlin"],
["403","Berlin-Dahlem(FU)","52.453711","13.301731","51","Berlin"],
["427","Berlin-Schönefeld","52.380698","13.530609","46","Brandenburg"],
["430","Berlin-Tegel","52.564412","13.308848","36","Berlin"],
["433","Berlin-Tempelhof","52.467488","13.402115","48","Berlin"],
["435","Berlin-Zehlendorf","52.428902","13.232686","45","Berlin"],
["450","Bernkastel-Kues","49.9186","7.0664","120","Rheinland-Pfalz"],
["502","Bischbrunn","49.874669","9.48829","412","Bayern"],
["591","Boizenburg","53.391084","10.687761","45","Mecklenburg-Vorpommern"],
["596","Boltenhagen","54.002806","11.190772","15","Mecklenburg-Vorpommern"],
["614","Borgentreich-Bühne","51.570874","9.311999","240","Nordrhein-Westfalen"],
["648","Brande-Hörnerkirchen","53.855114","9.71521","9","Schleswig-Holstein"],
["662","Braunschweig","52.291443","10.446456","81.2","Niedersachsen"],
["691","Bremen","53.045015","8.797904","4.1","Bremen"],
["701","Bremerhaven","53.533162","8.576083","7","Bremen"],
["722","Brocken","51.79862","10.618265","1133.9","Sachsen-Anhalt"],
["755","Buchen; Kr.Neckar-Odenwald","49.518196","9.32127","340","Baden-Württemberg"],
["807","Schlüsselfeld(Kläranlage)","49.74499","10.643423","290","Bayern"],
["817","Burgwald-Bottendorf","51.030634","8.814579","293","Hessen"],
["850","Celle","52.595933","10.029573","39","Niedersachsen"],
["853","Chemnitz","50.791286","12.871977","418","Sachsen"],
["863","Clausthal-Zellerfeld","51.790354","10.347039","585","Niedersachsen"],
["880","Cottbus","51.775983","14.316811","69","Brandenburg"],
["891","Cuxhaven","53.871254","8.705821","5","Niedersachsen"],
["963","Diepholz","52.588112","8.342405","37.7","Niedersachsen"],
["979","Dillenburg","50.736371","8.267238","314","Hessen"],
["982","Dillingen/Donau","48.570123","10.498459","420","Bayern"],
["1001","Doberlug-Kirchhain","51.645104","13.574676","96.8","Brandenburg"],
["1048","Dresden-Klotzsche","51.127955","13.754338","227","Sachsen"],
["1078","Düsseldorf","51.295952","6.768648","36.6","Nordrhein-Westfalen"],
["1103","Ebersberg-Halbing","48.100186","11.987154","592","Bayern"],
["1107","Ebrach","49.851977","10.499043","346","Bayern"],
["1197","Ellwangen-Rindelbach","48.989498","10.131234","460","Baden-Württemberg"],
["1224","Emmendingen-Mundingen","48.137756","7.835089","201","Baden-Württemberg"],
["1266","Erfde","54.299212","9.316185","18","Schleswig-Holstein"],
["1270","Erfurt-Weimar","50.982859","10.960808","316","Thüringen"],
["1279","Möhrendorf-Kleinseebach","49.649744","11.007445","268","Bayern"],
["1292","Eschenbach/Oberpfalz","49.752196","11.822151","470","Bayern"],
["1297","Eschwege","51.204132","10.013767","156","Hessen"],
["1303","Essen-Bredeney","51.404085","6.967741","150","Nordrhein-Westfalen"],
["1327","Weilerswist-Lommersum","50.711886","6.790489","147","Nordrhein-Westfalen"],
["1332","Falkenberg; Kr.Rottal-Inn","48.48315","12.724108","472","Bayern"],
["1346","Feldberg/Schwarzwald","47.874893","8.003817","1489.6","Baden-Württemberg"],
["1357","Fichtelberg/Oberfranken-Hüttstadl","49.98069","11.837637","654","Bayern"],
["1358","Fichtelberg","50.428346","12.953506","1213","Sachsen"],
["1420","Frankfurt/Main","50.025911","8.521294","99.7","Hessen"],
["1443","Freiburg","48.023276","7.834441","236.3","Baden-Württemberg"],
["1451","Freiburg/Elbe","53.827696","9.249276","2","Niedersachsen"],
["1468","Freudenstadt","48.453728","8.409057","796.5","Baden-Württemberg"],
["1503","Friesoythe-Altenoythe","53.064299","7.902205","5.7","Niedersachsen"],
["1526","Fulda-Horas","50.566806","9.65325","242","Hessen"],
["1544","Gardelegen","52.512914","11.394131","47","Sachsen-Anhalt"],
["1550","Garmisch-Partenkirchen","47.48305","11.062293","719","Bayern"],
["1580","Geisenheim","49.985931","7.954853","110.2","Hessen"],
["1590","Geldern-Walbeck","51.494168","6.246343","37","Nordrhein-Westfalen"],
["1612","Gera-Leumnitz","50.881268","12.128858","311","Thüringen"],
["1619","Gernsheim","49.761079","8.487637","90","Hessen"],
["1639","Gießen/Wettenberg","50.601706","8.643902","202.7","Hessen"],
["1645","Gilserberg-Moischeid","50.965565","9.050014","340","Hessen"],
["1667","Glückstadt","53.79702","9.429456","1","Schleswig-Holstein"],
["1684","Görlitz","51.162151","14.950565","238","Sachsen"],
["1691","Göttingen","51.500331","9.950566","167","Niedersachsen"],
["1735","Grainet-Rehberg","48.789321","13.62911","628","Bayern"],
["1757","Greifswald","54.096736","13.405576","2","Mecklenburg-Vorpommern"],
["1803","GroßLüsewitz","54.071372","12.32378","34","Mecklenburg-Vorpommern"],
["1832","GroßerArber","49.113037","13.134204","1436","Bayern"],
["1920","Hagen-Fley","51.412887","7.489477","100","Nordrhein-Westfalen"],
["1975","Hamburg-Fuhlsbüttel","53.633187","9.988085","11","Hamburg"],
["1981","Hamburg-Neuwiedenthal","53.477658","9.895686","3","Hamburg"],
["1990","Hamburg-Wandsbek","53.585347","10.129348","18","Hamburg"],
["1993","Hameln","52.084784","9.389576","68","Niedersachsen"],
["2014","Hannover","52.464425","9.677917","55","Niedersachsen"],
["2074","Hechingen","48.375139","8.980076","522","Baden-Württemberg"],
["2080","Heidelberg","49.420618","8.667613","110","Baden-Württemberg"],
["2110","Heinsberg-Schleiden","51.041072","6.104239","57","Nordrhein-Westfalen"],
["2115","Helgoland","54.174957","7.891954","4","Schleswig-Holstein"],
["2120","Helmstedt","52.216409","11.02193","140","Niedersachsen"],
["2147","Herford","52.126408","8.68649","77","Nordrhein-Westfalen"],
["2167","Niederwörresbach","49.767176","7.335605","302","Rheinland-Pfalz"],
["2171","BadHersfeld","50.851917","9.737819","272.2","Hessen"],
["2173","Herten","51.588967","7.154827","60","Nordrhein-Westfalen"],
["2206","Hildesheim","52.140812","9.883806","117","Niedersachsen"],
["2211","Hilgenroth","50.737065","7.652755","295","Rheinland-Pfalz"],
["2250","Höllenstein(Kraftwerk)","49.127875","12.864512","403","Bayern"],
["2260","Hof(Stadt)","50.323032","11.907729","474","Bayern"],
["2261","Hof","50.312236","11.876052","565.1","Bayern"],
["2268","Hofheim","50.135244","10.515636","263","Bayern"],
["2290","Hohenpeißenberg","47.800864","11.010754","977","Bayern"],
["2303","Hohn","54.314596","9.538997","10","Schleswig-Holstein"],
["2306","Hohwacht","54.319391","10.673193","8","Schleswig-Holstein"],
["2319","Holzkirchen","47.882278","11.69615","685","Bayern"],
["2324","Holzminden-Silberborn","51.765853","9.544662","440","Niedersachsen"],
["2480","Kahl/Main","50.064313","8993","107","Bayern"],
["2483","KahlerAsten","51.180254","8.489068","839","Nordrhein-Westfalen"],
["2497","Kall-Sistig","50.50141","6.526408","505","Nordrhein-Westfalen"],
["2522","Karlsruhe","49.038161","8.36406","111.6","Baden-Württemberg"],
["2532","Kassel","51.296303","9.442424","231","Hessen"],
["2542","Kaufbeuren","47.865204","10.600653","716","Bayern"],
["2543","Kaufering","48.09158","10.860913","585","Bayern"],
["2559","Kempten","47.723259","10.334797","705.2","Bayern"],
["2597","BadKissingen","50.224063","10.079213","281.8","Bayern"],
["2600","Kitzingen","49.736304","10.178117","188","Bayern"],
["2601","KleinerFeldberg/Taunus","50.221815","8.446877","825.6","Hessen"],
["2629","Kleve","51.761242","6.095381","46","Nordrhein-Westfalen"],
["2638","Klippeneck","48.105371","8.754878","973.4","Baden-Württemberg"],
["2657","Koblenz-Horchheim","50.336911","7.599165","85","Rheinland-Pfalz"],
["2667","Köln-Bonn","50.864559","7.157488","92","Nordrhein-Westfalen"],
["2680","BadKönigshofen","50284","10.4456","288","Bayern"],
["2700","Kösching","48.830189","11.487243","416","Bayern"],
["2712","Konstanz","47.677419","9.190052","442.5","Baden-Württemberg"],
["2750","Kronach","50.252336","11.32093","312","Bayern"],
["2812","Lahr","48.364697","7.828016","155","Baden-Württemberg"],
["2814","Merklingen","48.512126","9.764464","685","Baden-Württemberg"],
["2925","Leinefelde","51.393291","10.312345","356","Thüringen"],
["2928","Leipzig-Holzhausen","51.315067","12.446226","138","Sachsen"],
["2932","Leipzig/Halle","51.43479","12.239622","131","Sachsen"],
["2947","Lennestadt-Theten","51.133253","8.034831","286","Nordrhein-Westfalen"],
["2950","Lensahn","54.218979","10.884417","14","Schleswig-Holstein"],
["2968","Köln-Stammheim","50.989428","6.977688","43","Nordrhein-Westfalen"],
["3015","Lindenberg","52.208491","14.117973","98","Brandenburg"],
["3018","Lindenfels-Winterkasten","49.711685","8.780395","445","Hessen"],
["3023","Lingen","52.518093","7.308057","22","Niedersachsen"],
["3028","BadLippspringe","51.785459","8.838777","157","Nordrhein-Westfalen"],
["3031","Lippstadt-Bökenförde","51.633617","8.39445","92","Nordrhein-Westfalen"],
["3032","ListaufSylt","55.010989","8.412531","24.7","Schleswig-Holstein"],
["3093","Lüchow","52.972375","11.137388","17","Niedersachsen"],
["3126","Magdeburg","52.102889","11.582678","76","Sachsen-Anhalt"],
["3137","Mainz-Lerchenberg(ZDF)","49.965563","8.213852","195","Rheinland-Pfalz"],
["3155","Manderscheid-Sonnenhof","50.101542","6.800909","413","Rheinland-Pfalz"],
["3167","BadMarienberg","50.662025","7.960193","546.6","Rheinland-Pfalz"],
["3196","Marnitz","53.322288","11.931949","81","Mecklenburg-Vorpommern"],
["3231","Meiningen","50.561159","10.377105","450","Thüringen"],
["3244","Memmingen","47.982038","10.138397","615","Bayern"],
["3257","Mergentheim; Bad-Neunkirchen","49.477317","9.762223","250","Baden-Württemberg"],
["3271","Metten","48.854761","12.918851","313","Bayern"],
["3307","Mittenwald-Buckelwiesen","47.477882","11.265305","981","Bayern"],
["3366","Mühldorf","48.279069","12.502379","405.6","Bayern"],
["3376","Müncheberg","52.517588","14.123226","63","Brandenburg"],
["3379","München-Stadt","48.163142","11.542922","515.2","Bayern"],
["3402","Münsingen-Apfelstetten","48.385066","9.483693","750","Baden-Württemberg"],
["3490","Neuenahr; Bad-Ahrweiler","50.534561","7.085337","111","Rheinland-Pfalz"],
["3509","Menz","53.101974","13.042072","77","Brandenburg"],
["3527","Neukirchen-Hauptschwenda","50.89228","9.40498","500","Hessen"],
["3537","Neumünster","54.087289","9.979065","26","Schleswig-Holstein"],
["3552","Neuruppin","52.903704","12.807205","38","Brandenburg"],
["3578","Regensburg-Burgweinting","48.983186","12.144318","341","Bayern"],
["3612","Nienburg","52.671083","9.22291","25","Niedersachsen"],
["3621","Reimlingen","48.825254","10.506667","435","Bayern"],
["3631","Norderney","53.712302","7.151921","11","Niedersachsen"],
["3640","Nordhorn-Blanke","52.412368","7.063994","24","Niedersachsen"],
["3667","Nürnberg-Netzstall","49.425781","11.253831","368","Bayern"],
["3668","Nürnberg","49.503031","11.054923","314","Bayern"],
["3730","Oberstdorf","47.398578","10.275988","806","Bayern"],
["3739","Oberviechtach","49.452098","12.436557","596","Bayern"],
["3761","Öhringen","49.207046","9.517492","275.9","Baden-Württemberg"],
["3811","Oschatz","51.295965","13.092837","150","Sachsen"],
["3875","Parsberg/Oberpfalz-Eglwang","49.15102","11.689638","549","Bayern"],
["3927","Pfullendorf","47.934445","9.28694","630","Baden-Württemberg"],
["3939","Pirmasens","49.191191","7.5879","385","Rheinland-Pfalz"],
["3987","Potsdam","52.381287","13.062229","81","Brandenburg"],
["4063","Rahden-Kleinendorf","52.446122","8.590574","40.5","Nordrhein-Westfalen"],
["4104","Regensburg","49.042357","12.102053","365.4","Bayern"],
["4169","Rheinau-Memprechtshofen","48.67025","7.993875","131","Baden-Württemberg"],
["4175","Rheinfelden","47.558997","7.772105","282","Baden-Württemberg"],
["4261","Rosenheim","47.87535","12.127954","444","Bayern"],
["4271","Rostock-Warnemünde","54.180279","12.080806","4","Mecklenburg-Vorpommern"],
["4278","RothbeiNürnberg","49.251149","11.093364","340","Bayern"],
["4287","RothenburgobderTauber","49.384842","10.173229","415","Bayern"],
["4318","Ruhpolding","47.731033","12.660966","692","Bayern"],
["4323","Ruppertsecken","49.646838","7.883741","461","Rheinland-Pfalz"],
["4336","Saarbrücken-Ensheim","49.212803","7.107712","320","Saarland"],
["4350","BadSäckingen","47.561928","7.939928","339","Baden-Württemberg"],
["4371","BadSalzuflen","52.104211","8.75208","134.6","Nordrhein-Westfalen"],
["4377","Sandberg","50.351741","10.003394","510","Bayern"],
["4393","SanktPeter-Ording","54.327918","8.602987","5","Schleswig-Holstein"],
["4411","Schaafheim-Schlierbach","49.919516","8.967138","155","Hessen"],
["4442","Schieder","51.911935","9.15332","155","Nordrhein-Westfalen"],
["4466","Schleswig","54.527539","9.548666","42.7","Schleswig-Holstein"],
["4501","Schmücke","50.654562","10.769332","937","Thüringen"],
["4508","Schneifelforsthaus","50.296848","6.419387","649","Rheinland-Pfalz"],
["4549","Schönwald/Schwarzwald","48.100168","8.196889","1021","Baden-Württemberg"],
["4560","Schotten","50.492508","9.122558","265","Hessen"],
["4592","Schwandorf","49.327832","12.087041","356","Bayern"],
["4597","Schwangau-Horn","47.576937","10.71814","792","Bayern"],
["4625","Schwerin","53.642521","11.387203","59","Mecklenburg-Vorpommern"],
["4642","Seehausen","52.891136","11.729697","21","Sachsen-Anhalt"],
["4651","Seesen","51.903973","10.188523","186","Niedersachsen"],
["4692","Siegen(Kläranlage)","50.853381","7.996614","229","Nordrhein-Westfalen"],
["4703","Sigmaringen-Laiz","48.071874","9.194248","580","Baden-Württemberg"],
["4706","Simbach/Inn","48.271874","13.027305","360","Bayern"],
["4745","Soltau","52.9604","9.79306","75.6","Niedersachsen"],
["4887","Stötten","48.665709","9.864648","733.8","Baden-Württemberg"],
["4896","Wagersrott","54.665383","9.805022","40","Schleswig-Holstein"],
["4926","Stuttgart(Neckartal)","48.789592","9.216739","224","Baden-Württemberg"],
["4928","Stuttgart(Schnarrenberg)","48.828188","9.200041","314.3","Baden-Württemberg"],
["4931","Stuttgart-Echterdingen","48.688307","9.223535","371","Baden-Württemberg"],
["5014","Worpswede-Hüttenbusch","53.275827","8.985687","7","Niedersachsen"],
["5017","Teuschnitz","50.400219","11.388904","633","Bayern"],
["5029","Tholey","49.473764","7.038578","385.9","Saarland"],
["5064","Tönisvorst","51.289722","6.443651","37","Nordrhein-Westfalen"],
["5100","Trier-Petrisberg","49.747889","6.658227","265","Rheinland-Pfalz"],
["5111","Trostberg","48.03111","12.53957","559","Bayern"],
["5142","Ueckermünde","53.744431","14.069699","1.2","Mecklenburg-Vorpommern"],
["5155","Ulm","48.383656","9.952422","566.8","Baden-Württemberg"],
["5165","Unterlüß","52.849932","10.289833","95","Niedersachsen"],
["5185","Uttenreuth","49.593309","11.070363","291","Bayern"],
["5229","Villingen-Schwenningen","48.045281","8.460835","720","Baden-Württemberg"],
["5279","Wahlsburg-Lippoldsberg","51.61941","9.57491","176","Hessen"],
["5280","Wittenborn","53.922412","10.226738","35","Schleswig-Holstein"],
["5361","Wartenberg-Angersbach","50.627083","9.441989","270","Hessen"],
["5371","Wasserkuppe","50.497345","9.942797","921","Hessen"],
["5397","Weiden","49.666262","12.184464","439.6","Bayern"],
["5426","Weinbiet","49.375835","8.121278","553","Rheinland-Pfalz"],
["5433","Weiskirchen/Saar","49.553365","6.811951","380","Saarland"],
["5440","Weißenburg-Emetzheim","49.011554","10.93081","439.3","Bayern"],
["5467","Wendelstein","47.703531","12.011857","1832","Bayern"],
["5540","Wiesbaden(Süd)","50.068132","8.260327","147","Hessen"],
["5610","Winterberg","51.196815","8.526821","681","Nordrhein-Westfalen"],
["5629","Wittenberg","51.889183","12.644523","105","Sachsen-Anhalt"],
["5654","Wörnitz-Bottenweiler","49.217848","10.229589","464","Bayern"],
["5664","Wolfach","48.295289","8.239094","291","Baden-Württemberg"],
["5676","Wolfsburg(Südwest)","52.396186","10.689225","82","Niedersachsen"],
["5692","Worms","49.605078","8.365906","88","Rheinland-Pfalz"],
["5705","Würzburg","49.770283","9.957723","268","Bayern"],
["5717","Wuppertal-Buchenhofen","51.224808","7.105335","130","Nordrhein-Westfalen"],
["5731","Wutöschingen-Ofteringen","47.678257","8.380129","398","Baden-Württemberg"],
["5745","Zehdenick","52.966353","13.326781","51","Brandenburg"],
["5779","Zinnwald-Georgenfeld","50.731376","13.751594","877","Sachsen"],
["5792","Zugspitze","47.420868","10.984724","2964","Bayern"],
["5906","Mannheim","49.509028","8.554076","96.1","Baden-Württemberg"],
["6159","Dörpen","52.954182","7.319582","8","Niedersachsen"],
["14311","Hersdorf-Weißenseifen","50.150531","6.55262","530","Rheinland-Pfalz"]
];
weather = [
["3",["1981-2010","3.8","3.1",".1","0","0","0","0","0","0","0",".4","2.6","10"]],
["44",["1981-2010","5.5","4.5",".5","0","0","0","0","0","0","0",".7","4.8","16"]],
["73",["1981-2010","11.1","5.4",".6","0","0","0","0","0","0","0","1.8","7.3","26.2"]],
["78",["1981-2010","4.8","3.8",".5","0","0","0","0","0","0","0",".6","4.1","13.8"]],
["91",["1981-2010","8.2","6.3",".8","0","0","0","0","0","0","0","1.4","6.2","22.9"]],
["142",["1981-2010","10.7","6.2","1","0","0","0","0","0","0","0","2.1","8.4","28.4"]],
["150",["1981-2010","6.3","3.9",".1","0","0","0","0","0","0","0","1.1","4.5","15.8"]],
["151",["1981-2010","10.3","5.9","1","0","0","0","0","0","0","0","1.6","8.1","26.9"]],
["164",["1981-2010","9","5.7","1.1","0","0","0","0","0","0","0","1.5","7.5","24.8"]],
["183",["1981-2010","6.1","5.6","1.4","0","0","0","0","0","0","0",".4","2.8","16.3"]],
["198",["1981-2010","8.1","6",".9","0","0","0","0","0","0","0","1.4","6.7","23.2"]],
["217",["1981-2010","10.4","7.5","2",".1","0","0","0","0","0","0","3.1","8.8","32"]],
["232",["1981-2010","10.7","6.9","1.4",".1","0","0","0","0","0","0","2.1","8.4","29.5"]],
["243",["1981-2010","4.4","3.1",".5","0","0","0","0","0","0","0",".5","3.4","12"]],
["257",["1981-2010","5.8","2.7",".2","0","0","0","0","0","0","0",".8","3.2","12.7"]],
["282",["1981-2010","8","4.3",".5","0","0","0","0","0","0","0","1.1","5.1","19"]],
["298",["1981-2010","7.7","5.8","1.3","0","0","0","0","0","0","0",".9","5.9","21.5"]],
["320",["1981-2010","9.4","5.3",".8","0","0","0","0","0","0","0","1.2","6.3","23.1"]],
["330",["1981-2010","9.3","6.6","1.1",".1","0","0","0","0","0","0","2","7.3","26.4"]],
["377",["1981-2010","6.2","3.7",".3","0","0","0","0","0","0","0","1.1","3.7","15"]],
["400",["1981-2010","8","4",".4","0","0","0","0","0","0","0","1.3","6.8","20.5"]],
["403",["1981-2010","8","4.6",".5","0","0","0","0","0","0","0","1.2","6.6","20.8"]],
["427",["1981-2010","8.3","5",".7","0","0","0","0","0","0","0","1.3","7","22.2"]],
["430",["1981-2010","7.4","4.7",".6","0","0","0","0","0","0","0","1.1","6.2","20"]],
["433",["1981-2010","7.8","4.8",".6","0","0","0","0","0","0","0","1.1","6.3","20.7"]],
["435",["1981-2010","6.9","4.5",".4","0","0","0","0","0","0","0","1","5.6","18.4"]],
["450",["1981-2010","4.4","2.1","0","0","0","0","0","0","0","0",".7","2.9","10.2"]],
["502",["1981-2010","10.2","7.4","1",".1","0","0","0","0","0","0","2","7.8","28.4"]],
["591",["1981-2010","8.9","5.4","1","0","0","0","0","0","0","0","1","7.1","23.4"]],
["596",["1981-2010","6.4","4.8","1.4","0","0","0","0","0","0","0",".5","4.4","17.5"]],
["614",["1981-2010","6.6","5.2",".6","0","0","0","0","0","0","0","1","4.6","18"]],
["648",["1981-2010","5.8","3.7",".5","0","0","0","0","0","0","0",".6","5.1","15.8"]],
["662",["1981-2010","7","5.3",".7","0","0","0","0","0","0","0","1.2","5.9","20.2"]],
["691",["1981-2010","5.6","3.7",".6","0","0","0","0","0","0","0",".5","4.5","14.9"]],
["701",["1981-2010","5.3","3.3",".5","0","0","0","0","0","0","0",".5","4.1","13.6"]],
["722",["1981-2010","18.3","17.7","13.9","5.8",".4","0","0","0","0","2.6","9.9","16.4","85"]],
["755",["1981-2010","8.6","5.7",".8","0","0","0","0","0","0","0","1.6","6.2","22.9"]],
["807",["1981-2010","8.2","4.5",".5","0","0","0","0","0","0","0","1.4","5.6","20.3"]],
["817",["1981-2010","7.6","4.6",".5","0","0","0","0","0","0","0","1.3","6.2","20.3"]],
["850",["1981-2010","6","4.7",".6","0","0","0","0","0","0","0",".8","5","17.2"]],
["853",["1981-2010","9.4","8.1","2.3",".2","0","0","0","0","0","0","2.3","7.9","30.2"]],
["863",["1981-2010","11.6","11","4",".4","0","0","0","0","0",".1","3.2","9.8","40.2"]],
["880",["1981-2010","7.4","5.1",".7","0","0","0","0","0","0","0","1.2","6.3","20.7"]],
["891",["1981-2010","5.4","3.5",".6","0","0","0","0","0","0","0",".5","3.7","13.7"]],
["963",["1981-2010","5.6","3.7",".5","0","0","0","0","0","0","0",".5","4.8","15.1"]],
["979",["1981-2010","6.6","3.5",".3","0","0","0","0","0","0","0",".9","5","16.4"]],
["982",["1981-2010","10.2","6.4",".7","0","0","0","0","0","0","0","2.2","7.2","26.8"]],
["1001",["1981-2010","8.3","5.4",".9","0","0","0","0","0","0","0","1.4","6.4","22.4"]],
["1048",["1981-2010","9","6.6","1.2","0","0","0","0","0","0","0","1.5","6.3","24.7"]],
["1078",["1981-2010","3.1","1.9",".1","0","0","0","0","0","0","0",".4","2","7.5"]],
["1103",["1981-2010","10.5","7.3","1.4","0","0","0","0","0","0","0","2.8","9.6","31.6"]],
["1107",["1981-2010","9.1","5.1",".6","0","0","0","0","0","0","0","1.7","6.6","23.2"]],
["1197",["1981-2010","9.1","6.3","1",".1","0","0","0","0","0","0","1.8","6.9","25.1"]],
["1224",["1981-2010","5.9","2.5",".2","0","0","0","0","0","0","0",".9","2.9","12.4"]],
["1266",["1981-2010","6.6","4",".6","0","0","0","0","0","0","0",".7","5.1","17.1"]],
["1270",["1981-2010","9.6","8","1.7",".1","0","0","0","0","0","0","2.1","8.1","29.6"]],
["1279",["1981-2010","7.7","3.9",".5","0","0","0","0","0","0","0","1","5","18.1"]],
["1292",["1981-2010","12","7.9","1",".1","0","0","0","0","0","0","2.6","9.7","33.3"]],
["1297",["1981-2010","7","4.9",".6","0","0","0","0","0","0","0","1","5.2","18.7"]],
["1303",["1981-2010","4.4","3.4",".2","0","0","0","0","0","0","0",".6","2.9","11.6"]],
["1327",["1981-2010","4.3","3.2",".1","0","0","0","0","0","0","0",".6","2.5","10.6"]],
["1332",["1981-2010","13","7.4","1.4","0","0","0","0","0","0","0","2.2","9.7","33.7"]],
["1346",["1981-2010","15","14.6","13","6.5","1","0","0","0","0","2.3","9.2","13","74.6"]],
["1357",["1981-2010","15.1","11.3","3.6",".4","0","0","0","0","0",".1","4.8","12.8","48"]],
["1358",["1981-2010","19.8","18.3","14.5","4.9",".2","0","0","0","0","3.1","11.1","18.3","90.1"]],
["1420",["1981-2010","5.9","2.8",".1","0","0","0","0","0","0","0",".8","3.8","13.3"]],
["1443",["1981-2010","5.3","2.4",".1","0","0","0","0","0","0","0",".9","3.1","11.9"]],
["1451",["1981-2010","6.2","3.9",".5","0","0","0","0","0","0","0",".7","5.3","16.5"]],
["1468",["1981-2010","10.5","8.9","2.9",".3","0","0","0","0","0","0","3.9","9.7","36.3"]],
["1503",["1981-2010","4.3","3.6",".5","0","0","0","0","0","0","0",".5","3.8","12.7"]],
["1526",["1981-2010","7.3","4.6",".5","0","0","0","0","0","0","0","1.3","5","18.7"]],
["1544",["1981-2010","7.7","4.4",".7","0","0","0","0","0","0","0",".9","6.7","20.4"]],
["1550",["1981-2010","9.4","5.4","1.4",".1","0","0","0","0","0","0","2.8","8.6","27.6"]],
["1580",["1981-2010","5.1","2.4",".1","0","0","0","0","0","0","0",".9","3.2","11.7"]],
["1590",["1981-2010","3.6","2.2",".1","0","0","0","0","0","0","0",".4","2.1","8.4"]],
["1612",["1981-2010","8.9","7.5","1.5",".1","0","0","0","0","0","0","1.8","7.2","27.1"]],
["1619",["1981-2010","5.2","2.2","0","0","0","0","0","0","0","0",".7","2.8","11"]],
["1639",["1981-2010","7.2","3.8",".2","0","0","0","0","0","0","0","1","5","17.2"]],
["1645",["1981-2010","9.5","7.3","1.2","0","0","0","0","0","0","0","1.9","7.4","27.3"]],
["1667",["1981-2010","5.9","3.8",".7","0","0","0","0","0","0","0",".6","4.7","15.6"]],
["1684",["1981-2010","9.9","7.5","1.5",".1","0","0","0","0","0","0","2.1","8.2","29.3"]],
["1691",["1981-2010","6.5","4.7",".6","0","0","0","0","0","0","0","1","4.9","17.6"]],
["1735",["1981-2010","12.4","8","2.2","0","0","0","0","0","0","0","3.3","10.3","36.3"]],
["1757",["1981-2010","7.4","5.3","1","0","0","0","0","0","0","0","1.1","5.7","20.4"]],
["1803",["1981-2010","7.6","5.6","1.1","0","0","0","0","0","0","0","1.3","6.2","21.8"]],
["1832",["1981-2010","19.5","18.1","14.5","6.4",".5","0","0","0",".1","3","11.6","17.1","90.8"]],
["1920",["1981-2010","3.5","2.7",".2","0","0","0","0","0","0","0",".5","2.1","8.9"]],
["1975",["1981-2010","6.1","3.7",".6","0","0","0","0","0","0","0",".7","5.2","16.4"]],
["1981",["1981-2010","5.9","3.7",".5","0","0","0","0","0","0","0",".6","4.9","15.5"]],
["1990",["1981-2010","5.6","3.4",".4","0","0","0","0","0","0","0",".6","4.9","15"]],
["1993",["1981-2010","5.2","4",".5","0","0","0","0","0","0","0",".7","3.5","13.8"]],
["2014",["1981-2010","6.3","4.7",".6","0","0","0","0","0","0","0",".9","5.3","17.8"]],
["2074",["1981-2010","7.4","5.8","1",".1","0","0","0","0","0","0","1.7","5.5","21.5"]],
["2080",["1981-2010","5.3","2.1",".1","0","0","0","0","0","0","0",".7","2.7","10.8"]],
["2110",["1981-2010","3.9","2.8","0","0","0","0","0","0","0","0",".5","2.2","9.4"]],
["2115",["1981-2010","3","2.8",".5","0","0","0","0","0","0","0",".1","1.1","7.5"]],
["2120",["1981-2010","6.8","5.7",".8","0","0","0","0","0","0","0","1.2","5.7","20.1"]],
["2147",["1981-2010","4.6","3.4",".4","0","0","0","0","0","0","0",".7","3","12.1"]],
["2167",["1981-2010","6.3","4.1",".3","0","0","0","0","0","0","0","1.1","4.6","16.4"]],
["2171",["1981-2010","7.8","5.2",".8","0","0","0","0","0","0","0","1.2","6.1","21.1"]],
["2173",["1981-2010","3.5","2.4",".1","0","0","0","0","0","0","0",".4","2","8.4"]],
["2206",["1981-2010","5.4","4.9",".8","0","0","0","0","0","0","0",".9","4.3","16.2"]],
["2211",["1981-2010","6.6","4.1",".3","0","0","0","0","0","0","0","1","4.4","16.4"]],
["2250",["1981-2010","12.8","8.1","1.2","0","0","0","0","0","0","0","2.7","10","34.8"]],
["2260",["1981-2010","11.2","8.4","2.1",".2","0","0","0","0","0","0","2.5","8.3","32.7"]],
["2261",["1981-2010","13.8","10","2.9",".3","0","0","0","0","0","0","3.8","12","42.8"]],
["2268",["1981-2010","8.3","5",".5","0","0","0","0","0","0","0","1.3","4.9","20.1"]],
["2290",["1981-2010","11.2","10.5","5",".7","0","0","0","0","0",".3","5.8","10.3","43.8"]],
["2303",["1981-2010","5.9","4.1",".7","0","0","0","0","0","0","0",".6","4.1","15.4"]],
["2306",["1981-2010","6.4","4.6",".8","0","0","0","0","0","0","0",".4","4.1","16.2"]],
["2319",["1981-2010","10","7.3","1.8",".1","0","0","0","0","0",".1","3.2","9","31.4"]],
["2324",["1981-2010","10","7.9","1.9",".1","0","0","0","0","0","0","2","7.8","29.6"]],
["2480",["1981-2010","5.5","2.7",".1","0","0","0","0","0","0","0",".8","3","12"]],
["2483",["1981-2010","15","13.4","7.4","1.2","0","0","0","0","0",".2","6.2","13.7","57"]],
["2497",["1981-2010","6.6","6.1","1.3",".1","0","0","0","0","0","0","1.6","5.6","21.3"]],
["2522",["1981-2010","5.4","2.5",".1","0","0","0","0","0","0","0",".7","2.4","11.1"]],
["2532",["1981-2010","7.9","5.7",".6","0","0","0","0","0","0","0","1.5","6","21.7"]],
["2542",["1981-2010","10","8.3","2.2",".1","0","0","0","0","0","0","3.1","8.6","32.4"]],
["2543",["1981-2010","10.2","8.3","1.3","0","0","0","0","0","0","0","2.9","9.1","31.9"]],
["2559",["1981-2010","8.5","7","1.9",".1","0","0","0","0","0","0","2.6","7.7","27.8"]],
["2597",["1981-2010","8.8","4.1",".5","0","0","0","0","0","0","0","1.2","5.8","20.4"]],
["2600",["1981-2010","7.6","3.9",".3","0","0","0","0","0","0","0","1.1","4.3","17.3"]],
["2601",["1981-2010","14.6","12.2","5.2",".6","0","0","0","0","0",".1","5.7","12.3","50.6"]],
["2629",["1981-2010","4.1","2.3",".1","0","0","0","0","0","0","0",".6","2.9","10.1"]],
["2638",["1981-2010","10.4","10.6","4.6",".5","0","0","0","0","0","0","5.6","10.4","42.2"]],
["2657",["1981-2010","3.8","1.9","0","0","0","0","0","0","0","0",".6","1.9","8.2"]],
["2667",["1981-2010","3.6","1.9",".1","0","0","0","0","0","0","0",".4","2","7.9"]],
["2680",["1981-2010","9.1","5",".6","0","0","0","0","0","0","0","1.3","6.1","22"]],
["2700",["1981-2010","12.4","7.3","1.1","0","0","0","0","0","0","0","2.1","9.1","32.1"]],
["2712",["1981-2010","7.8","4",".4","0","0","0","0","0","0","0","1.4","4.9","18.5"]],
["2750",["1981-2010","9.2","5.3","1","0","0","0","0","0","0","0","1.3","6.3","23.2"]],
["2812",["1981-2010","7","2.6","0","0","0","0","0","0","0","0",".6","3.8","14.1"]],
["2814",["1981-2010","11.5","9.3","2.6",".3","0","0","0","0","0",".1","4.3","10.1","38.1"]],
["2925",["1981-2010","9.6","8","1.9",".1","0","0","0","0","0","0","2.1","7.8","29.4"]],
["2928",["1981-2010","7.4","5.8","1","0","0","0","0","0","0","0","1.3","5.9","21.3"]],
["2932",["1981-2010","7.6","6","1","0","0","0","0","0","0","0","1.3","6.3","22.1"]],
["2947",["1981-2010","5.7","3.6",".4","0","0","0","0","0","0","0","1","4.2","14.9"]],
["2950",["1981-2010","6","4.3",".7","0","0","0","0","0","0","0",".7","4.1","15.7"]],
["2968",["1981-2010","2.5","1.6",".1","0","0","0","0","0","0","0",".2","1.2","5.6"]],
["3015",["1981-2010","9","5.8",".7","0","0","0","0","0","0","0","1.4","7.6","24.5"]],
["3018",["1981-2010","7.9","5.7",".6","0","0","0","0","0","0","0","1.8","6.2","22.2"]],
["3023",["1981-2010","4.3","3",".3","0","0","0","0","0","0","0",".5","3.7","11.8"]],
["3028",["1981-2010","5.5","4.6",".6","0","0","0","0","0","0","0",".8","3.7","15.2"]],
["3031",["1981-2010","4.2","3.4",".4","0","0","0","0","0","0","0",".6","2.8","11.4"]],
["3032",["1981-2010","5.4","4.1",".6","0","0","0","0","0","0","0",".3","2.9","13.3"]],
["3093",["1981-2010","6.8","4.7",".5","0","0","0","0","0","0","0","1.1","7","20"]],
["3126",["1981-2010","6.9","4.8",".5","0","0","0","0","0","0","0","1","6","19.2"]],
["3137",["1981-2010","5.7","2.3",".1","0","0","0","0","0","0","0",".7","3.5","12.2"]],
["3155",["1981-2010","7","4.9",".7","0","0","0","0","0","0","0","1.5","5","19.2"]],
["3167",["1981-2010","11.5","8.5","2.3",".1","0","0","0","0","0","0","2.7","9.2","34.2"]],
["3196",["1981-2010","8.7","5.6",".8","0","0","0","0","0","0","0","1.2","7.7","24.1"]],
["3231",["1981-2010","12.3","8.4","1.7",".1","0","0","0","0","0","0","2.8","10","35.3"]],
["3244",["1981-2010","10.3","7.6","1.4","0","0","0","0","0","0","0","2.6","8.6","30.5"]],
["3257",["1981-2010","7.3","4.5",".4","0","0","0","0","0","0","0","1.3","4.8","18.3"]],
["3271",["1981-2010","10.9","5.8",".7","0","0","0","0","0","0","0","1.7","7.6","26.6"]],
["3307",["1981-2010","7.4","6.4","2.1",".2","0","0","0","0","0","0","2.8","7.2","26.1"]],
["3366",["1981-2010","11.8","6.5","1","0","0","0","0","0","0","0","1.9","8.9","30.1"]],
["3376",["1981-2010","9.4","5.7","1.1","0","0","0","0","0","0","0","1.8","7.6","25.6"]],
["3379",["1981-2010","8.3","5.7","1","0","0","0","0","0","0","0","1.5","6.2","22.8"]],
["3402",["1981-2010","11","8.9","2.1",".3","0","0","0","0","0","0","3.9","10","36.1"]],
["3490",["1981-2010","3.7","2.4","0","0","0","0","0","0","0","0",".5","1.9","8.6"]],
["3509",["1981-2010","8.9","5.3","1","0","0","0","0","0","0","0","1.4","8.2","24.8"]],
["3527",["1981-2010","11.5","8.3","1.9",".1","0","0","0","0","0","0","2.7","9.1","33.5"]],
["3537",["1981-2010","6.3","3.8",".6","0","0","0","0","0","0","0",".5","4.7","15.9"]],
["3552",["1981-2010","8.1","4.9",".6","0","0","0","0","0","0","0","1.1","6.3","20.9"]],
["3578",["1981-2010","10.8","6",".6","0","0","0","0","0","0","0","1.7","7.2","26.3"]],
["3612",["1981-2010","5.5","3.8",".4","0","0","0","0","0","0","0",".6","4.3","14.7"]],
["3621",["1981-2010","10.3","6.6","1",".1","0","0","0","0","0","0","1.8","7.6","27.3"]],
["3631",["1981-2010","4.3","3.1",".4","0","0","0","0","0","0","0",".5","2.7","11"]],
["3640",["1981-2010","3.8","2.9",".3","0","0","0","0","0","0","0",".4","2.9","10.4"]],
["3667",["1981-2010","8.7","5.1",".7","0","0","0","0","0","0","0","1.4","6","21.9"]],
["3668",["1981-2010","8.2","5",".7","0","0","0","0","0","0","0","1.3","5.9","21.1"]],
["3730",["1981-2010","8.6","5.9","1.8",".1","0","0","0","0","0","0","2.9","7.4","26.8"]],
["3739",["1981-2010","14.2","9.8","2.1",".1","0","0","0","0","0","0","3.7","11.3","41.1"]],
["3761",["1981-2010","6.6","3.9",".3","0","0","0","0","0","0","0","1.3","4.4","16.5"]],
["3811",["1981-2010","7.2","5.7","1","0","0","0","0","0","0","0","1.3","5.9","21"]],
["3875",["1981-2010","13.4","7.9","1.7","0","0","0","0","0","0","0","3","11","36.9"]],
["3927",["1981-2010","12","8.4","1.6",".1","0","0","0","0","0","0","3.2","10.2","35.6"]],
["3939",["1981-2010","7.1","4.1",".4","0","0","0","0","0","0","0","1.3","4","17"]],
["3987",["1981-2010","8.2","4.8",".5","0","0","0","0","0","0","0","1.4","7.1","22"]],
["4063",["1981-2010","5.4","4.3",".6","0","0","0","0","0","0","0",".7","4.6","15.7"]],
["4104",["1981-2010","11.9","6.3",".7","0","0","0","0","0","0","0","1.6","8.4","28.9"]],
["4169",["1981-2010","6.3","2.8",".1","0","0","0","0","0","0","0",".8","3.6","13.6"]],
["4175",["1981-2010","4.7","2",".1","0","0","0","0","0","0","0",".5","2.1","9.5"]],
["4261",["1981-2010","8.5","5.4",".7","0","0","0","0","0","0","0","1.9","7.4","23.8"]],
["4271",["1981-2010","6.1","4.1",".8","0","0","0","0","0","0","0",".6","4.3","15.9"]],
["4278",["1981-2010","8.4","4.6",".5","0","0","0","0","0","0","0","1.4","5.9","20.7"]],
["4287",["1981-2010","9.5","6.3",".9",".1","0","0","0","0","0","0","2","6.8","25.6"]],
["4318",["1981-2010","10.1","8","2.7",".2","0","0","0","0","0","0","3.4","9","33.3"]],
["4323",["1981-2010","9.3","6.7","1.1",".1","0","0","0","0","0","0","2.1","7.8","27.2"]],
["4336",["1981-2010","6.8","4.2",".3","0","0","0","0","0","0","0","1.2","4.9","17.4"]],
["4350",["1981-2010","6.7","3.2",".3","0","0","0","0","0","0","0","1.2","3.9","15.3"]],
["4371",["1981-2010","5.5","3.8",".5","0","0","0","0","0","0","0",".8","4.2","14.8"]],
["4377",["1981-2010","11.7","8.2","1.8",".1","0","0","0","0","0","0","2.4","9.9","34"]],
["4393",["1981-2010","5.2","3.6",".6","0","0","0","0","0","0","0",".4","3.7","13.6"]],
["4411",["1981-2010","6.1","3.6",".2","0","0","0","0","0","0","0","1","3.6","14.4"]],
["4442",["1981-2010","5.6","4.3",".5","0","0","0","0","0","0","0",".7","3.8","14.9"]],
["4466",["1981-2010","7","4.5",".8","0","0","0","0","0","0","0",".5","5","17.8"]],
["4501",["1981-2010","17.8","15.3","9","2","0","0","0","0","0",".6","7.9","17","69.5"]],
["4508",["1981-2010","9.9","7.4","2.5",".1","0","0","0","0","0","0","3","8.9","31.8"]],
["4549",["1981-2010","10.2","10.3","5",".8","0","0","0","0","0",".2","4.3","9.6","40.5"]],
["4560",["1981-2010","7.7","5.1",".5","0","0","0","0","0","0","0","1.3","5.4","20"]],
["4592",["1981-2010","11.6","5.5",".6","0","0","0","0","0","0","0","1.8","8.5","28"]],
["4597",["1981-2010","8.3","7.3","1.7",".2","0","0","0","0","0","0","3","7","27.5"]],
["4625",["1981-2010","7.5","4.8",".6","0","0","0","0","0","0","0",".8","6.2","19.9"]],
["4642",["1981-2010","7.6","4.9",".7","0","0","0","0","0","0","0","1.1","6.5","20.8"]],
["4651",["1981-2010","6.9","5.9",".9","0","0","0","0","0","0","0","1.3","5.5","20.4"]],
["4692",["1981-2010","5.6","3.1",".2","0","0","0","0","0","0","0",".7","4.1","13.6"]],
["4703",["1981-2010","10.3","7","1.2",".1","0","0","0","0","0","0","2.4","8.6","29.7"]],
["4706",["1981-2010","12","5.9","1.1","0","0","0","0","0","0","0","1.7","8.2","28.8"]],
["4745",["1981-2010","6.6","4.3",".3","0","0","0","0","0","0","0","1","6.8","19.2"]],
["4887",["1981-2010","12.6","10.5","3.4",".3","0","0","0","0","0",".1","5.2","11.7","43.9"]],
["4896",["1981-2010","6.8","5.5","1.2","0","0","0","0","0","0","0",".4","4.2","18"]],
["4926",["1981-2010","4.6","2.8",".2","0","0","0","0","0","0","0",".7","2.3","10.5"]],
["4928",["1981-2010","6.2","4.1",".3","0","0","0","0","0","0","0","1.4","4.2","16.2"]],
["4931",["1981-2010","6.9","4.7",".8",".1","0","0","0","0","0","0","1.3","4.9","18.6"]],
["5014",["1981-2010","5.9","3.9",".6","0","0","0","0","0","0","0",".8","5.2","16.4"]],
["5017",["1981-2010","13.9","10.1","2.9",".2","0","0","0","0","0","0","4.1","11.9","43.1"]],
["5029",["1981-2010","7.4","5.5",".4","0","0","0","0","0","0","0","1.8","6.8","21.8"]],
["5064",["1981-2010","3.7","1.8","0","0","0","0","0","0","0","0",".4","2.1","8"]],
["5100",["1981-2010","5.8","3.4",".2","0","0","0","0","0","0","0","1.1","4","14.5"]],
["5111",["1981-2010","9.9","6.3","1.2",".1","0","0","0","0","0","0","2","7.9","27.5"]],
["5142",["1981-2010","8.5","5.3","1.3","0","0","0","0","0","0","0","1.4","6","22.6"]],
["5155",["1981-2010","11.9","6.9","1","0","0","0","0","0","0","0","2.6","9.2","31.6"]],
["5165",["1981-2010","6.8","4.9",".7","0","0","0","0","0","0","0","1.1","5.6","19.1"]],
["5185",["1981-2010","7.3","4",".4","0","0","0","0","0","0","0","1.1","4.9","17.7"]],
["5229",["1981-2010","9.3","6.6","1.3",".1","0","0","0","0","0","0","2.8","7.9","28.1"]],
["5279",["1981-2010","7.5","5.5",".7","0","0","0","0","0","0","0","1.3","5.6","20.6"]],
["5280",["1981-2010","6.6","4.1",".7","0","0","0","0","0","0","0",".8","5.3","17.5"]],
["5361",["1981-2010","6.5","4.6",".6","0","0","0","0","0","0","0","1.3","4.8","17.8"]],
["5371",["1981-2010","15.9","13.8","7.6","1.3","0","0","0","0","0",".3","6.8","14.3","60"]],
["5397",["1981-2010","12.3","7.7","1.4",".1","0","0","0","0","0","0","2.1","9.4","32.9"]],
["5426",["1981-2010","10.5","7.7","1.7",".2","0","0","0","0","0","0","2.7","9","31.8"]],
["5433",["1981-2010","6.5","4.1",".3","0","0","0","0","0","0","0","1.1","4.2","16.3"]],
["5440",["1981-2010","9.2","6.1",".8",".1","0","0","0","0","0","0","1.6","6.7","24.4"]],
["5467",["1981-2010","15.7","16.1","14.1","8.4","1.6",".2","0","0",".4","2.9","10.7","14.3","84.4"]],
["5540",["1981-2010","5.2","2.3",".1","0","0","0","0","0","0","0",".7","2.7","10.9"]],
["5610",["1981-2010","11.8","11.1","4.7",".5","0","0","0","0","0","0","4.1","10","42.3"]],
["5629",["1981-2010","7.9","5.1",".5","0","0","0","0","0","0","0","1.4","6.9","21.8"]],
["5654",["1981-2010","10.3","7.8","1.6","0","0","0","0","0","0","0","2.5","7.9","30.1"]],
["5664",["1981-2010","4.9","2.2",".1","0","0","0","0","0","0","0","1","2.7","10.9"]],
["5676",["1981-2010","6.6","4.5",".5","0","0","0","0","0","0","0","1.1","5.2","17.9"]],
["5692",["1981-2010","5.4","2.6","0","0","0","0","0","0","0","0",".9","3.2","12.1"]],
["5705",["1981-2010","8.5","4.5",".5","0","0","0","0","0","0","0","1.3","5.7","20.4"]],
["5717",["1981-2010","3.2","2.2",".1","0","0","0","0","0","0","0",".3","1.9","7.8"]],
["5731",["1981-2010","7.6","3.8",".3","0","0","0","0","0","0","0","1.6","4.8","18.1"]],
["5745",["1981-2010","8.9","5.3",".6","0","0","0","0","0","0","0","1.5","6.8","23.1"]],
["5779",["1981-2010","18.9","17.1","10.2","1.6","0","0","0","0","0","1","9.2","18.5","76.5"]],
["5792",["1981-2010","29.6","26.3","29.1","24.1","12.6","6.8","2.9","2.2","7.9","12.9","23.6","29","207"]],
["5906",["1981-2010","5.5","2.1",".1","0","0","0","0","0","0","0",".7","3","11.3"]],
["6159",["1981-2010","4.6","3.3",".5","0","0","0","0","0","0","0",".5","4.1","13"]],
["14311",["1981-2010","8.8","6.2","1",".1","0","0","0","0","0","0","2.1","7.2","25.3"]]
];
temperature = [
[1881,7.6,7.5,7.7,6.6,7.5,7.0,7.5,7.5,8.1,8.0,7.1,8.3,6.7,7.5,7.1,6.7,7.3],
[1882,9.0,9.0,8.1,7.3,8.2,8.5,8.9,8.9,9.0,8.6,8.8,8.8,8.1,8.8,8.4,7.8,8.3],
[1883,8.4,8.4,7.8,6.8,8.0,7.9,8.4,8.4,8.7,8.3,8.2,8.5,7.5,8.3,7.9,7.3,7.9],
[1884,9.1,9.1,8.4,7.5,8.6,8.7,9.1,9.1,9.4,8.9,8.9,9.2,8.2,8.9,8.5,7.9,8.6],
[1885,8.4,8.4,7.8,7.0,7.7,7.7,7.9,7.9,8.3,8.0,7.6,8.3,7.7,8.1,7.7,7.2,7.7],
[1886,8.5,8.5,8.1,7.3,8.1,7.9,8.2,8.2,8.7,8.4,7.8,8.7,7.8,8.4,7.9,7.4,8.0],
[1887,7.8,7.7,6.7,5.9,6.8,7.4,7.4,7.4,7.6,7.1,7.3,7.3,6.7,7.5,6.9,6.2,7.0],
[1888,7.4,7.4,7.0,6.2,6.8,6.7,7.1,7.1,7.4,7.1,6.7,7.3,6.7,7.3,6.8,6.2,6.9],
[1889,8.1,8.1,7.1,6.4,7.3,7.6,8.0,8.0,8.1,7.5,7.7,7.6,7.1,8.0,7.4,6.7,7.4],
[1890,8.1,8.1,7.0,6.5,7.3,7.6,7.7,7.7,7.8,7.4,7.6,7.6,7.2,7.8,7.3,6.7,7.3],
[1891,8.2,8.2,7.2,6.5,7.4,7.6,7.8,7.8,8.1,7.5,7.6,7.7,7.4,8.0,7.5,6.8,7.4],
[1892,8.0,7.9,7.8,6.9,7.5,7.2,7.6,7.6,8.2,7.8,7.1,8.1,7.4,7.9,7.5,6.9,7.5],
[1893,8.3,8.3,8.1,7.0,8.0,7.6,8.3,8.3,8.8,8.4,7.9,8.7,7.6,8.3,7.9,7.3,7.9],
[1894,8.7,8.7,8.0,7.2,8.1,8.2,8.6,8.6,8.9,8.3,8.4,8.5,7.9,8.6,8.1,7.5,8.1],
[1895,8.0,8.0,7.3,6.4,7.2,7.4,7.6,7.6,7.9,7.5,7.3,7.8,7.3,7.8,7.3,6.7,7.3],
[1896,8.3,8.3,7.4,6.4,7.6,7.9,8.2,8.2,8.3,7.8,8.1,8.0,7.4,8.1,7.5,6.8,7.6],
[1897,8.3,8.3,8.2,7.1,7.9,7.9,8.2,8.2,8.6,8.3,8.0,8.6,7.8,8.3,7.9,7.3,7.9],
[1898,9.1,9.0,8.6,7.6,8.5,8.4,8.9,8.9,9.1,8.7,8.5,8.9,8.6,9.0,8.5,7.9,8.5],
[1899,8.6,8.6,8.3,7.0,8.1,8.1,8.5,8.6,8.8,8.4,8.4,8.7,7.9,8.6,8.0,7.4,8.1],
[1900,8.8,8.8,8.5,7.5,8.4,8.2,8.7,8.8,8.9,8.7,8.3,9.0,8.3,8.8,8.3,7.7,8.4],
[1901,8.3,8.3,7.4,6.8,7.5,7.8,8.1,8.1,8.3,7.9,7.9,8.1,7.5,8.1,7.5,6.7,7.6],
[1902,7.3,7.2,7.5,6.8,7.3,6.6,7.3,7.3,7.7,7.7,7.0,7.9,6.9,7.3,6.9,6.4,7.2],
[1903,9.0,9.0,8.1,7.6,8.3,8.2,8.8,8.8,8.9,8.5,8.4,8.6,8.4,9.0,8.5,7.8,8.4],
[1904,8.8,8.8,8.4,7.8,8.3,8.0,8.6,8.6,8.8,8.6,8.2,8.7,8.3,8.8,8.4,7.8,8.4],
[1905,8.6,8.6,7.8,7.2,7.9,7.9,8.3,8.3,8.5,8.2,8.1,8.3,7.8,8.4,7.9,7.3,8.0],
[1906,9.1,9.1,8.0,7.4,8.2,8.4,8.8,8.8,8.8,8.4,8.5,8.5,8.2,9.0,8.4,7.7,8.3],
[1907,8.2,8.2,7.9,7.2,7.8,7.5,8.1,8.1,8.4,8.1,7.6,8.3,7.7,8.1,7.7,7.2,7.8],
[1908,8.0,8.0,7.3,6.6,7.3,7.6,8.0,8.0,8.1,7.6,7.8,7.8,7.4,7.9,7.4,6.8,7.5],
[1909,7.9,7.8,7.2,6.6,7.4,7.2,7.7,7.7,7.9,7.7,7.3,7.9,7.3,7.8,7.3,6.8,7.4],
[1910,9.0,9.0,8.0,7.5,8.4,8.6,9.0,9.0,9.1,8.5,8.8,8.7,8.4,8.9,8.4,7.8,8.4],
[1911,9.7,9.6,8.8,8.2,9.1,9.0,9.5,9.5,9.7,9.3,9.1,9.5,9.0,9.7,9.1,8.5,9.0],
[1912,8.2,8.2,7.7,7.0,7.9,7.7,8.3,8.3,8.8,8.3,8.0,8.5,7.7,8.2,7.8,7.3,7.9],
[1913,9.2,9.2,8.2,7.6,8.4,8.8,9.0,9.0,9.2,8.7,8.8,9.0,8.4,9.1,8.5,7.9,8.5],
[1914,9.3,9.3,7.9,7.3,8.4,9.2,9.2,9.2,9.2,8.6,9.2,8.8,8.4,9.1,8.5,7.8,8.5],
[1915,8.3,8.3,7.9,7.4,8.1,7.6,8.1,8.1,8.5,8.5,7.6,8.7,7.7,8.2,7.8,7.3,7.9],
[1916,9.0,9.0,8.3,8.0,8.4,8.3,8.6,8.6,8.9,8.8,8.2,8.9,8.4,8.8,8.4,7.8,8.4],
[1917,8.2,8.2,7.3,6.9,7.5,7.6,7.9,7.9,8.0,7.8,7.7,7.8,7.4,8.0,7.5,6.8,7.5],
[1918,9.2,9.1,8.2,7.8,8.5,8.5,8.9,8.9,9.1,8.9,8.4,9.0,8.5,9.0,8.5,8.0,8.5],
[1919,7.7,7.7,7.3,6.8,7.3,7.3,7.5,7.5,7.9,7.8,7.2,7.9,7.1,7.7,7.2,6.6,7.3],
[1920,9.0,9.0,8.5,8.1,8.5,8.5,9.0,9.0,9.2,8.8,8.6,8.9,8.5,9.0,8.6,8.0,8.6],
[1921,9.4,9.4,8.8,8.3,9.0,9.0,9.3,9.3,9.6,9.4,9.1,9.5,8.8,9.4,8.9,8.2,9.0],
[1922,7.4,7.4,7.3,6.7,7.1,7.1,7.4,7.4,7.8,7.7,7.1,7.8,6.9,7.4,7.0,6.4,7.2],
[1923,8.2,8.2,8.2,7.6,8.0,7.6,8.0,8.0,8.5,8.6,7.6,8.6,7.9,8.2,7.9,7.4,8.0],
[1924,8.0,8.0,7.4,6.7,7.4,7.5,8.0,8.0,8.2,7.8,7.6,7.9,7.5,8.0,7.6,7.0,7.5],
[1925,9.0,9.0,7.9,7.4,8.2,8.6,8.8,8.8,8.8,8.5,8.5,8.5,8.3,8.9,8.4,7.8,8.3],
[1926,9.3,9.2,8.6,8.0,8.7,8.7,9.1,9.1,9.3,9.0,8.7,9.1,8.6,9.2,8.7,8.2,8.7],
[1927,8.4,8.3,8.0,7.5,8.0,7.9,8.3,8.3,8.6,8.4,8.0,8.5,7.8,8.4,8.0,7.4,8.0],
[1928,8.6,8.6,8.6,7.9,8.3,8.0,8.5,8.5,8.9,8.8,8.1,9.0,8.0,8.6,8.2,7.6,8.3],
[1929,7.6,7.6,7.5,6.7,7.6,7.0,7.5,7.5,8.2,8.0,7.1,8.1,7.1,7.6,7.3,6.8,7.4],
[1930,9.2,9.2,8.7,8.1,8.8,8.6,9.0,9.0,9.4,9.1,8.7,9.3,8.6,9.2,8.7,8.2,8.8],
[1931,8.1,8.1,7.2,6.6,7.6,7.6,8.1,8.1,8.3,7.9,7.7,8.0,7.4,8.2,7.7,7.0,7.6],
[1932,9.0,8.9,7.9,7.4,8.2,8.6,8.9,8.9,9.0,8.5,8.7,8.7,8.1,8.9,8.4,7.7,8.3],
[1933,7.9,7.9,7.4,6.6,7.7,7.8,8.2,8.2,8.4,8.1,8.3,8.4,7.1,8.0,7.5,6.9,7.6],
[1934,10.4,10.4,9.0,8.7,9.5,9.7,10.0,10.0,10.1,9.7,9.7,9.8,9.6,10.3,9.8,9.1,9.5],
[1935,8.9,8.9,8.0,7.5,8.4,8.5,8.9,8.9,9.1,8.8,8.6,8.9,8.2,9.0,8.5,7.8,8.4],
[1936,8.9,8.9,8.2,7.7,8.4,8.3,8.8,8.8,9.0,8.7,8.4,8.8,8.3,8.9,8.4,7.8,8.4],
[1937,8.9,8.9,8.6,7.9,8.7,8.4,8.9,8.8,9.2,9.0,8.4,9.3,8.4,9.0,8.6,8.1,8.6],
[1938,9.3,9.3,8.1,7.6,8.5,9.0,9.2,9.2,9.2,8.7,9.1,9.0,8.5,9.2,8.7,7.9,8.6],
[1939,8.9,8.9,7.9,7.3,8.2,8.5,8.9,8.9,9.1,8.6,8.6,8.8,8.1,8.9,8.3,7.7,8.3],
[1940,6.7,6.7,6.8,6.0,6.7,6.2,6.9,6.9,7.5,7.3,6.5,7.6,6.2,6.7,6.4,5.9,6.6],
[1941,7.3,7.3,7.1,6.4,7.3,6.9,7.6,7.6,8.1,7.7,7.1,8.0,6.7,7.4,7.0,6.5,7.2],
[1942,7.6,7.6,7.4,6.7,7.3,6.9,7.5,7.5,8.0,7.8,7.0,8.1,7.0,7.6,7.2,6.7,7.3],
[1943,9.3,9.3,8.8,8.1,8.8,8.9,9.2,9.2,9.5,9.1,9.0,9.4,8.8,9.3,8.9,8.4,8.9],
[1944,9.0,9.0,7.8,7.3,8.3,8.6,8.9,8.9,8.9,8.4,8.6,8.6,8.1,9.0,8.4,7.7,8.3],
[1945,9.5,9.5,8.7,8.1,9.0,9.3,9.4,9.4,9.6,9.3,9.1,9.5,8.9,9.2,8.9,8.5,9.0],
[1946,8.9,8.8,8.2,7.8,8.3,8.8,8.6,8.6,8.9,8.5,8.2,8.7,8.2,8.8,8.3,7.8,8.4],
[1947,8.4,8.4,8.9,8.2,8.8,7.7,8.6,8.5,9.3,9.3,8.0,9.5,8.2,8.6,8.3,7.9,8.5],
[1948,9.6,9.6,8.7,8.3,8.9,9.0,9.5,9.5,9.7,9.2,9.1,9.5,9.1,9.7,9.2,8.5,9.0],
[1949,9.6,9.6,8.9,8.4,9.1,9.2,9.5,9.5,9.8,9.5,9.3,9.9,8.9,9.7,9.2,8.6,9.1],
[1950,9.0,9.0,8.5,8.1,8.5,8.5,8.9,8.9,9.2,8.8,8.6,9.1,8.5,9.0,8.5,8.0,8.6],
[1951,9.2,9.2,8.5,8.1,8.7,8.6,9.0,9.0,9.3,8.9,8.7,9.2,8.9,9.2,8.8,8.3,8.7],
[1952,8.2,8.2,8.1,7.4,8.0,7.6,8.1,8.1,8.5,8.4,7.7,8.7,7.8,8.2,7.9,7.4,7.9],
[1953,9.6,9.6,8.4,8.0,8.9,9.1,9.4,9.4,9.5,9.1,9.2,9.5,9.1,9.6,9.2,8.6,8.9],
[1954,8.0,8.0,7.5,6.9,7.8,7.5,8.1,8.1,8.5,8.1,7.9,8.3,7.5,8.1,7.7,7.2,7.7],
[1955,7.9,7.9,7.5,6.8,7.4,7.6,8.0,8.0,8.3,7.9,7.8,8.3,7.2,7.8,7.4,6.8,7.5],
[1956,7.2,7.2,6.7,6.0,6.9,6.9,7.3,7.3,7.6,7.2,7.2,7.4,6.4,7.1,6.7,6.2,6.8],
[1957,9.0,9.0,8.3,7.8,8.6,8.5,9.0,9.0,9.4,8.9,8.7,9.3,8.5,9.0,8.6,8.1,8.6],
[1958,8.5,8.5,8.1,7.5,8.3,8.0,8.6,8.6,8.9,8.6,8.1,8.9,8.2,8.6,8.2,7.7,8.2],
[1959,9.3,9.3,8.8,8.1,9.2,8.9,9.4,9.4,9.9,9.6,9.1,10.1,8.8,9.4,9.0,8.6,9.0],
[1960,8.7,8.7,8.3,7.7,8.5,8.2,8.8,8.8,9.2,8.8,8.3,9.1,8.2,8.8,8.4,7.8,8.4],
[1961,9.3,9.3,9.0,8.2,8.9,8.8,9.2,9.2,9.6,9.3,8.9,9.7,8.9,9.3,8.9,8.4,8.9],
[1962,7.7,7.7,7.1,6.4,7.0,7.2,7.4,7.4,7.6,7.5,7.4,7.9,7.1,7.5,7.1,6.5,7.1],
[1963,7.7,7.7,7.0,6.4,7.0,7.1,7.3,7.3,7.7,7.3,7.1,7.7,7.2,7.5,7.1,6.5,7.1],
[1964,8.4,8.4,8.2,7.4,8.2,8.0,8.4,8.4,8.9,8.6,8.0,9.0,8.0,8.4,8.0,7.6,8.1],
[1965,7.9,7.8,7.3,6.7,7.5,7.3,7.9,7.9,8.2,7.8,7.5,8.1,7.4,7.9,7.5,6.9,7.5],
[1966,8.8,8.8,8.5,7.9,8.6,8.2,8.7,8.7,9.2,8.9,8.2,9.2,8.5,8.8,8.5,8.0,8.5],
[1967,9.6,9.6,8.5,8.0,8.9,9.1,9.4,9.4,9.5,9.0,9.1,9.2,8.9,9.5,9.0,8.4,8.9],
[1968,8.7,8.7,7.9,7.3,8.1,8.3,8.6,8.6,8.8,8.3,8.4,8.6,8.0,8.6,8.1,7.5,8.1],
[1969,7.8,7.8,7.6,7.1,7.9,7.3,8.2,8.2,8.7,8.2,7.8,8.5,7.5,8.0,7.6,7.1,7.8],
[1970,7.9,7.9,7.8,7.1,7.8,7.3,8.1,8.1,8.6,8.2,7.6,8.5,7.4,8.0,7.6,7.1,7.7],
[1971,9.0,9.0,8.1,7.5,8.4,8.6,8.9,8.9,9.2,8.7,8.6,8.9,8.3,8.9,8.4,7.8,8.4],
[1972,8.3,8.3,7.5,7.0,7.7,7.9,8.2,8.2,8.5,8.0,8.0,8.3,7.8,8.2,7.8,7.2,7.8],
[1973,8.7,8.7,7.8,7.2,8.3,8.4,8.8,8.8,9.0,8.6,8.6,8.9,8.0,8.7,8.2,7.5,8.2],
[1974,9.3,9.3,8.6,8.0,8.8,8.8,9.3,9.3,9.4,9.1,8.9,9.4,8.7,9.4,8.8,8.2,8.8],
[1975,9.6,9.6,8.5,8.0,8.9,9.0,9.5,9.5,9.6,9.1,9.2,9.4,8.9,9.5,9.0,8.3,8.9],
[1976,8.6,8.6,8.4,7.7,8.7,8.0,8.8,8.8,9.4,9.2,8.4,9.6,8.1,8.8,8.4,7.9,8.5],
[1977,9.1,9.1,8.6,8.0,8.7,8.5,9.1,9.1,9.3,8.9,8.6,9.1,8.5,9.1,8.6,8.1,8.7],
[1978,8.4,8.4,7.5,6.9,7.7,7.8,8.3,8.3,8.5,8.0,8.0,8.1,7.6,8.3,7.8,7.2,7.8],
[1979,8.0,8.0,8.0,7.4,7.6,7.3,7.8,7.8,8.2,8.1,7.2,8.4,7.7,8.0,7.6,7.1,7.7],
[1980,7.7,7.7,7.5,6.9,7.7,7.3,8.2,8.2,8.6,8.1,7.7,8.3,7.2,7.9,7.5,7.0,7.6],
[1981,8.7,8.6,8.1,7.5,8.1,8.1,8.5,8.5,8.9,8.5,8.0,8.9,8.0,8.6,8.1,7.5,8.2],
[1982,9.5,9.4,8.6,8.1,8.8,8.7,9.3,9.3,9.7,9.1,8.7,9.5,9.0,9.6,9.1,8.5,8.9],
[1983,9.7,9.7,8.8,8.2,8.9,9.1,9.5,9.5,9.7,9.2,9.1,9.5,9.0,9.6,9.1,8.4,9.0],
[1984,8.4,8.4,7.7,7.1,7.9,8.0,8.5,8.5,8.9,8.3,8.2,8.7,7.7,8.4,7.9,7.3,8.0],
[1985,7.9,7.9,7.4,6.7,7.3,7.2,7.8,7.8,8.0,7.7,7.4,8.1,7.3,7.9,7.4,6.8,7.4],
[1986,8.3,8.2,7.9,7.3,7.9,7.7,8.2,8.2,8.6,8.3,7.8,8.6,7.8,8.2,7.8,7.3,7.9],
[1987,7.5,7.5,7.7,7.0,7.4,7.1,7.7,7.7,8.2,7.9,7.3,8.3,7.0,7.5,7.1,6.6,7.4],
[1988,9.5,9.5,8.9,8.3,9.0,9.0,9.5,9.5,9.7,9.4,9.1,9.6,8.8,9.6,9.1,8.5,9.1],
[1989,10.2,10.1,9.0,8.4,9.4,9.7,10.0,10.0,10.3,9.7,9.6,9.9,9.5,10.1,9.6,8.9,9.5],
[1990,10.1,10.1,9.1,8.4,9.4,9.8,10.1,10.1,10.2,9.7,9.7,10.0,9.4,10.1,9.5,8.8,9.5],
[1991,8.9,8.9,8.2,7.4,8.3,8.4,8.8,8.8,9.0,8.7,8.5,9.1,8.2,8.8,8.3,7.7,8.3],
[1992,9.9,9.9,9.1,8.6,9.2,9.3,9.9,9.9,10.0,9.5,9.5,9.8,9.3,9.9,9.4,8.7,9.4],
[1993,8.8,8.8,8.6,8.0,8.5,8.2,8.6,8.6,9.1,8.9,8.2,9.3,8.3,8.7,8.3,7.8,8.5],
[1994,9.9,9.9,9.9,9.4,9.7,9.2,9.8,9.8,10.3,10.1,9.3,10.5,9.4,10.0,9.6,9.1,9.7],
[1995,9.2,9.2,8.8,8.1,8.9,8.7,9.3,9.3,9.8,9.4,8.9,9.8,8.5,9.3,8.8,8.2,8.9],
[1996,7.4,7.3,7.4,6.7,7.2,6.9,7.5,7.5,7.9,7.8,7.2,8.3,6.6,7.3,6.9,6.4,7.2],
[1997,9.2,9.2,8.8,8.1,8.9,8.8,9.4,9.4,9.7,9.3,9.0,9.7,8.5,9.3,8.8,8.3,8.9],
[1998,9.5,9.5,8.9,8.4,9.0,8.9,9.5,9.5,9.7,9.3,9.0,9.5,8.9,9.6,9.1,8.5,9.1],
[1999,10.1,10.0,9.1,8.5,9.5,9.5,10.2,10.2,10.3,9.8,9.7,9.9,9.2,10.1,9.6,8.9,9.5],
[2000,10.4,10.4,9.7,9.1,9.8,9.7,10.3,10.3,10.5,10.1,9.8,10.3,9.8,10.4,9.9,9.4,9.9],
[2001,9.2,9.2,9.0,8.3,9.1,8.8,9.4,9.4,9.8,9.5,8.9,9.7,8.6,9.3,9.0,8.5,9.0],
[2002,9.8,9.7,9.5,9.0,9.6,9.4,9.9,9.9,10.3,10.0,9.6,10.3,9.2,9.7,9.3,8.8,9.6],
[2003,9.5,9.5,9.4,8.8,9.5,9.0,9.7,9.7,10.1,10.1,9.2,10.6,9.0,9.6,9.2,8.7,9.4],
[2004,9.3,9.3,8.7,8.2,8.9,9.0,9.5,9.5,9.6,9.2,9.1,9.5,8.6,9.3,8.9,8.3,8.9],
[2005,9.3,9.3,8.6,8.0,9.1,9.1,9.6,9.6,9.9,9.5,9.2,9.9,8.6,9.4,9.0,8.4,9.0],
[2006,10.0,10.0,9.2,8.5,9.5,9.7,10.2,10.2,10.3,9.9,9.9,10.3,9.2,10.0,9.5,8.9,9.5],
[2007,10.4,10.3,9.5,9.1,9.8,10.0,10.4,10.4,10.5,10.1,10.0,10.3,9.7,10.3,9.9,9.3,9.9],
[2008,10.1,10.1,9.1,8.7,9.3,9.7,10.0,10.0,9.9,9.6,9.7,9.7,9.4,10.0,9.5,8.9,9.5],
[2009,9.5,9.5,9.0,8.5,9.1,9.1,9.7,9.7,9.8,9.6,9.3,9.8,8.9,9.5,9.1,8.6,9.2],
[2010,8.1,8.1,7.9,7.3,7.9,7.7,8.1,8.1,8.4,8.4,7.7,8.7,7.5,8.0,7.6,7.2,7.8],
[2011,9.9,9.9,9.6,8.9,9.7,9.4,10.0,10.0,10.4,10.2,9.4,10.5,9.4,10.0,9.6,9.1,9.6],
[2012,9.4,9.3,9.1,8.5,9.1,8.8,9.5,9.5,9.7,9.5,8.8,9.7,8.9,9.4,9.1,8.6,9.1],
[2013,9.2,9.2,8.6,8.1,8.7,8.9,9.1,9.1,9.2,9.0,8.8,9.2,8.4,9.1,8.6,8.1,8.7],
[2014,10.7,10.7,10.1,9.6,10.3,10.2,10.8,10.8,11.0,10.7,10.5,10.9,10.1,10.7,10.3,9.8,10.3],
[2015,10.4,10.3,9.9,9.4,9.9,9.8,10.2,10.2,10.4,10.2,9.7,10.5,9.9,10.3,10.0,9.5,9.9],
[2016,10.0,10.0,9.3,8.9,9.4,9.6,9.9,9.9,10.1,9.8,9.6,9.9,9.4,10.1,9.6,9.0,9.5]
];
function NearestCity(latitude, longitude) {
var mindif = 99999;
var closest;
window.lat=latitude;
window.long=longitude;
for (index = 0; index < cities.length; ++index) {
var dif = PythagorasEquirectangular(latitude, longitude, cities[index][2], cities[index][3]);
if (dif < mindif) {
closest = index;
mindif = dif;
}
}
city = cities[closest];
for (index = 0; index < weather.length; ++index) {
if (weather[index][0] == city[0]) {
tmp = weather[index][1];
jan = tmp[1];
feb = tmp[2];
mar = tmp[3];
apr = tmp[4];
mai = tmp[5];
jun = tmp[6];
jul = tmp[7];
aug = tmp[8];
sep = tmp[9];
okt = tmp[10];
nov = tmp[11];
dec = tmp[12];
}
}
// echo the nearest city
frame = document.getElementById('frame');
number = 16;
if (city[5] == "Berlin") {number=1;}
if (city[5] == "Brandenburg") {number=2;}
if (city[5] == "Baden-Württemberg") {number=3;}
if (city[5] == "Hessen") {number=5;}
if (city[5] == "Mecklenburg-Vorpommern") {number=6;}
if (city[5] == "Niedersachsen") {number=7;}
if (city[5] == "Hamburg") {number=8;}
if (city[5] == "Nordrhein-Westfalen") {number=9;}
if (city[5] == "Rheinland-Pfalz") {number=10;}
if (city[5] == "Schleswig-Holstein") {number=11;}
if (city[5] == "Saarland") {number=12;}
if (city[5] == "Sachsen") {number=13;}
if (city[5] == "Sachsen-Anhalt") {number=14;}
if (city[5] == "Thüringen") {number=15;}
if (city[5] == "Berlin") {number=1;}
html = '<h3>Your next weather centre is '+city[1]+" in "+city[5]+". It's located "+city[4]+" meters above normal.</h3>";
html = html+'<img style="max-width:80%;height:auto;" src="https://maps.googleapis.com/maps/api/staticmap?size=500x400&markers=color:blue|'+city[2]+', '+city[3]+'|'+window.lat+', '+window.long+'"><h4>Statistics of frozen days per month. (Average of 1881 to 2010)</h4>';
html = html+'<table>';
html = html+'<tr><td style="font-weight:bold;width:30%;">jan</td><td><div style="width:'+jan*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+jan+'</span></td></tr>';
html = html+'<tr><td style="font-weight:bold;width:30%;">feb</td><td><div style="width:'+feb*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+feb+'</span></td></tr>';
html = html+'<tr><td style="font-weight:bold;width:30%;">mar</td><td><div style="width:'+mar*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+mar+'</span></td></tr>';
html = html+'<tr><td style="font-weight:bold;width:30%;">apr</td><td><div style="width:'+apr*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+apr+'</span></td></tr>';
html = html+'<tr><td style="font-weight:bold;width:30%;">mai</td><td><div style="width:'+mai*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+mai+'</span></td></tr>';
html = html+'<tr><td style="font-weight:bold;width:30%;">jun</td><td><div style="width:'+jun*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+jun+'</span></td></tr>';
html = html+'<tr><td style="font-weight:bold;width:30%;">jul</td><td><div style="width:'+jul*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+jul+'</span></td></tr>';
html = html+'<tr><td style="font-weight:bold;width:30%;">aug</td><td><div style="width:'+aug*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+aug+'</span></td></tr>';
html = html+'<tr><td style="font-weight:bold;width:30%;">sep</td><td><div style="width:'+sep*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+sep+'</span></td></tr>';
html = html+'<tr><td style="font-weight:bold;width:30%;">okt</td><td><div style="width:'+okt*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+okt+'</span></td></tr>';
html = html+'<tr><td style="font-weight:bold;width:30%;">nov</td><td><div style="width:'+nov*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+nov+'</span></td></tr>';
html = html+'<tr><td style="font-weight:bold;width:30%;">dec</td><td><div style="width:'+dec*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+dec+'</span></td></tr>';
html = html+'</table>';
html = html+'<h4>Average temperature in '+city[5]+' (1881 to 2016)</h4>';
html = html+'<table>';
html = html+'<h4>The current average temperature is '+Math.round((temperature[temperature.length-1][0]/temperature[0][0])*10000)/100+'% of '+temperature[0][0]+'.</h4>';
for (index = temperature.length-1; index >= 0; index=index-1) {
html = html+'<tr><td style="font-weight:bold;width:30%;width:30%">'+temperature[index][0]+'</td><td><div style="width:'+temperature[index][number]*8+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+temperature[index][number]+'</span></td></tr>';
}
html = html+'</table>';
frame.innerHTML = html;
} | Jugendhackt/Post-It | data.js | JavaScript | mit | 56,516 |
using System;
using System.Text;
using System.Web;
using System.Web.Http.Description;
namespace ProCultura.Web.Api.Areas.HelpPage
{
public static class ApiDescriptionExtensions
{
/// <summary>
/// Generates an URI-friendly ID for the <see cref="ApiDescription"/>. E.g. "Get-Values-id_name" instead of "GetValues/{id}?name={name}"
/// </summary>
/// <param name="description">The <see cref="ApiDescription"/>.</param>
/// <returns>The ID as a string.</returns>
public static string GetFriendlyId(this ApiDescription description)
{
string path = description.RelativePath;
string[] urlParts = path.Split('?');
string localPath = urlParts[0];
string queryKeyString = null;
if (urlParts.Length > 1)
{
string query = urlParts[1];
string[] queryKeys = HttpUtility.ParseQueryString(query).AllKeys;
queryKeyString = String.Join("_", queryKeys);
}
StringBuilder friendlyPath = new StringBuilder();
friendlyPath.AppendFormat("{0}-{1}",
description.HttpMethod.Method,
localPath.Replace("/", "-").Replace("{", String.Empty).Replace("}", String.Empty));
if (queryKeyString != null)
{
friendlyPath.AppendFormat("_{0}", queryKeyString);
}
return friendlyPath.ToString();
}
}
} | jcespinoza/ProCulturaBackend | ProCultura.Web.Api/Areas/HelpPage/ApiDescriptionExtensions.cs | C# | mit | 1,490 |
import os
import re
import subprocess
from six.moves.urllib.parse import urlparse, quote_plus
from subprocess import CalledProcessError, PIPE, STDOUT
from conans.client.tools.env import no_op, environment_append
from conans.client.tools.files import chdir
from conans.errors import ConanException
from conans.util.files import decode_text, to_file_bytes
class Git(object):
def __init__(self, folder=None, verify_ssl=True, username=None, password=None,
force_english=True, runner=None):
self.folder = folder or os.getcwd()
if not os.path.exists(self.folder):
os.makedirs(self.folder)
self._verify_ssl = verify_ssl
self._force_eng = force_english
self._username = username
self._password = password
self._runner = runner
def run(self, command):
command = "git %s" % command
with chdir(self.folder) if self.folder else no_op():
with environment_append({"LC_ALL": "en_US.UTF-8"}) if self._force_eng else no_op():
if not self._runner:
return subprocess.check_output(command, shell=True).decode().strip()
else:
return self._runner(command)
def get_repo_root(self):
return self.run("rev-parse --show-toplevel")
def get_url_with_credentials(self, url):
if not self._username or not self._password:
return url
if urlparse(url).password:
return url
user_enc = quote_plus(self._username)
pwd_enc = quote_plus(self._password)
url = url.replace("://", "://" + user_enc + ":" + pwd_enc + "@", 1)
return url
def _configure_ssl_verify(self):
return self.run("config http.sslVerify %s" % ("true" if self._verify_ssl else "false"))
def clone(self, url, branch=None):
url = self.get_url_with_credentials(url)
if os.path.exists(url):
url = url.replace("\\", "/") # Windows local directory
if os.path.exists(self.folder) and os.listdir(self.folder):
if not branch:
raise ConanException("The destination folder '%s' is not empty, "
"specify a branch to checkout (not a tag or commit) "
"or specify a 'subfolder' "
"attribute in the 'scm'" % self.folder)
output = self.run("init")
output += self._configure_ssl_verify()
output += self.run('remote add origin "%s"' % url)
output += self.run("fetch ")
output += self.run("checkout -t origin/%s" % branch)
else:
branch_cmd = "--branch %s" % branch if branch else ""
output = self.run('clone "%s" . %s' % (url, branch_cmd))
output += self._configure_ssl_verify()
return output
def checkout(self, element, submodule=None):
self._check_git_repo()
output = self.run('checkout "%s"' % element)
if submodule:
if submodule == "shallow":
output += self.run("submodule sync")
output += self.run("submodule update --init")
elif submodule == "recursive":
output += self.run("submodule sync --recursive")
output += self.run("submodule update --init --recursive")
else:
raise ConanException("Invalid 'submodule' attribute value in the 'scm'. "
"Unknown value '%s'. Allowed values: ['shallow', 'recursive']" % submodule)
# Element can be a tag, branch or commit
return output
def excluded_files(self):
try:
file_paths = [os.path.normpath(os.path.join(os.path.relpath(folder, self.folder), el)).replace("\\", "/")
for folder, dirpaths, fs in os.walk(self.folder)
for el in fs + dirpaths]
p = subprocess.Popen(['git', 'check-ignore', '--stdin'],
stdout=PIPE, stdin=PIPE, stderr=STDOUT, cwd=self.folder)
paths = to_file_bytes("\n".join(file_paths))
grep_stdout = decode_text(p.communicate(input=paths)[0])
tmp = grep_stdout.splitlines()
except CalledProcessError:
tmp = []
return tmp
def get_remote_url(self, remote_name=None):
self._check_git_repo()
remote_name = remote_name or "origin"
try:
remotes = self.run("remote -v")
for remote in remotes.splitlines():
try:
name, url = remote.split(None, 1)
url, _ = url.rsplit(None, 1)
if name == remote_name:
return url
except Exception:
pass
except subprocess.CalledProcessError:
pass
return None
def get_commit(self):
self._check_git_repo()
try:
commit = self.run("rev-parse HEAD")
commit = commit.strip()
return commit
except Exception as e:
raise ConanException("Unable to get git commit from %s\n%s" % (self.folder, str(e)))
get_revision = get_commit
def _check_git_repo(self):
try:
self.run("status")
except Exception:
raise ConanException("Not a valid git repository")
def get_branch(self):
self._check_git_repo()
try:
status = self.run("status -bs --porcelain")
# ## feature/scm_branch...myorigin/feature/scm_branch
branch = status.splitlines()[0].split("...")[0].strip("#").strip()
return branch
except Exception as e:
raise ConanException("Unable to get git branch from %s\n%s" % (self.folder, str(e)))
| luckielordie/conan | conans/client/tools/scm.py | Python | mit | 5,881 |
/**
* 指示按钮
*/
Banner.prototype.btn = function() {
var s = this,
o = this.option,
$banner = this.$banner,
$btn;
for (var i = 0, item = ''; i < s.len; i++) {
item += '<a></a>';
}
$banner.append($('<div class="tb-btn"/>').append(item));
s.$btn = $btn = $('.tb-btn a', $banner);
$btn.first().addClass('active');
setTimeout(function() {
$btn.parent().css({
marginLeft: -($btn.outerWidth(true) * $btn.length / 2)
});
}, 0);
if (!Util.IS_MOBILE) {
$btn.on('click.terseBanner', function() {
if (s.isAnimated) return;
o.before.call(s, s.currentIndex);
s.currentIndex = $(this).index();
s.play();
});
}
};
| happyfreelife/easyBanner | src/btn.js | JavaScript | mit | 682 |
$(document).ready(function () {
console.log("ready!");
$("#subs").click(function () {
var name = $('#name').val();
var email = $('#email').val();
if (name != '' && email != '') {
$('#subs_err').html('');
var subs = {name: name, email: email};
var url = "/index.php/index/subscribe_user";
$.post(url, {subs: JSON.stringify(subs)}).done(function (data) {
$('#subscribe_content').html(data);
}); // end of post
} // end if
else {
$('#subs_err').html('Please provide name and email');
}
});
var getUrlParameter = function getUrlParameter(sParam) {
var sPageURL = decodeURIComponent(window.location.search.substring(1)),
sURLVariables = sPageURL.split('&'),
sParameterName,
i;
for (i = 0; i < sURLVariables.length; i++) {
sParameterName = sURLVariables[i].split('=');
if (sParameterName[0] === sParam) {
return sParameterName[1] === undefined ? true : sParameterName[1];
}
}
};
var code = getUrlParameter('errorcode');
if (code == 3) {
$('#login_err').html('Invalid email address or password');
}
$("#contact_submit").click(function () {
var name = $('#name').val();
var email = $('#email').val();
var phone = $('#phone').val();
var comment = $('#comment').val();
if (name != '' && email != '' && phone != '' && comment != '') {
$('#contact_err').html('');
var contact = {name: name, email: email, phone: phone, comment: comment};
var url = "/index.php/index/send_contact_request";
$.post(url, {contact: JSON.stringify(contact)}).done(function (data) {
$('#contact_container').html(data);
}); // end of post
} // end if
else {
$('#contact_err').html('Please provide all required fields');
}
});
}); | sirromas/george | assets/js/custom.js | JavaScript | mit | 2,063 |
module Zuora
module RESTOperations
module All
def all(params={})
Zuora.request(:get, base_resource_url, params)
end
end
end
end
| tradegecko/zuora-rest-ruby | lib/zuora/rest_operations/all.rb | Ruby | mit | 160 |
### import ####################################################################
import pycmds.project.classes as pc
import pycmds.hardware.hardware as hw
import pathlib
import appdirs
import toml
import yaqc
### driver ####################################################################
class Driver(hw.Driver):
def __init__(self, *args, **kwargs):
self._yaqd_port = kwargs.pop("yaqd_port")
super().__init__(*args, **kwargs)
self.grating_index = pc.Combo(
name="Grating",
allowed_values=[1, 2],
section=self.name,
option="grating_index",
display=True,
set_method="set_turret",
)
self.exposed.append(self.grating_index)
def get_position(self):
native_position = self.ctrl.get_position()
self.position.write(native_position, self.native_units)
return self.position.read()
def initialize(self, *args, **kwargs):
# open control
self.ctrl = yaqc.Client(self._yaqd_port)
# import some information from control
id_dict = self.ctrl.id()
self.serial_number = id_dict["serial"]
self.position.write(self.ctrl.get_position())
# recorded
self.recorded[self.name] = [self.position, self.native_units, 1.0, "m", False]
self.wait_until_still()
# finish
self.initialized.write(True)
self.initialized_signal.emit()
def is_busy(self):
return self.ctrl.busy()
def set_position(self, destination):
self.ctrl.set_position(float(destination))
self.wait_until_still()
def set_turret(self, destination_index):
if type(destination_index) == list:
destination_index = destination_index[0]
# turret index on ActiveX call starts from zero
destination_index_zero_based = int(destination_index) - 1
self.ctrl.set_turret(destination_index_zero_based)
self.grating_index.write(destination_index)
self.wait_until_still()
self.limits.write(*self.ctrl.get_limits(), self.native_units)
### gui #######################################################################
class GUI(hw.GUI):
pass
### hardware ##################################################################
class Hardware(hw.Hardware):
def __init__(self, *args, **kwargs):
self.kind = "spectrometer"
hw.Hardware.__init__(self, *args, **kwargs)
### import ####################################################################
conf = pathlib.Path(appdirs.user_config_dir("pycmds", "pycmds")) / "config.toml"
conf = toml.load(conf)
hardwares, gui, advanced_gui = hw.import_hardwares(
conf.get("hardware", {}).get("spectrometers", {}),
name="Spectrometers",
Driver=Driver,
GUI=GUI,
Hardware=Hardware,
)
| wright-group/PyCMDS | pycmds/hardware/spectrometers.py | Python | mit | 2,840 |
/* global createNS */
/* exported filtersFactory */
var filtersFactory = (function () {
var ob = {};
ob.createFilter = createFilter;
ob.createAlphaToLuminanceFilter = createAlphaToLuminanceFilter;
function createFilter(filId, skipCoordinates) {
var fil = createNS('filter');
fil.setAttribute('id', filId);
if (skipCoordinates !== true) {
fil.setAttribute('filterUnits', 'objectBoundingBox');
fil.setAttribute('x', '0%');
fil.setAttribute('y', '0%');
fil.setAttribute('width', '100%');
fil.setAttribute('height', '100%');
}
return fil;
}
function createAlphaToLuminanceFilter() {
var feColorMatrix = createNS('feColorMatrix');
feColorMatrix.setAttribute('type', 'matrix');
feColorMatrix.setAttribute('color-interpolation-filters', 'sRGB');
feColorMatrix.setAttribute('values', '0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 1');
return feColorMatrix;
}
return ob;
}());
| bodymovin/bodymovin | player/js/utils/filters.js | JavaScript | mit | 954 |
u"""
Fixer for Python 3 function parameter syntax
This fixer is rather sensitive to incorrect py3k syntax.
"""
# Note: "relevant" parameters are parameters following the first STAR in the list.
from lib2to3 import fixer_base
from lib2to3.fixer_util import token, String, Newline, Comma, Name
from libfuturize.fixer_util import indentation, suitify, DoubleStar
_assign_template = u"%(name)s = %(kwargs)s['%(name)s']; del %(kwargs)s['%(name)s']"
_if_template = u"if '%(name)s' in %(kwargs)s: %(assign)s"
_else_template = u"else: %(name)s = %(default)s"
_kwargs_default_name = u"_3to2kwargs"
def gen_params(raw_params):
u"""
Generator that yields tuples of (name, default_value) for each parameter in the list
If no default is given, then it is default_value is None (not Leaf(token.NAME, 'None'))
"""
assert raw_params[0].type == token.STAR and len(raw_params) > 2
curr_idx = 2 # the first place a keyword-only parameter name can be is index 2
max_idx = len(raw_params)
while curr_idx < max_idx:
curr_item = raw_params[curr_idx]
prev_item = curr_item.prev_sibling
if curr_item.type != token.NAME:
curr_idx += 1
continue
if prev_item is not None and prev_item.type == token.DOUBLESTAR:
break
name = curr_item.value
nxt = curr_item.next_sibling
if nxt is not None and nxt.type == token.EQUAL:
default_value = nxt.next_sibling
curr_idx += 2
else:
default_value = None
yield (name, default_value)
curr_idx += 1
def remove_params(raw_params, kwargs_default=_kwargs_default_name):
u"""
Removes all keyword-only args from the params list and a bare star, if any.
Does not add the kwargs dict if needed.
Returns True if more action is needed, False if not
(more action is needed if no kwargs dict exists)
"""
assert raw_params[0].type == token.STAR
if raw_params[1].type == token.COMMA:
raw_params[0].remove()
raw_params[1].remove()
kw_params = raw_params[2:]
else:
kw_params = raw_params[3:]
for param in kw_params:
if param.type != token.DOUBLESTAR:
param.remove()
else:
return False
else:
return True
def needs_fixing(raw_params, kwargs_default=_kwargs_default_name):
u"""
Returns string with the name of the kwargs dict if the params after the first star need fixing
Otherwise returns empty string
"""
found_kwargs = False
needs_fix = False
for t in raw_params[2:]:
if t.type == token.COMMA:
# Commas are irrelevant at this stage.
continue
elif t.type == token.NAME and not found_kwargs:
# Keyword-only argument: definitely need to fix.
needs_fix = True
elif t.type == token.NAME and found_kwargs:
# Return 'foobar' of **foobar, if needed.
return t.value if needs_fix else u''
elif t.type == token.DOUBLESTAR:
# Found either '*' from **foobar.
found_kwargs = True
else:
# Never found **foobar. Return a synthetic name, if needed.
return kwargs_default if needs_fix else u''
class FixKwargs(fixer_base.BaseFix):
run_order = 7 # Run after function annotations are removed
PATTERN = u"funcdef< 'def' NAME parameters< '(' arglist=typedargslist< params=any* > ')' > ':' suite=any >"
def transform(self, node, results):
params_rawlist = results[u"params"]
for i, item in enumerate(params_rawlist):
if item.type == token.STAR:
params_rawlist = params_rawlist[i:]
break
else:
return
# params is guaranteed to be a list starting with *.
# if fixing is needed, there will be at least 3 items in this list:
# [STAR, COMMA, NAME] is the minimum that we need to worry about.
new_kwargs = needs_fixing(params_rawlist)
# new_kwargs is the name of the kwargs dictionary.
if not new_kwargs:
return
suitify(node)
# At this point, params_rawlist is guaranteed to be a list
# beginning with a star that includes at least one keyword-only param
# e.g., [STAR, NAME, COMMA, NAME, COMMA, DOUBLESTAR, NAME] or
# [STAR, COMMA, NAME], or [STAR, COMMA, NAME, COMMA, DOUBLESTAR, NAME]
# Anatomy of a funcdef: ['def', 'name', parameters, ':', suite]
# Anatomy of that suite: [NEWLINE, INDENT, first_stmt, all_other_stmts]
# We need to insert our new stuff before the first_stmt and change the
# first_stmt's prefix.
suite = node.children[4]
first_stmt = suite.children[2]
ident = indentation(first_stmt)
for name, default_value in gen_params(params_rawlist):
if default_value is None:
suite.insert_child(2, Newline())
suite.insert_child(2, String(_assign_template % {u'name': name, u'kwargs': new_kwargs}, prefix=ident))
else:
suite.insert_child(2, Newline())
suite.insert_child(2, String(_else_template % {u'name': name, u'default': default_value}, prefix=ident))
suite.insert_child(2, Newline())
suite.insert_child(2, String(
_if_template % {u'assign': _assign_template % {u'name': name, u'kwargs': new_kwargs}, u'name': name,
u'kwargs': new_kwargs}, prefix=ident))
first_stmt.prefix = ident
suite.children[2].prefix = u""
# Now, we need to fix up the list of params.
must_add_kwargs = remove_params(params_rawlist)
if must_add_kwargs:
arglist = results[u'arglist']
if len(arglist.children) > 0 and arglist.children[-1].type != token.COMMA:
arglist.append_child(Comma())
arglist.append_child(DoubleStar(prefix=u" "))
arglist.append_child(Name(new_kwargs))
| thonkify/thonkify | src/lib/libpasteurize/fixes/fix_kwargs.py | Python | mit | 6,066 |
'use strict';
angular.module('myApp.post', ['ngRoute'])
.config(['$routeProvider', function($routeProvider) {
$routeProvider.when('/new-post', {
templateUrl: 'posts/new-post.html',
controller: 'PostCtrl'
});
}])
.controller('PostCtrl', ['$scope', '$firebaseArray', 'CommonProp', '$location', function($scope, $firebaseArray, CommonProp, $location) {
// if user is not logged in, redirect to sign in page
if (!CommonProp.getUser()) {
$location.path('/signin');
}
$scope.addPost = function() {
var firebaseObj = new Firebase("https://amber-heat-2147.firebaseio.com/articles");
var fb = $firebaseArray(firebaseObj);
var title = $scope.article.title;
var content = $scope.article.content;
fb.$add({
title: title,
content: content,
author: CommonProp.getUser()
}).then(function(ref) {
$location.path('/home');
}, function(error) {
console.log("Error:", error);
});
};
$scope.logout = function(){
CommonProp.logoutUser();
}
}]);
| hnquang112/angular-seeder | app/posts/posts.js | JavaScript | mit | 977 |
/*
* This file is part of Jiffy, licensed under the MIT License (MIT).
*
* Copyright (c) OreCruncher
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.blockartistry.world;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.init.Blocks;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.world.EnumSkyBlock;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraft.world.biome.BiomeGenBase;
import net.minecraft.world.chunk.Chunk;
import net.minecraftforge.common.util.ForgeDirection;
/**
* Used by the client renderer as well as path finding routines. Changes:
*
* + Chunk array vs. matrix
*
* + Removed unnecessary checks
*/
public class ChunkCache implements IBlockAccess {
private final int chunkX;
private final int chunkZ;
private final int dimX;
private final int dimZ;
private final Chunk[] chunkArray;
private final boolean isEmpty;
private final World worldObj;
public ChunkCache(World world, int x1, int y1, int z1, int x2, int y2, int z2, int buffer) {
this.worldObj = world;
this.chunkX = x1 - buffer >> 4;
this.chunkZ = z1 - buffer >> 4;
int l1 = x2 + buffer >> 4;
int i2 = z2 + buffer >> 4;
this.dimX = l1 - this.chunkX + 1;
this.dimZ = i2 - this.chunkZ + 1;
this.chunkArray = new Chunk[this.dimX * this.dimZ];
boolean emptyFlag = true;
for (int j2 = this.chunkX; j2 <= l1; ++j2) {
for (int k2 = this.chunkZ; k2 <= i2; ++k2) {
final int idx = j2 - this.chunkX + (k2 - this.chunkZ) * this.dimX;
final Chunk chunk = this.chunkArray[idx] = world.getChunkFromChunkCoords(j2, k2);
assert chunk != null;
if (emptyFlag && !chunk.getAreLevelsEmpty(y1, y2))
emptyFlag = false;
}
}
this.isEmpty = emptyFlag;
}
/**
* set by !chunk.getAreLevelsEmpty
*/
@SideOnly(Side.CLIENT)
public boolean extendedLevelsInChunkCache() {
return this.isEmpty;
}
public Block getBlock(final int x, final int y, final int z) {
// Seen out of range Ys come in. Haven't seen out of range
// X or Z. Relaxing range checks as not needed.
if(y < 0 || y > 255)
return Blocks.air;
final int arrayX = (x >> 4) - this.chunkX;
final int arrayZ = (z >> 4) - this.chunkZ;
assert (arrayX >= 0 && arrayX < this.dimX && arrayZ >= 0 && arrayZ < this.dimZ);
// if (l >= 0 && l < this.dimX && i1 >= 0 && i1 < this.dimZ)
return this.chunkArray[arrayX + arrayZ * this.dimX].getBlock(x & 15, y, z & 15);
}
public TileEntity getTileEntity(final int x, final int y, final int z) {
// Seen out of range Ys come in. Haven't seen out of range
// X or Z. Relaxing range checks as not needed.
if(y < 0 || y > 255)
return null;
final int arrayX = (x >> 4) - this.chunkX;
final int arrayZ = (z >> 4) - this.chunkZ;
assert (arrayX >= 0 && arrayX < this.dimX && arrayZ >= 0 && arrayZ < this.dimZ);
// if (l >= 0 && l < this.dimX && i1 >= 0 && i1 < this.dimZ)
return this.chunkArray[arrayX + arrayZ * this.dimX].func_150806_e(x & 15, y, z & 15);
}
public int getBlockMetadata(final int x, final int y, final int z) {
// Seen out of range Ys come in. Haven't seen out of range
// X or Z. Relaxing range checks as not needed.
if(y < 0 || y > 255)
return 0;
final int arrayX = (x >> 4) - this.chunkX;
final int arrayZ = (z >> 4) - this.chunkZ;
assert (arrayX >= 0 && arrayX < this.dimX && arrayZ >= 0 && arrayZ < this.dimZ);
// if (l >= 0 && l < this.dimX && i1 >= 0 && i1 < this.dimZ)
return this.chunkArray[arrayX + arrayZ * this.dimX].getBlockMetadata(x & 15, y, z & 15);
}
public boolean isAirBlock(final int x, final int y, final int z) {
return getBlock(x, y, z).getMaterial() == Material.air;
}
public int isBlockProvidingPowerTo(final int x, final int y, final int z, final int dir) {
return getBlock(x, y, z).isProvidingStrongPower(this, x, y, z, dir);
}
/**
* Any Light rendered on a 1.8 Block goes through here
*/
@SideOnly(Side.CLIENT)
public int getLightBrightnessForSkyBlocks(final int x, final int y, final int z, int p_72802_4_) {
int i1 = this.getSkyBlockTypeBrightness(EnumSkyBlock.Sky, x, y, z);
int j1 = this.getSkyBlockTypeBrightness(EnumSkyBlock.Block, x, y, z);
if (j1 < p_72802_4_) {
j1 = p_72802_4_;
}
return i1 << 20 | j1 << 4;
}
/**
* Gets the biome for a given set of x/z coordinates
*/
@SideOnly(Side.CLIENT)
public BiomeGenBase getBiomeGenForCoords(final int x, final int z) {
return this.worldObj.getBiomeGenForCoords(x, z);
}
/**
* Brightness for SkyBlock.Sky is clear white and (through color computing
* it is assumed) DEPENDENT ON DAYTIME. Brightness for SkyBlock.Block is
* yellowish and independent.
*/
@SideOnly(Side.CLIENT)
public int getSkyBlockTypeBrightness(final EnumSkyBlock skyBlock, final int x, int y, final int z) {
if (x >= -30000000 && z >= -30000000 && x < 30000000 && z <= 30000000) {
if (skyBlock == EnumSkyBlock.Sky && this.worldObj.provider.hasNoSky)
return 0;
if (y < 0)
y = 0;
else if (y > 255)
y = 255;
final int arrayX = (x >> 4) - this.chunkX;
final int arrayZ = (z >> 4) - this.chunkZ;
assert (arrayX >= 0 && arrayX < this.dimX && arrayZ >= 0 && arrayZ < this.dimZ);
// if (l >= 0 && l < this.dimX && i1 >= 0 && i1 < this.dimZ)
final Chunk chunk = this.chunkArray[arrayX + arrayZ * this.dimX];
if (chunk.getBlock(x & 15, y, z & 15).getUseNeighborBrightness()) {
int l = this.getSpecialBlockBrightness(skyBlock, x, y + 1, z);
int i1 = this.getSpecialBlockBrightness(skyBlock, x + 1, y, z);
int j1 = this.getSpecialBlockBrightness(skyBlock, x - 1, y, z);
int k1 = this.getSpecialBlockBrightness(skyBlock, x, y, z + 1);
int l1 = this.getSpecialBlockBrightness(skyBlock, x, y, z - 1);
if (i1 > l) {
l = i1;
}
if (j1 > l) {
l = j1;
}
if (k1 > l) {
l = k1;
}
if (l1 > l) {
l = l1;
}
return l;
} else {
return chunk.getSavedLightValue(skyBlock, x & 15, y, z & 15);
}
} else {
return skyBlock.defaultLightValue;
}
}
/**
* is only used on stairs and tilled fields
*/
@SideOnly(Side.CLIENT)
public int getSpecialBlockBrightness(final EnumSkyBlock skyBlock, final int x, int y, final int z) {
if (x >= -30000000 && z >= -30000000 && x < 30000000 && z <= 30000000) {
if (y < 0)
y = 0;
else if (y > 255)
y = 255;
final int arrayX = (x >> 4) - this.chunkX;
final int arrayZ = (z >> 4) - this.chunkZ;
assert (arrayX >= 0 && arrayX < this.dimX && arrayZ >= 0 && arrayZ < this.dimZ);
// if (l >= 0 && l < this.dimX && i1 >= 0 && i1 < this.dimZ)
return this.chunkArray[arrayX + arrayZ * this.dimX].getSavedLightValue(skyBlock, x & 15, y, z & 15);
} else {
return skyBlock.defaultLightValue;
}
}
/**
* Returns current world height.
*/
@SideOnly(Side.CLIENT)
public int getHeight() {
return 256;
}
@Override
public boolean isSideSolid(final int x, final int y, final int z, final ForgeDirection side,
final boolean _default) {
if (x < -30000000 || z < -30000000 || x >= 30000000 || z >= 30000000) {
return _default;
}
return getBlock(x, y, z).isSideSolid(this, x, y, z, side);
}
}
| OreCruncher/Jiffy | src/main/java/org/blockartistry/world/ChunkCache.java | Java | mit | 8,333 |