file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
elasticsearch_adapter.js | const MainDatabase = require('../mainDatabase.js');
const elasticsearch = require('elasticsearch');
const AgentKeepAlive = require('agentkeepalive');
const cloneObject = require('clone');
const async = require('async');
const {BuilderNode} = require('../../../utils/filterbuilder');
const NexxusError = require('../../NexxusError');
const Services = require('../../Services');
const constants = require('../../constants');
const NexxusPatch = require('../../Patch');
const utils = require('../../../utils/utils');
const tryConnectionMethod = Symbol('try connection private method');
const processSchemaModificationMethod = Symbol('process schema modification method');
class ElasticSearchDB extends MainDatabase {
| (config) {
if (typeof config !== 'object' || Object.keys(config).length === 0) {
throw new NexxusError(NexxusError.errors.ServerFailure, ['supplied empty or invalid configuration parameter']);
}
let esConfig = {
maxRetries: 10,
deadTimeout: 1e4,
pingTimeout: 3000,
keepAlive: true,
maxSockets: 300,
createNodeAgent (connection, config) {
return new AgentKeepAlive(connection.makeAgentConfig(config));
}
};
if (config.hosts) {
esConfig.hosts = config.hosts;
} else if (config.host) {
esConfig.host = config.host;
esConfig.sniffOnStart = true;
esConfig.sniffInterval = 30000;
esConfig.sniffOnConnectionFault = true;
}
const esApi = elasticsearch.Client.apis._default;
const disconnectFunctionHandler = e => {
if (e.message === 'No Living connections' && !this.reconnecting) {
this.reconnecting = true;
Services.logger.emergency(`Lost connection to elasticsearch: ${e.message}`);
setTimeout(() => {
this[tryConnectionMethod]();
}, 2000);
this.emit('disconnect');
}
throw e;
};
for (const func in esApi) {
if (esApi[func] && esApi[func].name === 'action') {
esApi[func] = new Proxy(esApi[func], {
apply: (target, ctx, args) => {
// this will replace the original callback
// when something bad happens normal operations are disrupted, thus we also emit a disconnected event
// so the application knows something went wrong
const lastArg = args.pop();
// also the ES library supports both callback and promises
if (lastArg instanceof Function) {
args.push((err, res) => {
if (err) {
if (err.message.startsWith('Request Timeout')) {
this.connected = false;
this.reconnecting = true;
return lastArg(err);
}
return disconnectFunctionHandler(err);
}
return lastArg(null, res);
});
return Reflect.apply(target, ctx, args);
}
args.push(lastArg);
return Reflect.apply(target, ctx, args).catch(disconnectFunctionHandler);
}
});
}
}
super(new elasticsearch.Client(esConfig));
this.config = config;
this.config.subscribe_limit = this.config.subscribe_limit || 64;
this.config.get_limit = this.config.get_limit || 384;
this.connected = false;
this.reconnecting = false;
this[tryConnectionMethod]();
}
[tryConnectionMethod] () {
let error = false;
async.doWhilst(callback => {
this.connection.ping({}, (err, res) => {
if (!err) {
Services.logger.info('Connected to ElasticSearch MainDatabase');
this.connected = true;
return setImmediate(callback);
}
if (err.message === 'No Living connections') {
Services.logger.error(`Failed connecting to Elasticsearch "${this.config.host || this.config.hosts.join(', ')}": ${err.message}. Retrying...`);
setTimeout(callback, 2000);
} else if (err.message.startsWith('Request Timeout')) {
Services.logger.error(`Failed connecting to Elasticsearch "${this.config.host || this.config.hosts.join(', ')}": ${err.message}. Retrying...`);
setTimeout(callback, 2000);
} else {
error = err;
Services.logger.emergency(`Connection to ElasticSearch failed: ${err.message}`);
setImmediate(callback);
}
return null;
});
}, () => this.connected === false && error === false, () => {
if (error) {
this.emit('error', error);
} else {
if (this.reconnecting === true) {
this.emit('reconnected');
} else {
this.emit('ready');
}
this.reconnecting = false;
}
});
}
async [processSchemaModificationMethod] (applicationId, modifications) {
if (modifications.added.schema) {
const addedModels = Object.keys(modifications.added.schema);
await addedModels.reduce(async (promise, modelName) => {
await promise;
try {
await this.connection.indices.create({
index: `${constants.CHANNEL_KEY_PREFIX}-${applicationId}-${modelName}`
});
Services.logger.debug(`Successfully created index: "${constants.CHANNEL_KEY_PREFIX}-${applicationId}-${modelName}"`);
} catch (err) {
Services.logger.warning(`Index already exists: "${constants.CHANNEL_KEY_PREFIX}-${applicationId}-${modelName}"`);
}
return Promise.resolve();
}, Promise.resolve());
}
if (modifications.deleted.schema) {
const removedModels = Object.keys(modifications.deleted.schema);
const indicesToRemove = removedModels.map(modelName => `${constants.CHANNEL_KEY_PREFIX}-${applicationId}-${modelName}`);
try {
await this.connection.indices.delete({
index: indicesToRemove
});
Services.logger.debug(`Successfully removed indices: "${indicesToRemove}"`);
} catch (err) {
Services.logger.warning(`Error when trying to remove indices: ${err}`);
}
}
}
/**
*
* @param {FilterBuilder} builder
* @return {Object} The result of <code>builder.build()</code> but with a few translations for ES
*/
getQueryObject (builder) {
const translationMappings = {
is: 'term',
not: 'not',
exists: 'exists',
range: 'range',
in_array: 'terms',
like: 'regexp'
};
function Translate (node) {
node.children.forEach(child => {
if (child instanceof BuilderNode) {
Translate(child);
} else {
let replaced = Object.keys(child)[0];
if (translationMappings[replaced]) {
// 'not' contains a filter name
if (replaced === 'not') {
let secondReplaced = Object.keys(child[replaced])[0];
if (translationMappings[secondReplaced] !== secondReplaced) {
child[replaced][translationMappings[secondReplaced]] = cloneObject(child[replaced][secondReplaced]);
delete child[replaced][secondReplaced];
}
} else if (replaced === 'like') {
child[translationMappings[replaced]] = cloneObject(child[replaced]);
let fieldObj = {};
Object.keys(child[translationMappings[replaced]]).forEach(field => {
fieldObj[field] = `.*${escapeRegExp(child[translationMappings[replaced]][field])}.*`;
});
child[translationMappings[replaced]] = fieldObj;
delete child[replaced];
} else if (translationMappings[replaced] !== replaced) {
child[translationMappings[replaced]] = cloneObject(child[replaced]);
delete child[replaced];
}
}
}
});
}
Translate(builder.root);
return builder.build();
}
async getObjects (items) {
if (!Array.isArray(items) || items.length === 0) {
throw new NexxusError(NexxusError.errors.InvalidFieldValue, 'ElasticSearchDB.getObjects: "ids" should be a non-empty array');
}
const docs = items.map(object => {
let index;
switch (object.type) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${object.type}`;
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${object.application_id}-${object.type}`;
}
}
return {
_id: object.id,
_index: index
};
}, this);
const results = await this.connection.mget({
body: {
docs
}
});
let errors = [];
let objects = [];
let versions = new Map();
results.docs.forEach(result => {
if (result.found) {
objects.push(result._source);
versions.set(result._id, result._version);
} else {
errors.push(new NexxusError(NexxusError.errors.ObjectNotFound, [result._id]));
}
});
return {errors, results: objects, versions};
}
async searchObjects (options) {
let index;
const reqBody = {
query: {
filtered: {
filter: {}
}
}
};
switch (options.modelName) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${options.modelName}`;
break;
}
default: {
if (Array.isArray(options.modelName)) {
index = options.modelName.map(model => {
return `${constants.CHANNEL_KEY_PREFIX}-${options.applicationId}-${model}`;
}).join(',');
} else {
index = `${constants.CHANNEL_KEY_PREFIX}-${options.applicationId}-${options.modelName}`;
}
}
}
if (options.filters && !options.filters.isEmpty()) {
reqBody.query = this.getQueryObject(options.filters);
} else {
reqBody.query = {match_all: {}};
}
if (options.fields) {
if (!(options.scanFunction instanceof Function)) {
throw new NexxusError(NexxusError.errors.ServerFailure, ['searchObjects was provided with fields but no scanFunction']);
}
let hitsCollected = 0;
let response = await this.connection.search({
index,
body: reqBody,
scroll: '10s',
fields: options.fields,
size: 1024
});
do {
let objects = [];
hitsCollected += response.hits.hits.length;
response.hits.hits.forEach(hit => {
let obj = {};
for (const f in hit.fields) {
obj[f] = hit.fields[f][0];
}
objects.push(obj);
});
if (response.hits.hits.length) {
await options.scanFunction(objects);
}
response = await this.connection.scroll({
scrollId: response._scroll_id,
scroll: '10s'
});
} while (response.hits.total !== hitsCollected);
return null;
}
if (options.sort) {
reqBody.sort = [];
Object.keys(options.sort).forEach(field => {
let sortObjectField = {};
if (!options.sort[field].type) {
sortObjectField[field] = { order: options.sort[field].order, unmapped_type: 'long' };
} else if (options.sort[field].type === 'geo') {
sortObjectField._geo_distance = {};
sortObjectField._geo_distance[field] = { lat: options.sort[field].poi.lat || 0.0, lon: options.sort[field].poi.long || 0.0 };
sortObjectField._geo_distance.order = options.sort[field].order;
}
reqBody.sort.push(sortObjectField);
});
}
const results = await this.connection.search({
index,
body: reqBody,
from: options.offset,
size: options.limit
});
return {results: results.hits.hits.map(object => object._source)};
}
async countObjects (modelName, options) {
let index;
let reqBody = {
query: {
filtered: {
filter: {}
}
}
};
switch (modelName) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${modelName}`;
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${options.applicationId}-${modelName}`;
}
}
if (options.filters && !options.filters.isEmpty()) {
reqBody.query.filtered.filter = this.getQueryObject(options.filters);
}
if (options.aggregation) {
reqBody.aggs = { aggregation: options.aggregation };
const result = await this.connection.search({
index,
body: reqBody,
search_type: 'count',
queryCache: true
});
let countResult = { count: result.hits.total };
countResult.aggregation = result.aggregations.aggregation.value;
return Object.assign({ count: result.hits.total }, { aggregation: result.aggregations.aggregation.value });
}
const result = await this.connection.count({
index,
body: reqBody
});
return { count: result.count };
}
async createObjects (objects) {
if (!Array.isArray(objects) || objects.length === 0) {
throw new NexxusError('InvalidFieldValue', ['ElasticSearchDB.createObjects: "objects" should be a non-empty array']);
}
let shouldRefresh = false;
let bulk = [];
let errors = [];
await objects.reduce(async (promise, obj) => {
await promise;
let index;
switch (obj.type) {
case 'admin':
case 'application': {
index = `${constants.CHANNEL_KEY_PREFIX}-${obj.type}`;
shouldRefresh = true;
if (obj.schema) {
await Object.keys(obj.schema).reduce(async (p, modelName) => {
await p;
return this.connection.indices.create({
index: `${constants.CHANNEL_KEY_PREFIX}-${obj.id}-${modelName}`
});
}, Promise.resolve());
}
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${obj.applicationId}-${obj.type}`;
}
}
bulk.push({ index: { _id: obj.id, _index: index, _type: '_doc' } });
bulk.push(obj);
return Promise.resolve();
}, Promise.resolve());
if (bulk.length !== objects.length * 2) {
Services.logger.warning(`ElasticSearchDB.createObjects: some objects were missing their "type" and "id" (${(objects.length - bulk.length / 2)} failed)`);
}
if (!bulk.length) {
return null;
}
const res = await this.connection.bulk({
body: bulk,
refresh: shouldRefresh
});
if (res.errors) {
res.items.forEach(error => {
errors.push(new NexxusError('ServerFailure', `Error creating ${error.index._type}: ${error.index.error}`));
});
}
return {errors};
}
async updateObjects (patches) {
if (!Array.isArray(patches) || patches.length === 0) {
throw new NexxusError(NexxusError.errors.InvalidFieldValue, 'ElasticSearchDB.updateObjects: "patches" should be a non-empty array');
}
let errors = [];
let shouldRefresh = false;
let finalResults = new Map();
async function getAndUpdate (objectPatches) {
let conflictedObjectPatches = [];
let objectsToGet = new Map();
let modifiedApplicationSchemas = new Map();
objectPatches.forEach(patch => {
if (!(patch instanceof NexxusPatch)) {
throw new TypeError('ElasticSearchDB.updateObjects: array elements must all be instances of NexxusPatch');
}
if (objectsToGet.has(patch.id)) {
objectsToGet.get(patch.id).patches.push(patch);
} else {
objectsToGet.set(patch.id, { id: patch.id, type: patch.model, application_id: patch.applicationId, patches: [patch] });
}
// we need to remember which application had its schema modified
if (patch.field === 'schema') {
modifiedApplicationSchemas.set(patch.id, true);
}
});
let bulk = [];
if (objectPatches.length === 0) {
return null;
}
let { errors: notFoundErrors, results, versions } = await this.getObjects(Array.from(objectsToGet.values()));
errors = errors.concat(notFoundErrors);
if (!results || !results.length) {
return null;
}
results = results.map(dbObject => {
return utils.getProperModel(dbObject);
});
results.forEach(model => {
const {diff, detailedDiff} = NexxusPatch.applyPatches(objectsToGet.get(model.properties.id).patches, model);
let index;
if (modifiedApplicationSchemas.has(model.properties.id)) {
try {
// we don't want this to delay other writes
this[processSchemaModificationMethod](model.properties.id, detailedDiff);
} catch (err) {
Services.logger.warning(`Unable to process schema modifications: ${err}`);
}
}
finalResults.set(model.properties.id, model);
switch (model.properties.type) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${model.properties.type}`;
shouldRefresh = true;
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${model.properties.application_id}-${model.properties.type}`;
}
}
bulk.push({ update: { _id: model.properties.id, _version: versions[model.properties.id], _index: index, _type: '_doc' } });
bulk.push({ doc: diff });
});
const res = await this.connection.bulk({
body: bulk,
refresh: shouldRefresh
});
if (res.errors) {
res.items.forEach(error => {
if (error.update.status === 409) {
objectsToGet.get(error.update._id).patches.forEach(patch => {
conflictedObjectPatches.push(patch);
});
} else {
errors.push(new Error(`Failed to update ${objectsToGet.get(error.update._id).type} with ID ${error.update._id}: ${error.update.error.reason}`));
}
});
}
if (conflictedObjectPatches.length) {
Services.logger.debug(`Retry-on-conflict object count: ${conflictedObjectPatches.length}`);
return getAndUpdate.call(this, conflictedObjectPatches);
}
return null;
}
await getAndUpdate.call(this, patches);
return {errors, results: Array.from(finalResults.values())};
}
async deleteObjects (objects) {
if (!(objects instanceof Map)) {
throw new NexxusError(NexxusError.errors.InvalidFieldValue, 'deleteObjects must be supplied a Map');
}
let errors = [];
let deleted = [];
let bulk = [];
let shouldRefresh = false;
objects.forEach((object, id) => {
if (typeof id !== 'string') {
errors.push(new NexxusError(NexxusError.errors.InvalidFieldValue,
`object with ID "${id}" supplied for deleteObjects is not a valid model type`));
return null;
}
let index;
switch (object.type) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${object.type}`;
shouldRefresh = true;
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${object.application_id}-${id}`;
}
}
return bulk.push({ delete: { _index: index, _id: id, _type: '_doc' } });
});
if (bulk.length === 0) {
return {errors};
}
const results = await this.connection.bulk({
body: bulk,
refresh: shouldRefresh
});
results.items.forEach(result => {
if (result.delete.result === 'not_found') {
errors.push(new NexxusError(NexxusError.errors.ObjectNotFound, [result.delete._id]));
} else {
deleted.push(result.delete._id);
}
});
return {errors, results: deleted};
}
}
function escapeRegExp (str) {
return str.replace(/[-[\]/{}()*+?.\\^$|]/g, '\\$&');
}
module.exports = ElasticSearchDB;
| constructor | identifier_name |
elasticsearch_adapter.js | const MainDatabase = require('../mainDatabase.js');
const elasticsearch = require('elasticsearch');
const AgentKeepAlive = require('agentkeepalive');
const cloneObject = require('clone');
const async = require('async');
const {BuilderNode} = require('../../../utils/filterbuilder');
const NexxusError = require('../../NexxusError');
const Services = require('../../Services');
const constants = require('../../constants');
const NexxusPatch = require('../../Patch');
const utils = require('../../../utils/utils');
const tryConnectionMethod = Symbol('try connection private method');
const processSchemaModificationMethod = Symbol('process schema modification method');
class ElasticSearchDB extends MainDatabase {
constructor (config) {
if (typeof config !== 'object' || Object.keys(config).length === 0) {
throw new NexxusError(NexxusError.errors.ServerFailure, ['supplied empty or invalid configuration parameter']);
}
let esConfig = {
maxRetries: 10,
deadTimeout: 1e4,
pingTimeout: 3000,
keepAlive: true,
maxSockets: 300,
createNodeAgent (connection, config) {
return new AgentKeepAlive(connection.makeAgentConfig(config));
}
};
if (config.hosts) {
esConfig.hosts = config.hosts;
} else if (config.host) {
esConfig.host = config.host;
esConfig.sniffOnStart = true;
esConfig.sniffInterval = 30000;
esConfig.sniffOnConnectionFault = true;
}
const esApi = elasticsearch.Client.apis._default;
const disconnectFunctionHandler = e => {
if (e.message === 'No Living connections' && !this.reconnecting) {
this.reconnecting = true;
Services.logger.emergency(`Lost connection to elasticsearch: ${e.message}`);
setTimeout(() => {
this[tryConnectionMethod]();
}, 2000);
this.emit('disconnect');
}
throw e;
};
for (const func in esApi) {
if (esApi[func] && esApi[func].name === 'action') {
esApi[func] = new Proxy(esApi[func], {
apply: (target, ctx, args) => {
// this will replace the original callback
// when something bad happens normal operations are disrupted, thus we also emit a disconnected event
// so the application knows something went wrong
const lastArg = args.pop();
// also the ES library supports both callback and promises
if (lastArg instanceof Function) {
args.push((err, res) => {
if (err) {
if (err.message.startsWith('Request Timeout')) {
this.connected = false;
this.reconnecting = true;
return lastArg(err);
}
return disconnectFunctionHandler(err);
}
return lastArg(null, res);
});
return Reflect.apply(target, ctx, args);
}
args.push(lastArg);
return Reflect.apply(target, ctx, args).catch(disconnectFunctionHandler);
}
});
}
}
super(new elasticsearch.Client(esConfig));
this.config = config;
this.config.subscribe_limit = this.config.subscribe_limit || 64;
this.config.get_limit = this.config.get_limit || 384;
this.connected = false;
this.reconnecting = false;
this[tryConnectionMethod]();
}
[tryConnectionMethod] () {
let error = false;
async.doWhilst(callback => {
this.connection.ping({}, (err, res) => {
if (!err) {
Services.logger.info('Connected to ElasticSearch MainDatabase');
this.connected = true;
return setImmediate(callback);
}
if (err.message === 'No Living connections') {
Services.logger.error(`Failed connecting to Elasticsearch "${this.config.host || this.config.hosts.join(', ')}": ${err.message}. Retrying...`);
setTimeout(callback, 2000);
} else if (err.message.startsWith('Request Timeout')) {
Services.logger.error(`Failed connecting to Elasticsearch "${this.config.host || this.config.hosts.join(', ')}": ${err.message}. Retrying...`);
setTimeout(callback, 2000);
} else {
error = err;
Services.logger.emergency(`Connection to ElasticSearch failed: ${err.message}`);
setImmediate(callback);
}
return null;
});
}, () => this.connected === false && error === false, () => {
if (error) {
this.emit('error', error);
} else {
if (this.reconnecting === true) {
this.emit('reconnected');
} else {
this.emit('ready');
}
this.reconnecting = false;
}
});
}
async [processSchemaModificationMethod] (applicationId, modifications) {
if (modifications.added.schema) {
const addedModels = Object.keys(modifications.added.schema);
await addedModels.reduce(async (promise, modelName) => {
await promise;
try {
await this.connection.indices.create({
index: `${constants.CHANNEL_KEY_PREFIX}-${applicationId}-${modelName}`
});
Services.logger.debug(`Successfully created index: "${constants.CHANNEL_KEY_PREFIX}-${applicationId}-${modelName}"`);
} catch (err) {
Services.logger.warning(`Index already exists: "${constants.CHANNEL_KEY_PREFIX}-${applicationId}-${modelName}"`);
}
return Promise.resolve();
}, Promise.resolve());
}
if (modifications.deleted.schema) {
const removedModels = Object.keys(modifications.deleted.schema);
const indicesToRemove = removedModels.map(modelName => `${constants.CHANNEL_KEY_PREFIX}-${applicationId}-${modelName}`);
try {
await this.connection.indices.delete({
index: indicesToRemove
});
Services.logger.debug(`Successfully removed indices: "${indicesToRemove}"`);
} catch (err) {
Services.logger.warning(`Error when trying to remove indices: ${err}`);
}
}
}
/**
*
* @param {FilterBuilder} builder
* @return {Object} The result of <code>builder.build()</code> but with a few translations for ES
*/
getQueryObject (builder) {
const translationMappings = {
is: 'term',
not: 'not',
exists: 'exists',
range: 'range',
in_array: 'terms',
like: 'regexp'
};
function Translate (node) {
node.children.forEach(child => {
if (child instanceof BuilderNode) {
Translate(child);
} else {
let replaced = Object.keys(child)[0];
if (translationMappings[replaced]) {
// 'not' contains a filter name
if (replaced === 'not') {
let secondReplaced = Object.keys(child[replaced])[0];
if (translationMappings[secondReplaced] !== secondReplaced) {
child[replaced][translationMappings[secondReplaced]] = cloneObject(child[replaced][secondReplaced]);
delete child[replaced][secondReplaced];
}
} else if (replaced === 'like') {
child[translationMappings[replaced]] = cloneObject(child[replaced]);
let fieldObj = {};
Object.keys(child[translationMappings[replaced]]).forEach(field => {
fieldObj[field] = `.*${escapeRegExp(child[translationMappings[replaced]][field])}.*`;
});
child[translationMappings[replaced]] = fieldObj;
delete child[replaced];
} else if (translationMappings[replaced] !== replaced) {
child[translationMappings[replaced]] = cloneObject(child[replaced]);
delete child[replaced];
}
}
}
});
}
Translate(builder.root);
return builder.build();
}
async getObjects (items) {
if (!Array.isArray(items) || items.length === 0) {
throw new NexxusError(NexxusError.errors.InvalidFieldValue, 'ElasticSearchDB.getObjects: "ids" should be a non-empty array');
}
const docs = items.map(object => {
let index;
switch (object.type) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${object.type}`;
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${object.application_id}-${object.type}`;
}
}
return {
_id: object.id,
_index: index
};
}, this);
const results = await this.connection.mget({
body: {
docs
}
});
let errors = [];
let objects = [];
let versions = new Map();
results.docs.forEach(result => {
if (result.found) {
objects.push(result._source);
versions.set(result._id, result._version);
} else {
errors.push(new NexxusError(NexxusError.errors.ObjectNotFound, [result._id]));
}
});
return {errors, results: objects, versions};
}
async searchObjects (options) {
let index;
const reqBody = {
query: {
filtered: {
filter: {}
}
}
};
switch (options.modelName) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${options.modelName}`;
break;
}
default: {
if (Array.isArray(options.modelName)) {
index = options.modelName.map(model => {
return `${constants.CHANNEL_KEY_PREFIX}-${options.applicationId}-${model}`;
}).join(',');
} else {
index = `${constants.CHANNEL_KEY_PREFIX}-${options.applicationId}-${options.modelName}`;
}
}
}
if (options.filters && !options.filters.isEmpty()) {
reqBody.query = this.getQueryObject(options.filters);
} else {
reqBody.query = {match_all: {}};
}
if (options.fields) {
if (!(options.scanFunction instanceof Function)) {
throw new NexxusError(NexxusError.errors.ServerFailure, ['searchObjects was provided with fields but no scanFunction']);
}
let hitsCollected = 0;
let response = await this.connection.search({
index,
body: reqBody,
scroll: '10s',
fields: options.fields,
size: 1024
});
do {
let objects = [];
hitsCollected += response.hits.hits.length;
response.hits.hits.forEach(hit => {
let obj = {};
for (const f in hit.fields) {
obj[f] = hit.fields[f][0];
}
objects.push(obj);
});
if (response.hits.hits.length) {
await options.scanFunction(objects);
}
response = await this.connection.scroll({
scrollId: response._scroll_id,
scroll: '10s'
});
} while (response.hits.total !== hitsCollected);
return null;
}
if (options.sort) {
reqBody.sort = [];
Object.keys(options.sort).forEach(field => {
let sortObjectField = {};
if (!options.sort[field].type) {
sortObjectField[field] = { order: options.sort[field].order, unmapped_type: 'long' };
} else if (options.sort[field].type === 'geo') {
sortObjectField._geo_distance = {};
sortObjectField._geo_distance[field] = { lat: options.sort[field].poi.lat || 0.0, lon: options.sort[field].poi.long || 0.0 };
sortObjectField._geo_distance.order = options.sort[field].order;
}
reqBody.sort.push(sortObjectField);
});
}
const results = await this.connection.search({
index,
body: reqBody,
from: options.offset,
size: options.limit
});
return {results: results.hits.hits.map(object => object._source)};
}
async countObjects (modelName, options) {
let index;
let reqBody = {
query: {
filtered: {
filter: {}
}
}
};
switch (modelName) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${modelName}`;
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${options.applicationId}-${modelName}`;
}
}
if (options.filters && !options.filters.isEmpty()) {
reqBody.query.filtered.filter = this.getQueryObject(options.filters);
}
if (options.aggregation) {
reqBody.aggs = { aggregation: options.aggregation };
const result = await this.connection.search({
index,
body: reqBody,
search_type: 'count',
queryCache: true
});
let countResult = { count: result.hits.total };
countResult.aggregation = result.aggregations.aggregation.value;
return Object.assign({ count: result.hits.total }, { aggregation: result.aggregations.aggregation.value });
}
const result = await this.connection.count({
index,
body: reqBody
});
return { count: result.count };
}
async createObjects (objects) {
if (!Array.isArray(objects) || objects.length === 0) {
throw new NexxusError('InvalidFieldValue', ['ElasticSearchDB.createObjects: "objects" should be a non-empty array']);
}
let shouldRefresh = false;
let bulk = [];
let errors = [];
await objects.reduce(async (promise, obj) => {
await promise;
let index;
switch (obj.type) {
case 'admin':
case 'application': {
index = `${constants.CHANNEL_KEY_PREFIX}-${obj.type}`;
shouldRefresh = true;
if (obj.schema) {
await Object.keys(obj.schema).reduce(async (p, modelName) => {
await p;
return this.connection.indices.create({
index: `${constants.CHANNEL_KEY_PREFIX}-${obj.id}-${modelName}`
});
}, Promise.resolve());
}
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${obj.applicationId}-${obj.type}`;
}
}
bulk.push({ index: { _id: obj.id, _index: index, _type: '_doc' } });
bulk.push(obj);
return Promise.resolve();
}, Promise.resolve());
if (bulk.length !== objects.length * 2) {
Services.logger.warning(`ElasticSearchDB.createObjects: some objects were missing their "type" and "id" (${(objects.length - bulk.length / 2)} failed)`);
}
if (!bulk.length) {
return null;
}
const res = await this.connection.bulk({
body: bulk,
refresh: shouldRefresh
});
if (res.errors) {
res.items.forEach(error => {
errors.push(new NexxusError('ServerFailure', `Error creating ${error.index._type}: ${error.index.error}`));
});
}
return {errors};
}
async updateObjects (patches) {
if (!Array.isArray(patches) || patches.length === 0) {
throw new NexxusError(NexxusError.errors.InvalidFieldValue, 'ElasticSearchDB.updateObjects: "patches" should be a non-empty array');
}
let errors = [];
let shouldRefresh = false;
let finalResults = new Map();
async function getAndUpdate (objectPatches) {
let conflictedObjectPatches = [];
let objectsToGet = new Map();
let modifiedApplicationSchemas = new Map();
objectPatches.forEach(patch => {
if (!(patch instanceof NexxusPatch)) {
throw new TypeError('ElasticSearchDB.updateObjects: array elements must all be instances of NexxusPatch');
}
if (objectsToGet.has(patch.id)) {
objectsToGet.get(patch.id).patches.push(patch);
} else {
objectsToGet.set(patch.id, { id: patch.id, type: patch.model, application_id: patch.applicationId, patches: [patch] });
}
// we need to remember which application had its schema modified
if (patch.field === 'schema') {
modifiedApplicationSchemas.set(patch.id, true);
}
});
let bulk = [];
if (objectPatches.length === 0) {
return null;
}
let { errors: notFoundErrors, results, versions } = await this.getObjects(Array.from(objectsToGet.values()));
errors = errors.concat(notFoundErrors);
if (!results || !results.length) {
return null;
} |
results = results.map(dbObject => {
return utils.getProperModel(dbObject);
});
results.forEach(model => {
const {diff, detailedDiff} = NexxusPatch.applyPatches(objectsToGet.get(model.properties.id).patches, model);
let index;
if (modifiedApplicationSchemas.has(model.properties.id)) {
try {
// we don't want this to delay other writes
this[processSchemaModificationMethod](model.properties.id, detailedDiff);
} catch (err) {
Services.logger.warning(`Unable to process schema modifications: ${err}`);
}
}
finalResults.set(model.properties.id, model);
switch (model.properties.type) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${model.properties.type}`;
shouldRefresh = true;
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${model.properties.application_id}-${model.properties.type}`;
}
}
bulk.push({ update: { _id: model.properties.id, _version: versions[model.properties.id], _index: index, _type: '_doc' } });
bulk.push({ doc: diff });
});
const res = await this.connection.bulk({
body: bulk,
refresh: shouldRefresh
});
if (res.errors) {
res.items.forEach(error => {
if (error.update.status === 409) {
objectsToGet.get(error.update._id).patches.forEach(patch => {
conflictedObjectPatches.push(patch);
});
} else {
errors.push(new Error(`Failed to update ${objectsToGet.get(error.update._id).type} with ID ${error.update._id}: ${error.update.error.reason}`));
}
});
}
if (conflictedObjectPatches.length) {
Services.logger.debug(`Retry-on-conflict object count: ${conflictedObjectPatches.length}`);
return getAndUpdate.call(this, conflictedObjectPatches);
}
return null;
}
await getAndUpdate.call(this, patches);
return {errors, results: Array.from(finalResults.values())};
}
async deleteObjects (objects) {
if (!(objects instanceof Map)) {
throw new NexxusError(NexxusError.errors.InvalidFieldValue, 'deleteObjects must be supplied a Map');
}
let errors = [];
let deleted = [];
let bulk = [];
let shouldRefresh = false;
objects.forEach((object, id) => {
if (typeof id !== 'string') {
errors.push(new NexxusError(NexxusError.errors.InvalidFieldValue,
`object with ID "${id}" supplied for deleteObjects is not a valid model type`));
return null;
}
let index;
switch (object.type) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${object.type}`;
shouldRefresh = true;
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${object.application_id}-${id}`;
}
}
return bulk.push({ delete: { _index: index, _id: id, _type: '_doc' } });
});
if (bulk.length === 0) {
return {errors};
}
const results = await this.connection.bulk({
body: bulk,
refresh: shouldRefresh
});
results.items.forEach(result => {
if (result.delete.result === 'not_found') {
errors.push(new NexxusError(NexxusError.errors.ObjectNotFound, [result.delete._id]));
} else {
deleted.push(result.delete._id);
}
});
return {errors, results: deleted};
}
}
function escapeRegExp (str) {
return str.replace(/[-[\]/{}()*+?.\\^$|]/g, '\\$&');
}
module.exports = ElasticSearchDB; | random_line_split | |
index.js | //@ts-check
/**
* Utility function
*
* @param {Array} arrA
* @param {Array} arrB
* @returns {Object}
*/
function zipObject(arrA, arrB) {
const zip = (arrX, arrY) => arrX.map((x, i) => [x, arrY[i]]);
return Object.fromEntries(zip(arrA, arrB));
}
/**
* A basic Promise-based and queue-based Semaphore
*/
const Semaphore = class SillySemaphore {
constructor(permits) {
this._permits = permits;
this._queue = [];
}
getPosition(acquirer) {
const idx = this._queue.findIndex(entry => entry.acquirer === acquirer);
return idx + 1; // to get rid of '-1'
}
async acquire(acquirer) {
return new Promise( (resolve, reject) => {
this._queue.push({
acquirer,
resolve,
reject
});
this._maybeNotify();
});
}
async release(_acquirer) {
this._permits++;
this._maybeNotify();
}
/**
* Reject all pending promises and nullify the 'queue'
* so that future calls fail...
*/
die() {
let entry;
while (entry = this._queue.pop()) {
entry.reject();
}
this._queue = null;
}
_maybeNotify() {
if (this._permits > 0) {
const entry = this._queue.shift();
if (entry) {
this._permits--;
entry.resolve();
}
}
}
}
class Sim {
static userVars = {};
static userAlgos = {};
static ctrl = null;
static lane1 = [];
static lane2 = [];
static crossing = 0;
static ui = {};
static creatorInterval = null;
static start() {
// if (Sim.ui.$sim === undefined) Sim.resetUI();
Sim.clearErrors();
Sim.loadUserInputs();
Sim.ui.$fieldset.disabled = true;
Sim.ctrl = new Controller();
Sim.ctrl.run();
Sim.initCreator();
Sim.redraw();
}
static stop() {
function freezeSimUI() {
// To replace all elements so no one can alter the current state
Sim.ui.$sim.outerHTML = Sim.ui.$sim.outerHTML;
// To signal that they no longer reference actual DOM elements and free them maybe(?)
Sim.ui = {};
}
function freeVariables() {
// Kill all user defined semaphores
Object.entries(Sim.userVars)
.map(entry => entry[1])
.filter(val => val instanceof Semaphore)
.forEach(sema => sema.die());
Sim.userVars = {};
}
function freeThreads() {
// TODO:
// - kill "threads" (maybe simply call .destory() of each existing Traverser)
// - cancel Controller 'ctrl' (AFAIK you can not cancel Promises...)
}
Sim.ui.$fieldset.disabled = false;
clearInterval(Sim.creatorInterval);
freezeSimUI();
//freeThreads();
//freeVariables();
}
static setup() {
const $ = str => document.querySelector(str);
Object.assign(Sim.ui, {
$form: $('form'),
$fieldset: $('form fieldset'),
$semaNames: $('#sema-names'),
$semaVals: $('#sema-vals'),
$intNames: $('#int-names'),
$intVals: $('#int-vals'),
$controller: $('#controller'),
$traverser1: $('#traverser1'),
$traverser2: $('#traverser2'),
$sim: $('#sim'),
$intersection: $('#intersection'),
})
Sim.ui.$form.onsubmit = (ev) => {
ev.preventDefault();
Sim.start();
}
$('#btn-load-attempt').onclick = (ev) => {
ev.preventDefault();
Sim.loadPreset(Sim.presets.myAttempt);
}
$('#btn-load-correct').onclick = (ev) => {
ev.preventDefault();
Sim.loadPreset(Sim.presets.correctAnswer);
}
}
static loadUserInputs() {
const {ui} = Sim;
// userAlgos...
Object.assign(Sim.userAlgos,
{
controller: ui.$controller.value,
traverser1: ui.$traverser1.value,
traverser2: ui.$traverser2.value,
}
)
// userVars...
const SEP = /,\s*/; // separator
const userSemas = zipObject(
ui.$semaNames.value.split(SEP),
ui.$semaVals.value.split(SEP).map(val => new Semaphore(Number(val)))
);
const userInts = zipObject(
ui.$intNames.value.split(SEP),
ui.$intVals.value.split(SEP).map(val => Number(val))
);
Object.assign(Sim.userVars, userSemas, userInts);
}
static initCreator() {
// maybe create a new instance of Traverser every second
Sim.creatorInterval = setInterval(Sim.maybeCreateTraverser, 1 * 1000);
}
/**
* Maybe create a new instance of Traverser and run it
* @returns {Traverser} The newly created traverser or null.
*/
static maybeCreateTraverser() {
// maybe not
if (Math.random() < 0.5) {
return null;
}
if (Math.random() < 0.5) {
if (Sim.lane1.length < Traverser1.MAX) {
const t1 = new Traverser1();
Sim.lane1.push(t1);
t1.run();
return t1;
}
} else {
if (Sim.lane2.length < Traverser2.MAX) {
const t2 = new Traverser2();
Sim.lane2.push(t2);
t2.run();
return t2;
}
}
// no room for a new 'Traverser' in the randomly chosen 'lane'
return null;
}
static redraw() {
// Just update 'data-*' and '--pos' values, and let CSS take care of the rest.
// Traffic light
const {ui, ctrl, userVars} = Sim;
ui.$sim.dataset.light = userVars.light;
ui.$sim.dataset.ctrlQueued = ctrl.orderVec.some(sema => userVars[sema].getPosition(ctrl) > 0);
// Lanes
const {lane1, lane2, redrawTraverser} = Sim;
lane1.sort(Traverser.compareTraversers);
lane2.sort(Traverser.compareTraversers);
lane1.forEach(redrawTraverser);
lane2.forEach(redrawTraverser);
// FIXME: Maybe it's better to use Proxy(userVars) and redraw after its attributes are accessed..
// Redraw before each repaint
requestAnimationFrame(Sim.redraw);
}
/**
* Update Traverser
*
* @param {Traverser} t
* @param {number} i - index
*/
static redrawTraverser(t, i) {
t.$elem.style.setProperty('--pos', i);
t.$elem.title =
`${t.name}\n\n` +
`orderVec: {${t.orderVec.join(', ')}}\n` +
`waitVec: {${t.getWaitVec().join(', ')}}`;
}
static showError(algoSource, message) {
Sim.ui[ '$' + algoSource ].setAttribute('title', message);
Sim.stop();
}
static clearErrors() {
Sim.ui.$form.querySelectorAll('[title]').forEach($x => $x.removeAttribute('title'));
}
/**
* Populate UI inputs with preset data
*
* @param {object} preset
*/
static loadPreset(preset) {
const keys = 'semaNames semaVals intNames intVals controller traverser1 traverser2'.split(' ');
for (const key of keys) {
Sim.ui['$' + key].value = preset[key];
}
}
}
class Algorithm {
/**
* @param {String} algoSource - "controller", "traverser1", or "traverser2"
*/
constructor(algoSource) {
this.algoSource = algoSource;
this.userAlgo = Sim.userAlgos[algoSource];
this.orderVec = Algorithm.parseOrderVector(this.userAlgo);
}
async run() {
const AsyncFunction = Object.getPrototypeOf(async function(){}).constructor;
try {
const asyncFunc = new AsyncFunction(`
with (Sim.userVars) {
${ Algorithm.awaitifyThis(this.userAlgo) }
}
`);
await asyncFunc.call(this);
} catch (userError) {
console.error('userError', userError); // for actual debugging
Sim.showError(this.algoSource, userError.message);
}
}
/**
* Replace function calls with `await`ed method invocations associated with `this` object.
*
* @param {string} code
* @returns {string} Updated code
*/
static awaitifyThis(code) {
// Prefix "p", "v", "sleep", and "traverse" calls with `await` and attach them to `this`
return code.replace(/\b(p|v|sleep|traverse)\(/g, 'await this.$1(');
}
async | (x) {
await this.sleep(0);
await x.acquire(this);
}
async v(x) {
await this.sleep(0);
await x.release(this);
}
async sleep(secs) {
return new Promise((resolve, _reject) => {
setTimeout(resolve, secs * 1000);
});
}
/**
* A vehicle's place in line is determined by a vector of priorities
* which are based on calls to p(...)
*
* @todo Should dedup before returning?
*
* @param {string} code
* @return {Array<string>}
*/
static parseOrderVector(code) {
const pCalls = code.match(/\bp\(\s*(\w+)\s*\)/g) || [];
const acquiredSemaphores = pCalls.map(x => x.slice(2, -1).trim());
return acquiredSemaphores;
}
}
class Controller extends Algorithm {
constructor() {
super('controller');
}
}
class Traverser extends Algorithm {
static counter = 0;
static freeColors = 'blue coral darkkhaki firebrick yellowgreen gray skyblue teal orange pink purple yellow'.split(' ');
constructor(algoSource) {
super(algoSource);
this.id = this.getUniqueId();
this.color = this.getUniqueColor();
this.type = Math.random() < 0.25 ? 'truck' : 'car'; // 25% chance of being a truck
this.name = `${this.color} ${this.type} #${this.id}`;
this.$elem = null;
// FIXME: These attributes are set by children
this.lane = null;
this.dir = this.algoSource === 'traverser1' ? 'south' : 'west';
this.initialPos = 'traverser1' ? '4' : '7'; // the very end of the line
}
/**
* Create a new visual element and display it on page
*/
initElem() {
this.$elem = document.createElement('span');
this.$elem.classList.add('vehicle', this.type, this.dir);
this.$elem.title = this.name; // (will be updated)
this.$elem.style.setProperty('--pos', this.initialPos); // (will be updated)
this.$elem.style.setProperty('--color', this.color);
Sim.ui.$intersection.append(this.$elem);
return this.$elem; // always return something
}
destroyElem() {
Traverser.freeColors.push(this.color);
this.$elem.remove();
}
destroy() {
this.destroyElem();
this.lane.splice(this.lane.findIndex(t => t === this), 1);
}
async traverse() {
// "Restarting the engine takes some time" za3ma
// Adds an element of "randomness"
await this.sleep(Math.random());
// Cross the intersection then "keep moving" and fade away...
await this.enterIntersection();
await this.leaveIntersection();
}
async enterIntersection() {
Sim.crossing++;
this.$elem.dataset.state = 'leaving';
this.assertNoCollision();
await this.sleep(0.5);
}
async leaveIntersection() {
await this.sleep(1);
this.assertNoCollision();
this.destroy();
Sim.crossing--;
}
assertNoCollision() {
const happened = Sim.crossing > 1; // more than one vehicle crossing the intersection
if (happened) {
Sim.ui.$sim.dataset.state = 'error';
throw new Error('Collision!');
}
}
/**
* @returns {Array<number>}
*/
getWaitVec() {
const vec = this.orderVec.map(semaName => Sim.userVars[semaName].getPosition(this));
return vec;
}
getUniqueId() {
// FIXME: Should throw error when about to overflow? Althrough this sim won't run for a long time for this to happen
return (++Traverser.counter).toString(36).toUpperCase();
}
getUniqueColor() {
return Traverser.freeColors.shift();
}
/**
*
* @param {Traverser} a
* @param {Traverser} b
* @returns {number}
*/
static compareTraversers(a, b) {
return Traverser.compareVecs( a.getWaitVec(), b.getWaitVec() );
}
/**
* Sort wait vecs in an ascending order
*
* @param {Array<number>} vecA
* @param {Array<number>} vecB
* @returns {number}
*/
static compareVecs(vecA, vecB) {
for (let i = 0; i < vecA.length; i++) {
if (vecA[i] - vecB[i] !== 0) {
return vecA[i] - vecB[i];
}
}
return 0;
}
}
class Traverser1 extends Traverser {
static MAX = 4;
constructor() {
super('traverser1');
this.initElem();
this.lane = Sim.lane1;
}
}
class Traverser2 extends Traverser {
static MAX = 7;
constructor() {
super('traverser2');
this.initElem();
this.lane = Sim.lane2;
}
}
Sim.setup();
| p | identifier_name |
index.js | //@ts-check
/**
* Utility function
*
* @param {Array} arrA
* @param {Array} arrB
* @returns {Object}
*/
function zipObject(arrA, arrB) {
const zip = (arrX, arrY) => arrX.map((x, i) => [x, arrY[i]]);
return Object.fromEntries(zip(arrA, arrB));
}
/**
* A basic Promise-based and queue-based Semaphore
*/
const Semaphore = class SillySemaphore {
constructor(permits) {
this._permits = permits;
this._queue = [];
}
getPosition(acquirer) {
const idx = this._queue.findIndex(entry => entry.acquirer === acquirer);
return idx + 1; // to get rid of '-1'
}
async acquire(acquirer) {
return new Promise( (resolve, reject) => {
this._queue.push({
acquirer,
resolve,
reject
});
this._maybeNotify();
});
}
async release(_acquirer) {
this._permits++;
this._maybeNotify();
}
/**
* Reject all pending promises and nullify the 'queue'
* so that future calls fail...
*/
die() {
let entry;
while (entry = this._queue.pop()) {
entry.reject();
}
this._queue = null;
}
_maybeNotify() {
if (this._permits > 0) {
const entry = this._queue.shift();
if (entry) {
this._permits--;
entry.resolve();
}
}
}
}
class Sim {
static userVars = {};
static userAlgos = {};
static ctrl = null;
static lane1 = [];
static lane2 = [];
static crossing = 0;
static ui = {};
static creatorInterval = null;
static start() {
// if (Sim.ui.$sim === undefined) Sim.resetUI();
Sim.clearErrors();
Sim.loadUserInputs();
Sim.ui.$fieldset.disabled = true;
Sim.ctrl = new Controller();
Sim.ctrl.run();
Sim.initCreator();
Sim.redraw();
}
static stop() {
function freezeSimUI() {
// To replace all elements so no one can alter the current state
Sim.ui.$sim.outerHTML = Sim.ui.$sim.outerHTML;
// To signal that they no longer reference actual DOM elements and free them maybe(?)
Sim.ui = {};
}
function freeVariables() {
// Kill all user defined semaphores
Object.entries(Sim.userVars)
.map(entry => entry[1])
.filter(val => val instanceof Semaphore)
.forEach(sema => sema.die());
Sim.userVars = {};
}
function freeThreads() {
// TODO:
// - kill "threads" (maybe simply call .destory() of each existing Traverser)
// - cancel Controller 'ctrl' (AFAIK you can not cancel Promises...)
}
Sim.ui.$fieldset.disabled = false;
clearInterval(Sim.creatorInterval);
freezeSimUI();
//freeThreads();
//freeVariables();
}
static setup() {
const $ = str => document.querySelector(str);
Object.assign(Sim.ui, {
$form: $('form'),
$fieldset: $('form fieldset'),
$semaNames: $('#sema-names'),
$semaVals: $('#sema-vals'),
$intNames: $('#int-names'),
$intVals: $('#int-vals'),
$controller: $('#controller'),
$traverser1: $('#traverser1'),
$traverser2: $('#traverser2'),
$sim: $('#sim'),
$intersection: $('#intersection'),
})
Sim.ui.$form.onsubmit = (ev) => {
ev.preventDefault();
Sim.start();
}
$('#btn-load-attempt').onclick = (ev) => {
ev.preventDefault();
Sim.loadPreset(Sim.presets.myAttempt);
}
$('#btn-load-correct').onclick = (ev) => {
ev.preventDefault();
Sim.loadPreset(Sim.presets.correctAnswer);
}
}
static loadUserInputs() {
const {ui} = Sim;
// userAlgos...
Object.assign(Sim.userAlgos,
{
controller: ui.$controller.value,
traverser1: ui.$traverser1.value,
traverser2: ui.$traverser2.value,
}
)
// userVars...
const SEP = /,\s*/; // separator
const userSemas = zipObject(
ui.$semaNames.value.split(SEP),
ui.$semaVals.value.split(SEP).map(val => new Semaphore(Number(val)))
);
const userInts = zipObject(
ui.$intNames.value.split(SEP),
ui.$intVals.value.split(SEP).map(val => Number(val))
);
Object.assign(Sim.userVars, userSemas, userInts);
}
static initCreator() {
// maybe create a new instance of Traverser every second
Sim.creatorInterval = setInterval(Sim.maybeCreateTraverser, 1 * 1000);
}
/**
* Maybe create a new instance of Traverser and run it
* @returns {Traverser} The newly created traverser or null.
*/
static maybeCreateTraverser() {
// maybe not
if (Math.random() < 0.5) {
return null;
}
if (Math.random() < 0.5) {
if (Sim.lane1.length < Traverser1.MAX) {
const t1 = new Traverser1();
Sim.lane1.push(t1);
t1.run();
return t1;
}
} else {
if (Sim.lane2.length < Traverser2.MAX) {
const t2 = new Traverser2();
Sim.lane2.push(t2);
t2.run();
return t2;
}
}
// no room for a new 'Traverser' in the randomly chosen 'lane'
return null;
}
static redraw() |
/**
* Update Traverser
*
* @param {Traverser} t
* @param {number} i - index
*/
static redrawTraverser(t, i) {
t.$elem.style.setProperty('--pos', i);
t.$elem.title =
`${t.name}\n\n` +
`orderVec: {${t.orderVec.join(', ')}}\n` +
`waitVec: {${t.getWaitVec().join(', ')}}`;
}
static showError(algoSource, message) {
Sim.ui[ '$' + algoSource ].setAttribute('title', message);
Sim.stop();
}
static clearErrors() {
Sim.ui.$form.querySelectorAll('[title]').forEach($x => $x.removeAttribute('title'));
}
/**
* Populate UI inputs with preset data
*
* @param {object} preset
*/
static loadPreset(preset) {
const keys = 'semaNames semaVals intNames intVals controller traverser1 traverser2'.split(' ');
for (const key of keys) {
Sim.ui['$' + key].value = preset[key];
}
}
}
class Algorithm {
/**
* @param {String} algoSource - "controller", "traverser1", or "traverser2"
*/
constructor(algoSource) {
this.algoSource = algoSource;
this.userAlgo = Sim.userAlgos[algoSource];
this.orderVec = Algorithm.parseOrderVector(this.userAlgo);
}
async run() {
const AsyncFunction = Object.getPrototypeOf(async function(){}).constructor;
try {
const asyncFunc = new AsyncFunction(`
with (Sim.userVars) {
${ Algorithm.awaitifyThis(this.userAlgo) }
}
`);
await asyncFunc.call(this);
} catch (userError) {
console.error('userError', userError); // for actual debugging
Sim.showError(this.algoSource, userError.message);
}
}
/**
* Replace function calls with `await`ed method invocations associated with `this` object.
*
* @param {string} code
* @returns {string} Updated code
*/
static awaitifyThis(code) {
// Prefix "p", "v", "sleep", and "traverse" calls with `await` and attach them to `this`
return code.replace(/\b(p|v|sleep|traverse)\(/g, 'await this.$1(');
}
async p(x) {
await this.sleep(0);
await x.acquire(this);
}
async v(x) {
await this.sleep(0);
await x.release(this);
}
async sleep(secs) {
return new Promise((resolve, _reject) => {
setTimeout(resolve, secs * 1000);
});
}
/**
* A vehicle's place in line is determined by a vector of priorities
* which are based on calls to p(...)
*
* @todo Should dedup before returning?
*
* @param {string} code
* @return {Array<string>}
*/
static parseOrderVector(code) {
const pCalls = code.match(/\bp\(\s*(\w+)\s*\)/g) || [];
const acquiredSemaphores = pCalls.map(x => x.slice(2, -1).trim());
return acquiredSemaphores;
}
}
class Controller extends Algorithm {
constructor() {
super('controller');
}
}
class Traverser extends Algorithm {
static counter = 0;
static freeColors = 'blue coral darkkhaki firebrick yellowgreen gray skyblue teal orange pink purple yellow'.split(' ');
constructor(algoSource) {
super(algoSource);
this.id = this.getUniqueId();
this.color = this.getUniqueColor();
this.type = Math.random() < 0.25 ? 'truck' : 'car'; // 25% chance of being a truck
this.name = `${this.color} ${this.type} #${this.id}`;
this.$elem = null;
// FIXME: These attributes are set by children
this.lane = null;
this.dir = this.algoSource === 'traverser1' ? 'south' : 'west';
this.initialPos = 'traverser1' ? '4' : '7'; // the very end of the line
}
/**
* Create a new visual element and display it on page
*/
initElem() {
this.$elem = document.createElement('span');
this.$elem.classList.add('vehicle', this.type, this.dir);
this.$elem.title = this.name; // (will be updated)
this.$elem.style.setProperty('--pos', this.initialPos); // (will be updated)
this.$elem.style.setProperty('--color', this.color);
Sim.ui.$intersection.append(this.$elem);
return this.$elem; // always return something
}
destroyElem() {
Traverser.freeColors.push(this.color);
this.$elem.remove();
}
destroy() {
this.destroyElem();
this.lane.splice(this.lane.findIndex(t => t === this), 1);
}
async traverse() {
// "Restarting the engine takes some time" za3ma
// Adds an element of "randomness"
await this.sleep(Math.random());
// Cross the intersection then "keep moving" and fade away...
await this.enterIntersection();
await this.leaveIntersection();
}
async enterIntersection() {
Sim.crossing++;
this.$elem.dataset.state = 'leaving';
this.assertNoCollision();
await this.sleep(0.5);
}
async leaveIntersection() {
await this.sleep(1);
this.assertNoCollision();
this.destroy();
Sim.crossing--;
}
assertNoCollision() {
const happened = Sim.crossing > 1; // more than one vehicle crossing the intersection
if (happened) {
Sim.ui.$sim.dataset.state = 'error';
throw new Error('Collision!');
}
}
/**
* @returns {Array<number>}
*/
getWaitVec() {
const vec = this.orderVec.map(semaName => Sim.userVars[semaName].getPosition(this));
return vec;
}
getUniqueId() {
// FIXME: Should throw error when about to overflow? Althrough this sim won't run for a long time for this to happen
return (++Traverser.counter).toString(36).toUpperCase();
}
getUniqueColor() {
return Traverser.freeColors.shift();
}
/**
*
* @param {Traverser} a
* @param {Traverser} b
* @returns {number}
*/
static compareTraversers(a, b) {
return Traverser.compareVecs( a.getWaitVec(), b.getWaitVec() );
}
/**
* Sort wait vecs in an ascending order
*
* @param {Array<number>} vecA
* @param {Array<number>} vecB
* @returns {number}
*/
static compareVecs(vecA, vecB) {
for (let i = 0; i < vecA.length; i++) {
if (vecA[i] - vecB[i] !== 0) {
return vecA[i] - vecB[i];
}
}
return 0;
}
}
class Traverser1 extends Traverser {
static MAX = 4;
constructor() {
super('traverser1');
this.initElem();
this.lane = Sim.lane1;
}
}
class Traverser2 extends Traverser {
static MAX = 7;
constructor() {
super('traverser2');
this.initElem();
this.lane = Sim.lane2;
}
}
Sim.setup();
| {
// Just update 'data-*' and '--pos' values, and let CSS take care of the rest.
// Traffic light
const {ui, ctrl, userVars} = Sim;
ui.$sim.dataset.light = userVars.light;
ui.$sim.dataset.ctrlQueued = ctrl.orderVec.some(sema => userVars[sema].getPosition(ctrl) > 0);
// Lanes
const {lane1, lane2, redrawTraverser} = Sim;
lane1.sort(Traverser.compareTraversers);
lane2.sort(Traverser.compareTraversers);
lane1.forEach(redrawTraverser);
lane2.forEach(redrawTraverser);
// FIXME: Maybe it's better to use Proxy(userVars) and redraw after its attributes are accessed..
// Redraw before each repaint
requestAnimationFrame(Sim.redraw);
} | identifier_body |
index.js | //@ts-check
/**
* Utility function
*
* @param {Array} arrA
* @param {Array} arrB
* @returns {Object}
*/
function zipObject(arrA, arrB) {
const zip = (arrX, arrY) => arrX.map((x, i) => [x, arrY[i]]);
return Object.fromEntries(zip(arrA, arrB));
}
/**
* A basic Promise-based and queue-based Semaphore
*/
const Semaphore = class SillySemaphore {
constructor(permits) {
this._permits = permits;
this._queue = [];
}
getPosition(acquirer) {
const idx = this._queue.findIndex(entry => entry.acquirer === acquirer);
return idx + 1; // to get rid of '-1'
}
async acquire(acquirer) {
return new Promise( (resolve, reject) => {
this._queue.push({
acquirer,
resolve,
reject
});
this._maybeNotify();
});
}
async release(_acquirer) {
this._permits++;
this._maybeNotify();
}
/**
* Reject all pending promises and nullify the 'queue'
* so that future calls fail...
*/
die() {
let entry;
while (entry = this._queue.pop()) {
entry.reject();
}
this._queue = null;
}
_maybeNotify() {
if (this._permits > 0) |
}
}
class Sim {
static userVars = {};
static userAlgos = {};
static ctrl = null;
static lane1 = [];
static lane2 = [];
static crossing = 0;
static ui = {};
static creatorInterval = null;
static start() {
// if (Sim.ui.$sim === undefined) Sim.resetUI();
Sim.clearErrors();
Sim.loadUserInputs();
Sim.ui.$fieldset.disabled = true;
Sim.ctrl = new Controller();
Sim.ctrl.run();
Sim.initCreator();
Sim.redraw();
}
static stop() {
function freezeSimUI() {
// To replace all elements so no one can alter the current state
Sim.ui.$sim.outerHTML = Sim.ui.$sim.outerHTML;
// To signal that they no longer reference actual DOM elements and free them maybe(?)
Sim.ui = {};
}
function freeVariables() {
// Kill all user defined semaphores
Object.entries(Sim.userVars)
.map(entry => entry[1])
.filter(val => val instanceof Semaphore)
.forEach(sema => sema.die());
Sim.userVars = {};
}
function freeThreads() {
// TODO:
// - kill "threads" (maybe simply call .destory() of each existing Traverser)
// - cancel Controller 'ctrl' (AFAIK you can not cancel Promises...)
}
Sim.ui.$fieldset.disabled = false;
clearInterval(Sim.creatorInterval);
freezeSimUI();
//freeThreads();
//freeVariables();
}
static setup() {
const $ = str => document.querySelector(str);
Object.assign(Sim.ui, {
$form: $('form'),
$fieldset: $('form fieldset'),
$semaNames: $('#sema-names'),
$semaVals: $('#sema-vals'),
$intNames: $('#int-names'),
$intVals: $('#int-vals'),
$controller: $('#controller'),
$traverser1: $('#traverser1'),
$traverser2: $('#traverser2'),
$sim: $('#sim'),
$intersection: $('#intersection'),
})
Sim.ui.$form.onsubmit = (ev) => {
ev.preventDefault();
Sim.start();
}
$('#btn-load-attempt').onclick = (ev) => {
ev.preventDefault();
Sim.loadPreset(Sim.presets.myAttempt);
}
$('#btn-load-correct').onclick = (ev) => {
ev.preventDefault();
Sim.loadPreset(Sim.presets.correctAnswer);
}
}
static loadUserInputs() {
const {ui} = Sim;
// userAlgos...
Object.assign(Sim.userAlgos,
{
controller: ui.$controller.value,
traverser1: ui.$traverser1.value,
traverser2: ui.$traverser2.value,
}
)
// userVars...
const SEP = /,\s*/; // separator
const userSemas = zipObject(
ui.$semaNames.value.split(SEP),
ui.$semaVals.value.split(SEP).map(val => new Semaphore(Number(val)))
);
const userInts = zipObject(
ui.$intNames.value.split(SEP),
ui.$intVals.value.split(SEP).map(val => Number(val))
);
Object.assign(Sim.userVars, userSemas, userInts);
}
static initCreator() {
// maybe create a new instance of Traverser every second
Sim.creatorInterval = setInterval(Sim.maybeCreateTraverser, 1 * 1000);
}
/**
* Maybe create a new instance of Traverser and run it
* @returns {Traverser} The newly created traverser or null.
*/
static maybeCreateTraverser() {
// maybe not
if (Math.random() < 0.5) {
return null;
}
if (Math.random() < 0.5) {
if (Sim.lane1.length < Traverser1.MAX) {
const t1 = new Traverser1();
Sim.lane1.push(t1);
t1.run();
return t1;
}
} else {
if (Sim.lane2.length < Traverser2.MAX) {
const t2 = new Traverser2();
Sim.lane2.push(t2);
t2.run();
return t2;
}
}
// no room for a new 'Traverser' in the randomly chosen 'lane'
return null;
}
static redraw() {
// Just update 'data-*' and '--pos' values, and let CSS take care of the rest.
// Traffic light
const {ui, ctrl, userVars} = Sim;
ui.$sim.dataset.light = userVars.light;
ui.$sim.dataset.ctrlQueued = ctrl.orderVec.some(sema => userVars[sema].getPosition(ctrl) > 0);
// Lanes
const {lane1, lane2, redrawTraverser} = Sim;
lane1.sort(Traverser.compareTraversers);
lane2.sort(Traverser.compareTraversers);
lane1.forEach(redrawTraverser);
lane2.forEach(redrawTraverser);
// FIXME: Maybe it's better to use Proxy(userVars) and redraw after its attributes are accessed..
// Redraw before each repaint
requestAnimationFrame(Sim.redraw);
}
/**
* Update Traverser
*
* @param {Traverser} t
* @param {number} i - index
*/
static redrawTraverser(t, i) {
t.$elem.style.setProperty('--pos', i);
t.$elem.title =
`${t.name}\n\n` +
`orderVec: {${t.orderVec.join(', ')}}\n` +
`waitVec: {${t.getWaitVec().join(', ')}}`;
}
static showError(algoSource, message) {
Sim.ui[ '$' + algoSource ].setAttribute('title', message);
Sim.stop();
}
static clearErrors() {
Sim.ui.$form.querySelectorAll('[title]').forEach($x => $x.removeAttribute('title'));
}
/**
* Populate UI inputs with preset data
*
* @param {object} preset
*/
static loadPreset(preset) {
const keys = 'semaNames semaVals intNames intVals controller traverser1 traverser2'.split(' ');
for (const key of keys) {
Sim.ui['$' + key].value = preset[key];
}
}
}
class Algorithm {
/**
* @param {String} algoSource - "controller", "traverser1", or "traverser2"
*/
constructor(algoSource) {
this.algoSource = algoSource;
this.userAlgo = Sim.userAlgos[algoSource];
this.orderVec = Algorithm.parseOrderVector(this.userAlgo);
}
async run() {
const AsyncFunction = Object.getPrototypeOf(async function(){}).constructor;
try {
const asyncFunc = new AsyncFunction(`
with (Sim.userVars) {
${ Algorithm.awaitifyThis(this.userAlgo) }
}
`);
await asyncFunc.call(this);
} catch (userError) {
console.error('userError', userError); // for actual debugging
Sim.showError(this.algoSource, userError.message);
}
}
/**
* Replace function calls with `await`ed method invocations associated with `this` object.
*
* @param {string} code
* @returns {string} Updated code
*/
static awaitifyThis(code) {
// Prefix "p", "v", "sleep", and "traverse" calls with `await` and attach them to `this`
return code.replace(/\b(p|v|sleep|traverse)\(/g, 'await this.$1(');
}
async p(x) {
await this.sleep(0);
await x.acquire(this);
}
async v(x) {
await this.sleep(0);
await x.release(this);
}
async sleep(secs) {
return new Promise((resolve, _reject) => {
setTimeout(resolve, secs * 1000);
});
}
/**
* A vehicle's place in line is determined by a vector of priorities
* which are based on calls to p(...)
*
* @todo Should dedup before returning?
*
* @param {string} code
* @return {Array<string>}
*/
static parseOrderVector(code) {
const pCalls = code.match(/\bp\(\s*(\w+)\s*\)/g) || [];
const acquiredSemaphores = pCalls.map(x => x.slice(2, -1).trim());
return acquiredSemaphores;
}
}
class Controller extends Algorithm {
constructor() {
super('controller');
}
}
class Traverser extends Algorithm {
static counter = 0;
static freeColors = 'blue coral darkkhaki firebrick yellowgreen gray skyblue teal orange pink purple yellow'.split(' ');
constructor(algoSource) {
super(algoSource);
this.id = this.getUniqueId();
this.color = this.getUniqueColor();
this.type = Math.random() < 0.25 ? 'truck' : 'car'; // 25% chance of being a truck
this.name = `${this.color} ${this.type} #${this.id}`;
this.$elem = null;
// FIXME: These attributes are set by children
this.lane = null;
this.dir = this.algoSource === 'traverser1' ? 'south' : 'west';
this.initialPos = 'traverser1' ? '4' : '7'; // the very end of the line
}
/**
* Create a new visual element and display it on page
*/
initElem() {
this.$elem = document.createElement('span');
this.$elem.classList.add('vehicle', this.type, this.dir);
this.$elem.title = this.name; // (will be updated)
this.$elem.style.setProperty('--pos', this.initialPos); // (will be updated)
this.$elem.style.setProperty('--color', this.color);
Sim.ui.$intersection.append(this.$elem);
return this.$elem; // always return something
}
destroyElem() {
Traverser.freeColors.push(this.color);
this.$elem.remove();
}
destroy() {
this.destroyElem();
this.lane.splice(this.lane.findIndex(t => t === this), 1);
}
async traverse() {
// "Restarting the engine takes some time" za3ma
// Adds an element of "randomness"
await this.sleep(Math.random());
// Cross the intersection then "keep moving" and fade away...
await this.enterIntersection();
await this.leaveIntersection();
}
async enterIntersection() {
Sim.crossing++;
this.$elem.dataset.state = 'leaving';
this.assertNoCollision();
await this.sleep(0.5);
}
async leaveIntersection() {
await this.sleep(1);
this.assertNoCollision();
this.destroy();
Sim.crossing--;
}
assertNoCollision() {
const happened = Sim.crossing > 1; // more than one vehicle crossing the intersection
if (happened) {
Sim.ui.$sim.dataset.state = 'error';
throw new Error('Collision!');
}
}
/**
* @returns {Array<number>}
*/
getWaitVec() {
const vec = this.orderVec.map(semaName => Sim.userVars[semaName].getPosition(this));
return vec;
}
getUniqueId() {
// FIXME: Should throw error when about to overflow? Althrough this sim won't run for a long time for this to happen
return (++Traverser.counter).toString(36).toUpperCase();
}
getUniqueColor() {
return Traverser.freeColors.shift();
}
/**
*
* @param {Traverser} a
* @param {Traverser} b
* @returns {number}
*/
static compareTraversers(a, b) {
return Traverser.compareVecs( a.getWaitVec(), b.getWaitVec() );
}
/**
* Sort wait vecs in an ascending order
*
* @param {Array<number>} vecA
* @param {Array<number>} vecB
* @returns {number}
*/
static compareVecs(vecA, vecB) {
for (let i = 0; i < vecA.length; i++) {
if (vecA[i] - vecB[i] !== 0) {
return vecA[i] - vecB[i];
}
}
return 0;
}
}
class Traverser1 extends Traverser {
static MAX = 4;
constructor() {
super('traverser1');
this.initElem();
this.lane = Sim.lane1;
}
}
class Traverser2 extends Traverser {
static MAX = 7;
constructor() {
super('traverser2');
this.initElem();
this.lane = Sim.lane2;
}
}
Sim.setup();
| {
const entry = this._queue.shift();
if (entry) {
this._permits--;
entry.resolve();
}
} | conditional_block |
index.js | //@ts-check
/**
* Utility function
*
* @param {Array} arrA
* @param {Array} arrB
* @returns {Object}
*/
function zipObject(arrA, arrB) {
const zip = (arrX, arrY) => arrX.map((x, i) => [x, arrY[i]]);
return Object.fromEntries(zip(arrA, arrB));
}
/** | constructor(permits) {
this._permits = permits;
this._queue = [];
}
getPosition(acquirer) {
const idx = this._queue.findIndex(entry => entry.acquirer === acquirer);
return idx + 1; // to get rid of '-1'
}
async acquire(acquirer) {
return new Promise( (resolve, reject) => {
this._queue.push({
acquirer,
resolve,
reject
});
this._maybeNotify();
});
}
async release(_acquirer) {
this._permits++;
this._maybeNotify();
}
/**
* Reject all pending promises and nullify the 'queue'
* so that future calls fail...
*/
die() {
let entry;
while (entry = this._queue.pop()) {
entry.reject();
}
this._queue = null;
}
_maybeNotify() {
if (this._permits > 0) {
const entry = this._queue.shift();
if (entry) {
this._permits--;
entry.resolve();
}
}
}
}
class Sim {
static userVars = {};
static userAlgos = {};
static ctrl = null;
static lane1 = [];
static lane2 = [];
static crossing = 0;
static ui = {};
static creatorInterval = null;
static start() {
// if (Sim.ui.$sim === undefined) Sim.resetUI();
Sim.clearErrors();
Sim.loadUserInputs();
Sim.ui.$fieldset.disabled = true;
Sim.ctrl = new Controller();
Sim.ctrl.run();
Sim.initCreator();
Sim.redraw();
}
static stop() {
function freezeSimUI() {
// To replace all elements so no one can alter the current state
Sim.ui.$sim.outerHTML = Sim.ui.$sim.outerHTML;
// To signal that they no longer reference actual DOM elements and free them maybe(?)
Sim.ui = {};
}
function freeVariables() {
// Kill all user defined semaphores
Object.entries(Sim.userVars)
.map(entry => entry[1])
.filter(val => val instanceof Semaphore)
.forEach(sema => sema.die());
Sim.userVars = {};
}
function freeThreads() {
// TODO:
// - kill "threads" (maybe simply call .destory() of each existing Traverser)
// - cancel Controller 'ctrl' (AFAIK you can not cancel Promises...)
}
Sim.ui.$fieldset.disabled = false;
clearInterval(Sim.creatorInterval);
freezeSimUI();
//freeThreads();
//freeVariables();
}
static setup() {
const $ = str => document.querySelector(str);
Object.assign(Sim.ui, {
$form: $('form'),
$fieldset: $('form fieldset'),
$semaNames: $('#sema-names'),
$semaVals: $('#sema-vals'),
$intNames: $('#int-names'),
$intVals: $('#int-vals'),
$controller: $('#controller'),
$traverser1: $('#traverser1'),
$traverser2: $('#traverser2'),
$sim: $('#sim'),
$intersection: $('#intersection'),
})
Sim.ui.$form.onsubmit = (ev) => {
ev.preventDefault();
Sim.start();
}
$('#btn-load-attempt').onclick = (ev) => {
ev.preventDefault();
Sim.loadPreset(Sim.presets.myAttempt);
}
$('#btn-load-correct').onclick = (ev) => {
ev.preventDefault();
Sim.loadPreset(Sim.presets.correctAnswer);
}
}
static loadUserInputs() {
const {ui} = Sim;
// userAlgos...
Object.assign(Sim.userAlgos,
{
controller: ui.$controller.value,
traverser1: ui.$traverser1.value,
traverser2: ui.$traverser2.value,
}
)
// userVars...
const SEP = /,\s*/; // separator
const userSemas = zipObject(
ui.$semaNames.value.split(SEP),
ui.$semaVals.value.split(SEP).map(val => new Semaphore(Number(val)))
);
const userInts = zipObject(
ui.$intNames.value.split(SEP),
ui.$intVals.value.split(SEP).map(val => Number(val))
);
Object.assign(Sim.userVars, userSemas, userInts);
}
static initCreator() {
// maybe create a new instance of Traverser every second
Sim.creatorInterval = setInterval(Sim.maybeCreateTraverser, 1 * 1000);
}
/**
* Maybe create a new instance of Traverser and run it
* @returns {Traverser} The newly created traverser or null.
*/
static maybeCreateTraverser() {
// maybe not
if (Math.random() < 0.5) {
return null;
}
if (Math.random() < 0.5) {
if (Sim.lane1.length < Traverser1.MAX) {
const t1 = new Traverser1();
Sim.lane1.push(t1);
t1.run();
return t1;
}
} else {
if (Sim.lane2.length < Traverser2.MAX) {
const t2 = new Traverser2();
Sim.lane2.push(t2);
t2.run();
return t2;
}
}
// no room for a new 'Traverser' in the randomly chosen 'lane'
return null;
}
static redraw() {
// Just update 'data-*' and '--pos' values, and let CSS take care of the rest.
// Traffic light
const {ui, ctrl, userVars} = Sim;
ui.$sim.dataset.light = userVars.light;
ui.$sim.dataset.ctrlQueued = ctrl.orderVec.some(sema => userVars[sema].getPosition(ctrl) > 0);
// Lanes
const {lane1, lane2, redrawTraverser} = Sim;
lane1.sort(Traverser.compareTraversers);
lane2.sort(Traverser.compareTraversers);
lane1.forEach(redrawTraverser);
lane2.forEach(redrawTraverser);
// FIXME: Maybe it's better to use Proxy(userVars) and redraw after its attributes are accessed..
// Redraw before each repaint
requestAnimationFrame(Sim.redraw);
}
/**
* Update Traverser
*
* @param {Traverser} t
* @param {number} i - index
*/
static redrawTraverser(t, i) {
t.$elem.style.setProperty('--pos', i);
t.$elem.title =
`${t.name}\n\n` +
`orderVec: {${t.orderVec.join(', ')}}\n` +
`waitVec: {${t.getWaitVec().join(', ')}}`;
}
static showError(algoSource, message) {
Sim.ui[ '$' + algoSource ].setAttribute('title', message);
Sim.stop();
}
static clearErrors() {
Sim.ui.$form.querySelectorAll('[title]').forEach($x => $x.removeAttribute('title'));
}
/**
* Populate UI inputs with preset data
*
* @param {object} preset
*/
static loadPreset(preset) {
const keys = 'semaNames semaVals intNames intVals controller traverser1 traverser2'.split(' ');
for (const key of keys) {
Sim.ui['$' + key].value = preset[key];
}
}
}
class Algorithm {
/**
* @param {String} algoSource - "controller", "traverser1", or "traverser2"
*/
constructor(algoSource) {
this.algoSource = algoSource;
this.userAlgo = Sim.userAlgos[algoSource];
this.orderVec = Algorithm.parseOrderVector(this.userAlgo);
}
async run() {
const AsyncFunction = Object.getPrototypeOf(async function(){}).constructor;
try {
const asyncFunc = new AsyncFunction(`
with (Sim.userVars) {
${ Algorithm.awaitifyThis(this.userAlgo) }
}
`);
await asyncFunc.call(this);
} catch (userError) {
console.error('userError', userError); // for actual debugging
Sim.showError(this.algoSource, userError.message);
}
}
/**
* Replace function calls with `await`ed method invocations associated with `this` object.
*
* @param {string} code
* @returns {string} Updated code
*/
static awaitifyThis(code) {
// Prefix "p", "v", "sleep", and "traverse" calls with `await` and attach them to `this`
return code.replace(/\b(p|v|sleep|traverse)\(/g, 'await this.$1(');
}
async p(x) {
await this.sleep(0);
await x.acquire(this);
}
async v(x) {
await this.sleep(0);
await x.release(this);
}
async sleep(secs) {
return new Promise((resolve, _reject) => {
setTimeout(resolve, secs * 1000);
});
}
/**
* A vehicle's place in line is determined by a vector of priorities
* which are based on calls to p(...)
*
* @todo Should dedup before returning?
*
* @param {string} code
* @return {Array<string>}
*/
static parseOrderVector(code) {
const pCalls = code.match(/\bp\(\s*(\w+)\s*\)/g) || [];
const acquiredSemaphores = pCalls.map(x => x.slice(2, -1).trim());
return acquiredSemaphores;
}
}
class Controller extends Algorithm {
constructor() {
super('controller');
}
}
class Traverser extends Algorithm {
static counter = 0;
static freeColors = 'blue coral darkkhaki firebrick yellowgreen gray skyblue teal orange pink purple yellow'.split(' ');
constructor(algoSource) {
super(algoSource);
this.id = this.getUniqueId();
this.color = this.getUniqueColor();
this.type = Math.random() < 0.25 ? 'truck' : 'car'; // 25% chance of being a truck
this.name = `${this.color} ${this.type} #${this.id}`;
this.$elem = null;
// FIXME: These attributes are set by children
this.lane = null;
this.dir = this.algoSource === 'traverser1' ? 'south' : 'west';
this.initialPos = 'traverser1' ? '4' : '7'; // the very end of the line
}
/**
* Create a new visual element and display it on page
*/
initElem() {
this.$elem = document.createElement('span');
this.$elem.classList.add('vehicle', this.type, this.dir);
this.$elem.title = this.name; // (will be updated)
this.$elem.style.setProperty('--pos', this.initialPos); // (will be updated)
this.$elem.style.setProperty('--color', this.color);
Sim.ui.$intersection.append(this.$elem);
return this.$elem; // always return something
}
destroyElem() {
Traverser.freeColors.push(this.color);
this.$elem.remove();
}
destroy() {
this.destroyElem();
this.lane.splice(this.lane.findIndex(t => t === this), 1);
}
async traverse() {
// "Restarting the engine takes some time" za3ma
// Adds an element of "randomness"
await this.sleep(Math.random());
// Cross the intersection then "keep moving" and fade away...
await this.enterIntersection();
await this.leaveIntersection();
}
async enterIntersection() {
Sim.crossing++;
this.$elem.dataset.state = 'leaving';
this.assertNoCollision();
await this.sleep(0.5);
}
async leaveIntersection() {
await this.sleep(1);
this.assertNoCollision();
this.destroy();
Sim.crossing--;
}
assertNoCollision() {
const happened = Sim.crossing > 1; // more than one vehicle crossing the intersection
if (happened) {
Sim.ui.$sim.dataset.state = 'error';
throw new Error('Collision!');
}
}
/**
* @returns {Array<number>}
*/
getWaitVec() {
const vec = this.orderVec.map(semaName => Sim.userVars[semaName].getPosition(this));
return vec;
}
getUniqueId() {
// FIXME: Should throw error when about to overflow? Althrough this sim won't run for a long time for this to happen
return (++Traverser.counter).toString(36).toUpperCase();
}
getUniqueColor() {
return Traverser.freeColors.shift();
}
/**
*
* @param {Traverser} a
* @param {Traverser} b
* @returns {number}
*/
static compareTraversers(a, b) {
return Traverser.compareVecs( a.getWaitVec(), b.getWaitVec() );
}
/**
* Sort wait vecs in an ascending order
*
* @param {Array<number>} vecA
* @param {Array<number>} vecB
* @returns {number}
*/
static compareVecs(vecA, vecB) {
for (let i = 0; i < vecA.length; i++) {
if (vecA[i] - vecB[i] !== 0) {
return vecA[i] - vecB[i];
}
}
return 0;
}
}
class Traverser1 extends Traverser {
static MAX = 4;
constructor() {
super('traverser1');
this.initElem();
this.lane = Sim.lane1;
}
}
class Traverser2 extends Traverser {
static MAX = 7;
constructor() {
super('traverser2');
this.initElem();
this.lane = Sim.lane2;
}
}
Sim.setup(); | * A basic Promise-based and queue-based Semaphore
*/
const Semaphore = class SillySemaphore {
| random_line_split |
NeuralNetworks.py | import sys
import numpy as np
import copy
from sklearn.preprocessing import normalize
def run_ann(zeros):
activations = [sigmoid_activation, sigmoid_activation, linear_activation]
deriv_activations = [sigmoid_activation_deriv, sigmoid_activation_deriv, linear_activation_deriv]
# widths_values = [5, 10, 25, 50, 100]
widths_values = [5, 10]
print("TUNING HYPER PARAMETERS:")
[gamma, d, _] = compute_hyperparameters(activations, deriv_activations, zeros)
print("GAMMA:", gamma, "D:", d)
errors = []
print("USING HYPER PARAMETERS FOR EACH WIDTH:")
for width in widths_values:
widths = [5, width, width, 1]
weights = run_sgd(gamma, d, widths, activations, deriv_activations, zeros)
train_predictions = calculate_predictions(train_data, train_y, widths, activations, weights)
train_error = calculate_error(train_y, train_predictions)
test_predictions = calculate_predictions(test_data,test_y, widths, activations, weights)
test_error = calculate_error(test_y, test_predictions)
errors.append([width, train_error, test_error])
print(width, " COMPLETE")
for error in errors:
print("Width:", error[0], ", Train Error:", error[1], ", Test Error:", error[2])
def compute_hyperparameters(activations, deriv_activations, zeros):
gammas = [1, 0.5]
ds = [1, 0.1]
smallest = [0, 100.0, 100.0]
for gamma in gammas:
for d in ds: smallest = get_smallest_error(smallest, gamma, d, activations, deriv_activations, zeros)
print("----------------------")
return smallest
def get_smallest_error(smallest, gamma, d, activations, deriv_activations, zeros):
# Computes the error for the given parameters and returns the error if it is the smallest, or the previous smallest.
|
def calculate_predictions(data, y, widths, activations, weights):
predictions = copy.deepcopy(y)
for i in range(len(data)):
predictions[i] = np.sign(run_forward_pass(weights, data[i], widths, activations)[-1])
return predictions
def calculate_error(y, predictions):
return 1 - np.count_nonzero(np.multiply(y, predictions) == 1) / len(y)
def run_sgd(initial_gamma, d, widths, activations, deriv_activations, zeros, n=872):
weights = create_weights(widths, zeros)
loss = []
for epoch in range(100):
learning_rate = update_learning_rate(initial_gamma, d, epoch)
[y, x] = shuffle_data(train_y, train_data)
l = 0
for i in range(n):
nodes = run_forward_pass(weights, x[i], widths, activations)
prediction = np.sign(nodes[-1])
weights_grad = run_backpropagation(weights, nodes, y[i], prediction, deriv_activations)
weights = update_weights(weights, learning_rate, weights_grad)
l += compute_loss(prediction, y[i])
loss.append(l)
# print("LOSS:", loss)
return weights
def create_weights(widths, zeros):
weights = []
for level in range(len(widths) - 2):
temp = []
for j in range(widths[level]):
if not zeros:
temp.append(np.random.normal(0, 0.1, widths[level + 1] - 1).tolist())
else:
temp.append([0] * (widths[level + 1] - 1))
weights.append(temp)
temp = []
for j in range(widths[level]):
if not zeros:
temp.append(np.random.normal(0, 0.1, 1).tolist())
else:
temp.append([1])
weights.append(temp)
return np.array(weights)
def shuffle_data(y, data):
"""Shuffles the given data by appending y to the data, then shuffling, then returns the separated data and y."""
combined = np.c_[data.reshape(len(data), -1), y.reshape(len(y), -1)]
np.random.shuffle(combined)
shuffled_data = combined[:, :data.size // len(data)].reshape(data.shape)
shuffled_y = combined[:, data.size // len(data):].reshape(y.shape)
return [shuffled_y, shuffled_data]
def update_learning_rate(initial_gamma, d, epoch):
return initial_gamma / (1.0 + epoch * (initial_gamma / d))
def update_weights(weights, learning_rate, weights_grad):
for i in range(len(weights_grad)):
for j in range(len(weights_grad[i])):
for k in range(len(weights_grad[i][j])):
if type(weights[i][j][k]) == np.matrix:
weights[i][j][k][0, 0] -= learning_rate * weights_grad[i][j][k][0, 0]
else:
weights[i][j][k] -= learning_rate * weights_grad[i][j][k]
return weights
def compute_loss(prediction, label):
return np.square(prediction[0] - label[0, 0]) / 2
# Forward Pass
def run_forward_pass(weights, example, widths, activations):
shape = []
for i in range(len(widths)):
shape.append(np.zeros(widths[i]))
nodes = np.array(shape)
nodes[0] = example
for i in range(1, len(nodes)):
nodes[i] = activations[i-1](widths[i], weights[i-1], nodes[i-1])
return nodes
def linear_activation(width, weights, prev_nodes):
curr_nodes = np.zeros(width)
for j in range(len(curr_nodes)):
for i in range(len(prev_nodes)):
curr_nodes[j] += prev_nodes[i] * weights[i][j]
return curr_nodes
def sigmoid_activation(width, weights, prev_nodes):
prev_nodes = copy.deepcopy(prev_nodes)
if prev_nodes.ndim > 1:
prev_nodes = np.asarray(prev_nodes.T)
prev_nodes = prev_nodes[:, 0]
curr_nodes = np.zeros(width)
curr_nodes[0] = 1
for j in range(len(curr_nodes) - 1):
z = 0
for i in range(len(prev_nodes)):
z += prev_nodes[i] * weights[i][j]
curr_nodes[j + 1] = compute_sigmoid(z)
return curr_nodes
def compute_sigmoid(z):
return 1/(1+np.exp(-z))
# Backpropagation
def run_backpropagation(weights, nodes, y, prediction, activations):
loss_deriv = prediction - y
prev_node_derivs = [loss_deriv]
weight_derivs = copy.deepcopy(weights)
is_last_level = True
for level in range(len(weights) - 1, -1, -1):
weight_derivs[level] = compute_weight_derivs(weight_derivs[level], prev_node_derivs, nodes[level+1], nodes[level], activations[level])
prev_node_derivs = compute_node_derivatives(weights[level], nodes[level], prev_node_derivs, is_last_level)
is_last_level = False
return weight_derivs
def compute_weight_derivs(weight_derivs, prev_node_derivs, prev_nodes, next_nodes, activation):
start = 0
if activation == sigmoid_activation_deriv: start = 1
for i in range(len(weight_derivs)):
for j in range(start, len(weight_derivs[i]) + start):
if next_nodes.ndim == 2:
next_nodes = copy.deepcopy(next_nodes)
next_nodes = np.asarray(next_nodes.T)
next_nodes = next_nodes[:, 0]
weight_derivs[i][j-start] = activation(prev_node_derivs[j], next_nodes[i], prev_nodes[j])
return weight_derivs
def linear_activation_deriv(prev_node_deriv, next_node, _):
return prev_node_deriv[0] * next_node
def sigmoid_activation_deriv(prev_node_deriv, next_node, prev_node):
return prev_node_deriv * next_node * prev_node * (1-prev_node)
def compute_node_derivatives(weights, curr_nodes, prev_node_derivs, is_last_level):
curr_node_derivs = np.zeros(curr_nodes.shape)
for i in range(len(curr_nodes)):
product = 0
for j in range(len(weights[i])):
k = j
if not is_last_level: k += 1
product += weights[i][j] * prev_node_derivs[k]
curr_node_derivs[i] = product
return curr_node_derivs
def import_data(path, num_examples):
"""Imports the data at the given path to a csv file with the given amount of examples."""
data = np.empty((num_examples, 5), dtype="float128")
y = np.empty((num_examples, 1), dtype="float128")
with open(path, 'r') as f:
i = 0
for line in f:
example = []
terms = line.strip().split(',')
for j in range(len(terms)):
if j == 4:
y[i] = 2 * float(terms[j]) - 1
else:
example.append(float(terms[j]))
data[i, 1:] = example
data[i, 0] = 1
i += 1
data = normalize(np.asmatrix(data), axis=0)
return [data, np.asmatrix(y)]
def run_example():
widths = np.array([3, 3, 3, 1])
train_x = np.array([1., 1., 1.])
train_y = np.array([1])
weights = np.array([
[[-1., 1.], [-2., 2.], [-3., 3.]],
[[-1., 1.], [-2., 2.], [-3., 3.]],
[[-1.], [2.], [-1.5]]
])
# weights = np.array([
# [np.array([-1, 1]), np.array([-2, 2]), np.array([-3, 3])],
# [np.array([-1, 1]), np.array([-2, 2]), np.array([-3, 3])],
# [np.array([-1]), np.array([2]), np.array([-1.5])]
# ])
activations = [sigmoid_activation, sigmoid_activation, linear_activation]
nodes = run_forward_pass(weights, train_x, widths, activations)
print("FORWARD PASS --------")
for i in range(len(nodes)):
print("LAYER:", i, ":", nodes[i])
print("BACKPROPAGATION --------")
deriv_activations = [sigmoid_activation_deriv, sigmoid_activation_deriv, linear_activation_deriv]
weights_grad = run_backpropagation(weights, nodes, train_y, np.sign(nodes[-1]), deriv_activations)
for level in weights_grad:
print(level)
print("__________")
weights = update_weights(weights, 0.01, weights_grad)
for level in weights:
print(level)
def run_example_sgd():
widths = np.array([3, 3, 3, 1])
activations = [sigmoid_activation, sigmoid_activation, linear_activation]
deriv_activations = [sigmoid_activation_deriv, sigmoid_activation_deriv, linear_activation_deriv]
run_sgd(1, 1, widths, activations, deriv_activations, 1, False)
if __name__ == '__main__':
if sys.argv[1] == "example": run_example()
elif sys.argv[1] == "example_sgd":
train_data = np.array([1., 1., 1.])
train_data = train_data[np.newaxis, :]
train_y = np.array([1])
train_y = train_y[:, np.newaxis]
run_example_sgd()
elif sys.argv[1] == "ann":
[train_data, train_y] = import_data("./bank-note/train.csv", 872)
[test_data, test_y] = import_data("./bank-note/test.csv", 500)
run_ann(False)
elif sys.argv[1] == "zeros":
[train_data, train_y] = import_data("./bank-note/train.csv", 872)
[test_data, test_y] = import_data("./bank-note/test.csv", 500)
run_ann(True) | widths = [5, 5, 5, 1]
weights = run_sgd(gamma, d, widths, activations, deriv_activations, zeros)
predictions = calculate_predictions(train_data, train_y, widths, activations, weights)
error = calculate_error(train_y, predictions)
print("GAMMA:", gamma, " D:", d, " ERROR:", error)
if error < smallest[2]: smallest = [gamma, d, error]
return smallest | identifier_body |
NeuralNetworks.py | import sys
import numpy as np
import copy
from sklearn.preprocessing import normalize
def run_ann(zeros):
activations = [sigmoid_activation, sigmoid_activation, linear_activation]
deriv_activations = [sigmoid_activation_deriv, sigmoid_activation_deriv, linear_activation_deriv]
# widths_values = [5, 10, 25, 50, 100]
widths_values = [5, 10]
print("TUNING HYPER PARAMETERS:")
[gamma, d, _] = compute_hyperparameters(activations, deriv_activations, zeros)
print("GAMMA:", gamma, "D:", d)
errors = []
print("USING HYPER PARAMETERS FOR EACH WIDTH:")
for width in widths_values:
widths = [5, width, width, 1]
weights = run_sgd(gamma, d, widths, activations, deriv_activations, zeros)
train_predictions = calculate_predictions(train_data, train_y, widths, activations, weights)
train_error = calculate_error(train_y, train_predictions)
test_predictions = calculate_predictions(test_data,test_y, widths, activations, weights)
test_error = calculate_error(test_y, test_predictions)
errors.append([width, train_error, test_error])
print(width, " COMPLETE")
for error in errors:
print("Width:", error[0], ", Train Error:", error[1], ", Test Error:", error[2])
def compute_hyperparameters(activations, deriv_activations, zeros):
gammas = [1, 0.5]
ds = [1, 0.1]
smallest = [0, 100.0, 100.0]
for gamma in gammas:
|
return smallest
def get_smallest_error(smallest, gamma, d, activations, deriv_activations, zeros):
# Computes the error for the given parameters and returns the error if it is the smallest, or the previous smallest.
widths = [5, 5, 5, 1]
weights = run_sgd(gamma, d, widths, activations, deriv_activations, zeros)
predictions = calculate_predictions(train_data, train_y, widths, activations, weights)
error = calculate_error(train_y, predictions)
print("GAMMA:", gamma, " D:", d, " ERROR:", error)
if error < smallest[2]: smallest = [gamma, d, error]
return smallest
def calculate_predictions(data, y, widths, activations, weights):
predictions = copy.deepcopy(y)
for i in range(len(data)):
predictions[i] = np.sign(run_forward_pass(weights, data[i], widths, activations)[-1])
return predictions
def calculate_error(y, predictions):
return 1 - np.count_nonzero(np.multiply(y, predictions) == 1) / len(y)
def run_sgd(initial_gamma, d, widths, activations, deriv_activations, zeros, n=872):
weights = create_weights(widths, zeros)
loss = []
for epoch in range(100):
learning_rate = update_learning_rate(initial_gamma, d, epoch)
[y, x] = shuffle_data(train_y, train_data)
l = 0
for i in range(n):
nodes = run_forward_pass(weights, x[i], widths, activations)
prediction = np.sign(nodes[-1])
weights_grad = run_backpropagation(weights, nodes, y[i], prediction, deriv_activations)
weights = update_weights(weights, learning_rate, weights_grad)
l += compute_loss(prediction, y[i])
loss.append(l)
# print("LOSS:", loss)
return weights
def create_weights(widths, zeros):
weights = []
for level in range(len(widths) - 2):
temp = []
for j in range(widths[level]):
if not zeros:
temp.append(np.random.normal(0, 0.1, widths[level + 1] - 1).tolist())
else:
temp.append([0] * (widths[level + 1] - 1))
weights.append(temp)
temp = []
for j in range(widths[level]):
if not zeros:
temp.append(np.random.normal(0, 0.1, 1).tolist())
else:
temp.append([1])
weights.append(temp)
return np.array(weights)
def shuffle_data(y, data):
"""Shuffles the given data by appending y to the data, then shuffling, then returns the separated data and y."""
combined = np.c_[data.reshape(len(data), -1), y.reshape(len(y), -1)]
np.random.shuffle(combined)
shuffled_data = combined[:, :data.size // len(data)].reshape(data.shape)
shuffled_y = combined[:, data.size // len(data):].reshape(y.shape)
return [shuffled_y, shuffled_data]
def update_learning_rate(initial_gamma, d, epoch):
return initial_gamma / (1.0 + epoch * (initial_gamma / d))
def update_weights(weights, learning_rate, weights_grad):
for i in range(len(weights_grad)):
for j in range(len(weights_grad[i])):
for k in range(len(weights_grad[i][j])):
if type(weights[i][j][k]) == np.matrix:
weights[i][j][k][0, 0] -= learning_rate * weights_grad[i][j][k][0, 0]
else:
weights[i][j][k] -= learning_rate * weights_grad[i][j][k]
return weights
def compute_loss(prediction, label):
return np.square(prediction[0] - label[0, 0]) / 2
# Forward Pass
def run_forward_pass(weights, example, widths, activations):
shape = []
for i in range(len(widths)):
shape.append(np.zeros(widths[i]))
nodes = np.array(shape)
nodes[0] = example
for i in range(1, len(nodes)):
nodes[i] = activations[i-1](widths[i], weights[i-1], nodes[i-1])
return nodes
def linear_activation(width, weights, prev_nodes):
curr_nodes = np.zeros(width)
for j in range(len(curr_nodes)):
for i in range(len(prev_nodes)):
curr_nodes[j] += prev_nodes[i] * weights[i][j]
return curr_nodes
def sigmoid_activation(width, weights, prev_nodes):
prev_nodes = copy.deepcopy(prev_nodes)
if prev_nodes.ndim > 1:
prev_nodes = np.asarray(prev_nodes.T)
prev_nodes = prev_nodes[:, 0]
curr_nodes = np.zeros(width)
curr_nodes[0] = 1
for j in range(len(curr_nodes) - 1):
z = 0
for i in range(len(prev_nodes)):
z += prev_nodes[i] * weights[i][j]
curr_nodes[j + 1] = compute_sigmoid(z)
return curr_nodes
def compute_sigmoid(z):
return 1/(1+np.exp(-z))
# Backpropagation
def run_backpropagation(weights, nodes, y, prediction, activations):
loss_deriv = prediction - y
prev_node_derivs = [loss_deriv]
weight_derivs = copy.deepcopy(weights)
is_last_level = True
for level in range(len(weights) - 1, -1, -1):
weight_derivs[level] = compute_weight_derivs(weight_derivs[level], prev_node_derivs, nodes[level+1], nodes[level], activations[level])
prev_node_derivs = compute_node_derivatives(weights[level], nodes[level], prev_node_derivs, is_last_level)
is_last_level = False
return weight_derivs
def compute_weight_derivs(weight_derivs, prev_node_derivs, prev_nodes, next_nodes, activation):
start = 0
if activation == sigmoid_activation_deriv: start = 1
for i in range(len(weight_derivs)):
for j in range(start, len(weight_derivs[i]) + start):
if next_nodes.ndim == 2:
next_nodes = copy.deepcopy(next_nodes)
next_nodes = np.asarray(next_nodes.T)
next_nodes = next_nodes[:, 0]
weight_derivs[i][j-start] = activation(prev_node_derivs[j], next_nodes[i], prev_nodes[j])
return weight_derivs
def linear_activation_deriv(prev_node_deriv, next_node, _):
return prev_node_deriv[0] * next_node
def sigmoid_activation_deriv(prev_node_deriv, next_node, prev_node):
return prev_node_deriv * next_node * prev_node * (1-prev_node)
def compute_node_derivatives(weights, curr_nodes, prev_node_derivs, is_last_level):
curr_node_derivs = np.zeros(curr_nodes.shape)
for i in range(len(curr_nodes)):
product = 0
for j in range(len(weights[i])):
k = j
if not is_last_level: k += 1
product += weights[i][j] * prev_node_derivs[k]
curr_node_derivs[i] = product
return curr_node_derivs
def import_data(path, num_examples):
"""Imports the data at the given path to a csv file with the given amount of examples."""
data = np.empty((num_examples, 5), dtype="float128")
y = np.empty((num_examples, 1), dtype="float128")
with open(path, 'r') as f:
i = 0
for line in f:
example = []
terms = line.strip().split(',')
for j in range(len(terms)):
if j == 4:
y[i] = 2 * float(terms[j]) - 1
else:
example.append(float(terms[j]))
data[i, 1:] = example
data[i, 0] = 1
i += 1
data = normalize(np.asmatrix(data), axis=0)
return [data, np.asmatrix(y)]
def run_example():
widths = np.array([3, 3, 3, 1])
train_x = np.array([1., 1., 1.])
train_y = np.array([1])
weights = np.array([
[[-1., 1.], [-2., 2.], [-3., 3.]],
[[-1., 1.], [-2., 2.], [-3., 3.]],
[[-1.], [2.], [-1.5]]
])
# weights = np.array([
# [np.array([-1, 1]), np.array([-2, 2]), np.array([-3, 3])],
# [np.array([-1, 1]), np.array([-2, 2]), np.array([-3, 3])],
# [np.array([-1]), np.array([2]), np.array([-1.5])]
# ])
activations = [sigmoid_activation, sigmoid_activation, linear_activation]
nodes = run_forward_pass(weights, train_x, widths, activations)
print("FORWARD PASS --------")
for i in range(len(nodes)):
print("LAYER:", i, ":", nodes[i])
print("BACKPROPAGATION --------")
deriv_activations = [sigmoid_activation_deriv, sigmoid_activation_deriv, linear_activation_deriv]
weights_grad = run_backpropagation(weights, nodes, train_y, np.sign(nodes[-1]), deriv_activations)
for level in weights_grad:
print(level)
print("__________")
weights = update_weights(weights, 0.01, weights_grad)
for level in weights:
print(level)
def run_example_sgd():
widths = np.array([3, 3, 3, 1])
activations = [sigmoid_activation, sigmoid_activation, linear_activation]
deriv_activations = [sigmoid_activation_deriv, sigmoid_activation_deriv, linear_activation_deriv]
run_sgd(1, 1, widths, activations, deriv_activations, 1, False)
if __name__ == '__main__':
if sys.argv[1] == "example": run_example()
elif sys.argv[1] == "example_sgd":
train_data = np.array([1., 1., 1.])
train_data = train_data[np.newaxis, :]
train_y = np.array([1])
train_y = train_y[:, np.newaxis]
run_example_sgd()
elif sys.argv[1] == "ann":
[train_data, train_y] = import_data("./bank-note/train.csv", 872)
[test_data, test_y] = import_data("./bank-note/test.csv", 500)
run_ann(False)
elif sys.argv[1] == "zeros":
[train_data, train_y] = import_data("./bank-note/train.csv", 872)
[test_data, test_y] = import_data("./bank-note/test.csv", 500)
run_ann(True) | for d in ds: smallest = get_smallest_error(smallest, gamma, d, activations, deriv_activations, zeros)
print("----------------------") | conditional_block |
NeuralNetworks.py | import sys
import numpy as np
import copy
from sklearn.preprocessing import normalize
def run_ann(zeros):
activations = [sigmoid_activation, sigmoid_activation, linear_activation]
deriv_activations = [sigmoid_activation_deriv, sigmoid_activation_deriv, linear_activation_deriv]
# widths_values = [5, 10, 25, 50, 100]
widths_values = [5, 10]
print("TUNING HYPER PARAMETERS:")
[gamma, d, _] = compute_hyperparameters(activations, deriv_activations, zeros)
print("GAMMA:", gamma, "D:", d)
errors = []
print("USING HYPER PARAMETERS FOR EACH WIDTH:")
for width in widths_values:
widths = [5, width, width, 1]
weights = run_sgd(gamma, d, widths, activations, deriv_activations, zeros)
train_predictions = calculate_predictions(train_data, train_y, widths, activations, weights)
train_error = calculate_error(train_y, train_predictions)
test_predictions = calculate_predictions(test_data,test_y, widths, activations, weights)
test_error = calculate_error(test_y, test_predictions)
errors.append([width, train_error, test_error])
print(width, " COMPLETE")
for error in errors:
print("Width:", error[0], ", Train Error:", error[1], ", Test Error:", error[2])
def compute_hyperparameters(activations, deriv_activations, zeros):
gammas = [1, 0.5]
ds = [1, 0.1]
smallest = [0, 100.0, 100.0]
for gamma in gammas:
for d in ds: smallest = get_smallest_error(smallest, gamma, d, activations, deriv_activations, zeros)
print("----------------------")
return smallest
def get_smallest_error(smallest, gamma, d, activations, deriv_activations, zeros):
# Computes the error for the given parameters and returns the error if it is the smallest, or the previous smallest.
widths = [5, 5, 5, 1]
weights = run_sgd(gamma, d, widths, activations, deriv_activations, zeros)
predictions = calculate_predictions(train_data, train_y, widths, activations, weights)
error = calculate_error(train_y, predictions)
print("GAMMA:", gamma, " D:", d, " ERROR:", error)
if error < smallest[2]: smallest = [gamma, d, error]
return smallest
def calculate_predictions(data, y, widths, activations, weights):
predictions = copy.deepcopy(y)
for i in range(len(data)):
predictions[i] = np.sign(run_forward_pass(weights, data[i], widths, activations)[-1])
return predictions
def calculate_error(y, predictions):
return 1 - np.count_nonzero(np.multiply(y, predictions) == 1) / len(y)
def run_sgd(initial_gamma, d, widths, activations, deriv_activations, zeros, n=872):
weights = create_weights(widths, zeros)
loss = []
for epoch in range(100):
learning_rate = update_learning_rate(initial_gamma, d, epoch)
[y, x] = shuffle_data(train_y, train_data)
l = 0
for i in range(n):
nodes = run_forward_pass(weights, x[i], widths, activations)
prediction = np.sign(nodes[-1])
weights_grad = run_backpropagation(weights, nodes, y[i], prediction, deriv_activations)
weights = update_weights(weights, learning_rate, weights_grad)
l += compute_loss(prediction, y[i])
loss.append(l)
# print("LOSS:", loss)
return weights
def create_weights(widths, zeros):
weights = []
for level in range(len(widths) - 2):
temp = []
for j in range(widths[level]):
if not zeros:
temp.append(np.random.normal(0, 0.1, widths[level + 1] - 1).tolist())
else:
temp.append([0] * (widths[level + 1] - 1))
weights.append(temp)
temp = []
for j in range(widths[level]):
if not zeros:
temp.append(np.random.normal(0, 0.1, 1).tolist())
else:
temp.append([1])
weights.append(temp)
return np.array(weights)
def shuffle_data(y, data):
"""Shuffles the given data by appending y to the data, then shuffling, then returns the separated data and y."""
combined = np.c_[data.reshape(len(data), -1), y.reshape(len(y), -1)]
np.random.shuffle(combined)
shuffled_data = combined[:, :data.size // len(data)].reshape(data.shape)
shuffled_y = combined[:, data.size // len(data):].reshape(y.shape)
return [shuffled_y, shuffled_data]
def update_learning_rate(initial_gamma, d, epoch):
return initial_gamma / (1.0 + epoch * (initial_gamma / d))
def update_weights(weights, learning_rate, weights_grad):
for i in range(len(weights_grad)):
for j in range(len(weights_grad[i])):
for k in range(len(weights_grad[i][j])):
if type(weights[i][j][k]) == np.matrix:
weights[i][j][k][0, 0] -= learning_rate * weights_grad[i][j][k][0, 0]
else:
weights[i][j][k] -= learning_rate * weights_grad[i][j][k]
return weights
def compute_loss(prediction, label):
return np.square(prediction[0] - label[0, 0]) / 2
# Forward Pass
def run_forward_pass(weights, example, widths, activations):
shape = []
for i in range(len(widths)):
shape.append(np.zeros(widths[i]))
nodes = np.array(shape)
nodes[0] = example
for i in range(1, len(nodes)):
nodes[i] = activations[i-1](widths[i], weights[i-1], nodes[i-1])
return nodes
def linear_activation(width, weights, prev_nodes):
curr_nodes = np.zeros(width)
for j in range(len(curr_nodes)):
for i in range(len(prev_nodes)):
curr_nodes[j] += prev_nodes[i] * weights[i][j]
return curr_nodes
def sigmoid_activation(width, weights, prev_nodes):
prev_nodes = copy.deepcopy(prev_nodes)
if prev_nodes.ndim > 1:
prev_nodes = np.asarray(prev_nodes.T)
prev_nodes = prev_nodes[:, 0]
curr_nodes = np.zeros(width)
curr_nodes[0] = 1
for j in range(len(curr_nodes) - 1):
z = 0
for i in range(len(prev_nodes)):
z += prev_nodes[i] * weights[i][j]
curr_nodes[j + 1] = compute_sigmoid(z)
return curr_nodes
def compute_sigmoid(z):
return 1/(1+np.exp(-z))
# Backpropagation
def run_backpropagation(weights, nodes, y, prediction, activations):
loss_deriv = prediction - y
prev_node_derivs = [loss_deriv]
weight_derivs = copy.deepcopy(weights)
is_last_level = True
for level in range(len(weights) - 1, -1, -1):
weight_derivs[level] = compute_weight_derivs(weight_derivs[level], prev_node_derivs, nodes[level+1], nodes[level], activations[level])
prev_node_derivs = compute_node_derivatives(weights[level], nodes[level], prev_node_derivs, is_last_level)
is_last_level = False
return weight_derivs
def compute_weight_derivs(weight_derivs, prev_node_derivs, prev_nodes, next_nodes, activation):
start = 0
if activation == sigmoid_activation_deriv: start = 1
for i in range(len(weight_derivs)):
for j in range(start, len(weight_derivs[i]) + start):
if next_nodes.ndim == 2:
next_nodes = copy.deepcopy(next_nodes)
next_nodes = np.asarray(next_nodes.T)
next_nodes = next_nodes[:, 0]
weight_derivs[i][j-start] = activation(prev_node_derivs[j], next_nodes[i], prev_nodes[j])
return weight_derivs
def linear_activation_deriv(prev_node_deriv, next_node, _):
return prev_node_deriv[0] * next_node
def sigmoid_activation_deriv(prev_node_deriv, next_node, prev_node):
return prev_node_deriv * next_node * prev_node * (1-prev_node)
def | (weights, curr_nodes, prev_node_derivs, is_last_level):
curr_node_derivs = np.zeros(curr_nodes.shape)
for i in range(len(curr_nodes)):
product = 0
for j in range(len(weights[i])):
k = j
if not is_last_level: k += 1
product += weights[i][j] * prev_node_derivs[k]
curr_node_derivs[i] = product
return curr_node_derivs
def import_data(path, num_examples):
"""Imports the data at the given path to a csv file with the given amount of examples."""
data = np.empty((num_examples, 5), dtype="float128")
y = np.empty((num_examples, 1), dtype="float128")
with open(path, 'r') as f:
i = 0
for line in f:
example = []
terms = line.strip().split(',')
for j in range(len(terms)):
if j == 4:
y[i] = 2 * float(terms[j]) - 1
else:
example.append(float(terms[j]))
data[i, 1:] = example
data[i, 0] = 1
i += 1
data = normalize(np.asmatrix(data), axis=0)
return [data, np.asmatrix(y)]
def run_example():
widths = np.array([3, 3, 3, 1])
train_x = np.array([1., 1., 1.])
train_y = np.array([1])
weights = np.array([
[[-1., 1.], [-2., 2.], [-3., 3.]],
[[-1., 1.], [-2., 2.], [-3., 3.]],
[[-1.], [2.], [-1.5]]
])
# weights = np.array([
# [np.array([-1, 1]), np.array([-2, 2]), np.array([-3, 3])],
# [np.array([-1, 1]), np.array([-2, 2]), np.array([-3, 3])],
# [np.array([-1]), np.array([2]), np.array([-1.5])]
# ])
activations = [sigmoid_activation, sigmoid_activation, linear_activation]
nodes = run_forward_pass(weights, train_x, widths, activations)
print("FORWARD PASS --------")
for i in range(len(nodes)):
print("LAYER:", i, ":", nodes[i])
print("BACKPROPAGATION --------")
deriv_activations = [sigmoid_activation_deriv, sigmoid_activation_deriv, linear_activation_deriv]
weights_grad = run_backpropagation(weights, nodes, train_y, np.sign(nodes[-1]), deriv_activations)
for level in weights_grad:
print(level)
print("__________")
weights = update_weights(weights, 0.01, weights_grad)
for level in weights:
print(level)
def run_example_sgd():
widths = np.array([3, 3, 3, 1])
activations = [sigmoid_activation, sigmoid_activation, linear_activation]
deriv_activations = [sigmoid_activation_deriv, sigmoid_activation_deriv, linear_activation_deriv]
run_sgd(1, 1, widths, activations, deriv_activations, 1, False)
if __name__ == '__main__':
if sys.argv[1] == "example": run_example()
elif sys.argv[1] == "example_sgd":
train_data = np.array([1., 1., 1.])
train_data = train_data[np.newaxis, :]
train_y = np.array([1])
train_y = train_y[:, np.newaxis]
run_example_sgd()
elif sys.argv[1] == "ann":
[train_data, train_y] = import_data("./bank-note/train.csv", 872)
[test_data, test_y] = import_data("./bank-note/test.csv", 500)
run_ann(False)
elif sys.argv[1] == "zeros":
[train_data, train_y] = import_data("./bank-note/train.csv", 872)
[test_data, test_y] = import_data("./bank-note/test.csv", 500)
run_ann(True) | compute_node_derivatives | identifier_name |
NeuralNetworks.py | import sys
import numpy as np
import copy
from sklearn.preprocessing import normalize
def run_ann(zeros):
activations = [sigmoid_activation, sigmoid_activation, linear_activation]
deriv_activations = [sigmoid_activation_deriv, sigmoid_activation_deriv, linear_activation_deriv]
# widths_values = [5, 10, 25, 50, 100]
widths_values = [5, 10]
print("TUNING HYPER PARAMETERS:")
[gamma, d, _] = compute_hyperparameters(activations, deriv_activations, zeros)
print("GAMMA:", gamma, "D:", d)
errors = []
print("USING HYPER PARAMETERS FOR EACH WIDTH:")
for width in widths_values:
widths = [5, width, width, 1]
weights = run_sgd(gamma, d, widths, activations, deriv_activations, zeros)
train_predictions = calculate_predictions(train_data, train_y, widths, activations, weights)
train_error = calculate_error(train_y, train_predictions)
test_predictions = calculate_predictions(test_data,test_y, widths, activations, weights)
test_error = calculate_error(test_y, test_predictions)
errors.append([width, train_error, test_error])
print(width, " COMPLETE")
for error in errors:
print("Width:", error[0], ", Train Error:", error[1], ", Test Error:", error[2])
def compute_hyperparameters(activations, deriv_activations, zeros):
gammas = [1, 0.5]
ds = [1, 0.1]
smallest = [0, 100.0, 100.0]
for gamma in gammas:
for d in ds: smallest = get_smallest_error(smallest, gamma, d, activations, deriv_activations, zeros)
print("----------------------")
return smallest
def get_smallest_error(smallest, gamma, d, activations, deriv_activations, zeros):
# Computes the error for the given parameters and returns the error if it is the smallest, or the previous smallest.
widths = [5, 5, 5, 1]
weights = run_sgd(gamma, d, widths, activations, deriv_activations, zeros)
predictions = calculate_predictions(train_data, train_y, widths, activations, weights)
error = calculate_error(train_y, predictions)
print("GAMMA:", gamma, " D:", d, " ERROR:", error)
if error < smallest[2]: smallest = [gamma, d, error]
return smallest
def calculate_predictions(data, y, widths, activations, weights):
predictions = copy.deepcopy(y)
for i in range(len(data)):
predictions[i] = np.sign(run_forward_pass(weights, data[i], widths, activations)[-1])
return predictions
def calculate_error(y, predictions):
return 1 - np.count_nonzero(np.multiply(y, predictions) == 1) / len(y)
| loss = []
for epoch in range(100):
learning_rate = update_learning_rate(initial_gamma, d, epoch)
[y, x] = shuffle_data(train_y, train_data)
l = 0
for i in range(n):
nodes = run_forward_pass(weights, x[i], widths, activations)
prediction = np.sign(nodes[-1])
weights_grad = run_backpropagation(weights, nodes, y[i], prediction, deriv_activations)
weights = update_weights(weights, learning_rate, weights_grad)
l += compute_loss(prediction, y[i])
loss.append(l)
# print("LOSS:", loss)
return weights
def create_weights(widths, zeros):
weights = []
for level in range(len(widths) - 2):
temp = []
for j in range(widths[level]):
if not zeros:
temp.append(np.random.normal(0, 0.1, widths[level + 1] - 1).tolist())
else:
temp.append([0] * (widths[level + 1] - 1))
weights.append(temp)
temp = []
for j in range(widths[level]):
if not zeros:
temp.append(np.random.normal(0, 0.1, 1).tolist())
else:
temp.append([1])
weights.append(temp)
return np.array(weights)
def shuffle_data(y, data):
"""Shuffles the given data by appending y to the data, then shuffling, then returns the separated data and y."""
combined = np.c_[data.reshape(len(data), -1), y.reshape(len(y), -1)]
np.random.shuffle(combined)
shuffled_data = combined[:, :data.size // len(data)].reshape(data.shape)
shuffled_y = combined[:, data.size // len(data):].reshape(y.shape)
return [shuffled_y, shuffled_data]
def update_learning_rate(initial_gamma, d, epoch):
return initial_gamma / (1.0 + epoch * (initial_gamma / d))
def update_weights(weights, learning_rate, weights_grad):
for i in range(len(weights_grad)):
for j in range(len(weights_grad[i])):
for k in range(len(weights_grad[i][j])):
if type(weights[i][j][k]) == np.matrix:
weights[i][j][k][0, 0] -= learning_rate * weights_grad[i][j][k][0, 0]
else:
weights[i][j][k] -= learning_rate * weights_grad[i][j][k]
return weights
def compute_loss(prediction, label):
return np.square(prediction[0] - label[0, 0]) / 2
# Forward Pass
def run_forward_pass(weights, example, widths, activations):
shape = []
for i in range(len(widths)):
shape.append(np.zeros(widths[i]))
nodes = np.array(shape)
nodes[0] = example
for i in range(1, len(nodes)):
nodes[i] = activations[i-1](widths[i], weights[i-1], nodes[i-1])
return nodes
def linear_activation(width, weights, prev_nodes):
curr_nodes = np.zeros(width)
for j in range(len(curr_nodes)):
for i in range(len(prev_nodes)):
curr_nodes[j] += prev_nodes[i] * weights[i][j]
return curr_nodes
def sigmoid_activation(width, weights, prev_nodes):
prev_nodes = copy.deepcopy(prev_nodes)
if prev_nodes.ndim > 1:
prev_nodes = np.asarray(prev_nodes.T)
prev_nodes = prev_nodes[:, 0]
curr_nodes = np.zeros(width)
curr_nodes[0] = 1
for j in range(len(curr_nodes) - 1):
z = 0
for i in range(len(prev_nodes)):
z += prev_nodes[i] * weights[i][j]
curr_nodes[j + 1] = compute_sigmoid(z)
return curr_nodes
def compute_sigmoid(z):
return 1/(1+np.exp(-z))
# Backpropagation
def run_backpropagation(weights, nodes, y, prediction, activations):
loss_deriv = prediction - y
prev_node_derivs = [loss_deriv]
weight_derivs = copy.deepcopy(weights)
is_last_level = True
for level in range(len(weights) - 1, -1, -1):
weight_derivs[level] = compute_weight_derivs(weight_derivs[level], prev_node_derivs, nodes[level+1], nodes[level], activations[level])
prev_node_derivs = compute_node_derivatives(weights[level], nodes[level], prev_node_derivs, is_last_level)
is_last_level = False
return weight_derivs
def compute_weight_derivs(weight_derivs, prev_node_derivs, prev_nodes, next_nodes, activation):
start = 0
if activation == sigmoid_activation_deriv: start = 1
for i in range(len(weight_derivs)):
for j in range(start, len(weight_derivs[i]) + start):
if next_nodes.ndim == 2:
next_nodes = copy.deepcopy(next_nodes)
next_nodes = np.asarray(next_nodes.T)
next_nodes = next_nodes[:, 0]
weight_derivs[i][j-start] = activation(prev_node_derivs[j], next_nodes[i], prev_nodes[j])
return weight_derivs
def linear_activation_deriv(prev_node_deriv, next_node, _):
return prev_node_deriv[0] * next_node
def sigmoid_activation_deriv(prev_node_deriv, next_node, prev_node):
return prev_node_deriv * next_node * prev_node * (1-prev_node)
def compute_node_derivatives(weights, curr_nodes, prev_node_derivs, is_last_level):
curr_node_derivs = np.zeros(curr_nodes.shape)
for i in range(len(curr_nodes)):
product = 0
for j in range(len(weights[i])):
k = j
if not is_last_level: k += 1
product += weights[i][j] * prev_node_derivs[k]
curr_node_derivs[i] = product
return curr_node_derivs
def import_data(path, num_examples):
"""Imports the data at the given path to a csv file with the given amount of examples."""
data = np.empty((num_examples, 5), dtype="float128")
y = np.empty((num_examples, 1), dtype="float128")
with open(path, 'r') as f:
i = 0
for line in f:
example = []
terms = line.strip().split(',')
for j in range(len(terms)):
if j == 4:
y[i] = 2 * float(terms[j]) - 1
else:
example.append(float(terms[j]))
data[i, 1:] = example
data[i, 0] = 1
i += 1
data = normalize(np.asmatrix(data), axis=0)
return [data, np.asmatrix(y)]
def run_example():
widths = np.array([3, 3, 3, 1])
train_x = np.array([1., 1., 1.])
train_y = np.array([1])
weights = np.array([
[[-1., 1.], [-2., 2.], [-3., 3.]],
[[-1., 1.], [-2., 2.], [-3., 3.]],
[[-1.], [2.], [-1.5]]
])
# weights = np.array([
# [np.array([-1, 1]), np.array([-2, 2]), np.array([-3, 3])],
# [np.array([-1, 1]), np.array([-2, 2]), np.array([-3, 3])],
# [np.array([-1]), np.array([2]), np.array([-1.5])]
# ])
activations = [sigmoid_activation, sigmoid_activation, linear_activation]
nodes = run_forward_pass(weights, train_x, widths, activations)
print("FORWARD PASS --------")
for i in range(len(nodes)):
print("LAYER:", i, ":", nodes[i])
print("BACKPROPAGATION --------")
deriv_activations = [sigmoid_activation_deriv, sigmoid_activation_deriv, linear_activation_deriv]
weights_grad = run_backpropagation(weights, nodes, train_y, np.sign(nodes[-1]), deriv_activations)
for level in weights_grad:
print(level)
print("__________")
weights = update_weights(weights, 0.01, weights_grad)
for level in weights:
print(level)
def run_example_sgd():
widths = np.array([3, 3, 3, 1])
activations = [sigmoid_activation, sigmoid_activation, linear_activation]
deriv_activations = [sigmoid_activation_deriv, sigmoid_activation_deriv, linear_activation_deriv]
run_sgd(1, 1, widths, activations, deriv_activations, 1, False)
if __name__ == '__main__':
if sys.argv[1] == "example": run_example()
elif sys.argv[1] == "example_sgd":
train_data = np.array([1., 1., 1.])
train_data = train_data[np.newaxis, :]
train_y = np.array([1])
train_y = train_y[:, np.newaxis]
run_example_sgd()
elif sys.argv[1] == "ann":
[train_data, train_y] = import_data("./bank-note/train.csv", 872)
[test_data, test_y] = import_data("./bank-note/test.csv", 500)
run_ann(False)
elif sys.argv[1] == "zeros":
[train_data, train_y] = import_data("./bank-note/train.csv", 872)
[test_data, test_y] = import_data("./bank-note/test.csv", 500)
run_ann(True) |
def run_sgd(initial_gamma, d, widths, activations, deriv_activations, zeros, n=872):
weights = create_weights(widths, zeros) | random_line_split |
updateTotalCompany.js | /*
用于全量更新[tCR0001_V2.0]表中的company信息
wrote by tzf, 2017/12/8
*/
const req = require('require-yml');
const Db = require('mssql');
const Mssql = req('./lib/mssql');
const Pool = req('./lib/pool');
const config = req('./config/source.yml');
const log4js = require('log4js');
const fs = require('fs');
const writeLineStream = require('lei-stream').writeLine;
const transactions = require('./transactions.js');
const UUID = require('uuid');
const NodeCache = require("node-cache");
const myCache = new NodeCache({ stdTTL: 100, checkperiod: 120 }); //缓存失效时间3h
log4js.configure({
appenders: {
'out': {
type: 'file', //文件输出
filename: 'logs/updateData.log',
maxLogSize: config.logInfo.maxLogSize
}
},
categories: { default: { appenders: ['out'], level: 'info' } }
});
const logger = log4js.getLogger();
//set汇率转换
async function setRateConvert() {
return new Promise(async (resolve, reject) => {
try {
let now = Date.now();
let sql = 'select RE9003_001,RE9003_002,RE9003_003,RE9003_004,RE9003_005,RE9003_006,RE9003_007,RE9003_008,RE9003_009,RE9003_010 from [dbo].[tRE9003] where flag<> 1';
let res = await Mssql.connect(config.mssql_rate).query(sql);
let setRateConvertCost = Date.now() - now;
let rows = res.recordset;
let fetched = rows.length; //每次查询SQL Server的实际记录数
if (fetched > 0) {
for (let i = 0; i < rows.length; i++) {
let MY = rows[i].RE9003_001; //美元
let OY = rows[i].RE9003_002; //欧元
let RY = rows[i].RE9003_003; //日元
let GY = rows[i].RE9003_004; //港元
let YB = rows[i].RE9003_005; //英镑
let JNDY = rows[i].RE9003_006; //加拿大元
let ODLYY = rows[i].RE9003_007; //澳大利亚元
let XXLY = rows[i].RE9003_008; //新西兰元
let XJPY = rows[i].RE9003_009; //新加坡元
let RSFL = rows[i].RE9003_010; //瑞士法郎
let obj = {
MY: `${MY}`,
OY: `${OY}`,
RY: `${RY}`,
GY: `${GY}`,
YB: `${YB}`,
JNDY: `${JNDY}`,
ODLYY: `${ODLYY}`,
XXLY: `${XXLY}`,
XJPY: `${XJPY}`,
RSFL: `${RSFL}`
};
myCache.set("currencyRate", obj, function (err, success) {
if (!err && success) {
return resolve(success);
console.log(success);
logger.info('myCache set currencyRate status: ' + success);
console.log('setRateConvertCost: ' + setRateConvertCost + 'ms');
logger.info('setRateConvertCost: ' + setRateConvertCost + 'ms');
}
});
}
}
} catch (err) {
console.error(err);
logger.error(err);
return reject(err);
}
});
}
//get汇率转换
function getRateConvert() {
let res = {};
try {
myCache.get("currencyRate", function (err, value) {
if (!err) {
| (value == undefined) {
console.log('can not get the currencyRate value');
logger.info('can not get the currencyRate value');
return ({});
} else {
res = value;
console.log('the currencyRate value: ' + '美元: ' + value.MY + ', 欧元: ' + value.OY + ', 日元: ' + value.RY + ', 香港元: ' + value.GY + ', 英镑: ' + value.YB + ', 加拿大元: ' + value.JNDY + ', 澳大利亚元: ' + value.ODLYY + ', 新西兰元: ' + value.XXLY + ', 新加坡元: ' + value.XJPY + ', 瑞士法郎: ' + value.RSFL);
logger.info('the currencyRate value: ' + '美元: ' + value.MY + ', 欧元: ' + value.OY + ', 日元: ' + value.RY + ', 香港元: ' + value.GY + ', 英镑: ' + value.YB + ', 加拿大元: ' + value.JNDY + ', 澳大利亚元: ' + value.ODLYY + ', 新西兰元: ' + value.XXLY + ', 新加坡元: ' + value.XJPY + ', 瑞士法郎: ' + value.RSFL);
}
}
});
return res;
} catch (err) {
console.log(err);
logger.error(err);
return reject(err);
}
}
//判断货币类型
function judgeCurrencyFlag(currencyFlag) {
let rateFlag = 'RMB';
if (currencyFlag == 840) rateFlag = 'MY';
else if (currencyFlag == 954) rateFlag = 'OY';
else if (currencyFlag == 392) rateFlag = 'RY';
else if (currencyFlag == 344) rateFlag = 'GY';
else if (currencyFlag == 826) rateFlag = 'YB';
else if (currencyFlag == 124) rateFlag = 'JNDY';
else if (currencyFlag == 36) rateFlag = 'ODLYY';
else if (currencyFlag == 554) rateFlag = 'XXLY';
else if (currencyFlag == 702) rateFlag = 'XJPY';
else if (currencyFlag == 756) rateFlag = 'RSFL';
else if (currencyFlag == 156 || currencyFlag == 0) rateFlag = 'RMB';
return rateFlag;
}
let updateTotalCompany = {
startQueryCompany: async function (flag) {
if (flag) {
try {
//set汇率转换
let covertFlag = await setRateConvert();
let rateValueMap = {};
if (covertFlag == true) {
rateValueMap = getRateConvert();
}
let id = config.updateInfo.companyId;
let i = 1;
let ctx = await transactions.getContext(id);
let fetched = 0;
if (!ctx.last)
ctx.last = 0; //全量更新置0
let resultCount = 0;
let startTime = Date.now();
let updateInfo = {'logInfo': '', 'updateStatus': 0};
let CSVFilePath = '../neo4jDB_update/totalData/companies.csv'; //windows
// writeLineStream第一个参数为ReadStream实例,也可以为文件名
let w = writeLineStream(fs.createWriteStream(CSVFilePath), {
// 换行符,默认\n
newline: '\n',
// 编码器,可以为函数或字符串(内置编码器:json,base64),默认null
encoding: function (data) {
return data;
},
// 缓存的行数,默认为0(表示不缓存),此选项主要用于优化写文件性能,当数量缓存的内容超过该数量时再一次性写入到流中,可以提高写速度
cacheLines: 0
});
// let line1 = 'ITCode2:ID,ITName:string';
let line1 = 'timestamp:string,isPerson:string,ITCode2:ID,RMBFund:float,regFund:float,regFundUnit:string,isExtra:string,surStatus:string,originTable:string,isBranches:string';
w.write(line1);
let originTable = 'tCR0001_V2.0'; //数据来源
do {
let rows = [];
let now = Date.now();
let sql = `
select top 10000 cast(tmstamp as bigint) as _ts, ITCode2,CR0001_005,CR0001_006,CR0001_040,CR0001_041 from [tCR0001_V2.0] WITH(READPAST)
where flag<> 1 and tmstamp > cast( cast(${ctx.last} as bigint) as binary(8)) order by tmstamp;
`;
let res = await Mssql.connect(config.mssql).query(sql);
let queryCost = Date.now() - now;
rows = res.recordset;
fetched = rows.length; //每次查询SQL Server的实际记录数
writeStart = Date.now();
if (fetched > 0) {
resultCount += fetched;
let lines = [];
let codes = [];
for (let i = 0; i < rows.length; i++) {
let rate = null; //汇率标识
let rateValue = 1;
let ITCode = rows[i].ITCode2;
let timestamp = rows[i]._ts;
if (ITCode) {
codes.push(ITCode);
}
if (!ITCode) { //如果ITCode为null,则传入UUID,并在node上的isExtra置1;
ITCode = rows[i]._ts + transactions.createRndNum(6); //产生6位随机数 + timestamp作为ITCode
isExtra = 1; //1代表没有机构代码
}
else {
isExtra = 0;
}
let fund = rows[i].CR0001_005; //注册资金,未换算的值
let currencyUnit = rows[i].CR0001_006; //货币类型
let currencyFlag = rows[i].CR0001_040; //货币种类标识
let surStatus = rows[i].CR0001_041; //续存状态
if (!surStatus) surStatus = 1; //默认为1
if (!currencyFlag) currencyFlag = 0;
if (!currencyUnit) currencyUnit = '万人民币元';
if (currencyFlag != null) {
rate = judgeCurrencyFlag(currencyFlag);
}
if (!fund) fund = 0;
if (rate == 'RMB') rateValue = 1;
else if (rate != null) {
rateValue = parseFloat(rateValueMap[`${rate}`]);
}
let RMBFund = fund * rateValue;
lines.push([timestamp, 0, ITCode, RMBFund, fund, currencyUnit, isExtra, surStatus, originTable]);
}
let branches = null;
let retryCount = 0;
do {
try {
branches = await transactions.judgeBranches(codes);
break;
} catch (err) {
retryCount++;
console.error(err);
logger.error(err);
}
} while (retryCount < 3)
if (retryCount == 3) {
console.error('全量更新表tCR0001_V2.0中company信息,查询分支机构失败');
logger.error('全量更新表tCR0001_V2.0中company信息,查询分支机构失败');
// break;
return updateInfo;
}
for (let i = 0; i < lines.length; ++i) {
let line = lines[i];
// let isBranches = 0; //初始化分支机构属性,0表示不是分支机构,1表示是分支机构
if (branches[i] == "") {
branches[i] = 0;
} else if (branches[i] != "") {
branches[i] = 1;
}
line.push(branches[i]);
w.write(line.join(","));
}
ctx.last = rows[fetched - 1]._ts;
ctx.updatetime = now;
ctx.latestUpdated = resultCount;
// 保存同步到的位置
transactions.saveContext(id, ctx)
.catch(err => console.error(err));
writeCost = Date.now() - writeStart;
if (fetched > 0)
logger.info(`Total table: 'tCR0001_V2.0' qry:${queryCost} ms; result:${fetched}` +', writeCost: ' + writeCost + 'ms' + ', 读写次数: ' + i + ', last timestamp: '+ ctx.last);
console.log('全量更新表tCR0001_V2.0中company信息,读写次数: ' + i + ', 查询SQLServer耗时:' + queryCost + 'ms' +', writeCost: ' + writeCost + 'ms' + ', last timestamp: '+ ctx.last);
i++;
//for test
// if(i == 2 )
// break;
}
} while (fetched >= 10000);
// 结束
w.end(function () {
// 回调函数可选
console.log('companies.csv write end');
logger.info('companies.csv write end');
});
let totalCost = Date.now() - startTime;
logInfo = '全量更新表tCR0001_V2.0中company信息,总耗时: ' + totalCost + ', 更新记录数: ' + resultCount;
updateStatus = 1;
updateInfo.status = updateStatus;
updateInfo.info = logInfo;
logger.info(`counts: ` + i++ + `, totalConst :${totalCost} ms; resultCount: ${resultCount}`);
console.log(logInfo);
return updateInfo;
} catch (err) {
console.error(err);
logger.error(err);
return err;
}
}
}
}
module.exports = updateTotalCompany; | if | identifier_name |
updateTotalCompany.js | /*
用于全量更新[tCR0001_V2.0]表中的company信息
wrote by tzf, 2017/12/8
*/
const req = require('require-yml');
const Db = require('mssql');
const Mssql = req('./lib/mssql');
const Pool = req('./lib/pool');
const config = req('./config/source.yml');
const log4js = require('log4js');
const fs = require('fs');
const writeLineStream = require('lei-stream').writeLine;
const transactions = require('./transactions.js');
const UUID = require('uuid');
const NodeCache = require("node-cache");
const myCache = new NodeCache({ stdTTL: 100, checkperiod: 120 }); //缓存失效时间3h
log4js.configure({
appenders: {
'out': {
type: 'file', //文件输出
filename: 'logs/updateData.log',
maxLogSize: config.logInfo.maxLogSize
}
},
categories: { default: { appenders: ['out'], level: 'info' } }
});
const logger = log4js.getLogger();
//set汇率转换
async function setRateConvert() {
return new Promise(async (resolve, reject) => {
try {
let now = Date.now();
let sql = 'select RE9003_001,RE9003_002,RE9003_003,RE9003_004,RE9003_005,RE9003_006,RE9003_007,RE9003_008,RE9003_009,RE9003_010 from [dbo].[tRE9003] where flag<> 1';
let res = await Mssql.connect(config.mssql_rate).query(sql);
let setRateConvertCost = Date.now() - now;
let rows = res.recordset;
let fetched = rows.length; //每次查询SQL Server的实际记录数
if (fetched > 0) {
for (let i = 0; i < rows.length; i++) {
let MY = rows[i].RE9003_001; //美元
let OY = rows[i].RE9003_002; //欧元
let RY = rows[i].RE9003_003; //日元
let GY = rows[i].RE9003_004; //港元
let YB = rows[i].RE9003_005; //英镑
let JNDY = rows[i].RE9003_006; //加拿大元
let ODLYY = rows[i].RE9003_007; //澳大利亚元
let XXLY = rows[i].RE9003_008; //新西兰元
let XJPY = rows[i].RE9003_009; //新加坡元
let RSFL = rows[i].RE9003_010; //瑞士法郎
let obj = {
MY: `${MY}`,
OY: `${OY}`,
RY: `${RY}`,
GY: `${GY}`,
YB: `${YB}`,
JNDY: `${JNDY}`,
ODLYY: `${ODLYY}`,
XXLY: `${XXLY}`,
XJPY: `${XJPY}`,
RSFL: `${RSFL}`
};
myCache.set("currencyRate", obj, function (err, success) {
if (!err && success) {
return resolve(success);
console.log(success);
logger.info('myCache set currencyRate status: ' + success);
console.log('setRateConvertCost: ' + setRateConvertCost + 'ms');
logger.info('setRateConvertCost: ' + setRateConvertCost + 'ms');
}
});
}
}
} catch (err) {
console.error(err);
logger.error(err);
return reject(err);
}
});
}
//get汇率转换
function getRateConvert() {
let res = {};
try {
myCache.get("currencyRate", function (err, value) {
if (!err) {
if (value == undefined) {
console.log('can not get the currencyRate value');
logger.info('can not get the currencyRate value');
return ({});
} else {
res = value;
console.log('the currencyRate value: ' + '美元: ' + value.MY + ', 欧元: ' + value.OY + ', 日元: ' + value.RY + ', 香港元: ' + value.GY + ', 英镑: ' + value.YB + ', 加拿大元: ' + value.JNDY + ', 澳大利亚元: ' + value.ODLYY + ', 新西兰元: ' + value.XXLY + ', 新加坡元: ' + value.XJPY + ', 瑞士法郎: ' + value.RSFL);
logger.info('the currencyRate value: ' + '美元: ' + value.MY + ', 欧元: ' + value.OY + ', 日元: ' + value.RY + ', 香港元: ' + value.GY + ', 英镑: ' + value.YB + ', 加拿大元: ' + value.JNDY + ', 澳大利亚元: ' + value.ODLYY + ', 新西兰元: ' + value.XXLY + ', 新加坡元: ' + value.XJPY + ', 瑞士法郎: ' + value.RSFL);
}
}
});
return res;
} catch (err) {
console.log(err);
logger.error(err);
return reject(err);
}
}
//判断货币类型
function judgeCurrencyFlag(currencyFlag) {
let rateFlag = 'RMB';
if (currencyFlag == 840) rateFlag = 'MY';
else if (currencyFlag == 954) rateFlag = 'OY';
else if (currencyFlag == 392) rateFlag = 'RY';
else if (currencyFlag == 344) rateFlag = 'GY';
else if (currencyFlag == 826) rateFlag = 'YB';
else if (currencyFlag == 124) rateFlag = 'JNDY';
else if (currencyFlag == 36) rateFlag = 'ODLYY';
else if (currencyFlag == 554) rateFlag = 'XXLY';
else if (currencyFlag == 702) rateFlag = 'XJPY';
else if (currencyFlag == 756) rateFlag = 'RSFL';
else if (currencyFlag == 156 || currencyFlag == 0) rateFlag = 'RMB';
return rateFlag;
}
let updateTotalCompany = {
startQueryCompany: async function (flag) {
if (flag) {
try {
//set汇率转换
let covertFlag = await setRateConvert();
let rateValueMap = {};
if (covertFlag == true) {
rateValueMap = getRateConvert();
}
let id = config.updateInfo.companyId;
let i = 1;
let ctx = await transactions.getContext(id);
let fetched = 0;
if (!ctx.last)
ctx.last = 0; //全量更新置0
let resultCount = 0;
let startTime = Date.now();
let updateInfo = {'logInfo': '', 'updateStatus': 0};
let CSVFilePath = '../neo4jDB_update/totalData/companies.csv'; //windows
// writeLineStream第一个参数为ReadStream实例,也可以为文件名
let w = writeLineStream(fs.createWriteStream(CSVFilePath), {
// 换行符,默认\n
newline: '\n',
// 编码器,可以为函数或字符串(内置编码器:json,base64),默认null
encoding: function (data) {
return data;
},
// 缓存的行数,默认为0(表示不缓存),此选项主要用于优化写文件性能,当数量缓存的内容超过该数量时再一次性写入到流中,可以提高写速度
cacheLines: 0
});
// let line1 = 'ITCode2:ID,ITName:string';
let line1 = 'timestamp:string,isPerson:string,ITCode2:ID,RMBFund:float,regFund:float,regFundUnit:string,isExtra:string,surStatus:string,originTable:string,isBranches:string';
w.write(line1);
let originTable = 'tCR0001_V2.0'; //数据来源
do {
let rows = [];
let now = Date.now();
let sql = `
select top 10000 cast(tmstamp as bigint) as _ts, ITCode2,CR0001_005,CR0001_006,CR0001_040,CR0001_041 from [tCR0001_V2.0] WITH(READPAST)
where flag<> 1 and tmstamp > cast( cast(${ctx.last} as bigint) as binary(8)) order by tmstamp;
`;
let res = await Mssql.connect(config.mssql).query(sql);
let queryCost = Date.now() - now;
rows = res.recordset;
fetched = rows.length; //每次查询SQL Server的实际记录数
writeStart = Date.now();
if (fetched > 0) {
resultCount += fetched;
let lines = [];
let codes = [];
for (let i = 0; i < rows.length; i++) {
let rate = null; //汇率标识
let rateValue = 1;
let ITCode = rows[i].ITCode2;
let timestamp = rows[i]._ts;
if (ITCode) {
codes.push(ITCode);
}
if (!ITCode) { //如果ITCode为null,则传入UUID,并在node上的isExtra置1;
ITCode = rows[i]._ts + transactions.createRndNum(6); //产生6位随机数 + timestamp作为ITCode
isExtra = 1; //1代表没有机构代码
}
else {
isEx | i].CR0001_005; //注册资金,未换算的值
let currencyUnit = rows[i].CR0001_006; //货币类型
let currencyFlag = rows[i].CR0001_040; //货币种类标识
let surStatus = rows[i].CR0001_041; //续存状态
if (!surStatus) surStatus = 1; //默认为1
if (!currencyFlag) currencyFlag = 0;
if (!currencyUnit) currencyUnit = '万人民币元';
if (currencyFlag != null) {
rate = judgeCurrencyFlag(currencyFlag);
}
if (!fund) fund = 0;
if (rate == 'RMB') rateValue = 1;
else if (rate != null) {
rateValue = parseFloat(rateValueMap[`${rate}`]);
}
let RMBFund = fund * rateValue;
lines.push([timestamp, 0, ITCode, RMBFund, fund, currencyUnit, isExtra, surStatus, originTable]);
}
let branches = null;
let retryCount = 0;
do {
try {
branches = await transactions.judgeBranches(codes);
break;
} catch (err) {
retryCount++;
console.error(err);
logger.error(err);
}
} while (retryCount < 3)
if (retryCount == 3) {
console.error('全量更新表tCR0001_V2.0中company信息,查询分支机构失败');
logger.error('全量更新表tCR0001_V2.0中company信息,查询分支机构失败');
// break;
return updateInfo;
}
for (let i = 0; i < lines.length; ++i) {
let line = lines[i];
// let isBranches = 0; //初始化分支机构属性,0表示不是分支机构,1表示是分支机构
if (branches[i] == "") {
branches[i] = 0;
} else if (branches[i] != "") {
branches[i] = 1;
}
line.push(branches[i]);
w.write(line.join(","));
}
ctx.last = rows[fetched - 1]._ts;
ctx.updatetime = now;
ctx.latestUpdated = resultCount;
// 保存同步到的位置
transactions.saveContext(id, ctx)
.catch(err => console.error(err));
writeCost = Date.now() - writeStart;
if (fetched > 0)
logger.info(`Total table: 'tCR0001_V2.0' qry:${queryCost} ms; result:${fetched}` +', writeCost: ' + writeCost + 'ms' + ', 读写次数: ' + i + ', last timestamp: '+ ctx.last);
console.log('全量更新表tCR0001_V2.0中company信息,读写次数: ' + i + ', 查询SQLServer耗时:' + queryCost + 'ms' +', writeCost: ' + writeCost + 'ms' + ', last timestamp: '+ ctx.last);
i++;
//for test
// if(i == 2 )
// break;
}
} while (fetched >= 10000);
// 结束
w.end(function () {
// 回调函数可选
console.log('companies.csv write end');
logger.info('companies.csv write end');
});
let totalCost = Date.now() - startTime;
logInfo = '全量更新表tCR0001_V2.0中company信息,总耗时: ' + totalCost + ', 更新记录数: ' + resultCount;
updateStatus = 1;
updateInfo.status = updateStatus;
updateInfo.info = logInfo;
logger.info(`counts: ` + i++ + `, totalConst :${totalCost} ms; resultCount: ${resultCount}`);
console.log(logInfo);
return updateInfo;
} catch (err) {
console.error(err);
logger.error(err);
return err;
}
}
}
}
module.exports = updateTotalCompany; | tra = 0;
}
let fund = rows[ | conditional_block |
updateTotalCompany.js | /*
用于全量更新[tCR0001_V2.0]表中的company信息
wrote by tzf, 2017/12/8
*/
const req = require('require-yml');
const Db = require('mssql');
const Mssql = req('./lib/mssql');
const Pool = req('./lib/pool');
const config = req('./config/source.yml');
const log4js = require('log4js');
const fs = require('fs');
const writeLineStream = require('lei-stream').writeLine;
const transactions = require('./transactions.js');
const UUID = require('uuid');
const NodeCache = require("node-cache");
const myCache = new NodeCache({ stdTTL: 100, checkperiod: 120 }); //缓存失效时间3h
log4js.configure({
appenders: {
'out': {
type: 'file', //文件输出
filename: 'logs/updateData.log',
maxLogSize: config.logInfo.maxLogSize
}
},
categories: { default: { appenders: ['out'], level: 'info' } }
});
const logger = log4js.getLogger();
//set汇率转换
async function setRateConvert() {
return new Promise(async (resolve, reject) => {
try {
let now = Date.now();
let sql = 'select RE9003_001,RE9003_002,RE9003_003,RE9003_004,RE9003_005,RE9003_006,RE9003_007,RE9003_008,RE9003_009,RE9003_010 from [dbo].[tRE9003] where flag<> 1';
let res = await Mssql.connect(config.mssql_rate).query(sql);
let setRateConvertCost = Date.now() - now;
let rows = res.recordset;
let fetched = rows.length; //每次查询SQL Server的实际记录数
if (fetched > 0) {
for (let i = 0; i < rows.length; i++) {
let MY = rows[i].RE9003_001; //美元
let OY = rows[i].RE9003_002; //欧元
let RY = rows[i].RE9003_003; //日元
let GY = rows[i].RE9003_004; //港元
let YB = rows[i].RE9003_005; //英镑
let JNDY = rows[i].RE9003_006; //加拿大元
let ODLYY = rows[i].RE9003_007; //澳大利亚元
let XXLY = rows[i].RE9003_008; //新西兰元
let XJPY = rows[i].RE9003_009; //新加坡元
let RSFL = rows[i].RE9003_010; //瑞士法郎
let obj = {
MY: `${MY}`,
OY: `${OY}`,
RY: `${RY}`,
GY: `${GY}`,
YB: `${YB}`,
JNDY: `${JNDY}`,
ODLYY: `${ODLYY}`,
XXLY: `${XXLY}`,
XJPY: `${XJPY}`,
RSFL: `${RSFL}`
};
myCache.set("currencyRate", obj, function (err, success) {
if (!err && success) {
return resolve(success);
console.log(success);
logger.info('myCache set currencyRate status: ' + success);
console.log('setRateConvertCost: ' + setRateConvertCost + 'ms');
logger.info('setRateConvertCost: ' + setRateConvertCost + 'ms');
}
});
}
}
} catch (err) {
console.error(err);
logger.error(err);
return reject(err);
}
});
}
//get汇率转换
function getRateConvert() {
let res = {};
try {
myCache.get("currencyRate", function (err, value) {
if (!err) {
if (v | ) rateFlag = 'GY';
else if (currencyFlag == 826) rateFlag = 'YB';
else if (currencyFlag == 124) rateFlag = 'JNDY';
else if (currencyFlag == 36) rateFlag = 'ODLYY';
else if (currencyFlag == 554) rateFlag = 'XXLY';
else if (currencyFlag == 702) rateFlag = 'XJPY';
else if (currencyFlag == 756) rateFlag = 'RSFL';
else if (currencyFlag == 156 || currencyFlag == 0) rateFlag = 'RMB';
return rateFlag;
}
let updateTotalCompany = {
startQueryCompany: async function (flag) {
if (flag) {
try {
//set汇率转换
let covertFlag = await setRateConvert();
let rateValueMap = {};
if (covertFlag == true) {
rateValueMap = getRateConvert();
}
let id = config.updateInfo.companyId;
let i = 1;
let ctx = await transactions.getContext(id);
let fetched = 0;
if (!ctx.last)
ctx.last = 0; //全量更新置0
let resultCount = 0;
let startTime = Date.now();
let updateInfo = {'logInfo': '', 'updateStatus': 0};
let CSVFilePath = '../neo4jDB_update/totalData/companies.csv'; //windows
// writeLineStream第一个参数为ReadStream实例,也可以为文件名
let w = writeLineStream(fs.createWriteStream(CSVFilePath), {
// 换行符,默认\n
newline: '\n',
// 编码器,可以为函数或字符串(内置编码器:json,base64),默认null
encoding: function (data) {
return data;
},
// 缓存的行数,默认为0(表示不缓存),此选项主要用于优化写文件性能,当数量缓存的内容超过该数量时再一次性写入到流中,可以提高写速度
cacheLines: 0
});
// let line1 = 'ITCode2:ID,ITName:string';
let line1 = 'timestamp:string,isPerson:string,ITCode2:ID,RMBFund:float,regFund:float,regFundUnit:string,isExtra:string,surStatus:string,originTable:string,isBranches:string';
w.write(line1);
let originTable = 'tCR0001_V2.0'; //数据来源
do {
let rows = [];
let now = Date.now();
let sql = `
select top 10000 cast(tmstamp as bigint) as _ts, ITCode2,CR0001_005,CR0001_006,CR0001_040,CR0001_041 from [tCR0001_V2.0] WITH(READPAST)
where flag<> 1 and tmstamp > cast( cast(${ctx.last} as bigint) as binary(8)) order by tmstamp;
`;
let res = await Mssql.connect(config.mssql).query(sql);
let queryCost = Date.now() - now;
rows = res.recordset;
fetched = rows.length; //每次查询SQL Server的实际记录数
writeStart = Date.now();
if (fetched > 0) {
resultCount += fetched;
let lines = [];
let codes = [];
for (let i = 0; i < rows.length; i++) {
let rate = null; //汇率标识
let rateValue = 1;
let ITCode = rows[i].ITCode2;
let timestamp = rows[i]._ts;
if (ITCode) {
codes.push(ITCode);
}
if (!ITCode) { //如果ITCode为null,则传入UUID,并在node上的isExtra置1;
ITCode = rows[i]._ts + transactions.createRndNum(6); //产生6位随机数 + timestamp作为ITCode
isExtra = 1; //1代表没有机构代码
}
else {
isExtra = 0;
}
let fund = rows[i].CR0001_005; //注册资金,未换算的值
let currencyUnit = rows[i].CR0001_006; //货币类型
let currencyFlag = rows[i].CR0001_040; //货币种类标识
let surStatus = rows[i].CR0001_041; //续存状态
if (!surStatus) surStatus = 1; //默认为1
if (!currencyFlag) currencyFlag = 0;
if (!currencyUnit) currencyUnit = '万人民币元';
if (currencyFlag != null) {
rate = judgeCurrencyFlag(currencyFlag);
}
if (!fund) fund = 0;
if (rate == 'RMB') rateValue = 1;
else if (rate != null) {
rateValue = parseFloat(rateValueMap[`${rate}`]);
}
let RMBFund = fund * rateValue;
lines.push([timestamp, 0, ITCode, RMBFund, fund, currencyUnit, isExtra, surStatus, originTable]);
}
let branches = null;
let retryCount = 0;
do {
try {
branches = await transactions.judgeBranches(codes);
break;
} catch (err) {
retryCount++;
console.error(err);
logger.error(err);
}
} while (retryCount < 3)
if (retryCount == 3) {
console.error('全量更新表tCR0001_V2.0中company信息,查询分支机构失败');
logger.error('全量更新表tCR0001_V2.0中company信息,查询分支机构失败');
// break;
return updateInfo;
}
for (let i = 0; i < lines.length; ++i) {
let line = lines[i];
// let isBranches = 0; //初始化分支机构属性,0表示不是分支机构,1表示是分支机构
if (branches[i] == "") {
branches[i] = 0;
} else if (branches[i] != "") {
branches[i] = 1;
}
line.push(branches[i]);
w.write(line.join(","));
}
ctx.last = rows[fetched - 1]._ts;
ctx.updatetime = now;
ctx.latestUpdated = resultCount;
// 保存同步到的位置
transactions.saveContext(id, ctx)
.catch(err => console.error(err));
writeCost = Date.now() - writeStart;
if (fetched > 0)
logger.info(`Total table: 'tCR0001_V2.0' qry:${queryCost} ms; result:${fetched}` +', writeCost: ' + writeCost + 'ms' + ', 读写次数: ' + i + ', last timestamp: '+ ctx.last);
console.log('全量更新表tCR0001_V2.0中company信息,读写次数: ' + i + ', 查询SQLServer耗时:' + queryCost + 'ms' +', writeCost: ' + writeCost + 'ms' + ', last timestamp: '+ ctx.last);
i++;
//for test
// if(i == 2 )
// break;
}
} while (fetched >= 10000);
// 结束
w.end(function () {
// 回调函数可选
console.log('companies.csv write end');
logger.info('companies.csv write end');
});
let totalCost = Date.now() - startTime;
logInfo = '全量更新表tCR0001_V2.0中company信息,总耗时: ' + totalCost + ', 更新记录数: ' + resultCount;
updateStatus = 1;
updateInfo.status = updateStatus;
updateInfo.info = logInfo;
logger.info(`counts: ` + i++ + `, totalConst :${totalCost} ms; resultCount: ${resultCount}`);
console.log(logInfo);
return updateInfo;
} catch (err) {
console.error(err);
logger.error(err);
return err;
}
}
}
}
module.exports = updateTotalCompany; | alue == undefined) {
console.log('can not get the currencyRate value');
logger.info('can not get the currencyRate value');
return ({});
} else {
res = value;
console.log('the currencyRate value: ' + '美元: ' + value.MY + ', 欧元: ' + value.OY + ', 日元: ' + value.RY + ', 香港元: ' + value.GY + ', 英镑: ' + value.YB + ', 加拿大元: ' + value.JNDY + ', 澳大利亚元: ' + value.ODLYY + ', 新西兰元: ' + value.XXLY + ', 新加坡元: ' + value.XJPY + ', 瑞士法郎: ' + value.RSFL);
logger.info('the currencyRate value: ' + '美元: ' + value.MY + ', 欧元: ' + value.OY + ', 日元: ' + value.RY + ', 香港元: ' + value.GY + ', 英镑: ' + value.YB + ', 加拿大元: ' + value.JNDY + ', 澳大利亚元: ' + value.ODLYY + ', 新西兰元: ' + value.XXLY + ', 新加坡元: ' + value.XJPY + ', 瑞士法郎: ' + value.RSFL);
}
}
});
return res;
} catch (err) {
console.log(err);
logger.error(err);
return reject(err);
}
}
//判断货币类型
function judgeCurrencyFlag(currencyFlag) {
let rateFlag = 'RMB';
if (currencyFlag == 840) rateFlag = 'MY';
else if (currencyFlag == 954) rateFlag = 'OY';
else if (currencyFlag == 392) rateFlag = 'RY';
else if (currencyFlag == 344 | identifier_body |
updateTotalCompany.js | /*
用于全量更新[tCR0001_V2.0]表中的company信息
wrote by tzf, 2017/12/8
*/
const req = require('require-yml');
const Db = require('mssql');
const Mssql = req('./lib/mssql');
const Pool = req('./lib/pool');
const config = req('./config/source.yml');
const log4js = require('log4js');
const fs = require('fs');
const writeLineStream = require('lei-stream').writeLine;
const transactions = require('./transactions.js');
const UUID = require('uuid');
const NodeCache = require("node-cache");
const myCache = new NodeCache({ stdTTL: 100, checkperiod: 120 }); //缓存失效时间3h
log4js.configure({
appenders: {
'out': {
type: 'file', //文件输出
filename: 'logs/updateData.log',
maxLogSize: config.logInfo.maxLogSize
}
},
categories: { default: { appenders: ['out'], level: 'info' } }
});
const logger = log4js.getLogger();
//set汇率转换
async function setRateConvert() {
return new Promise(async (resolve, reject) => {
try {
let now = Date.now();
let sql = 'select RE9003_001,RE9003_002,RE9003_003,RE9003_004,RE9003_005,RE9003_006,RE9003_007,RE9003_008,RE9003_009,RE9003_010 from [dbo].[tRE9003] where flag<> 1';
let res = await Mssql.connect(config.mssql_rate).query(sql);
let setRateConvertCost = Date.now() - now;
let rows = res.recordset;
let fetched = rows.length; //每次查询SQL Server的实际记录数
if (fetched > 0) {
for (let i = 0; i < rows.length; i++) {
let MY = rows[i].RE9003_001; //美元
let OY = rows[i].RE9003_002; //欧元
let RY = rows[i].RE9003_003; //日元
let GY = rows[i].RE9003_004; //港元
let YB = rows[i].RE9003_005; //英镑
let JNDY = rows[i].RE9003_006; //加拿大元
let ODLYY = rows[i].RE9003_007; //澳大利亚元
let XXLY = rows[i].RE9003_008; //新西兰元
let XJPY = rows[i].RE9003_009; //新加坡元
let RSFL = rows[i].RE9003_010; //瑞士法郎
let obj = {
MY: `${MY}`,
OY: `${OY}`,
RY: `${RY}`,
GY: `${GY}`,
YB: `${YB}`,
JNDY: `${JNDY}`,
ODLYY: `${ODLYY}`,
XXLY: `${XXLY}`,
XJPY: `${XJPY}`,
RSFL: `${RSFL}`
};
myCache.set("currencyRate", obj, function (err, success) {
if (!err && success) {
return resolve(success);
console.log(success);
logger.info('myCache set currencyRate status: ' + success);
console.log('setRateConvertCost: ' + setRateConvertCost + 'ms');
logger.info('setRateConvertCost: ' + setRateConvertCost + 'ms');
}
});
}
}
} catch (err) {
console.error(err);
logger.error(err);
return reject(err);
}
});
}
//get汇率转换
function getRateConvert() {
let res = {};
try {
myCache.get("currencyRate", function (err, value) {
if (!err) {
if (value == undefined) {
console.log('can not get the currencyRate value');
logger.info('can not get the currencyRate value');
return ({});
} else {
res = value;
console.log('the currencyRate value: ' + '美元: ' + value.MY + ', 欧元: ' + value.OY + ', 日元: ' + value.RY + ', 香港元: ' + value.GY + ', 英镑: ' + value.YB + ', 加拿大元: ' + value.JNDY + ', 澳大利亚元: ' + value.ODLYY + ', 新西兰元: ' + value.XXLY + ', 新加坡元: ' + value.XJPY + ', 瑞士法郎: ' + value.RSFL);
logger.info('the currencyRate value: ' + '美元: ' + value.MY + ', 欧元: ' + value.OY + ', 日元: ' + value.RY + ', 香港元: ' + value.GY + ', 英镑: ' + value.YB + ', 加拿大元: ' + value.JNDY + ', 澳大利亚元: ' + value.ODLYY + ', 新西兰元: ' + value.XXLY + ', 新加坡元: ' + value.XJPY + ', 瑞士法郎: ' + value.RSFL);
}
}
});
return res;
} catch (err) {
console.log(err);
logger.error(err);
return reject(err);
}
}
//判断货币类型
function judgeCurrencyFlag(currencyFlag) {
let rateFlag = 'RMB';
if (currencyFlag == 840) rateFlag = 'MY';
else if (currencyFlag == 954) rateFlag = 'OY';
else if (currencyFlag == 392) rateFlag = 'RY';
else if (currencyFlag == 344) rateFlag = 'GY';
else if (currencyFlag == 826) rateFlag = 'YB';
else if (currencyFlag == 124) rateFlag = 'JNDY';
else if (currencyFlag == 36) rateFlag = 'ODLYY';
else if (currencyFlag == 554) rateFlag = 'XXLY';
else if (currencyFlag == 702) rateFlag = 'XJPY';
else if (currencyFlag == 756) rateFlag = 'RSFL';
else if (currencyFlag == 156 || currencyFlag == 0) rateFlag = 'RMB';
return rateFlag;
}
let updateTotalCompany = {
startQueryCompany: async function (flag) {
if (flag) {
try {
//set汇率转换
let covertFlag = await setRateConvert();
let rateValueMap = {};
if (covertFlag == true) {
rateValueMap = getRateConvert();
}
let id = config.updateInfo.companyId;
let i = 1;
let ctx = await transactions.getContext(id);
let fetched = 0;
if (!ctx.last)
ctx.last = 0; //全量更新置0
let resultCount = 0;
let startTime = Date.now();
let updateInfo = {'logInfo': '', 'updateStatus': 0};
let CSVFilePath = '../neo4jDB_update/totalData/companies.csv'; //windows
// writeLineStream第一个参数为ReadStream实例,也可以为文件名
let w = writeLineStream(fs.createWriteStream(CSVFilePath), {
// 换行符,默认\n
newline: '\n',
// 编码器,可以为函数或字符串(内置编码器:json,base64),默认null
encoding: function (data) {
return data;
},
// 缓存的行数,默认为0(表示不缓存),此选项主要用于优化写文件性能,当数量缓存的内容超过该数量时再一次性写入到流中,可以提高写速度
cacheLines: 0
});
// let line1 = 'ITCode2:ID,ITName:string';
let line1 = 'timestamp:string,isPerson:string,ITCode2:ID,RMBFund:float,regFund:float,regFundUnit:string,isExtra:string,surStatus:string,originTable:string,isBranches:string';
w.write(line1);
let originTable = 'tCR0001_V2.0'; //数据来源
| do {
let rows = [];
let now = Date.now();
let sql = `
select top 10000 cast(tmstamp as bigint) as _ts, ITCode2,CR0001_005,CR0001_006,CR0001_040,CR0001_041 from [tCR0001_V2.0] WITH(READPAST)
where flag<> 1 and tmstamp > cast( cast(${ctx.last} as bigint) as binary(8)) order by tmstamp;
`;
let res = await Mssql.connect(config.mssql).query(sql);
let queryCost = Date.now() - now;
rows = res.recordset;
fetched = rows.length; //每次查询SQL Server的实际记录数
writeStart = Date.now();
if (fetched > 0) {
resultCount += fetched;
let lines = [];
let codes = [];
for (let i = 0; i < rows.length; i++) {
let rate = null; //汇率标识
let rateValue = 1;
let ITCode = rows[i].ITCode2;
let timestamp = rows[i]._ts;
if (ITCode) {
codes.push(ITCode);
}
if (!ITCode) { //如果ITCode为null,则传入UUID,并在node上的isExtra置1;
ITCode = rows[i]._ts + transactions.createRndNum(6); //产生6位随机数 + timestamp作为ITCode
isExtra = 1; //1代表没有机构代码
}
else {
isExtra = 0;
}
let fund = rows[i].CR0001_005; //注册资金,未换算的值
let currencyUnit = rows[i].CR0001_006; //货币类型
let currencyFlag = rows[i].CR0001_040; //货币种类标识
let surStatus = rows[i].CR0001_041; //续存状态
if (!surStatus) surStatus = 1; //默认为1
if (!currencyFlag) currencyFlag = 0;
if (!currencyUnit) currencyUnit = '万人民币元';
if (currencyFlag != null) {
rate = judgeCurrencyFlag(currencyFlag);
}
if (!fund) fund = 0;
if (rate == 'RMB') rateValue = 1;
else if (rate != null) {
rateValue = parseFloat(rateValueMap[`${rate}`]);
}
let RMBFund = fund * rateValue;
lines.push([timestamp, 0, ITCode, RMBFund, fund, currencyUnit, isExtra, surStatus, originTable]);
}
let branches = null;
let retryCount = 0;
do {
try {
branches = await transactions.judgeBranches(codes);
break;
} catch (err) {
retryCount++;
console.error(err);
logger.error(err);
}
} while (retryCount < 3)
if (retryCount == 3) {
console.error('全量更新表tCR0001_V2.0中company信息,查询分支机构失败');
logger.error('全量更新表tCR0001_V2.0中company信息,查询分支机构失败');
// break;
return updateInfo;
}
for (let i = 0; i < lines.length; ++i) {
let line = lines[i];
// let isBranches = 0; //初始化分支机构属性,0表示不是分支机构,1表示是分支机构
if (branches[i] == "") {
branches[i] = 0;
} else if (branches[i] != "") {
branches[i] = 1;
}
line.push(branches[i]);
w.write(line.join(","));
}
ctx.last = rows[fetched - 1]._ts;
ctx.updatetime = now;
ctx.latestUpdated = resultCount;
// 保存同步到的位置
transactions.saveContext(id, ctx)
.catch(err => console.error(err));
writeCost = Date.now() - writeStart;
if (fetched > 0)
logger.info(`Total table: 'tCR0001_V2.0' qry:${queryCost} ms; result:${fetched}` +', writeCost: ' + writeCost + 'ms' + ', 读写次数: ' + i + ', last timestamp: '+ ctx.last);
console.log('全量更新表tCR0001_V2.0中company信息,读写次数: ' + i + ', 查询SQLServer耗时:' + queryCost + 'ms' +', writeCost: ' + writeCost + 'ms' + ', last timestamp: '+ ctx.last);
i++;
//for test
// if(i == 2 )
// break;
}
} while (fetched >= 10000);
// 结束
w.end(function () {
// 回调函数可选
console.log('companies.csv write end');
logger.info('companies.csv write end');
});
let totalCost = Date.now() - startTime;
logInfo = '全量更新表tCR0001_V2.0中company信息,总耗时: ' + totalCost + ', 更新记录数: ' + resultCount;
updateStatus = 1;
updateInfo.status = updateStatus;
updateInfo.info = logInfo;
logger.info(`counts: ` + i++ + `, totalConst :${totalCost} ms; resultCount: ${resultCount}`);
console.log(logInfo);
return updateInfo;
} catch (err) {
console.error(err);
logger.error(err);
return err;
}
}
}
}
module.exports = updateTotalCompany; | random_line_split | |
words.go | package words
import "math/rand"
import "strings"
import "time"
func BigWords() string {
abuff := []string{}
total := 0
words := 0
for {
w := ranWord()
words += 1
total += len(w)
abuff = append(abuff, w)
if total >= 40 && words > 4 {
break
}
}
buff := ""
for _, x := range abuff {
first := x[0:1]
last := x[1:len(x)]
first = strings.ToUpper(first)
buff += first + last
}
return buff
}
func ranWord() string {
list := []string{
"people",
"history",
"way",
"art",
"world",
"information",
"map",
"two",
"family",
"government",
"health",
"system",
"computer",
"meat",
"year",
"thanks",
"music",
"person",
"reading",
"method",
"data",
"food",
"understanding",
"theory",
"law",
"bird",
"literature",
"problem",
"software",
"control",
"knowledge",
"power",
"ability",
"economics",
"love",
"internet",
"television",
"science",
"library",
"nature",
"fact",
"product",
"idea",
"temperature",
"investment",
"area",
"society",
"activity",
"story",
"industry",
"media",
"thing",
"oven",
"community",
"definition",
"safety",
"quality",
"development",
"language",
"management",
"player",
"variety",
"video",
"week",
"security",
"country",
"exam",
"movie",
"organization",
"equipment",
"physics",
"analysis",
"policy",
"series",
"thought",
"basis",
"boyfriend",
"direction",
"strategy",
"technology",
"army",
"camera",
"freedom",
"paper",
"environment",
"child",
"instance",
"month",
"truth",
"marketing",
"university",
"writing",
"article",
"department",
"difference",
"goal",
"news",
"audience",
"fishing",
"growth",
"income",
"marriage",
"user",
"combination",
"failure",
"meaning",
"medicine",
"philosophy",
"teacher",
"communication",
"night",
"chemistry",
"disease",
"disk",
"energy",
"nation",
"road",
"role",
"soup",
"advertising",
"location",
"success",
"addition",
"apartment",
"education",
"math",
"moment",
"painting",
"politics",
"attention",
"decision",
"event",
"property",
"shopping",
"student",
"wood",
"competition",
"distribution",
"entertainment",
"office",
"population",
"president",
"unit",
"category",
"cigarette",
"context",
"introduction",
"opportunity",
"performance",
"driver",
"flight",
"length",
"magazine",
"newspaper",
"relationship",
"teaching",
"cell",
"dealer",
"finding",
"lake",
"member",
"message",
"phone",
"scene",
"appearance",
"association",
"concept",
"customer",
"death",
"discussion",
"housing",
"inflation",
"insurance",
"mood",
"woman",
"advice",
"blood",
"effort",
"expression",
"importance",
"opinion",
"payment",
"reality",
"responsibility",
"situation",
"skill",
"statement",
"wealth",
"application",
"city",
"county",
"depth",
"estate",
"foundation",
"grandmother",
"heart",
"perspective",
"photo",
"recipe",
"studio",
"topic",
"collection",
"depression",
"imagination",
"passion",
"percentage",
"resource",
"setting",
"ad",
"agency",
"college",
"connection",
"criticism",
"debt",
"description",
"memory",
"patience",
"secretary",
"solution",
"administration",
"aspect",
"attitude",
"director",
"personality",
"psychology",
"recommendation",
"response",
"selection",
"storage",
"version",
"alcohol",
"argument",
"complaint",
"contract",
"emphasis",
"highway",
"loss",
"membership",
"possession",
"preparation",
"steak",
"union",
"agreement",
"cancer",
"currency",
"employment",
"engineering",
"entry",
"interaction",
"mixture",
"preference",
"region",
"republic",
"tradition",
"virus",
"actor",
"classroom",
"delivery",
"device",
"difficulty",
"drama",
"election",
"engine",
"football",
"guidance",
"hotel",
"owner",
"priority",
"protection",
"suggestion",
"tension",
"variation",
"anxiety",
"atmosphere",
"awareness",
"bath",
"bread",
"candidate",
"climate",
"comparison",
"confusion",
"construction",
"elevator",
"emotion",
"employee",
"employer",
"guest",
"height",
"leadership",
"mall",
"manager",
"operation",
"recording",
"sample",
"transportation",
"charity",
"cousin",
"disaster",
"editor",
"efficiency",
"excitement",
"extent",
"feedback",
"guitar",
"homework",
"leader",
"mom",
"outcome",
"permission",
"presentation",
"promotion",
"reflection",
"refrigerator",
"resolution",
"revenue",
"session",
"singer",
"tennis",
"basket",
"bonus",
"cabinet",
"childhood",
"church",
"clothes",
"coffee",
"dinner",
"drawing",
"hair",
"hearing",
"initiative",
"judgment",
"lab",
"measurement",
"mode",
"mud",
"orange",
"poetry",
"police",
"possibility",
"procedure",
"queen",
"ratio",
"relation",
"restaurant",
"satisfaction",
"sector",
"signature",
"significance",
"song",
"tooth",
"town",
"vehicle",
"volume",
"wife",
"accident",
"airport",
"appointment",
"arrival",
"assumption",
"baseball",
"chapter",
"committee",
"conversation",
"database",
"enthusiasm",
"error",
"explanation",
"farmer",
"gate",
"girl",
"hall",
"historian",
"hospital",
"injury",
"instruction",
"maintenance",
"manufacturer",
"meal",
"perception",
"pie",
"poem",
"presence",
"proposal",
"reception",
"replacement",
"revolution",
"river",
"son",
"speech",
"tea",
"village",
"warning",
"winner",
"worker",
"writer",
"assistance",
"breath",
"buyer",
"chest",
"chocolate",
"conclusion",
"contribution",
"cookie",
"courage",
"dad",
"desk",
"drawer",
"establishment",
"examination",
"garbage",
"grocery",
"honey",
"impression",
"improvement",
"independence",
"insect",
"inspection",
"inspector",
"king",
"ladder",
"menu",
"penalty",
"piano",
"potato",
"profession",
"professor",
"quantity",
"reaction",
"requirement",
"salad",
"sister",
"supermarket",
"tongue",
"weakness",
"wedding",
"affair",
"ambition",
"analyst",
"apple",
"assignment",
"assistant",
"bathroom",
"bedroom",
"beer",
"birthday",
"celebration",
"championship",
"cheek",
"client",
"consequence",
"departure",
"diamond",
"dirt",
"ear",
"fortune",
"friendship",
"funeral",
"gene",
"girlfriend",
"hat",
"indication",
"intention",
"lady",
"midnight",
"negotiation",
"obligation",
"passenger",
"pizza",
"platform",
"poet",
"pollution",
"recognition",
"reputation",
"shirt",
"sir",
"speaker",
"stranger",
"surgery",
"sympathy",
"tale",
"throat",
"trainer",
"uncle",
"youth",
"time",
"work",
"film",
"water",
"money",
"example",
"while",
"business",
"study",
"game",
"life",
"form",
"air",
"day",
"place",
"number",
"part",
"field",
"fish",
"back",
"process",
"heat",
"hand",
"experience",
"job",
"book",
"end",
"point",
"type",
"home",
"economy",
"value",
"body",
"market",
"guide",
"interest",
"state",
"radio",
"course",
"company",
"price",
"size",
"card",
"list",
"mind",
"trade",
"line",
"care",
"group",
"risk",
"word",
"fat",
"force",
"key",
"light",
"training",
"name",
"school",
"top",
"amount",
"level",
"order",
"practice",
"research",
"sense",
"service",
"piece",
"web",
"boss",
"sport",
"fun",
"house",
"page",
"term",
"test",
"answer",
"sound",
"focus",
"matter",
"kind",
"soil",
"board",
"oil",
"picture",
"access",
"garden",
"range",
"rate",
"reason",
"future",
"site",
"demand",
"exercise",
"image",
"case",
"cause",
"coast",
"action",
"age",
"bad",
"boat",
"record",
"result",
"section",
"building",
"mouse",
"cash",
"class",
"nothing",
"period",
"plan",
"store",
"tax",
"side",
"subject",
"space",
"rule",
"stock",
"weather",
"chance",
"figure",
"man",
"model",
"source",
"beginning",
"earth",
"program",
"chicken",
"design",
"feature",
"head",
"material",
"purpose",
"question",
"rock",
"salt",
"act",
"birth",
"car",
"dog",
"object",
"scale",
"sun",
"note",
"profit",
"rent",
"speed",
"style",
"war",
"bank",
"craft",
"half",
"inside",
"outside",
"standard",
"bus",
"exchange",
"eye",
"fire",
"position",
"pressure",
"stress",
"advantage",
"benefit",
"box",
"frame",
"issue",
"step",
"cycle",
"face",
"item",
"metal",
"paint",
"review",
"room",
"screen",
"structure",
"view",
"account",
"ball",
"discipline",
"medium",
"share",
"balance",
"bit",
"black",
"bottom",
"choice",
"gift",
"impact",
"machine",
"shape",
"tool",
"wind",
"address",
"average",
"career",
"culture",
"morning",
"pot",
"sign",
"table",
"task",
"condition",
"contact",
"credit",
"egg",
"hope",
"ice",
"network",
"north",
"square",
"attempt",
"date",
"effect",
"link",
"post",
"star",
"voice",
"capital",
"challenge",
"friend",
"self",
"shot",
"brush",
"couple",
"debate",
"exit",
"front",
"function",
"lack",
"living",
"plant",
"plastic",
"spot",
"summer",
"taste",
"theme",
"track",
"wing",
"brain",
"button",
"click",
"desire",
"foot",
"gas",
"influence",
"notice",
"rain",
"wall",
"base",
"damage",
"distance",
"feeling",
"pair",
"savings",
"staff",
"sugar",
"target",
"text",
"animal",
"author",
"budget",
"discount",
"file",
"ground",
"lesson",
"minute",
"officer",
"phase",
"reference",
"register",
"sky",
"stage",
"stick",
"title",
"trouble",
"bowl",
"bridge",
"campaign",
"character",
"club",
"edge",
"evidence",
"fan",
"letter",
"lock",
"maximum",
"novel",
"option",
"pack",
"park",
"plenty",
"quarter",
"skin",
"sort",
"weight",
"baby",
"background",
"carry",
"dish",
"factor",
"fruit",
"glass",
"joint",
"master",
"muscle",
"red",
"strength",
"traffic",
"trip",
"vegetable",
"appeal",
"chart",
"gear",
"ideal",
"kitchen",
"land",
"log",
"mother",
"net",
"party",
"principle",
"relative",
"sale",
"season",
"signal", | "belt",
"bench",
"commission",
"copy",
"drop",
"minimum",
"path",
"progress",
"project",
"sea",
"south",
"status",
"stuff",
"ticket",
"tour",
"angle",
"blue",
"breakfast",
"confidence",
"daughter",
"degree",
"doctor",
"dot",
"dream",
"duty",
"essay",
"father",
"fee",
"finance",
"hour",
"juice",
"limit",
"luck",
"milk",
"mouth",
"peace",
"pipe",
"seat",
"stable",
"storm",
"substance",
"team",
"trick",
"afternoon",
"bat",
"beach",
"blank",
"catch",
"chain",
"consideration",
"cream",
"crew",
"detail",
"gold",
"interview",
"kid",
"mark",
"match",
"mission",
"pain",
"pleasure",
"score",
"screw",
"sex",
"shop",
"shower",
"suit",
"tone",
"window",
"agent",
"band",
"block",
"bone",
"calendar",
"cap",
"coat",
"contest",
"corner",
"court",
"cup",
"district",
"door",
"east",
"finger",
"garage",
"guarantee",
"hole",
"hook",
"implement",
"layer",
"lecture",
"lie",
"manner",
"meeting",
"nose",
"parking",
"partner",
"profile",
"respect",
"rice",
"routine",
"schedule",
"swimming",
"telephone",
"tip",
"winter",
"airline",
"bag",
"battle",
"bed",
"bill",
"bother",
"cake",
"code",
"curve",
"designer",
"dimension",
"dress",
"ease",
"emergency",
"evening",
"extension",
"farm",
"fight",
"gap",
"grade",
"holiday",
"horror",
"horse",
"host",
"husband",
"loan",
"mistake",
"mountain",
"nail",
"noise",
"occasion",
"package",
"patient",
"pause",
"phrase",
"proof",
"race",
"relief",
"sand",
"sentence",
"shoulder",
"smoke",
"stomach",
"string",
"tourist",
"towel",
"vacation",
"west",
"wheel",
"wine",
"arm",
"aside",
"associate",
"bet",
"blow",
"border",
"branch",
"breast",
"brother",
"buddy",
"bunch",
"chip",
"coach",
"cross",
"document",
"draft",
"dust",
"expert",
"floor",
"god",
"golf",
"habit",
"iron",
"judge",
"knife",
"landscape",
"league",
"mail",
"mess",
"hardware"}
rand.Seed(time.Now().UnixNano())
x := rand.Intn(len(list))
return list[x]
} | "spirit",
"street",
"tree",
"wave", | random_line_split |
words.go | package words
import "math/rand"
import "strings"
import "time"
func | () string {
abuff := []string{}
total := 0
words := 0
for {
w := ranWord()
words += 1
total += len(w)
abuff = append(abuff, w)
if total >= 40 && words > 4 {
break
}
}
buff := ""
for _, x := range abuff {
first := x[0:1]
last := x[1:len(x)]
first = strings.ToUpper(first)
buff += first + last
}
return buff
}
func ranWord() string {
list := []string{
"people",
"history",
"way",
"art",
"world",
"information",
"map",
"two",
"family",
"government",
"health",
"system",
"computer",
"meat",
"year",
"thanks",
"music",
"person",
"reading",
"method",
"data",
"food",
"understanding",
"theory",
"law",
"bird",
"literature",
"problem",
"software",
"control",
"knowledge",
"power",
"ability",
"economics",
"love",
"internet",
"television",
"science",
"library",
"nature",
"fact",
"product",
"idea",
"temperature",
"investment",
"area",
"society",
"activity",
"story",
"industry",
"media",
"thing",
"oven",
"community",
"definition",
"safety",
"quality",
"development",
"language",
"management",
"player",
"variety",
"video",
"week",
"security",
"country",
"exam",
"movie",
"organization",
"equipment",
"physics",
"analysis",
"policy",
"series",
"thought",
"basis",
"boyfriend",
"direction",
"strategy",
"technology",
"army",
"camera",
"freedom",
"paper",
"environment",
"child",
"instance",
"month",
"truth",
"marketing",
"university",
"writing",
"article",
"department",
"difference",
"goal",
"news",
"audience",
"fishing",
"growth",
"income",
"marriage",
"user",
"combination",
"failure",
"meaning",
"medicine",
"philosophy",
"teacher",
"communication",
"night",
"chemistry",
"disease",
"disk",
"energy",
"nation",
"road",
"role",
"soup",
"advertising",
"location",
"success",
"addition",
"apartment",
"education",
"math",
"moment",
"painting",
"politics",
"attention",
"decision",
"event",
"property",
"shopping",
"student",
"wood",
"competition",
"distribution",
"entertainment",
"office",
"population",
"president",
"unit",
"category",
"cigarette",
"context",
"introduction",
"opportunity",
"performance",
"driver",
"flight",
"length",
"magazine",
"newspaper",
"relationship",
"teaching",
"cell",
"dealer",
"finding",
"lake",
"member",
"message",
"phone",
"scene",
"appearance",
"association",
"concept",
"customer",
"death",
"discussion",
"housing",
"inflation",
"insurance",
"mood",
"woman",
"advice",
"blood",
"effort",
"expression",
"importance",
"opinion",
"payment",
"reality",
"responsibility",
"situation",
"skill",
"statement",
"wealth",
"application",
"city",
"county",
"depth",
"estate",
"foundation",
"grandmother",
"heart",
"perspective",
"photo",
"recipe",
"studio",
"topic",
"collection",
"depression",
"imagination",
"passion",
"percentage",
"resource",
"setting",
"ad",
"agency",
"college",
"connection",
"criticism",
"debt",
"description",
"memory",
"patience",
"secretary",
"solution",
"administration",
"aspect",
"attitude",
"director",
"personality",
"psychology",
"recommendation",
"response",
"selection",
"storage",
"version",
"alcohol",
"argument",
"complaint",
"contract",
"emphasis",
"highway",
"loss",
"membership",
"possession",
"preparation",
"steak",
"union",
"agreement",
"cancer",
"currency",
"employment",
"engineering",
"entry",
"interaction",
"mixture",
"preference",
"region",
"republic",
"tradition",
"virus",
"actor",
"classroom",
"delivery",
"device",
"difficulty",
"drama",
"election",
"engine",
"football",
"guidance",
"hotel",
"owner",
"priority",
"protection",
"suggestion",
"tension",
"variation",
"anxiety",
"atmosphere",
"awareness",
"bath",
"bread",
"candidate",
"climate",
"comparison",
"confusion",
"construction",
"elevator",
"emotion",
"employee",
"employer",
"guest",
"height",
"leadership",
"mall",
"manager",
"operation",
"recording",
"sample",
"transportation",
"charity",
"cousin",
"disaster",
"editor",
"efficiency",
"excitement",
"extent",
"feedback",
"guitar",
"homework",
"leader",
"mom",
"outcome",
"permission",
"presentation",
"promotion",
"reflection",
"refrigerator",
"resolution",
"revenue",
"session",
"singer",
"tennis",
"basket",
"bonus",
"cabinet",
"childhood",
"church",
"clothes",
"coffee",
"dinner",
"drawing",
"hair",
"hearing",
"initiative",
"judgment",
"lab",
"measurement",
"mode",
"mud",
"orange",
"poetry",
"police",
"possibility",
"procedure",
"queen",
"ratio",
"relation",
"restaurant",
"satisfaction",
"sector",
"signature",
"significance",
"song",
"tooth",
"town",
"vehicle",
"volume",
"wife",
"accident",
"airport",
"appointment",
"arrival",
"assumption",
"baseball",
"chapter",
"committee",
"conversation",
"database",
"enthusiasm",
"error",
"explanation",
"farmer",
"gate",
"girl",
"hall",
"historian",
"hospital",
"injury",
"instruction",
"maintenance",
"manufacturer",
"meal",
"perception",
"pie",
"poem",
"presence",
"proposal",
"reception",
"replacement",
"revolution",
"river",
"son",
"speech",
"tea",
"village",
"warning",
"winner",
"worker",
"writer",
"assistance",
"breath",
"buyer",
"chest",
"chocolate",
"conclusion",
"contribution",
"cookie",
"courage",
"dad",
"desk",
"drawer",
"establishment",
"examination",
"garbage",
"grocery",
"honey",
"impression",
"improvement",
"independence",
"insect",
"inspection",
"inspector",
"king",
"ladder",
"menu",
"penalty",
"piano",
"potato",
"profession",
"professor",
"quantity",
"reaction",
"requirement",
"salad",
"sister",
"supermarket",
"tongue",
"weakness",
"wedding",
"affair",
"ambition",
"analyst",
"apple",
"assignment",
"assistant",
"bathroom",
"bedroom",
"beer",
"birthday",
"celebration",
"championship",
"cheek",
"client",
"consequence",
"departure",
"diamond",
"dirt",
"ear",
"fortune",
"friendship",
"funeral",
"gene",
"girlfriend",
"hat",
"indication",
"intention",
"lady",
"midnight",
"negotiation",
"obligation",
"passenger",
"pizza",
"platform",
"poet",
"pollution",
"recognition",
"reputation",
"shirt",
"sir",
"speaker",
"stranger",
"surgery",
"sympathy",
"tale",
"throat",
"trainer",
"uncle",
"youth",
"time",
"work",
"film",
"water",
"money",
"example",
"while",
"business",
"study",
"game",
"life",
"form",
"air",
"day",
"place",
"number",
"part",
"field",
"fish",
"back",
"process",
"heat",
"hand",
"experience",
"job",
"book",
"end",
"point",
"type",
"home",
"economy",
"value",
"body",
"market",
"guide",
"interest",
"state",
"radio",
"course",
"company",
"price",
"size",
"card",
"list",
"mind",
"trade",
"line",
"care",
"group",
"risk",
"word",
"fat",
"force",
"key",
"light",
"training",
"name",
"school",
"top",
"amount",
"level",
"order",
"practice",
"research",
"sense",
"service",
"piece",
"web",
"boss",
"sport",
"fun",
"house",
"page",
"term",
"test",
"answer",
"sound",
"focus",
"matter",
"kind",
"soil",
"board",
"oil",
"picture",
"access",
"garden",
"range",
"rate",
"reason",
"future",
"site",
"demand",
"exercise",
"image",
"case",
"cause",
"coast",
"action",
"age",
"bad",
"boat",
"record",
"result",
"section",
"building",
"mouse",
"cash",
"class",
"nothing",
"period",
"plan",
"store",
"tax",
"side",
"subject",
"space",
"rule",
"stock",
"weather",
"chance",
"figure",
"man",
"model",
"source",
"beginning",
"earth",
"program",
"chicken",
"design",
"feature",
"head",
"material",
"purpose",
"question",
"rock",
"salt",
"act",
"birth",
"car",
"dog",
"object",
"scale",
"sun",
"note",
"profit",
"rent",
"speed",
"style",
"war",
"bank",
"craft",
"half",
"inside",
"outside",
"standard",
"bus",
"exchange",
"eye",
"fire",
"position",
"pressure",
"stress",
"advantage",
"benefit",
"box",
"frame",
"issue",
"step",
"cycle",
"face",
"item",
"metal",
"paint",
"review",
"room",
"screen",
"structure",
"view",
"account",
"ball",
"discipline",
"medium",
"share",
"balance",
"bit",
"black",
"bottom",
"choice",
"gift",
"impact",
"machine",
"shape",
"tool",
"wind",
"address",
"average",
"career",
"culture",
"morning",
"pot",
"sign",
"table",
"task",
"condition",
"contact",
"credit",
"egg",
"hope",
"ice",
"network",
"north",
"square",
"attempt",
"date",
"effect",
"link",
"post",
"star",
"voice",
"capital",
"challenge",
"friend",
"self",
"shot",
"brush",
"couple",
"debate",
"exit",
"front",
"function",
"lack",
"living",
"plant",
"plastic",
"spot",
"summer",
"taste",
"theme",
"track",
"wing",
"brain",
"button",
"click",
"desire",
"foot",
"gas",
"influence",
"notice",
"rain",
"wall",
"base",
"damage",
"distance",
"feeling",
"pair",
"savings",
"staff",
"sugar",
"target",
"text",
"animal",
"author",
"budget",
"discount",
"file",
"ground",
"lesson",
"minute",
"officer",
"phase",
"reference",
"register",
"sky",
"stage",
"stick",
"title",
"trouble",
"bowl",
"bridge",
"campaign",
"character",
"club",
"edge",
"evidence",
"fan",
"letter",
"lock",
"maximum",
"novel",
"option",
"pack",
"park",
"plenty",
"quarter",
"skin",
"sort",
"weight",
"baby",
"background",
"carry",
"dish",
"factor",
"fruit",
"glass",
"joint",
"master",
"muscle",
"red",
"strength",
"traffic",
"trip",
"vegetable",
"appeal",
"chart",
"gear",
"ideal",
"kitchen",
"land",
"log",
"mother",
"net",
"party",
"principle",
"relative",
"sale",
"season",
"signal",
"spirit",
"street",
"tree",
"wave",
"belt",
"bench",
"commission",
"copy",
"drop",
"minimum",
"path",
"progress",
"project",
"sea",
"south",
"status",
"stuff",
"ticket",
"tour",
"angle",
"blue",
"breakfast",
"confidence",
"daughter",
"degree",
"doctor",
"dot",
"dream",
"duty",
"essay",
"father",
"fee",
"finance",
"hour",
"juice",
"limit",
"luck",
"milk",
"mouth",
"peace",
"pipe",
"seat",
"stable",
"storm",
"substance",
"team",
"trick",
"afternoon",
"bat",
"beach",
"blank",
"catch",
"chain",
"consideration",
"cream",
"crew",
"detail",
"gold",
"interview",
"kid",
"mark",
"match",
"mission",
"pain",
"pleasure",
"score",
"screw",
"sex",
"shop",
"shower",
"suit",
"tone",
"window",
"agent",
"band",
"block",
"bone",
"calendar",
"cap",
"coat",
"contest",
"corner",
"court",
"cup",
"district",
"door",
"east",
"finger",
"garage",
"guarantee",
"hole",
"hook",
"implement",
"layer",
"lecture",
"lie",
"manner",
"meeting",
"nose",
"parking",
"partner",
"profile",
"respect",
"rice",
"routine",
"schedule",
"swimming",
"telephone",
"tip",
"winter",
"airline",
"bag",
"battle",
"bed",
"bill",
"bother",
"cake",
"code",
"curve",
"designer",
"dimension",
"dress",
"ease",
"emergency",
"evening",
"extension",
"farm",
"fight",
"gap",
"grade",
"holiday",
"horror",
"horse",
"host",
"husband",
"loan",
"mistake",
"mountain",
"nail",
"noise",
"occasion",
"package",
"patient",
"pause",
"phrase",
"proof",
"race",
"relief",
"sand",
"sentence",
"shoulder",
"smoke",
"stomach",
"string",
"tourist",
"towel",
"vacation",
"west",
"wheel",
"wine",
"arm",
"aside",
"associate",
"bet",
"blow",
"border",
"branch",
"breast",
"brother",
"buddy",
"bunch",
"chip",
"coach",
"cross",
"document",
"draft",
"dust",
"expert",
"floor",
"god",
"golf",
"habit",
"iron",
"judge",
"knife",
"landscape",
"league",
"mail",
"mess",
"hardware"}
rand.Seed(time.Now().UnixNano())
x := rand.Intn(len(list))
return list[x]
}
| BigWords | identifier_name |
words.go | package words
import "math/rand"
import "strings"
import "time"
func BigWords() string {
abuff := []string{}
total := 0
words := 0
for {
w := ranWord()
words += 1
total += len(w)
abuff = append(abuff, w)
if total >= 40 && words > 4 {
break
}
}
buff := ""
for _, x := range abuff {
first := x[0:1]
last := x[1:len(x)]
first = strings.ToUpper(first)
buff += first + last
}
return buff
}
func ranWord() string | {
list := []string{
"people",
"history",
"way",
"art",
"world",
"information",
"map",
"two",
"family",
"government",
"health",
"system",
"computer",
"meat",
"year",
"thanks",
"music",
"person",
"reading",
"method",
"data",
"food",
"understanding",
"theory",
"law",
"bird",
"literature",
"problem",
"software",
"control",
"knowledge",
"power",
"ability",
"economics",
"love",
"internet",
"television",
"science",
"library",
"nature",
"fact",
"product",
"idea",
"temperature",
"investment",
"area",
"society",
"activity",
"story",
"industry",
"media",
"thing",
"oven",
"community",
"definition",
"safety",
"quality",
"development",
"language",
"management",
"player",
"variety",
"video",
"week",
"security",
"country",
"exam",
"movie",
"organization",
"equipment",
"physics",
"analysis",
"policy",
"series",
"thought",
"basis",
"boyfriend",
"direction",
"strategy",
"technology",
"army",
"camera",
"freedom",
"paper",
"environment",
"child",
"instance",
"month",
"truth",
"marketing",
"university",
"writing",
"article",
"department",
"difference",
"goal",
"news",
"audience",
"fishing",
"growth",
"income",
"marriage",
"user",
"combination",
"failure",
"meaning",
"medicine",
"philosophy",
"teacher",
"communication",
"night",
"chemistry",
"disease",
"disk",
"energy",
"nation",
"road",
"role",
"soup",
"advertising",
"location",
"success",
"addition",
"apartment",
"education",
"math",
"moment",
"painting",
"politics",
"attention",
"decision",
"event",
"property",
"shopping",
"student",
"wood",
"competition",
"distribution",
"entertainment",
"office",
"population",
"president",
"unit",
"category",
"cigarette",
"context",
"introduction",
"opportunity",
"performance",
"driver",
"flight",
"length",
"magazine",
"newspaper",
"relationship",
"teaching",
"cell",
"dealer",
"finding",
"lake",
"member",
"message",
"phone",
"scene",
"appearance",
"association",
"concept",
"customer",
"death",
"discussion",
"housing",
"inflation",
"insurance",
"mood",
"woman",
"advice",
"blood",
"effort",
"expression",
"importance",
"opinion",
"payment",
"reality",
"responsibility",
"situation",
"skill",
"statement",
"wealth",
"application",
"city",
"county",
"depth",
"estate",
"foundation",
"grandmother",
"heart",
"perspective",
"photo",
"recipe",
"studio",
"topic",
"collection",
"depression",
"imagination",
"passion",
"percentage",
"resource",
"setting",
"ad",
"agency",
"college",
"connection",
"criticism",
"debt",
"description",
"memory",
"patience",
"secretary",
"solution",
"administration",
"aspect",
"attitude",
"director",
"personality",
"psychology",
"recommendation",
"response",
"selection",
"storage",
"version",
"alcohol",
"argument",
"complaint",
"contract",
"emphasis",
"highway",
"loss",
"membership",
"possession",
"preparation",
"steak",
"union",
"agreement",
"cancer",
"currency",
"employment",
"engineering",
"entry",
"interaction",
"mixture",
"preference",
"region",
"republic",
"tradition",
"virus",
"actor",
"classroom",
"delivery",
"device",
"difficulty",
"drama",
"election",
"engine",
"football",
"guidance",
"hotel",
"owner",
"priority",
"protection",
"suggestion",
"tension",
"variation",
"anxiety",
"atmosphere",
"awareness",
"bath",
"bread",
"candidate",
"climate",
"comparison",
"confusion",
"construction",
"elevator",
"emotion",
"employee",
"employer",
"guest",
"height",
"leadership",
"mall",
"manager",
"operation",
"recording",
"sample",
"transportation",
"charity",
"cousin",
"disaster",
"editor",
"efficiency",
"excitement",
"extent",
"feedback",
"guitar",
"homework",
"leader",
"mom",
"outcome",
"permission",
"presentation",
"promotion",
"reflection",
"refrigerator",
"resolution",
"revenue",
"session",
"singer",
"tennis",
"basket",
"bonus",
"cabinet",
"childhood",
"church",
"clothes",
"coffee",
"dinner",
"drawing",
"hair",
"hearing",
"initiative",
"judgment",
"lab",
"measurement",
"mode",
"mud",
"orange",
"poetry",
"police",
"possibility",
"procedure",
"queen",
"ratio",
"relation",
"restaurant",
"satisfaction",
"sector",
"signature",
"significance",
"song",
"tooth",
"town",
"vehicle",
"volume",
"wife",
"accident",
"airport",
"appointment",
"arrival",
"assumption",
"baseball",
"chapter",
"committee",
"conversation",
"database",
"enthusiasm",
"error",
"explanation",
"farmer",
"gate",
"girl",
"hall",
"historian",
"hospital",
"injury",
"instruction",
"maintenance",
"manufacturer",
"meal",
"perception",
"pie",
"poem",
"presence",
"proposal",
"reception",
"replacement",
"revolution",
"river",
"son",
"speech",
"tea",
"village",
"warning",
"winner",
"worker",
"writer",
"assistance",
"breath",
"buyer",
"chest",
"chocolate",
"conclusion",
"contribution",
"cookie",
"courage",
"dad",
"desk",
"drawer",
"establishment",
"examination",
"garbage",
"grocery",
"honey",
"impression",
"improvement",
"independence",
"insect",
"inspection",
"inspector",
"king",
"ladder",
"menu",
"penalty",
"piano",
"potato",
"profession",
"professor",
"quantity",
"reaction",
"requirement",
"salad",
"sister",
"supermarket",
"tongue",
"weakness",
"wedding",
"affair",
"ambition",
"analyst",
"apple",
"assignment",
"assistant",
"bathroom",
"bedroom",
"beer",
"birthday",
"celebration",
"championship",
"cheek",
"client",
"consequence",
"departure",
"diamond",
"dirt",
"ear",
"fortune",
"friendship",
"funeral",
"gene",
"girlfriend",
"hat",
"indication",
"intention",
"lady",
"midnight",
"negotiation",
"obligation",
"passenger",
"pizza",
"platform",
"poet",
"pollution",
"recognition",
"reputation",
"shirt",
"sir",
"speaker",
"stranger",
"surgery",
"sympathy",
"tale",
"throat",
"trainer",
"uncle",
"youth",
"time",
"work",
"film",
"water",
"money",
"example",
"while",
"business",
"study",
"game",
"life",
"form",
"air",
"day",
"place",
"number",
"part",
"field",
"fish",
"back",
"process",
"heat",
"hand",
"experience",
"job",
"book",
"end",
"point",
"type",
"home",
"economy",
"value",
"body",
"market",
"guide",
"interest",
"state",
"radio",
"course",
"company",
"price",
"size",
"card",
"list",
"mind",
"trade",
"line",
"care",
"group",
"risk",
"word",
"fat",
"force",
"key",
"light",
"training",
"name",
"school",
"top",
"amount",
"level",
"order",
"practice",
"research",
"sense",
"service",
"piece",
"web",
"boss",
"sport",
"fun",
"house",
"page",
"term",
"test",
"answer",
"sound",
"focus",
"matter",
"kind",
"soil",
"board",
"oil",
"picture",
"access",
"garden",
"range",
"rate",
"reason",
"future",
"site",
"demand",
"exercise",
"image",
"case",
"cause",
"coast",
"action",
"age",
"bad",
"boat",
"record",
"result",
"section",
"building",
"mouse",
"cash",
"class",
"nothing",
"period",
"plan",
"store",
"tax",
"side",
"subject",
"space",
"rule",
"stock",
"weather",
"chance",
"figure",
"man",
"model",
"source",
"beginning",
"earth",
"program",
"chicken",
"design",
"feature",
"head",
"material",
"purpose",
"question",
"rock",
"salt",
"act",
"birth",
"car",
"dog",
"object",
"scale",
"sun",
"note",
"profit",
"rent",
"speed",
"style",
"war",
"bank",
"craft",
"half",
"inside",
"outside",
"standard",
"bus",
"exchange",
"eye",
"fire",
"position",
"pressure",
"stress",
"advantage",
"benefit",
"box",
"frame",
"issue",
"step",
"cycle",
"face",
"item",
"metal",
"paint",
"review",
"room",
"screen",
"structure",
"view",
"account",
"ball",
"discipline",
"medium",
"share",
"balance",
"bit",
"black",
"bottom",
"choice",
"gift",
"impact",
"machine",
"shape",
"tool",
"wind",
"address",
"average",
"career",
"culture",
"morning",
"pot",
"sign",
"table",
"task",
"condition",
"contact",
"credit",
"egg",
"hope",
"ice",
"network",
"north",
"square",
"attempt",
"date",
"effect",
"link",
"post",
"star",
"voice",
"capital",
"challenge",
"friend",
"self",
"shot",
"brush",
"couple",
"debate",
"exit",
"front",
"function",
"lack",
"living",
"plant",
"plastic",
"spot",
"summer",
"taste",
"theme",
"track",
"wing",
"brain",
"button",
"click",
"desire",
"foot",
"gas",
"influence",
"notice",
"rain",
"wall",
"base",
"damage",
"distance",
"feeling",
"pair",
"savings",
"staff",
"sugar",
"target",
"text",
"animal",
"author",
"budget",
"discount",
"file",
"ground",
"lesson",
"minute",
"officer",
"phase",
"reference",
"register",
"sky",
"stage",
"stick",
"title",
"trouble",
"bowl",
"bridge",
"campaign",
"character",
"club",
"edge",
"evidence",
"fan",
"letter",
"lock",
"maximum",
"novel",
"option",
"pack",
"park",
"plenty",
"quarter",
"skin",
"sort",
"weight",
"baby",
"background",
"carry",
"dish",
"factor",
"fruit",
"glass",
"joint",
"master",
"muscle",
"red",
"strength",
"traffic",
"trip",
"vegetable",
"appeal",
"chart",
"gear",
"ideal",
"kitchen",
"land",
"log",
"mother",
"net",
"party",
"principle",
"relative",
"sale",
"season",
"signal",
"spirit",
"street",
"tree",
"wave",
"belt",
"bench",
"commission",
"copy",
"drop",
"minimum",
"path",
"progress",
"project",
"sea",
"south",
"status",
"stuff",
"ticket",
"tour",
"angle",
"blue",
"breakfast",
"confidence",
"daughter",
"degree",
"doctor",
"dot",
"dream",
"duty",
"essay",
"father",
"fee",
"finance",
"hour",
"juice",
"limit",
"luck",
"milk",
"mouth",
"peace",
"pipe",
"seat",
"stable",
"storm",
"substance",
"team",
"trick",
"afternoon",
"bat",
"beach",
"blank",
"catch",
"chain",
"consideration",
"cream",
"crew",
"detail",
"gold",
"interview",
"kid",
"mark",
"match",
"mission",
"pain",
"pleasure",
"score",
"screw",
"sex",
"shop",
"shower",
"suit",
"tone",
"window",
"agent",
"band",
"block",
"bone",
"calendar",
"cap",
"coat",
"contest",
"corner",
"court",
"cup",
"district",
"door",
"east",
"finger",
"garage",
"guarantee",
"hole",
"hook",
"implement",
"layer",
"lecture",
"lie",
"manner",
"meeting",
"nose",
"parking",
"partner",
"profile",
"respect",
"rice",
"routine",
"schedule",
"swimming",
"telephone",
"tip",
"winter",
"airline",
"bag",
"battle",
"bed",
"bill",
"bother",
"cake",
"code",
"curve",
"designer",
"dimension",
"dress",
"ease",
"emergency",
"evening",
"extension",
"farm",
"fight",
"gap",
"grade",
"holiday",
"horror",
"horse",
"host",
"husband",
"loan",
"mistake",
"mountain",
"nail",
"noise",
"occasion",
"package",
"patient",
"pause",
"phrase",
"proof",
"race",
"relief",
"sand",
"sentence",
"shoulder",
"smoke",
"stomach",
"string",
"tourist",
"towel",
"vacation",
"west",
"wheel",
"wine",
"arm",
"aside",
"associate",
"bet",
"blow",
"border",
"branch",
"breast",
"brother",
"buddy",
"bunch",
"chip",
"coach",
"cross",
"document",
"draft",
"dust",
"expert",
"floor",
"god",
"golf",
"habit",
"iron",
"judge",
"knife",
"landscape",
"league",
"mail",
"mess",
"hardware"}
rand.Seed(time.Now().UnixNano())
x := rand.Intn(len(list))
return list[x]
} | identifier_body | |
words.go | package words
import "math/rand"
import "strings"
import "time"
func BigWords() string {
abuff := []string{}
total := 0
words := 0
for {
w := ranWord()
words += 1
total += len(w)
abuff = append(abuff, w)
if total >= 40 && words > 4 {
break
}
}
buff := ""
for _, x := range abuff |
return buff
}
func ranWord() string {
list := []string{
"people",
"history",
"way",
"art",
"world",
"information",
"map",
"two",
"family",
"government",
"health",
"system",
"computer",
"meat",
"year",
"thanks",
"music",
"person",
"reading",
"method",
"data",
"food",
"understanding",
"theory",
"law",
"bird",
"literature",
"problem",
"software",
"control",
"knowledge",
"power",
"ability",
"economics",
"love",
"internet",
"television",
"science",
"library",
"nature",
"fact",
"product",
"idea",
"temperature",
"investment",
"area",
"society",
"activity",
"story",
"industry",
"media",
"thing",
"oven",
"community",
"definition",
"safety",
"quality",
"development",
"language",
"management",
"player",
"variety",
"video",
"week",
"security",
"country",
"exam",
"movie",
"organization",
"equipment",
"physics",
"analysis",
"policy",
"series",
"thought",
"basis",
"boyfriend",
"direction",
"strategy",
"technology",
"army",
"camera",
"freedom",
"paper",
"environment",
"child",
"instance",
"month",
"truth",
"marketing",
"university",
"writing",
"article",
"department",
"difference",
"goal",
"news",
"audience",
"fishing",
"growth",
"income",
"marriage",
"user",
"combination",
"failure",
"meaning",
"medicine",
"philosophy",
"teacher",
"communication",
"night",
"chemistry",
"disease",
"disk",
"energy",
"nation",
"road",
"role",
"soup",
"advertising",
"location",
"success",
"addition",
"apartment",
"education",
"math",
"moment",
"painting",
"politics",
"attention",
"decision",
"event",
"property",
"shopping",
"student",
"wood",
"competition",
"distribution",
"entertainment",
"office",
"population",
"president",
"unit",
"category",
"cigarette",
"context",
"introduction",
"opportunity",
"performance",
"driver",
"flight",
"length",
"magazine",
"newspaper",
"relationship",
"teaching",
"cell",
"dealer",
"finding",
"lake",
"member",
"message",
"phone",
"scene",
"appearance",
"association",
"concept",
"customer",
"death",
"discussion",
"housing",
"inflation",
"insurance",
"mood",
"woman",
"advice",
"blood",
"effort",
"expression",
"importance",
"opinion",
"payment",
"reality",
"responsibility",
"situation",
"skill",
"statement",
"wealth",
"application",
"city",
"county",
"depth",
"estate",
"foundation",
"grandmother",
"heart",
"perspective",
"photo",
"recipe",
"studio",
"topic",
"collection",
"depression",
"imagination",
"passion",
"percentage",
"resource",
"setting",
"ad",
"agency",
"college",
"connection",
"criticism",
"debt",
"description",
"memory",
"patience",
"secretary",
"solution",
"administration",
"aspect",
"attitude",
"director",
"personality",
"psychology",
"recommendation",
"response",
"selection",
"storage",
"version",
"alcohol",
"argument",
"complaint",
"contract",
"emphasis",
"highway",
"loss",
"membership",
"possession",
"preparation",
"steak",
"union",
"agreement",
"cancer",
"currency",
"employment",
"engineering",
"entry",
"interaction",
"mixture",
"preference",
"region",
"republic",
"tradition",
"virus",
"actor",
"classroom",
"delivery",
"device",
"difficulty",
"drama",
"election",
"engine",
"football",
"guidance",
"hotel",
"owner",
"priority",
"protection",
"suggestion",
"tension",
"variation",
"anxiety",
"atmosphere",
"awareness",
"bath",
"bread",
"candidate",
"climate",
"comparison",
"confusion",
"construction",
"elevator",
"emotion",
"employee",
"employer",
"guest",
"height",
"leadership",
"mall",
"manager",
"operation",
"recording",
"sample",
"transportation",
"charity",
"cousin",
"disaster",
"editor",
"efficiency",
"excitement",
"extent",
"feedback",
"guitar",
"homework",
"leader",
"mom",
"outcome",
"permission",
"presentation",
"promotion",
"reflection",
"refrigerator",
"resolution",
"revenue",
"session",
"singer",
"tennis",
"basket",
"bonus",
"cabinet",
"childhood",
"church",
"clothes",
"coffee",
"dinner",
"drawing",
"hair",
"hearing",
"initiative",
"judgment",
"lab",
"measurement",
"mode",
"mud",
"orange",
"poetry",
"police",
"possibility",
"procedure",
"queen",
"ratio",
"relation",
"restaurant",
"satisfaction",
"sector",
"signature",
"significance",
"song",
"tooth",
"town",
"vehicle",
"volume",
"wife",
"accident",
"airport",
"appointment",
"arrival",
"assumption",
"baseball",
"chapter",
"committee",
"conversation",
"database",
"enthusiasm",
"error",
"explanation",
"farmer",
"gate",
"girl",
"hall",
"historian",
"hospital",
"injury",
"instruction",
"maintenance",
"manufacturer",
"meal",
"perception",
"pie",
"poem",
"presence",
"proposal",
"reception",
"replacement",
"revolution",
"river",
"son",
"speech",
"tea",
"village",
"warning",
"winner",
"worker",
"writer",
"assistance",
"breath",
"buyer",
"chest",
"chocolate",
"conclusion",
"contribution",
"cookie",
"courage",
"dad",
"desk",
"drawer",
"establishment",
"examination",
"garbage",
"grocery",
"honey",
"impression",
"improvement",
"independence",
"insect",
"inspection",
"inspector",
"king",
"ladder",
"menu",
"penalty",
"piano",
"potato",
"profession",
"professor",
"quantity",
"reaction",
"requirement",
"salad",
"sister",
"supermarket",
"tongue",
"weakness",
"wedding",
"affair",
"ambition",
"analyst",
"apple",
"assignment",
"assistant",
"bathroom",
"bedroom",
"beer",
"birthday",
"celebration",
"championship",
"cheek",
"client",
"consequence",
"departure",
"diamond",
"dirt",
"ear",
"fortune",
"friendship",
"funeral",
"gene",
"girlfriend",
"hat",
"indication",
"intention",
"lady",
"midnight",
"negotiation",
"obligation",
"passenger",
"pizza",
"platform",
"poet",
"pollution",
"recognition",
"reputation",
"shirt",
"sir",
"speaker",
"stranger",
"surgery",
"sympathy",
"tale",
"throat",
"trainer",
"uncle",
"youth",
"time",
"work",
"film",
"water",
"money",
"example",
"while",
"business",
"study",
"game",
"life",
"form",
"air",
"day",
"place",
"number",
"part",
"field",
"fish",
"back",
"process",
"heat",
"hand",
"experience",
"job",
"book",
"end",
"point",
"type",
"home",
"economy",
"value",
"body",
"market",
"guide",
"interest",
"state",
"radio",
"course",
"company",
"price",
"size",
"card",
"list",
"mind",
"trade",
"line",
"care",
"group",
"risk",
"word",
"fat",
"force",
"key",
"light",
"training",
"name",
"school",
"top",
"amount",
"level",
"order",
"practice",
"research",
"sense",
"service",
"piece",
"web",
"boss",
"sport",
"fun",
"house",
"page",
"term",
"test",
"answer",
"sound",
"focus",
"matter",
"kind",
"soil",
"board",
"oil",
"picture",
"access",
"garden",
"range",
"rate",
"reason",
"future",
"site",
"demand",
"exercise",
"image",
"case",
"cause",
"coast",
"action",
"age",
"bad",
"boat",
"record",
"result",
"section",
"building",
"mouse",
"cash",
"class",
"nothing",
"period",
"plan",
"store",
"tax",
"side",
"subject",
"space",
"rule",
"stock",
"weather",
"chance",
"figure",
"man",
"model",
"source",
"beginning",
"earth",
"program",
"chicken",
"design",
"feature",
"head",
"material",
"purpose",
"question",
"rock",
"salt",
"act",
"birth",
"car",
"dog",
"object",
"scale",
"sun",
"note",
"profit",
"rent",
"speed",
"style",
"war",
"bank",
"craft",
"half",
"inside",
"outside",
"standard",
"bus",
"exchange",
"eye",
"fire",
"position",
"pressure",
"stress",
"advantage",
"benefit",
"box",
"frame",
"issue",
"step",
"cycle",
"face",
"item",
"metal",
"paint",
"review",
"room",
"screen",
"structure",
"view",
"account",
"ball",
"discipline",
"medium",
"share",
"balance",
"bit",
"black",
"bottom",
"choice",
"gift",
"impact",
"machine",
"shape",
"tool",
"wind",
"address",
"average",
"career",
"culture",
"morning",
"pot",
"sign",
"table",
"task",
"condition",
"contact",
"credit",
"egg",
"hope",
"ice",
"network",
"north",
"square",
"attempt",
"date",
"effect",
"link",
"post",
"star",
"voice",
"capital",
"challenge",
"friend",
"self",
"shot",
"brush",
"couple",
"debate",
"exit",
"front",
"function",
"lack",
"living",
"plant",
"plastic",
"spot",
"summer",
"taste",
"theme",
"track",
"wing",
"brain",
"button",
"click",
"desire",
"foot",
"gas",
"influence",
"notice",
"rain",
"wall",
"base",
"damage",
"distance",
"feeling",
"pair",
"savings",
"staff",
"sugar",
"target",
"text",
"animal",
"author",
"budget",
"discount",
"file",
"ground",
"lesson",
"minute",
"officer",
"phase",
"reference",
"register",
"sky",
"stage",
"stick",
"title",
"trouble",
"bowl",
"bridge",
"campaign",
"character",
"club",
"edge",
"evidence",
"fan",
"letter",
"lock",
"maximum",
"novel",
"option",
"pack",
"park",
"plenty",
"quarter",
"skin",
"sort",
"weight",
"baby",
"background",
"carry",
"dish",
"factor",
"fruit",
"glass",
"joint",
"master",
"muscle",
"red",
"strength",
"traffic",
"trip",
"vegetable",
"appeal",
"chart",
"gear",
"ideal",
"kitchen",
"land",
"log",
"mother",
"net",
"party",
"principle",
"relative",
"sale",
"season",
"signal",
"spirit",
"street",
"tree",
"wave",
"belt",
"bench",
"commission",
"copy",
"drop",
"minimum",
"path",
"progress",
"project",
"sea",
"south",
"status",
"stuff",
"ticket",
"tour",
"angle",
"blue",
"breakfast",
"confidence",
"daughter",
"degree",
"doctor",
"dot",
"dream",
"duty",
"essay",
"father",
"fee",
"finance",
"hour",
"juice",
"limit",
"luck",
"milk",
"mouth",
"peace",
"pipe",
"seat",
"stable",
"storm",
"substance",
"team",
"trick",
"afternoon",
"bat",
"beach",
"blank",
"catch",
"chain",
"consideration",
"cream",
"crew",
"detail",
"gold",
"interview",
"kid",
"mark",
"match",
"mission",
"pain",
"pleasure",
"score",
"screw",
"sex",
"shop",
"shower",
"suit",
"tone",
"window",
"agent",
"band",
"block",
"bone",
"calendar",
"cap",
"coat",
"contest",
"corner",
"court",
"cup",
"district",
"door",
"east",
"finger",
"garage",
"guarantee",
"hole",
"hook",
"implement",
"layer",
"lecture",
"lie",
"manner",
"meeting",
"nose",
"parking",
"partner",
"profile",
"respect",
"rice",
"routine",
"schedule",
"swimming",
"telephone",
"tip",
"winter",
"airline",
"bag",
"battle",
"bed",
"bill",
"bother",
"cake",
"code",
"curve",
"designer",
"dimension",
"dress",
"ease",
"emergency",
"evening",
"extension",
"farm",
"fight",
"gap",
"grade",
"holiday",
"horror",
"horse",
"host",
"husband",
"loan",
"mistake",
"mountain",
"nail",
"noise",
"occasion",
"package",
"patient",
"pause",
"phrase",
"proof",
"race",
"relief",
"sand",
"sentence",
"shoulder",
"smoke",
"stomach",
"string",
"tourist",
"towel",
"vacation",
"west",
"wheel",
"wine",
"arm",
"aside",
"associate",
"bet",
"blow",
"border",
"branch",
"breast",
"brother",
"buddy",
"bunch",
"chip",
"coach",
"cross",
"document",
"draft",
"dust",
"expert",
"floor",
"god",
"golf",
"habit",
"iron",
"judge",
"knife",
"landscape",
"league",
"mail",
"mess",
"hardware"}
rand.Seed(time.Now().UnixNano())
x := rand.Intn(len(list))
return list[x]
}
| {
first := x[0:1]
last := x[1:len(x)]
first = strings.ToUpper(first)
buff += first + last
} | conditional_block |
codec.py | import functools
from typing import List
import numpy as np
import hiccup.settings as settings
import hiccup.model as model
import hiccup.utils as utils
import hiccup.transform as transform
import hiccup.huffman as huffman
import hiccup.hicimage as hic
"""
Encoding/Decoding functionality aka
Run Length encoding
Huffman Encoding - looking at papers, you can rely on the default Huffman encodings for say jpeg but then for
our eventual Wavelet encoding, the same Huffman encodings are definitely not applicable. To be consistent, and
avoid having to copy the entire RL Huffman table, I'll generate on the fly and persist. This is expensive for
smaller images, but for very large images this is a small penalty.
"""
class RunLength:
@classmethod
def from_dict(cls, d):
return cls(d["value"], d["zeros"])
def __init__(self, value=0, length=0):
self.value = value
self.length = length
def __eq__(self, other):
return type(self) == type(other) and self.value == other.value and self.length == other.length
def __str__(self):
return "(%d, %d)" % (self.length, self.value)
@property
def segment(self):
return [0] * self.length + [self.value]
@property
def is_trailing(self):
return self.value == 0 and self.length == 0
def differential_coding(blocks: np.ndarray):
"""
Produce differential coding for the DC coefficients
"""
dc_comps = [transform.dc_component(b) for b in blocks]
return utils.differences(dc_comps)
def run_length_coding(arr: np.ndarray, max_len=0xF) -> List[RunLength]:
"""
Come up with the run length encoding for a matrix
"""
def _break_up_rle(code, max_len):
l = code["zeros"]
div = l // max_len
full = {
"zeros": max_len - 1, # minus 1 because we get another for free from the value
"value": 0
}
return ([full] * div) + [{
"zeros": l - (div * max_len),
"value": code["value"]
}]
def reduction(agg, next):
if "value" in agg[-1]:
agg.append({"zeros": 0})
if next == 0:
agg[-1]["zeros"] += 1
return agg
if "value" not in agg[-1]:
agg[-1]["value"] = next
return agg
utils.debug_msg("Going to determine RLE for %d size array" % len(arr))
rl = functools.reduce(reduction, arr, [{"zeros": 0}])
utils.debug_msg("%d long RLE created" % len(rl))
# If the last element has no value then it was 0! That is a special tuple, (0,0)
if "value" not in rl[-1]:
rl[-1] = {"zeros": 0, "value": 0}
# the goal of RLE in the case of compression is to contain the first symbol (length, size) within a byte
# so if the length is too long, then we need to break it up
if max_len is not None:
utils.debug_msg("Breaking up RLE lengths that are larger than %d" % max_len)
rl = [_break_up_rle(code, max_len) for code in rl]
rl = utils.flatten(rl)
utils.debug_msg("Make RLE objects")
return [RunLength.from_dict(r) for r in rl]
def decode_run_length(rles: List[RunLength], length: int):
arr = []
for (i, d) in enumerate(rles):
arr.append(d.segment)
arr = utils.flatten(arr)
# arr = utils.flatten([d.segment for d in rles])
if rles[-1].is_trailing:
fill = length - len(arr)
arr += ([0] * fill)
return arr
def wavelet_encode(compressed: model.CompressedImage):
"""
In brief reading of literature, Huffman coding is still considered for wavelet image compression. There are other
more effective (and complicated schemes) that I think are out of scope of this project which is just to introduce
the concepts.
"""
def collapse_subbands(k, v):
out = [transform.zigzag(l) for l in v]
out = utils.flatten(out)
return out
utils.debug_msg("Starting Wavelet encoding")
lin_subbands = utils.dict_map(compressed.as_dict, collapse_subbands)
utils.debug_msg("Have completed linearizing the subbands")
rles = utils.dict_map(lin_subbands, lambda _, v: run_length_coding(v))
utils.debug_msg("Have completed the run length encodings")
values_huffs = utils.dict_map(rles,
lambda _, v: huffman.HuffmanTree.construct_from_data(v, key_func=lambda t: t.value))
length_huffs = utils.dict_map(rles,
lambda _, v: huffman.HuffmanTree.construct_from_data(v, key_func=lambda t: t.length))
utils.debug_msg("Huffman trees are constructed")
def encode_huff(d):
huffs = [t[1] for t in d.items()]
return [huffman_encode(h) for h in huffs]
def encode_data(d):
huffs = [t[1] for t in d.items()]
return [huffman_data_encode(h) for h in huffs]
smallest = compressed.luminance_component[0].shape
biggest = compressed.luminance_component[-1].shape
payloads = utils.flatten([
encode_huff(values_huffs),
encode_huff(length_huffs),
encode_data(values_huffs),
encode_data(length_huffs),
[
hic.TupP(smallest[0], smallest[1]),
hic.TupP(biggest[0], biggest[1])
]
])
return hic.HicImage.wavelet_image(payloads)
def wavelet_decode_pull_subbands(data, shapes):
offset = utils.size(shapes[0])
subbands = [transform.izigzag(np.array(data[:offset]), shapes[0])]
for i in range(len(shapes)):
subbands.append(transform.izigzag(np.array(data[offset:offset + utils.size(shapes[i])]), shapes[i]))
offset += utils.size(shapes[i])
subbands.append(transform.izigzag(np.array(data[offset:offset + utils.size(shapes[i])]), shapes[i]))
offset += utils.size(shapes[i])
subbands.append(transform.izigzag(np.array(data[offset:offset + utils.size(shapes[i])]), shapes[i]))
offset += utils.size(shapes[i])
return subbands
def wavelet_decoded_subbands_shapes(min_shape, max_shape):
"""
We just do Haar or Daubechie, assume power of 2
"""
levels = int(np.sqrt(max_shape[0] // min_shape[0]))
shapes = [(min_shape[0] * (np.power(2, i)), min_shape[1] * (np.power(2, i))) for i in range(0, levels + 1)]
return shapes
def wavelet_decoded_length(min_shape, max_shape):
shapes = wavelet_decoded_subbands_shapes(min_shape, max_shape)
length = functools.reduce(lambda agg, s: agg + (3 * (s[0] * s[1])), shapes, 0)
length += (min_shape[0] * min_shape[1])
return length
def wavelet_decode(hic: hic.HicImage) -> model.CompressedImage:
utils.debug_msg("Wavelet decode")
assert hic.hic_type == model.Compression.HIC
payloads = hic.payloads
utils.debug_msg("Decoding Huffman trees")
value_huffs = {
"lum": huffman_decode(payloads[0]),
"cr": huffman_decode(payloads[1]),
"cb": huffman_decode(payloads[2])
}
length_huffs = {
"lum": huffman_decode(payloads[3]),
"cr": huffman_decode(payloads[4]),
"cb": huffman_decode(payloads[5])
}
utils.debug_msg("Decode RLE values")
value_comps = {
"lum": huffman_data_decode(payloads[6], value_huffs["lum"]),
"cr": huffman_data_decode(payloads[7], value_huffs["cr"]),
"cb": huffman_data_decode(payloads[8], value_huffs["cb"]),
}
utils.debug_msg("Decode RLE lengths")
length_comps = {
"lum": huffman_data_decode(payloads[9], length_huffs["lum"]),
"cr": huffman_data_decode(payloads[10], length_huffs["cr"]),
"cb": huffman_data_decode(payloads[11], length_huffs["cb"]),
}
min_shape = payloads[12].numbers
max_shape = payloads[13].numbers
utils.debug_msg("Unloaded all of the data")
# ====
rles = utils.dict_map(value_comps,
lambda k, v: [RunLength(value=t[1], length=t[0]) for t in list(zip(length_comps[k], v))])
length = wavelet_decoded_length(min_shape, max_shape)
data = utils.dict_map(rles, lambda _, v: decode_run_length(v, length))
shapes = wavelet_decoded_subbands_shapes(min_shape, max_shape)
channels = utils.dict_map(data, lambda _, v: wavelet_decode_pull_subbands(v, shapes))
return model.CompressedImage.from_dict(channels)
def huffman_encode(huff: huffman.HuffmanTree) -> hic.Payload:
"""
Encode huffman in payload
"""
leaves = huff.encode_table()
return hic.PayloadStringP(hic.TupP, [hic.TupP(t[0], t[1]) for t in leaves])
def huffman_decode(data: hic.PayloadStringP) -> huffman.HuffmanTree:
"""
Decode huffman from payload
"""
number_string = data.payloads
leaves = [p.numbers for p in number_string]
return huffman.HuffmanTree.construct_from_coding(leaves)
def huffman_data_encode(huff: huffman.HuffmanTree) -> hic.Payload:
"""
Encode huffman data into payload
"""
data = huff.encode_data()
return hic.BitStringP(data)
def huffman_data_decode(data: hic.BitStringP, huffman: huffman.HuffmanTree) -> list:
"""
Decode huffman data from payload with huffman tree
"""
return huffman.decode_data(data.payload)
def jpeg_encode(compressed: model.CompressedImage) -> hic.HicImage:
"""
Generally follow JPEG encoding. Since for the wavelet work I am don't have some standard huffman tree to work with
I might as well be consistent between the two implementations and just encode the entire array with custom
Huffman trees. To attempt to be honest with the implementation though, I'll still treat the DC components
separately by doing the differences and again applying a custom Huffman. A main feature of DCT on each block is the
meaning of the DC component.
For RL it's also easier implementation-wise to split up the length from the value and not try to optimize and weave
them together. Yes, the encoding will suffer bloat, but we are trying to highlight the transforms anyway.
"""
utils.debug_msg("Starting JPEG encoding")
dc_comps = utils.dict_map(compressed.as_dict,
lambda _, v: differential_coding(transform.split_matrix(v, settings.JPEG_BLOCK_SIZE)))
utils.debug_msg("Determined differences DC components")
def ac_comp_fun(k, v):
utils.debug_msg("Determining AC components for: " + k)
splits = transform.split_matrix(v, settings.JPEG_BLOCK_SIZE)
acs = transform.ac_components(splits)
utils.debug_msg("Calculating RLE for: " + k)
out = run_length_coding(acs)
return out
# on each transformed channel, run RLE on the AC components of each block
ac_comps = utils.dict_map(compressed.as_dict, ac_comp_fun)
utils.debug_msg("Determined RLEs for AC components")
dc_huffs = utils.dict_map(dc_comps, lambda _, v: huffman.HuffmanTree.construct_from_data(v))
ac_value_huffs = utils.dict_map(ac_comps,
lambda _, v: huffman.HuffmanTree.construct_from_data(v, key_func=lambda s: s.value))
ac_length_huffs = utils.dict_map(ac_comps,
lambda _, v: huffman.HuffmanTree.construct_from_data(v,
key_func=lambda s: s.length))
def encode_huff(d):
huffs = [t[1] for t in d.items()]
return [huffman_encode(h) for h in huffs]
def encode_data(d):
huffs = [t[1] for t in d.items()]
return [huffman_data_encode(h) for h in huffs]
payloads = utils.flatten([
encode_huff(dc_huffs),
encode_huff(ac_value_huffs),
encode_huff(ac_length_huffs),
encode_data(dc_huffs),
encode_data(ac_value_huffs),
encode_data(ac_length_huffs),
[
hic.TupP(compressed.shape[0][0], compressed.shape[0][1]),
hic.TupP(compressed.shape[1][0], compressed.shape[1][1])
]
])
return hic.HicImage.jpeg_image(payloads)
def jpeg_decode(hic: hic.HicImage) -> model.CompressedImage:
"""
Reverse jpeg_encode()
payloads = utils.flatten([
encode_huff(dc_huffs),
encode_huff(ac_value_huffs),
encode_huff(ac_length_huffs),
encode_data(dc_huffs),
encode_data(ac_value_huffs),
encode_data(ac_length_huffs)
])
"""
utils.debug_msg("JPEG decode")
assert hic.hic_type == model.Compression.JPEG
payloads = hic.payloads
utils.debug_msg("Decoding Huffman trees")
dc_huffs = {
"lum": huffman_decode(payloads[0]),
"cr": huffman_decode(payloads[1]),
"cb": huffman_decode(payloads[2])
}
ac_value_huffs = {
"lum": huffman_decode(payloads[3]),
"cr": huffman_decode(payloads[4]),
"cb": huffman_decode(payloads[5])
}
ac_length_huffs = {
"lum": huffman_decode(payloads[6]),
"cr": huffman_decode(payloads[7]),
"cb": huffman_decode(payloads[8])
}
utils.debug_msg("Decode DC differences")
dc_comps = {
"lum": huffman_data_decode(payloads[9], dc_huffs["lum"]),
"cr": huffman_data_decode(payloads[10], dc_huffs["cr"]),
"cb": huffman_data_decode(payloads[11], dc_huffs["cb"]),
}
utils.debug_msg("Decode RLE values")
ac_values = {
"lum": huffman_data_decode(payloads[12], ac_value_huffs["lum"]),
"cr": huffman_data_decode(payloads[13], ac_value_huffs["cr"]),
"cb": huffman_data_decode(payloads[14], ac_value_huffs["cb"]),
}
utils.debug_msg("Decode RLE lengths")
ac_lengths = {
"lum": huffman_data_decode(payloads[15], ac_length_huffs["lum"]),
"cr": huffman_data_decode(payloads[16], ac_length_huffs["cr"]),
"cb": huffman_data_decode(payloads[17], ac_length_huffs["cb"]),
}
shapes = {
"lum": payloads[18].numbers,
"cr": payloads[19].numbers,
"cb": payloads[19].numbers
}
utils.debug_msg("Unloaded all of the data")
# ====
sub_length = utils.size(settings.JPEG_BLOCK_SHAPE()) - 1
utils.debug_msg("Calculating AC RLEs")
ac_rle = utils.dict_map(ac_values,
lambda k, v: [RunLength(t[1], t[0]) for t in list(zip(ac_lengths[k], v))])
def | (k, v):
utils.debug_msg("Determining deficient AC matricies for: " + k)
ac_length = utils.size(shapes[k]) - len(dc_comps[k])
out = decode_run_length(v, ac_length)
if k == "lum":
s = [str(i) for i in out]
print(" ".join(s))
return out
ac_mats = utils.dict_map(ac_rle, ac_mat_fun)
ac_mats = utils.dict_map(ac_mats, lambda _, v: utils.group_tuples(v, sub_length))
dc_comps = utils.dict_map(dc_comps, lambda _, v: utils.invert_differences(v))
def merge_comps(dc_key, dc_values):
utils.debug_msg("Merging: " + dc_key)
tuples = ac_mats[dc_key] # there are all of the AC zigzag arrays missing their DC component
assert len(tuples) == len(dc_values)
zipped = zip(dc_values, tuples) # combine them to be mapped later
lin_mats = [[t[0], *t[1]] for t in zipped] # create the linearized block
mats = [transform.izigzag(np.array(m), settings.JPEG_BLOCK_SHAPE()) for m in lin_mats]
return mats
compressed = utils.dict_map(dc_comps, merge_comps)
merged = utils.dict_map(compressed, lambda k, v: transform.merge_blocks(np.array(v), shapes[k]))
return model.CompressedImage.from_dict(merged)
| ac_mat_fun | identifier_name |
codec.py | import functools
from typing import List
import numpy as np
import hiccup.settings as settings
import hiccup.model as model
import hiccup.utils as utils
import hiccup.transform as transform
import hiccup.huffman as huffman
import hiccup.hicimage as hic
"""
Encoding/Decoding functionality aka
Run Length encoding
Huffman Encoding - looking at papers, you can rely on the default Huffman encodings for say jpeg but then for
our eventual Wavelet encoding, the same Huffman encodings are definitely not applicable. To be consistent, and
avoid having to copy the entire RL Huffman table, I'll generate on the fly and persist. This is expensive for
smaller images, but for very large images this is a small penalty.
"""
class RunLength:
@classmethod
def from_dict(cls, d):
return cls(d["value"], d["zeros"])
def __init__(self, value=0, length=0):
self.value = value
self.length = length
def __eq__(self, other):
return type(self) == type(other) and self.value == other.value and self.length == other.length
def __str__(self):
return "(%d, %d)" % (self.length, self.value)
@property
def segment(self):
return [0] * self.length + [self.value]
@property
def is_trailing(self):
return self.value == 0 and self.length == 0
def differential_coding(blocks: np.ndarray):
"""
Produce differential coding for the DC coefficients
"""
dc_comps = [transform.dc_component(b) for b in blocks]
return utils.differences(dc_comps)
def run_length_coding(arr: np.ndarray, max_len=0xF) -> List[RunLength]:
"""
Come up with the run length encoding for a matrix
"""
def _break_up_rle(code, max_len):
l = code["zeros"]
div = l // max_len
full = {
"zeros": max_len - 1, # minus 1 because we get another for free from the value
"value": 0
}
return ([full] * div) + [{
"zeros": l - (div * max_len),
"value": code["value"]
}]
def reduction(agg, next):
if "value" in agg[-1]:
agg.append({"zeros": 0})
if next == 0:
agg[-1]["zeros"] += 1
return agg
if "value" not in agg[-1]:
agg[-1]["value"] = next
return agg
utils.debug_msg("Going to determine RLE for %d size array" % len(arr))
rl = functools.reduce(reduction, arr, [{"zeros": 0}])
utils.debug_msg("%d long RLE created" % len(rl))
# If the last element has no value then it was 0! That is a special tuple, (0,0)
if "value" not in rl[-1]:
rl[-1] = {"zeros": 0, "value": 0}
# the goal of RLE in the case of compression is to contain the first symbol (length, size) within a byte
# so if the length is too long, then we need to break it up
if max_len is not None:
utils.debug_msg("Breaking up RLE lengths that are larger than %d" % max_len)
rl = [_break_up_rle(code, max_len) for code in rl]
rl = utils.flatten(rl)
utils.debug_msg("Make RLE objects")
return [RunLength.from_dict(r) for r in rl]
def decode_run_length(rles: List[RunLength], length: int):
arr = []
for (i, d) in enumerate(rles):
arr.append(d.segment)
arr = utils.flatten(arr)
# arr = utils.flatten([d.segment for d in rles])
if rles[-1].is_trailing:
|
return arr
def wavelet_encode(compressed: model.CompressedImage):
"""
In brief reading of literature, Huffman coding is still considered for wavelet image compression. There are other
more effective (and complicated schemes) that I think are out of scope of this project which is just to introduce
the concepts.
"""
def collapse_subbands(k, v):
out = [transform.zigzag(l) for l in v]
out = utils.flatten(out)
return out
utils.debug_msg("Starting Wavelet encoding")
lin_subbands = utils.dict_map(compressed.as_dict, collapse_subbands)
utils.debug_msg("Have completed linearizing the subbands")
rles = utils.dict_map(lin_subbands, lambda _, v: run_length_coding(v))
utils.debug_msg("Have completed the run length encodings")
values_huffs = utils.dict_map(rles,
lambda _, v: huffman.HuffmanTree.construct_from_data(v, key_func=lambda t: t.value))
length_huffs = utils.dict_map(rles,
lambda _, v: huffman.HuffmanTree.construct_from_data(v, key_func=lambda t: t.length))
utils.debug_msg("Huffman trees are constructed")
def encode_huff(d):
huffs = [t[1] for t in d.items()]
return [huffman_encode(h) for h in huffs]
def encode_data(d):
huffs = [t[1] for t in d.items()]
return [huffman_data_encode(h) for h in huffs]
smallest = compressed.luminance_component[0].shape
biggest = compressed.luminance_component[-1].shape
payloads = utils.flatten([
encode_huff(values_huffs),
encode_huff(length_huffs),
encode_data(values_huffs),
encode_data(length_huffs),
[
hic.TupP(smallest[0], smallest[1]),
hic.TupP(biggest[0], biggest[1])
]
])
return hic.HicImage.wavelet_image(payloads)
def wavelet_decode_pull_subbands(data, shapes):
offset = utils.size(shapes[0])
subbands = [transform.izigzag(np.array(data[:offset]), shapes[0])]
for i in range(len(shapes)):
subbands.append(transform.izigzag(np.array(data[offset:offset + utils.size(shapes[i])]), shapes[i]))
offset += utils.size(shapes[i])
subbands.append(transform.izigzag(np.array(data[offset:offset + utils.size(shapes[i])]), shapes[i]))
offset += utils.size(shapes[i])
subbands.append(transform.izigzag(np.array(data[offset:offset + utils.size(shapes[i])]), shapes[i]))
offset += utils.size(shapes[i])
return subbands
def wavelet_decoded_subbands_shapes(min_shape, max_shape):
"""
We just do Haar or Daubechie, assume power of 2
"""
levels = int(np.sqrt(max_shape[0] // min_shape[0]))
shapes = [(min_shape[0] * (np.power(2, i)), min_shape[1] * (np.power(2, i))) for i in range(0, levels + 1)]
return shapes
def wavelet_decoded_length(min_shape, max_shape):
shapes = wavelet_decoded_subbands_shapes(min_shape, max_shape)
length = functools.reduce(lambda agg, s: agg + (3 * (s[0] * s[1])), shapes, 0)
length += (min_shape[0] * min_shape[1])
return length
def wavelet_decode(hic: hic.HicImage) -> model.CompressedImage:
utils.debug_msg("Wavelet decode")
assert hic.hic_type == model.Compression.HIC
payloads = hic.payloads
utils.debug_msg("Decoding Huffman trees")
value_huffs = {
"lum": huffman_decode(payloads[0]),
"cr": huffman_decode(payloads[1]),
"cb": huffman_decode(payloads[2])
}
length_huffs = {
"lum": huffman_decode(payloads[3]),
"cr": huffman_decode(payloads[4]),
"cb": huffman_decode(payloads[5])
}
utils.debug_msg("Decode RLE values")
value_comps = {
"lum": huffman_data_decode(payloads[6], value_huffs["lum"]),
"cr": huffman_data_decode(payloads[7], value_huffs["cr"]),
"cb": huffman_data_decode(payloads[8], value_huffs["cb"]),
}
utils.debug_msg("Decode RLE lengths")
length_comps = {
"lum": huffman_data_decode(payloads[9], length_huffs["lum"]),
"cr": huffman_data_decode(payloads[10], length_huffs["cr"]),
"cb": huffman_data_decode(payloads[11], length_huffs["cb"]),
}
min_shape = payloads[12].numbers
max_shape = payloads[13].numbers
utils.debug_msg("Unloaded all of the data")
# ====
rles = utils.dict_map(value_comps,
lambda k, v: [RunLength(value=t[1], length=t[0]) for t in list(zip(length_comps[k], v))])
length = wavelet_decoded_length(min_shape, max_shape)
data = utils.dict_map(rles, lambda _, v: decode_run_length(v, length))
shapes = wavelet_decoded_subbands_shapes(min_shape, max_shape)
channels = utils.dict_map(data, lambda _, v: wavelet_decode_pull_subbands(v, shapes))
return model.CompressedImage.from_dict(channels)
def huffman_encode(huff: huffman.HuffmanTree) -> hic.Payload:
"""
Encode huffman in payload
"""
leaves = huff.encode_table()
return hic.PayloadStringP(hic.TupP, [hic.TupP(t[0], t[1]) for t in leaves])
def huffman_decode(data: hic.PayloadStringP) -> huffman.HuffmanTree:
"""
Decode huffman from payload
"""
number_string = data.payloads
leaves = [p.numbers for p in number_string]
return huffman.HuffmanTree.construct_from_coding(leaves)
def huffman_data_encode(huff: huffman.HuffmanTree) -> hic.Payload:
"""
Encode huffman data into payload
"""
data = huff.encode_data()
return hic.BitStringP(data)
def huffman_data_decode(data: hic.BitStringP, huffman: huffman.HuffmanTree) -> list:
"""
Decode huffman data from payload with huffman tree
"""
return huffman.decode_data(data.payload)
def jpeg_encode(compressed: model.CompressedImage) -> hic.HicImage:
"""
Generally follow JPEG encoding. Since for the wavelet work I am don't have some standard huffman tree to work with
I might as well be consistent between the two implementations and just encode the entire array with custom
Huffman trees. To attempt to be honest with the implementation though, I'll still treat the DC components
separately by doing the differences and again applying a custom Huffman. A main feature of DCT on each block is the
meaning of the DC component.
For RL it's also easier implementation-wise to split up the length from the value and not try to optimize and weave
them together. Yes, the encoding will suffer bloat, but we are trying to highlight the transforms anyway.
"""
utils.debug_msg("Starting JPEG encoding")
dc_comps = utils.dict_map(compressed.as_dict,
lambda _, v: differential_coding(transform.split_matrix(v, settings.JPEG_BLOCK_SIZE)))
utils.debug_msg("Determined differences DC components")
def ac_comp_fun(k, v):
utils.debug_msg("Determining AC components for: " + k)
splits = transform.split_matrix(v, settings.JPEG_BLOCK_SIZE)
acs = transform.ac_components(splits)
utils.debug_msg("Calculating RLE for: " + k)
out = run_length_coding(acs)
return out
# on each transformed channel, run RLE on the AC components of each block
ac_comps = utils.dict_map(compressed.as_dict, ac_comp_fun)
utils.debug_msg("Determined RLEs for AC components")
dc_huffs = utils.dict_map(dc_comps, lambda _, v: huffman.HuffmanTree.construct_from_data(v))
ac_value_huffs = utils.dict_map(ac_comps,
lambda _, v: huffman.HuffmanTree.construct_from_data(v, key_func=lambda s: s.value))
ac_length_huffs = utils.dict_map(ac_comps,
lambda _, v: huffman.HuffmanTree.construct_from_data(v,
key_func=lambda s: s.length))
def encode_huff(d):
huffs = [t[1] for t in d.items()]
return [huffman_encode(h) for h in huffs]
def encode_data(d):
huffs = [t[1] for t in d.items()]
return [huffman_data_encode(h) for h in huffs]
payloads = utils.flatten([
encode_huff(dc_huffs),
encode_huff(ac_value_huffs),
encode_huff(ac_length_huffs),
encode_data(dc_huffs),
encode_data(ac_value_huffs),
encode_data(ac_length_huffs),
[
hic.TupP(compressed.shape[0][0], compressed.shape[0][1]),
hic.TupP(compressed.shape[1][0], compressed.shape[1][1])
]
])
return hic.HicImage.jpeg_image(payloads)
def jpeg_decode(hic: hic.HicImage) -> model.CompressedImage:
"""
Reverse jpeg_encode()
payloads = utils.flatten([
encode_huff(dc_huffs),
encode_huff(ac_value_huffs),
encode_huff(ac_length_huffs),
encode_data(dc_huffs),
encode_data(ac_value_huffs),
encode_data(ac_length_huffs)
])
"""
utils.debug_msg("JPEG decode")
assert hic.hic_type == model.Compression.JPEG
payloads = hic.payloads
utils.debug_msg("Decoding Huffman trees")
dc_huffs = {
"lum": huffman_decode(payloads[0]),
"cr": huffman_decode(payloads[1]),
"cb": huffman_decode(payloads[2])
}
ac_value_huffs = {
"lum": huffman_decode(payloads[3]),
"cr": huffman_decode(payloads[4]),
"cb": huffman_decode(payloads[5])
}
ac_length_huffs = {
"lum": huffman_decode(payloads[6]),
"cr": huffman_decode(payloads[7]),
"cb": huffman_decode(payloads[8])
}
utils.debug_msg("Decode DC differences")
dc_comps = {
"lum": huffman_data_decode(payloads[9], dc_huffs["lum"]),
"cr": huffman_data_decode(payloads[10], dc_huffs["cr"]),
"cb": huffman_data_decode(payloads[11], dc_huffs["cb"]),
}
utils.debug_msg("Decode RLE values")
ac_values = {
"lum": huffman_data_decode(payloads[12], ac_value_huffs["lum"]),
"cr": huffman_data_decode(payloads[13], ac_value_huffs["cr"]),
"cb": huffman_data_decode(payloads[14], ac_value_huffs["cb"]),
}
utils.debug_msg("Decode RLE lengths")
ac_lengths = {
"lum": huffman_data_decode(payloads[15], ac_length_huffs["lum"]),
"cr": huffman_data_decode(payloads[16], ac_length_huffs["cr"]),
"cb": huffman_data_decode(payloads[17], ac_length_huffs["cb"]),
}
shapes = {
"lum": payloads[18].numbers,
"cr": payloads[19].numbers,
"cb": payloads[19].numbers
}
utils.debug_msg("Unloaded all of the data")
# ====
sub_length = utils.size(settings.JPEG_BLOCK_SHAPE()) - 1
utils.debug_msg("Calculating AC RLEs")
ac_rle = utils.dict_map(ac_values,
lambda k, v: [RunLength(t[1], t[0]) for t in list(zip(ac_lengths[k], v))])
def ac_mat_fun(k, v):
utils.debug_msg("Determining deficient AC matricies for: " + k)
ac_length = utils.size(shapes[k]) - len(dc_comps[k])
out = decode_run_length(v, ac_length)
if k == "lum":
s = [str(i) for i in out]
print(" ".join(s))
return out
ac_mats = utils.dict_map(ac_rle, ac_mat_fun)
ac_mats = utils.dict_map(ac_mats, lambda _, v: utils.group_tuples(v, sub_length))
dc_comps = utils.dict_map(dc_comps, lambda _, v: utils.invert_differences(v))
def merge_comps(dc_key, dc_values):
utils.debug_msg("Merging: " + dc_key)
tuples = ac_mats[dc_key] # there are all of the AC zigzag arrays missing their DC component
assert len(tuples) == len(dc_values)
zipped = zip(dc_values, tuples) # combine them to be mapped later
lin_mats = [[t[0], *t[1]] for t in zipped] # create the linearized block
mats = [transform.izigzag(np.array(m), settings.JPEG_BLOCK_SHAPE()) for m in lin_mats]
return mats
compressed = utils.dict_map(dc_comps, merge_comps)
merged = utils.dict_map(compressed, lambda k, v: transform.merge_blocks(np.array(v), shapes[k]))
return model.CompressedImage.from_dict(merged)
| fill = length - len(arr)
arr += ([0] * fill) | conditional_block |
codec.py | import functools
from typing import List
import numpy as np
import hiccup.settings as settings
import hiccup.model as model
import hiccup.utils as utils
import hiccup.transform as transform
import hiccup.huffman as huffman
import hiccup.hicimage as hic
"""
Encoding/Decoding functionality aka
Run Length encoding
Huffman Encoding - looking at papers, you can rely on the default Huffman encodings for say jpeg but then for
our eventual Wavelet encoding, the same Huffman encodings are definitely not applicable. To be consistent, and
avoid having to copy the entire RL Huffman table, I'll generate on the fly and persist. This is expensive for
smaller images, but for very large images this is a small penalty.
"""
class RunLength:
@classmethod
def from_dict(cls, d):
return cls(d["value"], d["zeros"])
def __init__(self, value=0, length=0):
self.value = value
self.length = length
def __eq__(self, other):
return type(self) == type(other) and self.value == other.value and self.length == other.length
def __str__(self):
return "(%d, %d)" % (self.length, self.value)
@property
def segment(self):
return [0] * self.length + [self.value]
@property
def is_trailing(self):
return self.value == 0 and self.length == 0
def differential_coding(blocks: np.ndarray):
"""
Produce differential coding for the DC coefficients
"""
dc_comps = [transform.dc_component(b) for b in blocks]
return utils.differences(dc_comps)
def run_length_coding(arr: np.ndarray, max_len=0xF) -> List[RunLength]: | l = code["zeros"]
div = l // max_len
full = {
"zeros": max_len - 1, # minus 1 because we get another for free from the value
"value": 0
}
return ([full] * div) + [{
"zeros": l - (div * max_len),
"value": code["value"]
}]
def reduction(agg, next):
if "value" in agg[-1]:
agg.append({"zeros": 0})
if next == 0:
agg[-1]["zeros"] += 1
return agg
if "value" not in agg[-1]:
agg[-1]["value"] = next
return agg
utils.debug_msg("Going to determine RLE for %d size array" % len(arr))
rl = functools.reduce(reduction, arr, [{"zeros": 0}])
utils.debug_msg("%d long RLE created" % len(rl))
# If the last element has no value then it was 0! That is a special tuple, (0,0)
if "value" not in rl[-1]:
rl[-1] = {"zeros": 0, "value": 0}
# the goal of RLE in the case of compression is to contain the first symbol (length, size) within a byte
# so if the length is too long, then we need to break it up
if max_len is not None:
utils.debug_msg("Breaking up RLE lengths that are larger than %d" % max_len)
rl = [_break_up_rle(code, max_len) for code in rl]
rl = utils.flatten(rl)
utils.debug_msg("Make RLE objects")
return [RunLength.from_dict(r) for r in rl]
def decode_run_length(rles: List[RunLength], length: int):
arr = []
for (i, d) in enumerate(rles):
arr.append(d.segment)
arr = utils.flatten(arr)
# arr = utils.flatten([d.segment for d in rles])
if rles[-1].is_trailing:
fill = length - len(arr)
arr += ([0] * fill)
return arr
def wavelet_encode(compressed: model.CompressedImage):
"""
In brief reading of literature, Huffman coding is still considered for wavelet image compression. There are other
more effective (and complicated schemes) that I think are out of scope of this project which is just to introduce
the concepts.
"""
def collapse_subbands(k, v):
out = [transform.zigzag(l) for l in v]
out = utils.flatten(out)
return out
utils.debug_msg("Starting Wavelet encoding")
lin_subbands = utils.dict_map(compressed.as_dict, collapse_subbands)
utils.debug_msg("Have completed linearizing the subbands")
rles = utils.dict_map(lin_subbands, lambda _, v: run_length_coding(v))
utils.debug_msg("Have completed the run length encodings")
values_huffs = utils.dict_map(rles,
lambda _, v: huffman.HuffmanTree.construct_from_data(v, key_func=lambda t: t.value))
length_huffs = utils.dict_map(rles,
lambda _, v: huffman.HuffmanTree.construct_from_data(v, key_func=lambda t: t.length))
utils.debug_msg("Huffman trees are constructed")
def encode_huff(d):
huffs = [t[1] for t in d.items()]
return [huffman_encode(h) for h in huffs]
def encode_data(d):
huffs = [t[1] for t in d.items()]
return [huffman_data_encode(h) for h in huffs]
smallest = compressed.luminance_component[0].shape
biggest = compressed.luminance_component[-1].shape
payloads = utils.flatten([
encode_huff(values_huffs),
encode_huff(length_huffs),
encode_data(values_huffs),
encode_data(length_huffs),
[
hic.TupP(smallest[0], smallest[1]),
hic.TupP(biggest[0], biggest[1])
]
])
return hic.HicImage.wavelet_image(payloads)
def wavelet_decode_pull_subbands(data, shapes):
offset = utils.size(shapes[0])
subbands = [transform.izigzag(np.array(data[:offset]), shapes[0])]
for i in range(len(shapes)):
subbands.append(transform.izigzag(np.array(data[offset:offset + utils.size(shapes[i])]), shapes[i]))
offset += utils.size(shapes[i])
subbands.append(transform.izigzag(np.array(data[offset:offset + utils.size(shapes[i])]), shapes[i]))
offset += utils.size(shapes[i])
subbands.append(transform.izigzag(np.array(data[offset:offset + utils.size(shapes[i])]), shapes[i]))
offset += utils.size(shapes[i])
return subbands
def wavelet_decoded_subbands_shapes(min_shape, max_shape):
"""
We just do Haar or Daubechie, assume power of 2
"""
levels = int(np.sqrt(max_shape[0] // min_shape[0]))
shapes = [(min_shape[0] * (np.power(2, i)), min_shape[1] * (np.power(2, i))) for i in range(0, levels + 1)]
return shapes
def wavelet_decoded_length(min_shape, max_shape):
shapes = wavelet_decoded_subbands_shapes(min_shape, max_shape)
length = functools.reduce(lambda agg, s: agg + (3 * (s[0] * s[1])), shapes, 0)
length += (min_shape[0] * min_shape[1])
return length
def wavelet_decode(hic: hic.HicImage) -> model.CompressedImage:
utils.debug_msg("Wavelet decode")
assert hic.hic_type == model.Compression.HIC
payloads = hic.payloads
utils.debug_msg("Decoding Huffman trees")
value_huffs = {
"lum": huffman_decode(payloads[0]),
"cr": huffman_decode(payloads[1]),
"cb": huffman_decode(payloads[2])
}
length_huffs = {
"lum": huffman_decode(payloads[3]),
"cr": huffman_decode(payloads[4]),
"cb": huffman_decode(payloads[5])
}
utils.debug_msg("Decode RLE values")
value_comps = {
"lum": huffman_data_decode(payloads[6], value_huffs["lum"]),
"cr": huffman_data_decode(payloads[7], value_huffs["cr"]),
"cb": huffman_data_decode(payloads[8], value_huffs["cb"]),
}
utils.debug_msg("Decode RLE lengths")
length_comps = {
"lum": huffman_data_decode(payloads[9], length_huffs["lum"]),
"cr": huffman_data_decode(payloads[10], length_huffs["cr"]),
"cb": huffman_data_decode(payloads[11], length_huffs["cb"]),
}
min_shape = payloads[12].numbers
max_shape = payloads[13].numbers
utils.debug_msg("Unloaded all of the data")
# ====
rles = utils.dict_map(value_comps,
lambda k, v: [RunLength(value=t[1], length=t[0]) for t in list(zip(length_comps[k], v))])
length = wavelet_decoded_length(min_shape, max_shape)
data = utils.dict_map(rles, lambda _, v: decode_run_length(v, length))
shapes = wavelet_decoded_subbands_shapes(min_shape, max_shape)
channels = utils.dict_map(data, lambda _, v: wavelet_decode_pull_subbands(v, shapes))
return model.CompressedImage.from_dict(channels)
def huffman_encode(huff: huffman.HuffmanTree) -> hic.Payload:
"""
Encode huffman in payload
"""
leaves = huff.encode_table()
return hic.PayloadStringP(hic.TupP, [hic.TupP(t[0], t[1]) for t in leaves])
def huffman_decode(data: hic.PayloadStringP) -> huffman.HuffmanTree:
"""
Decode huffman from payload
"""
number_string = data.payloads
leaves = [p.numbers for p in number_string]
return huffman.HuffmanTree.construct_from_coding(leaves)
def huffman_data_encode(huff: huffman.HuffmanTree) -> hic.Payload:
"""
Encode huffman data into payload
"""
data = huff.encode_data()
return hic.BitStringP(data)
def huffman_data_decode(data: hic.BitStringP, huffman: huffman.HuffmanTree) -> list:
"""
Decode huffman data from payload with huffman tree
"""
return huffman.decode_data(data.payload)
def jpeg_encode(compressed: model.CompressedImage) -> hic.HicImage:
"""
Generally follow JPEG encoding. Since for the wavelet work I am don't have some standard huffman tree to work with
I might as well be consistent between the two implementations and just encode the entire array with custom
Huffman trees. To attempt to be honest with the implementation though, I'll still treat the DC components
separately by doing the differences and again applying a custom Huffman. A main feature of DCT on each block is the
meaning of the DC component.
For RL it's also easier implementation-wise to split up the length from the value and not try to optimize and weave
them together. Yes, the encoding will suffer bloat, but we are trying to highlight the transforms anyway.
"""
utils.debug_msg("Starting JPEG encoding")
dc_comps = utils.dict_map(compressed.as_dict,
lambda _, v: differential_coding(transform.split_matrix(v, settings.JPEG_BLOCK_SIZE)))
utils.debug_msg("Determined differences DC components")
def ac_comp_fun(k, v):
utils.debug_msg("Determining AC components for: " + k)
splits = transform.split_matrix(v, settings.JPEG_BLOCK_SIZE)
acs = transform.ac_components(splits)
utils.debug_msg("Calculating RLE for: " + k)
out = run_length_coding(acs)
return out
# on each transformed channel, run RLE on the AC components of each block
ac_comps = utils.dict_map(compressed.as_dict, ac_comp_fun)
utils.debug_msg("Determined RLEs for AC components")
dc_huffs = utils.dict_map(dc_comps, lambda _, v: huffman.HuffmanTree.construct_from_data(v))
ac_value_huffs = utils.dict_map(ac_comps,
lambda _, v: huffman.HuffmanTree.construct_from_data(v, key_func=lambda s: s.value))
ac_length_huffs = utils.dict_map(ac_comps,
lambda _, v: huffman.HuffmanTree.construct_from_data(v,
key_func=lambda s: s.length))
def encode_huff(d):
huffs = [t[1] for t in d.items()]
return [huffman_encode(h) for h in huffs]
def encode_data(d):
huffs = [t[1] for t in d.items()]
return [huffman_data_encode(h) for h in huffs]
payloads = utils.flatten([
encode_huff(dc_huffs),
encode_huff(ac_value_huffs),
encode_huff(ac_length_huffs),
encode_data(dc_huffs),
encode_data(ac_value_huffs),
encode_data(ac_length_huffs),
[
hic.TupP(compressed.shape[0][0], compressed.shape[0][1]),
hic.TupP(compressed.shape[1][0], compressed.shape[1][1])
]
])
return hic.HicImage.jpeg_image(payloads)
def jpeg_decode(hic: hic.HicImage) -> model.CompressedImage:
"""
Reverse jpeg_encode()
payloads = utils.flatten([
encode_huff(dc_huffs),
encode_huff(ac_value_huffs),
encode_huff(ac_length_huffs),
encode_data(dc_huffs),
encode_data(ac_value_huffs),
encode_data(ac_length_huffs)
])
"""
utils.debug_msg("JPEG decode")
assert hic.hic_type == model.Compression.JPEG
payloads = hic.payloads
utils.debug_msg("Decoding Huffman trees")
dc_huffs = {
"lum": huffman_decode(payloads[0]),
"cr": huffman_decode(payloads[1]),
"cb": huffman_decode(payloads[2])
}
ac_value_huffs = {
"lum": huffman_decode(payloads[3]),
"cr": huffman_decode(payloads[4]),
"cb": huffman_decode(payloads[5])
}
ac_length_huffs = {
"lum": huffman_decode(payloads[6]),
"cr": huffman_decode(payloads[7]),
"cb": huffman_decode(payloads[8])
}
utils.debug_msg("Decode DC differences")
dc_comps = {
"lum": huffman_data_decode(payloads[9], dc_huffs["lum"]),
"cr": huffman_data_decode(payloads[10], dc_huffs["cr"]),
"cb": huffman_data_decode(payloads[11], dc_huffs["cb"]),
}
utils.debug_msg("Decode RLE values")
ac_values = {
"lum": huffman_data_decode(payloads[12], ac_value_huffs["lum"]),
"cr": huffman_data_decode(payloads[13], ac_value_huffs["cr"]),
"cb": huffman_data_decode(payloads[14], ac_value_huffs["cb"]),
}
utils.debug_msg("Decode RLE lengths")
ac_lengths = {
"lum": huffman_data_decode(payloads[15], ac_length_huffs["lum"]),
"cr": huffman_data_decode(payloads[16], ac_length_huffs["cr"]),
"cb": huffman_data_decode(payloads[17], ac_length_huffs["cb"]),
}
shapes = {
"lum": payloads[18].numbers,
"cr": payloads[19].numbers,
"cb": payloads[19].numbers
}
utils.debug_msg("Unloaded all of the data")
# ====
sub_length = utils.size(settings.JPEG_BLOCK_SHAPE()) - 1
utils.debug_msg("Calculating AC RLEs")
ac_rle = utils.dict_map(ac_values,
lambda k, v: [RunLength(t[1], t[0]) for t in list(zip(ac_lengths[k], v))])
def ac_mat_fun(k, v):
utils.debug_msg("Determining deficient AC matricies for: " + k)
ac_length = utils.size(shapes[k]) - len(dc_comps[k])
out = decode_run_length(v, ac_length)
if k == "lum":
s = [str(i) for i in out]
print(" ".join(s))
return out
ac_mats = utils.dict_map(ac_rle, ac_mat_fun)
ac_mats = utils.dict_map(ac_mats, lambda _, v: utils.group_tuples(v, sub_length))
dc_comps = utils.dict_map(dc_comps, lambda _, v: utils.invert_differences(v))
def merge_comps(dc_key, dc_values):
utils.debug_msg("Merging: " + dc_key)
tuples = ac_mats[dc_key] # there are all of the AC zigzag arrays missing their DC component
assert len(tuples) == len(dc_values)
zipped = zip(dc_values, tuples) # combine them to be mapped later
lin_mats = [[t[0], *t[1]] for t in zipped] # create the linearized block
mats = [transform.izigzag(np.array(m), settings.JPEG_BLOCK_SHAPE()) for m in lin_mats]
return mats
compressed = utils.dict_map(dc_comps, merge_comps)
merged = utils.dict_map(compressed, lambda k, v: transform.merge_blocks(np.array(v), shapes[k]))
return model.CompressedImage.from_dict(merged) | """
Come up with the run length encoding for a matrix
"""
def _break_up_rle(code, max_len): | random_line_split |
codec.py | import functools
from typing import List
import numpy as np
import hiccup.settings as settings
import hiccup.model as model
import hiccup.utils as utils
import hiccup.transform as transform
import hiccup.huffman as huffman
import hiccup.hicimage as hic
"""
Encoding/Decoding functionality aka
Run Length encoding
Huffman Encoding - looking at papers, you can rely on the default Huffman encodings for say jpeg but then for
our eventual Wavelet encoding, the same Huffman encodings are definitely not applicable. To be consistent, and
avoid having to copy the entire RL Huffman table, I'll generate on the fly and persist. This is expensive for
smaller images, but for very large images this is a small penalty.
"""
class RunLength:
@classmethod
def from_dict(cls, d):
return cls(d["value"], d["zeros"])
def __init__(self, value=0, length=0):
self.value = value
self.length = length
def __eq__(self, other):
return type(self) == type(other) and self.value == other.value and self.length == other.length
def __str__(self):
return "(%d, %d)" % (self.length, self.value)
@property
def segment(self):
return [0] * self.length + [self.value]
@property
def is_trailing(self):
return self.value == 0 and self.length == 0
def differential_coding(blocks: np.ndarray):
"""
Produce differential coding for the DC coefficients
"""
dc_comps = [transform.dc_component(b) for b in blocks]
return utils.differences(dc_comps)
def run_length_coding(arr: np.ndarray, max_len=0xF) -> List[RunLength]:
"""
Come up with the run length encoding for a matrix
"""
def _break_up_rle(code, max_len):
l = code["zeros"]
div = l // max_len
full = {
"zeros": max_len - 1, # minus 1 because we get another for free from the value
"value": 0
}
return ([full] * div) + [{
"zeros": l - (div * max_len),
"value": code["value"]
}]
def reduction(agg, next):
if "value" in agg[-1]:
agg.append({"zeros": 0})
if next == 0:
agg[-1]["zeros"] += 1
return agg
if "value" not in agg[-1]:
agg[-1]["value"] = next
return agg
utils.debug_msg("Going to determine RLE for %d size array" % len(arr))
rl = functools.reduce(reduction, arr, [{"zeros": 0}])
utils.debug_msg("%d long RLE created" % len(rl))
# If the last element has no value then it was 0! That is a special tuple, (0,0)
if "value" not in rl[-1]:
rl[-1] = {"zeros": 0, "value": 0}
# the goal of RLE in the case of compression is to contain the first symbol (length, size) within a byte
# so if the length is too long, then we need to break it up
if max_len is not None:
utils.debug_msg("Breaking up RLE lengths that are larger than %d" % max_len)
rl = [_break_up_rle(code, max_len) for code in rl]
rl = utils.flatten(rl)
utils.debug_msg("Make RLE objects")
return [RunLength.from_dict(r) for r in rl]
def decode_run_length(rles: List[RunLength], length: int):
arr = []
for (i, d) in enumerate(rles):
arr.append(d.segment)
arr = utils.flatten(arr)
# arr = utils.flatten([d.segment for d in rles])
if rles[-1].is_trailing:
fill = length - len(arr)
arr += ([0] * fill)
return arr
def wavelet_encode(compressed: model.CompressedImage):
"""
In brief reading of literature, Huffman coding is still considered for wavelet image compression. There are other
more effective (and complicated schemes) that I think are out of scope of this project which is just to introduce
the concepts.
"""
def collapse_subbands(k, v):
out = [transform.zigzag(l) for l in v]
out = utils.flatten(out)
return out
utils.debug_msg("Starting Wavelet encoding")
lin_subbands = utils.dict_map(compressed.as_dict, collapse_subbands)
utils.debug_msg("Have completed linearizing the subbands")
rles = utils.dict_map(lin_subbands, lambda _, v: run_length_coding(v))
utils.debug_msg("Have completed the run length encodings")
values_huffs = utils.dict_map(rles,
lambda _, v: huffman.HuffmanTree.construct_from_data(v, key_func=lambda t: t.value))
length_huffs = utils.dict_map(rles,
lambda _, v: huffman.HuffmanTree.construct_from_data(v, key_func=lambda t: t.length))
utils.debug_msg("Huffman trees are constructed")
def encode_huff(d):
huffs = [t[1] for t in d.items()]
return [huffman_encode(h) for h in huffs]
def encode_data(d):
huffs = [t[1] for t in d.items()]
return [huffman_data_encode(h) for h in huffs]
smallest = compressed.luminance_component[0].shape
biggest = compressed.luminance_component[-1].shape
payloads = utils.flatten([
encode_huff(values_huffs),
encode_huff(length_huffs),
encode_data(values_huffs),
encode_data(length_huffs),
[
hic.TupP(smallest[0], smallest[1]),
hic.TupP(biggest[0], biggest[1])
]
])
return hic.HicImage.wavelet_image(payloads)
def wavelet_decode_pull_subbands(data, shapes):
|
def wavelet_decoded_subbands_shapes(min_shape, max_shape):
"""
We just do Haar or Daubechie, assume power of 2
"""
levels = int(np.sqrt(max_shape[0] // min_shape[0]))
shapes = [(min_shape[0] * (np.power(2, i)), min_shape[1] * (np.power(2, i))) for i in range(0, levels + 1)]
return shapes
def wavelet_decoded_length(min_shape, max_shape):
shapes = wavelet_decoded_subbands_shapes(min_shape, max_shape)
length = functools.reduce(lambda agg, s: agg + (3 * (s[0] * s[1])), shapes, 0)
length += (min_shape[0] * min_shape[1])
return length
def wavelet_decode(hic: hic.HicImage) -> model.CompressedImage:
utils.debug_msg("Wavelet decode")
assert hic.hic_type == model.Compression.HIC
payloads = hic.payloads
utils.debug_msg("Decoding Huffman trees")
value_huffs = {
"lum": huffman_decode(payloads[0]),
"cr": huffman_decode(payloads[1]),
"cb": huffman_decode(payloads[2])
}
length_huffs = {
"lum": huffman_decode(payloads[3]),
"cr": huffman_decode(payloads[4]),
"cb": huffman_decode(payloads[5])
}
utils.debug_msg("Decode RLE values")
value_comps = {
"lum": huffman_data_decode(payloads[6], value_huffs["lum"]),
"cr": huffman_data_decode(payloads[7], value_huffs["cr"]),
"cb": huffman_data_decode(payloads[8], value_huffs["cb"]),
}
utils.debug_msg("Decode RLE lengths")
length_comps = {
"lum": huffman_data_decode(payloads[9], length_huffs["lum"]),
"cr": huffman_data_decode(payloads[10], length_huffs["cr"]),
"cb": huffman_data_decode(payloads[11], length_huffs["cb"]),
}
min_shape = payloads[12].numbers
max_shape = payloads[13].numbers
utils.debug_msg("Unloaded all of the data")
# ====
rles = utils.dict_map(value_comps,
lambda k, v: [RunLength(value=t[1], length=t[0]) for t in list(zip(length_comps[k], v))])
length = wavelet_decoded_length(min_shape, max_shape)
data = utils.dict_map(rles, lambda _, v: decode_run_length(v, length))
shapes = wavelet_decoded_subbands_shapes(min_shape, max_shape)
channels = utils.dict_map(data, lambda _, v: wavelet_decode_pull_subbands(v, shapes))
return model.CompressedImage.from_dict(channels)
def huffman_encode(huff: huffman.HuffmanTree) -> hic.Payload:
"""
Encode huffman in payload
"""
leaves = huff.encode_table()
return hic.PayloadStringP(hic.TupP, [hic.TupP(t[0], t[1]) for t in leaves])
def huffman_decode(data: hic.PayloadStringP) -> huffman.HuffmanTree:
"""
Decode huffman from payload
"""
number_string = data.payloads
leaves = [p.numbers for p in number_string]
return huffman.HuffmanTree.construct_from_coding(leaves)
def huffman_data_encode(huff: huffman.HuffmanTree) -> hic.Payload:
"""
Encode huffman data into payload
"""
data = huff.encode_data()
return hic.BitStringP(data)
def huffman_data_decode(data: hic.BitStringP, huffman: huffman.HuffmanTree) -> list:
"""
Decode huffman data from payload with huffman tree
"""
return huffman.decode_data(data.payload)
def jpeg_encode(compressed: model.CompressedImage) -> hic.HicImage:
"""
Generally follow JPEG encoding. Since for the wavelet work I am don't have some standard huffman tree to work with
I might as well be consistent between the two implementations and just encode the entire array with custom
Huffman trees. To attempt to be honest with the implementation though, I'll still treat the DC components
separately by doing the differences and again applying a custom Huffman. A main feature of DCT on each block is the
meaning of the DC component.
For RL it's also easier implementation-wise to split up the length from the value and not try to optimize and weave
them together. Yes, the encoding will suffer bloat, but we are trying to highlight the transforms anyway.
"""
utils.debug_msg("Starting JPEG encoding")
dc_comps = utils.dict_map(compressed.as_dict,
lambda _, v: differential_coding(transform.split_matrix(v, settings.JPEG_BLOCK_SIZE)))
utils.debug_msg("Determined differences DC components")
def ac_comp_fun(k, v):
utils.debug_msg("Determining AC components for: " + k)
splits = transform.split_matrix(v, settings.JPEG_BLOCK_SIZE)
acs = transform.ac_components(splits)
utils.debug_msg("Calculating RLE for: " + k)
out = run_length_coding(acs)
return out
# on each transformed channel, run RLE on the AC components of each block
ac_comps = utils.dict_map(compressed.as_dict, ac_comp_fun)
utils.debug_msg("Determined RLEs for AC components")
dc_huffs = utils.dict_map(dc_comps, lambda _, v: huffman.HuffmanTree.construct_from_data(v))
ac_value_huffs = utils.dict_map(ac_comps,
lambda _, v: huffman.HuffmanTree.construct_from_data(v, key_func=lambda s: s.value))
ac_length_huffs = utils.dict_map(ac_comps,
lambda _, v: huffman.HuffmanTree.construct_from_data(v,
key_func=lambda s: s.length))
def encode_huff(d):
huffs = [t[1] for t in d.items()]
return [huffman_encode(h) for h in huffs]
def encode_data(d):
huffs = [t[1] for t in d.items()]
return [huffman_data_encode(h) for h in huffs]
payloads = utils.flatten([
encode_huff(dc_huffs),
encode_huff(ac_value_huffs),
encode_huff(ac_length_huffs),
encode_data(dc_huffs),
encode_data(ac_value_huffs),
encode_data(ac_length_huffs),
[
hic.TupP(compressed.shape[0][0], compressed.shape[0][1]),
hic.TupP(compressed.shape[1][0], compressed.shape[1][1])
]
])
return hic.HicImage.jpeg_image(payloads)
def jpeg_decode(hic: hic.HicImage) -> model.CompressedImage:
"""
Reverse jpeg_encode()
payloads = utils.flatten([
encode_huff(dc_huffs),
encode_huff(ac_value_huffs),
encode_huff(ac_length_huffs),
encode_data(dc_huffs),
encode_data(ac_value_huffs),
encode_data(ac_length_huffs)
])
"""
utils.debug_msg("JPEG decode")
assert hic.hic_type == model.Compression.JPEG
payloads = hic.payloads
utils.debug_msg("Decoding Huffman trees")
dc_huffs = {
"lum": huffman_decode(payloads[0]),
"cr": huffman_decode(payloads[1]),
"cb": huffman_decode(payloads[2])
}
ac_value_huffs = {
"lum": huffman_decode(payloads[3]),
"cr": huffman_decode(payloads[4]),
"cb": huffman_decode(payloads[5])
}
ac_length_huffs = {
"lum": huffman_decode(payloads[6]),
"cr": huffman_decode(payloads[7]),
"cb": huffman_decode(payloads[8])
}
utils.debug_msg("Decode DC differences")
dc_comps = {
"lum": huffman_data_decode(payloads[9], dc_huffs["lum"]),
"cr": huffman_data_decode(payloads[10], dc_huffs["cr"]),
"cb": huffman_data_decode(payloads[11], dc_huffs["cb"]),
}
utils.debug_msg("Decode RLE values")
ac_values = {
"lum": huffman_data_decode(payloads[12], ac_value_huffs["lum"]),
"cr": huffman_data_decode(payloads[13], ac_value_huffs["cr"]),
"cb": huffman_data_decode(payloads[14], ac_value_huffs["cb"]),
}
utils.debug_msg("Decode RLE lengths")
ac_lengths = {
"lum": huffman_data_decode(payloads[15], ac_length_huffs["lum"]),
"cr": huffman_data_decode(payloads[16], ac_length_huffs["cr"]),
"cb": huffman_data_decode(payloads[17], ac_length_huffs["cb"]),
}
shapes = {
"lum": payloads[18].numbers,
"cr": payloads[19].numbers,
"cb": payloads[19].numbers
}
utils.debug_msg("Unloaded all of the data")
# ====
sub_length = utils.size(settings.JPEG_BLOCK_SHAPE()) - 1
utils.debug_msg("Calculating AC RLEs")
ac_rle = utils.dict_map(ac_values,
lambda k, v: [RunLength(t[1], t[0]) for t in list(zip(ac_lengths[k], v))])
def ac_mat_fun(k, v):
utils.debug_msg("Determining deficient AC matricies for: " + k)
ac_length = utils.size(shapes[k]) - len(dc_comps[k])
out = decode_run_length(v, ac_length)
if k == "lum":
s = [str(i) for i in out]
print(" ".join(s))
return out
ac_mats = utils.dict_map(ac_rle, ac_mat_fun)
ac_mats = utils.dict_map(ac_mats, lambda _, v: utils.group_tuples(v, sub_length))
dc_comps = utils.dict_map(dc_comps, lambda _, v: utils.invert_differences(v))
def merge_comps(dc_key, dc_values):
utils.debug_msg("Merging: " + dc_key)
tuples = ac_mats[dc_key] # there are all of the AC zigzag arrays missing their DC component
assert len(tuples) == len(dc_values)
zipped = zip(dc_values, tuples) # combine them to be mapped later
lin_mats = [[t[0], *t[1]] for t in zipped] # create the linearized block
mats = [transform.izigzag(np.array(m), settings.JPEG_BLOCK_SHAPE()) for m in lin_mats]
return mats
compressed = utils.dict_map(dc_comps, merge_comps)
merged = utils.dict_map(compressed, lambda k, v: transform.merge_blocks(np.array(v), shapes[k]))
return model.CompressedImage.from_dict(merged)
| offset = utils.size(shapes[0])
subbands = [transform.izigzag(np.array(data[:offset]), shapes[0])]
for i in range(len(shapes)):
subbands.append(transform.izigzag(np.array(data[offset:offset + utils.size(shapes[i])]), shapes[i]))
offset += utils.size(shapes[i])
subbands.append(transform.izigzag(np.array(data[offset:offset + utils.size(shapes[i])]), shapes[i]))
offset += utils.size(shapes[i])
subbands.append(transform.izigzag(np.array(data[offset:offset + utils.size(shapes[i])]), shapes[i]))
offset += utils.size(shapes[i])
return subbands | identifier_body |
mailbox.rs | use std::future::Future;
use anyhow::Result;
use wasmtime::{Caller, FuncType, Linker, Trap, ValType};
use crate::{
api::{error::IntoTrap, get_memory},
message::Message,
state::ProcessState,
};
use super::{link_async2_if_match, link_if_match};
// Register the mailbox APIs to the linker
pub(crate) fn register(
linker: &mut Linker<ProcessState>,
namespace_filter: &[String],
) -> Result<()> {
link_if_match(
linker,
"lunatic::message",
"create",
FuncType::new([], []),
create,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"set_buffer",
FuncType::new([ValType::I32, ValType::I32], []),
set_buffer,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"add_process",
FuncType::new([ValType::I64], [ValType::I64]),
add_process,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"add_tcp_stream",
FuncType::new([ValType::I64], [ValType::I64]),
add_tcp_stream,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"send",
FuncType::new([ValType::I64], [ValType::I32]),
send,
namespace_filter,
)?;
link_async2_if_match(
linker,
"lunatic::message",
"prepare_receive",
FuncType::new([ValType::I32, ValType::I32], [ValType::I32]),
prepare_receive,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"receive",
FuncType::new([ValType::I32, ValType::I32], []),
receive,
namespace_filter,
)?;
Ok(())
}
//% lunatic::message
//%
//% A lunatic message consists of 2 parts:
//% * A buffer of raw data
//% * An array of resource IDs
//%
//% If resources are sent between processes, their ID changes. The resource ID can for example
//% be already taken in the receiving process. So we need a way to communicate the new ID on the
//% receiving end.
//%
//% When the `create()` function is called an empty message is allocated and both parts can be
//% modified before it's sent to another process. If a new resource is added to the message, the
//% index inside of the array is returned. This information can be now serialized inside the raw
//% data buffer in some way. E.g. You are serializing a structure like this:
//%
//% struct A {
//% a: String,
//% b: Process,
//% c: i32,
//% d: TcpStream
//% }
//%
//% Into something like this:
//%
//% ["Some string" | [resource 0] | i32 value | [resource 1] ]
//%
//% [resource 0] & [resource 1] are just encoded as 0 and 1 u64 values, representing their order
//% in the resource array.
//%
//% It's common to use some serialization library that will encode a mixture of raw data and
//% resource indexes into the data buffer.
//%
//% On the receiving side, first the `prepare_receive()` function must be called to receive info
//% on how big the buffer and resource arrays are, so that enough space can be allocated inside
//% the guest.
//%
//% The `receive()` function will do 2 things:
//% * Write the buffer of raw data to the specified location
//% * Give all resources to the new process (with new IDs) and write the IDs to the specified
//% location in the same order they were added.
//% Now the information from the buffer (with resource indexes) can be used to deserialize the
//% received message into the same structure.
//%
//% This can be a bit confusing, because resources are just IDs (u64 values) themself. But we
//% still need to serialize them into different u64 values. Resources are inherently bound to a
//% process and you can't access another resource just by guessing an ID from another process.
//% The process of sending them around needs to be explicit.
//%
//% This API was designed around the idea that most guest languages will use some serialization
//% library and turning resources into indexes is a way of serializing. The same is true for
//% deserializing them on the receiving side, when an index needs to be turned into an actual
//% resource ID.
//% lunatic::message::create()
//%
//% Creates a new message. This message is intended to be modified by other functions in this
//% namespace. Once `lunatic::message::send` is called it will be sent to another process.
fn create(mut caller: Caller<ProcessState>) {
caller.data_mut().message = Some(Message::default());
}
//% lunatic::message::set_buffer(
//% data_ptr: i32,
//% data_len: i32,
//% )
//%
//% Sets the data for the next message.
//%
//% Traps:
//% * If **data_ptr + data_len** is outside the memory.
//% * If it's called before the next message is created.
fn set_buffer(mut caller: Caller<ProcessState>, data_ptr: u32, data_len: u32) -> Result<(), Trap> {
let mut buffer = vec![0; data_len as usize];
let memory = get_memory(&mut caller)?;
memory
.read(&caller, data_ptr as usize, buffer.as_mut_slice())
.or_trap("lunatic::message::set_buffer")?;
let message = caller
.data_mut()
.message
.as_mut()
.or_trap("lunatic::message::set_buffer")?;
match message {
Message::Data(data) => data.set_buffer(buffer),
Message::Signal => return Err(Trap::new("Unexpected `Message::Signal` in scratch buffer")),
};
Ok(())
}
//% lunatic::message::add_process(process_id: u64) -> u64
//%
//% Adds a process resource to the next message and returns the location in the array the process
//% was added to. This will remove the process handle from the current process' resources.
//%
//% Traps:
//% * If process ID doesn't exist
//% * If it's called before the next message is created.
fn add_process(mut caller: Caller<ProcessState>, process_id: u64) -> Result<u64, Trap> {
let process = caller
.data_mut()
.resources
.processes
.remove(process_id)
.or_trap("lunatic::message::add_process")?;
let message = caller
.data_mut()
.message
.as_mut()
.or_trap("lunatic::message::add_process")?;
let pid = match message {
Message::Data(data) => data.add_process(process) as u64,
Message::Signal => return Err(Trap::new("Unexpected `Message::Signal` in scratch buffer")),
};
Ok(pid)
}
//% lunatic::message::add_tcp_stream(stream_id: u64) -> u64
//%
//% Adds a TCP stream resource to the next message and returns the location in the array the TCP
//% stream was added to. This will remove the TCP stream from the current process' resources.
//%
//% Traps:
//% * If TCP stream ID doesn't exist
//% * If it's called before the next message is created.
fn add_tcp_stream(mut caller: Caller<ProcessState>, stream_id: u64) -> Result<u64, Trap> {
let stream = caller
.data_mut()
.resources
.tcp_streams
.remove(stream_id)
.or_trap("lunatic::message::add_tcp_stream")?;
let message = caller
.data_mut()
.message
.as_mut()
.or_trap("lunatic::message::add_tcp_stream")?;
let stream_id = match message {
Message::Data(data) => data.add_tcp_stream(stream) as u64,
Message::Signal => return Err(Trap::new("Unexpected `Message::Signal` in scratch buffer")),
};
Ok(stream_id)
}
//% lunatic::message::send(
//% process_id: i64,
//% ) -> i32
//%
//% Returns:
//% * 0 on success
//% * 1 on error - Process can't receive messages (finished).
//%
//% Sends the message to a process.
//%
//% Traps:
//% * If the process ID doesn't exist.
//% * If it's called before a creating the next message.
fn send(mut caller: Caller<ProcessState>, process_id: u64) -> Result<u32, Trap> |
//% lunatic::message::prepare_receive(i32_data_size_ptr: i32, i32_res_size_ptr: i32) -> i32
//%
//% Returns:
//% * 0 if it's a regular message.
//% * 1 if it's a signal turned into a message.
//%
//% For regular messages both parameters are used.
//% * **i32_data_size_ptr** - Location to write the message buffer size to as.
//% * **i32_res_size_ptr** - Location to write the number of resources to.
//%
//% This function should be called before `lunatic::message::receive` to let the guest know how
//% much memory space needs to be reserved for the next message. The data size is in **bytes**,
//% the resources size is the number of resources and each resource is a u64 value. Because of
//% this the guest needs to reserve `64 * resource size` bytes for the resource buffer.
//%
//% Traps:
//% * If **size_ptr** is outside the memory.
fn prepare_receive(
mut caller: Caller<ProcessState>,
data_size_ptr: u32,
res_size_ptr: u32,
) -> Box<dyn Future<Output = Result<u32, Trap>> + Send + '_> {
Box::new(async move {
let message = caller
.data_mut()
.mailbox
.recv()
.await
.expect("a process always hold onto its sender and this can't be None");
let result = match &message {
Message::Data(message) => {
let message_buffer_size = message.buffer_size() as u32;
let message_resources_size = message.resources_size() as u32;
let memory = get_memory(&mut caller)?;
memory
.write(
&mut caller,
data_size_ptr as usize,
&message_buffer_size.to_le_bytes(),
)
.or_trap("lunatic::message::prepare_receive")?;
memory
.write(
&mut caller,
res_size_ptr as usize,
&message_resources_size.to_le_bytes(),
)
.or_trap("lunatic::message::prepare_receive")?;
0
}
Message::Signal => 1,
};
// Put the message into the scratch area
caller.data_mut().message = Some(message);
Ok(result)
})
}
//% lunatic::message::receive(data_ptr: i32, resource_ptr: i32)
//%
//% * **data_ptr** - Pointer to write the data to.
//% * **resource_ptr** - Pointer to an array of i64 values, where each value represents the
//% resource id inside the new process. Resources are in the same order they
//% were added.
//%
//% Writes the message that was prepared with `lunatic::message::prepare_receive` to the guest. It
//% should only be called if `prepare_receive` returned 0, otherwise it will trap. Signal message
//% don't cary any additional information and everything we need was returned by `prepare_receive`.
//%
//% Traps:
//% * If `lunatic::message::prepare_receive` was not called before.
//% * If **data_ptr + size of the message** is outside the memory.
//% * If **resource_ptr + size of the resources** is outside the memory.
fn receive(mut caller: Caller<ProcessState>, data_ptr: u32, resource_ptr: u32) -> Result<(), Trap> {
let last_message = caller
.data_mut()
.message
.take()
.or_trap("lunatic::message::receive")?;
match last_message {
Message::Data(last_message) => {
let memory = get_memory(&mut caller)?;
memory
.write(&mut caller, data_ptr as usize, last_message.buffer())
.or_trap("lunatic::message::receive")?;
let resources: Vec<u8> = last_message
.resources()
.into_iter()
.map(|resource| match resource {
crate::message::Resource::Process(process_handle) => {
u64::to_le_bytes(caller.data_mut().resources.processes.add(process_handle))
}
crate::message::Resource::TcpStream(tcp_stream) => {
u64::to_le_bytes(caller.data_mut().resources.tcp_streams.add(tcp_stream))
}
})
.flatten()
.collect();
memory
.write(&mut caller, resource_ptr as usize, &resources)
.or_trap("lunatic::message::receive")?;
Ok(())
}
Message::Signal => Err(Trap::new("`lunatic::message::receive` called on a signal")),
}
}
| {
let message = caller
.data_mut()
.message
.take()
.or_trap("lunatic::message::send")?;
let process = caller
.data()
.resources
.processes
.get(process_id)
.or_trap("lunatic::message::send")?;
let result = match process.send_message(message) {
Ok(()) => 0,
Err(_error) => 1,
};
Ok(result)
} | identifier_body |
mailbox.rs | use std::future::Future;
use anyhow::Result;
use wasmtime::{Caller, FuncType, Linker, Trap, ValType};
use crate::{
api::{error::IntoTrap, get_memory},
message::Message,
state::ProcessState,
};
use super::{link_async2_if_match, link_if_match};
// Register the mailbox APIs to the linker
pub(crate) fn register(
linker: &mut Linker<ProcessState>,
namespace_filter: &[String],
) -> Result<()> {
link_if_match(
linker,
"lunatic::message",
"create",
FuncType::new([], []),
create,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"set_buffer",
FuncType::new([ValType::I32, ValType::I32], []),
set_buffer,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"add_process",
FuncType::new([ValType::I64], [ValType::I64]),
add_process,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"add_tcp_stream",
FuncType::new([ValType::I64], [ValType::I64]),
add_tcp_stream,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"send",
FuncType::new([ValType::I64], [ValType::I32]),
send,
namespace_filter,
)?;
link_async2_if_match(
linker,
"lunatic::message",
"prepare_receive",
FuncType::new([ValType::I32, ValType::I32], [ValType::I32]),
prepare_receive,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"receive",
FuncType::new([ValType::I32, ValType::I32], []),
receive,
namespace_filter,
)?;
Ok(())
}
//% lunatic::message
//%
//% A lunatic message consists of 2 parts:
//% * A buffer of raw data
//% * An array of resource IDs
//%
//% If resources are sent between processes, their ID changes. The resource ID can for example
//% be already taken in the receiving process. So we need a way to communicate the new ID on the
//% receiving end.
//%
//% When the `create()` function is called an empty message is allocated and both parts can be
//% modified before it's sent to another process. If a new resource is added to the message, the
//% index inside of the array is returned. This information can be now serialized inside the raw
//% data buffer in some way. E.g. You are serializing a structure like this:
//%
//% struct A {
//% a: String,
//% b: Process,
//% c: i32,
//% d: TcpStream
//% }
//%
//% Into something like this:
//%
//% ["Some string" | [resource 0] | i32 value | [resource 1] ]
//%
//% [resource 0] & [resource 1] are just encoded as 0 and 1 u64 values, representing their order
//% in the resource array.
//%
//% It's common to use some serialization library that will encode a mixture of raw data and
//% resource indexes into the data buffer.
//%
//% On the receiving side, first the `prepare_receive()` function must be called to receive info
//% on how big the buffer and resource arrays are, so that enough space can be allocated inside
//% the guest.
//%
//% The `receive()` function will do 2 things:
//% * Write the buffer of raw data to the specified location
//% * Give all resources to the new process (with new IDs) and write the IDs to the specified
//% location in the same order they were added.
//% Now the information from the buffer (with resource indexes) can be used to deserialize the | //% The process of sending them around needs to be explicit.
//%
//% This API was designed around the idea that most guest languages will use some serialization
//% library and turning resources into indexes is a way of serializing. The same is true for
//% deserializing them on the receiving side, when an index needs to be turned into an actual
//% resource ID.
//% lunatic::message::create()
//%
//% Creates a new message. This message is intended to be modified by other functions in this
//% namespace. Once `lunatic::message::send` is called it will be sent to another process.
fn create(mut caller: Caller<ProcessState>) {
caller.data_mut().message = Some(Message::default());
}
//% lunatic::message::set_buffer(
//% data_ptr: i32,
//% data_len: i32,
//% )
//%
//% Sets the data for the next message.
//%
//% Traps:
//% * If **data_ptr + data_len** is outside the memory.
//% * If it's called before the next message is created.
fn set_buffer(mut caller: Caller<ProcessState>, data_ptr: u32, data_len: u32) -> Result<(), Trap> {
let mut buffer = vec![0; data_len as usize];
let memory = get_memory(&mut caller)?;
memory
.read(&caller, data_ptr as usize, buffer.as_mut_slice())
.or_trap("lunatic::message::set_buffer")?;
let message = caller
.data_mut()
.message
.as_mut()
.or_trap("lunatic::message::set_buffer")?;
match message {
Message::Data(data) => data.set_buffer(buffer),
Message::Signal => return Err(Trap::new("Unexpected `Message::Signal` in scratch buffer")),
};
Ok(())
}
//% lunatic::message::add_process(process_id: u64) -> u64
//%
//% Adds a process resource to the next message and returns the location in the array the process
//% was added to. This will remove the process handle from the current process' resources.
//%
//% Traps:
//% * If process ID doesn't exist
//% * If it's called before the next message is created.
fn add_process(mut caller: Caller<ProcessState>, process_id: u64) -> Result<u64, Trap> {
let process = caller
.data_mut()
.resources
.processes
.remove(process_id)
.or_trap("lunatic::message::add_process")?;
let message = caller
.data_mut()
.message
.as_mut()
.or_trap("lunatic::message::add_process")?;
let pid = match message {
Message::Data(data) => data.add_process(process) as u64,
Message::Signal => return Err(Trap::new("Unexpected `Message::Signal` in scratch buffer")),
};
Ok(pid)
}
//% lunatic::message::add_tcp_stream(stream_id: u64) -> u64
//%
//% Adds a TCP stream resource to the next message and returns the location in the array the TCP
//% stream was added to. This will remove the TCP stream from the current process' resources.
//%
//% Traps:
//% * If TCP stream ID doesn't exist
//% * If it's called before the next message is created.
fn add_tcp_stream(mut caller: Caller<ProcessState>, stream_id: u64) -> Result<u64, Trap> {
let stream = caller
.data_mut()
.resources
.tcp_streams
.remove(stream_id)
.or_trap("lunatic::message::add_tcp_stream")?;
let message = caller
.data_mut()
.message
.as_mut()
.or_trap("lunatic::message::add_tcp_stream")?;
let stream_id = match message {
Message::Data(data) => data.add_tcp_stream(stream) as u64,
Message::Signal => return Err(Trap::new("Unexpected `Message::Signal` in scratch buffer")),
};
Ok(stream_id)
}
//% lunatic::message::send(
//% process_id: i64,
//% ) -> i32
//%
//% Returns:
//% * 0 on success
//% * 1 on error - Process can't receive messages (finished).
//%
//% Sends the message to a process.
//%
//% Traps:
//% * If the process ID doesn't exist.
//% * If it's called before a creating the next message.
fn send(mut caller: Caller<ProcessState>, process_id: u64) -> Result<u32, Trap> {
let message = caller
.data_mut()
.message
.take()
.or_trap("lunatic::message::send")?;
let process = caller
.data()
.resources
.processes
.get(process_id)
.or_trap("lunatic::message::send")?;
let result = match process.send_message(message) {
Ok(()) => 0,
Err(_error) => 1,
};
Ok(result)
}
//% lunatic::message::prepare_receive(i32_data_size_ptr: i32, i32_res_size_ptr: i32) -> i32
//%
//% Returns:
//% * 0 if it's a regular message.
//% * 1 if it's a signal turned into a message.
//%
//% For regular messages both parameters are used.
//% * **i32_data_size_ptr** - Location to write the message buffer size to as.
//% * **i32_res_size_ptr** - Location to write the number of resources to.
//%
//% This function should be called before `lunatic::message::receive` to let the guest know how
//% much memory space needs to be reserved for the next message. The data size is in **bytes**,
//% the resources size is the number of resources and each resource is a u64 value. Because of
//% this the guest needs to reserve `64 * resource size` bytes for the resource buffer.
//%
//% Traps:
//% * If **size_ptr** is outside the memory.
fn prepare_receive(
mut caller: Caller<ProcessState>,
data_size_ptr: u32,
res_size_ptr: u32,
) -> Box<dyn Future<Output = Result<u32, Trap>> + Send + '_> {
Box::new(async move {
let message = caller
.data_mut()
.mailbox
.recv()
.await
.expect("a process always hold onto its sender and this can't be None");
let result = match &message {
Message::Data(message) => {
let message_buffer_size = message.buffer_size() as u32;
let message_resources_size = message.resources_size() as u32;
let memory = get_memory(&mut caller)?;
memory
.write(
&mut caller,
data_size_ptr as usize,
&message_buffer_size.to_le_bytes(),
)
.or_trap("lunatic::message::prepare_receive")?;
memory
.write(
&mut caller,
res_size_ptr as usize,
&message_resources_size.to_le_bytes(),
)
.or_trap("lunatic::message::prepare_receive")?;
0
}
Message::Signal => 1,
};
// Put the message into the scratch area
caller.data_mut().message = Some(message);
Ok(result)
})
}
//% lunatic::message::receive(data_ptr: i32, resource_ptr: i32)
//%
//% * **data_ptr** - Pointer to write the data to.
//% * **resource_ptr** - Pointer to an array of i64 values, where each value represents the
//% resource id inside the new process. Resources are in the same order they
//% were added.
//%
//% Writes the message that was prepared with `lunatic::message::prepare_receive` to the guest. It
//% should only be called if `prepare_receive` returned 0, otherwise it will trap. Signal message
//% don't cary any additional information and everything we need was returned by `prepare_receive`.
//%
//% Traps:
//% * If `lunatic::message::prepare_receive` was not called before.
//% * If **data_ptr + size of the message** is outside the memory.
//% * If **resource_ptr + size of the resources** is outside the memory.
fn receive(mut caller: Caller<ProcessState>, data_ptr: u32, resource_ptr: u32) -> Result<(), Trap> {
let last_message = caller
.data_mut()
.message
.take()
.or_trap("lunatic::message::receive")?;
match last_message {
Message::Data(last_message) => {
let memory = get_memory(&mut caller)?;
memory
.write(&mut caller, data_ptr as usize, last_message.buffer())
.or_trap("lunatic::message::receive")?;
let resources: Vec<u8> = last_message
.resources()
.into_iter()
.map(|resource| match resource {
crate::message::Resource::Process(process_handle) => {
u64::to_le_bytes(caller.data_mut().resources.processes.add(process_handle))
}
crate::message::Resource::TcpStream(tcp_stream) => {
u64::to_le_bytes(caller.data_mut().resources.tcp_streams.add(tcp_stream))
}
})
.flatten()
.collect();
memory
.write(&mut caller, resource_ptr as usize, &resources)
.or_trap("lunatic::message::receive")?;
Ok(())
}
Message::Signal => Err(Trap::new("`lunatic::message::receive` called on a signal")),
}
} | //% received message into the same structure.
//%
//% This can be a bit confusing, because resources are just IDs (u64 values) themself. But we
//% still need to serialize them into different u64 values. Resources are inherently bound to a
//% process and you can't access another resource just by guessing an ID from another process. | random_line_split |
mailbox.rs | use std::future::Future;
use anyhow::Result;
use wasmtime::{Caller, FuncType, Linker, Trap, ValType};
use crate::{
api::{error::IntoTrap, get_memory},
message::Message,
state::ProcessState,
};
use super::{link_async2_if_match, link_if_match};
// Register the mailbox APIs to the linker
pub(crate) fn register(
linker: &mut Linker<ProcessState>,
namespace_filter: &[String],
) -> Result<()> {
link_if_match(
linker,
"lunatic::message",
"create",
FuncType::new([], []),
create,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"set_buffer",
FuncType::new([ValType::I32, ValType::I32], []),
set_buffer,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"add_process",
FuncType::new([ValType::I64], [ValType::I64]),
add_process,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"add_tcp_stream",
FuncType::new([ValType::I64], [ValType::I64]),
add_tcp_stream,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"send",
FuncType::new([ValType::I64], [ValType::I32]),
send,
namespace_filter,
)?;
link_async2_if_match(
linker,
"lunatic::message",
"prepare_receive",
FuncType::new([ValType::I32, ValType::I32], [ValType::I32]),
prepare_receive,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::message",
"receive",
FuncType::new([ValType::I32, ValType::I32], []),
receive,
namespace_filter,
)?;
Ok(())
}
//% lunatic::message
//%
//% A lunatic message consists of 2 parts:
//% * A buffer of raw data
//% * An array of resource IDs
//%
//% If resources are sent between processes, their ID changes. The resource ID can for example
//% be already taken in the receiving process. So we need a way to communicate the new ID on the
//% receiving end.
//%
//% When the `create()` function is called an empty message is allocated and both parts can be
//% modified before it's sent to another process. If a new resource is added to the message, the
//% index inside of the array is returned. This information can be now serialized inside the raw
//% data buffer in some way. E.g. You are serializing a structure like this:
//%
//% struct A {
//% a: String,
//% b: Process,
//% c: i32,
//% d: TcpStream
//% }
//%
//% Into something like this:
//%
//% ["Some string" | [resource 0] | i32 value | [resource 1] ]
//%
//% [resource 0] & [resource 1] are just encoded as 0 and 1 u64 values, representing their order
//% in the resource array.
//%
//% It's common to use some serialization library that will encode a mixture of raw data and
//% resource indexes into the data buffer.
//%
//% On the receiving side, first the `prepare_receive()` function must be called to receive info
//% on how big the buffer and resource arrays are, so that enough space can be allocated inside
//% the guest.
//%
//% The `receive()` function will do 2 things:
//% * Write the buffer of raw data to the specified location
//% * Give all resources to the new process (with new IDs) and write the IDs to the specified
//% location in the same order they were added.
//% Now the information from the buffer (with resource indexes) can be used to deserialize the
//% received message into the same structure.
//%
//% This can be a bit confusing, because resources are just IDs (u64 values) themself. But we
//% still need to serialize them into different u64 values. Resources are inherently bound to a
//% process and you can't access another resource just by guessing an ID from another process.
//% The process of sending them around needs to be explicit.
//%
//% This API was designed around the idea that most guest languages will use some serialization
//% library and turning resources into indexes is a way of serializing. The same is true for
//% deserializing them on the receiving side, when an index needs to be turned into an actual
//% resource ID.
//% lunatic::message::create()
//%
//% Creates a new message. This message is intended to be modified by other functions in this
//% namespace. Once `lunatic::message::send` is called it will be sent to another process.
fn create(mut caller: Caller<ProcessState>) {
caller.data_mut().message = Some(Message::default());
}
//% lunatic::message::set_buffer(
//% data_ptr: i32,
//% data_len: i32,
//% )
//%
//% Sets the data for the next message.
//%
//% Traps:
//% * If **data_ptr + data_len** is outside the memory.
//% * If it's called before the next message is created.
fn set_buffer(mut caller: Caller<ProcessState>, data_ptr: u32, data_len: u32) -> Result<(), Trap> {
let mut buffer = vec![0; data_len as usize];
let memory = get_memory(&mut caller)?;
memory
.read(&caller, data_ptr as usize, buffer.as_mut_slice())
.or_trap("lunatic::message::set_buffer")?;
let message = caller
.data_mut()
.message
.as_mut()
.or_trap("lunatic::message::set_buffer")?;
match message {
Message::Data(data) => data.set_buffer(buffer),
Message::Signal => return Err(Trap::new("Unexpected `Message::Signal` in scratch buffer")),
};
Ok(())
}
//% lunatic::message::add_process(process_id: u64) -> u64
//%
//% Adds a process resource to the next message and returns the location in the array the process
//% was added to. This will remove the process handle from the current process' resources.
//%
//% Traps:
//% * If process ID doesn't exist
//% * If it's called before the next message is created.
fn add_process(mut caller: Caller<ProcessState>, process_id: u64) -> Result<u64, Trap> {
let process = caller
.data_mut()
.resources
.processes
.remove(process_id)
.or_trap("lunatic::message::add_process")?;
let message = caller
.data_mut()
.message
.as_mut()
.or_trap("lunatic::message::add_process")?;
let pid = match message {
Message::Data(data) => data.add_process(process) as u64,
Message::Signal => return Err(Trap::new("Unexpected `Message::Signal` in scratch buffer")),
};
Ok(pid)
}
//% lunatic::message::add_tcp_stream(stream_id: u64) -> u64
//%
//% Adds a TCP stream resource to the next message and returns the location in the array the TCP
//% stream was added to. This will remove the TCP stream from the current process' resources.
//%
//% Traps:
//% * If TCP stream ID doesn't exist
//% * If it's called before the next message is created.
fn add_tcp_stream(mut caller: Caller<ProcessState>, stream_id: u64) -> Result<u64, Trap> {
let stream = caller
.data_mut()
.resources
.tcp_streams
.remove(stream_id)
.or_trap("lunatic::message::add_tcp_stream")?;
let message = caller
.data_mut()
.message
.as_mut()
.or_trap("lunatic::message::add_tcp_stream")?;
let stream_id = match message {
Message::Data(data) => data.add_tcp_stream(stream) as u64,
Message::Signal => return Err(Trap::new("Unexpected `Message::Signal` in scratch buffer")),
};
Ok(stream_id)
}
//% lunatic::message::send(
//% process_id: i64,
//% ) -> i32
//%
//% Returns:
//% * 0 on success
//% * 1 on error - Process can't receive messages (finished).
//%
//% Sends the message to a process.
//%
//% Traps:
//% * If the process ID doesn't exist.
//% * If it's called before a creating the next message.
fn send(mut caller: Caller<ProcessState>, process_id: u64) -> Result<u32, Trap> {
let message = caller
.data_mut()
.message
.take()
.or_trap("lunatic::message::send")?;
let process = caller
.data()
.resources
.processes
.get(process_id)
.or_trap("lunatic::message::send")?;
let result = match process.send_message(message) {
Ok(()) => 0,
Err(_error) => 1,
};
Ok(result)
}
//% lunatic::message::prepare_receive(i32_data_size_ptr: i32, i32_res_size_ptr: i32) -> i32
//%
//% Returns:
//% * 0 if it's a regular message.
//% * 1 if it's a signal turned into a message.
//%
//% For regular messages both parameters are used.
//% * **i32_data_size_ptr** - Location to write the message buffer size to as.
//% * **i32_res_size_ptr** - Location to write the number of resources to.
//%
//% This function should be called before `lunatic::message::receive` to let the guest know how
//% much memory space needs to be reserved for the next message. The data size is in **bytes**,
//% the resources size is the number of resources and each resource is a u64 value. Because of
//% this the guest needs to reserve `64 * resource size` bytes for the resource buffer.
//%
//% Traps:
//% * If **size_ptr** is outside the memory.
fn | (
mut caller: Caller<ProcessState>,
data_size_ptr: u32,
res_size_ptr: u32,
) -> Box<dyn Future<Output = Result<u32, Trap>> + Send + '_> {
Box::new(async move {
let message = caller
.data_mut()
.mailbox
.recv()
.await
.expect("a process always hold onto its sender and this can't be None");
let result = match &message {
Message::Data(message) => {
let message_buffer_size = message.buffer_size() as u32;
let message_resources_size = message.resources_size() as u32;
let memory = get_memory(&mut caller)?;
memory
.write(
&mut caller,
data_size_ptr as usize,
&message_buffer_size.to_le_bytes(),
)
.or_trap("lunatic::message::prepare_receive")?;
memory
.write(
&mut caller,
res_size_ptr as usize,
&message_resources_size.to_le_bytes(),
)
.or_trap("lunatic::message::prepare_receive")?;
0
}
Message::Signal => 1,
};
// Put the message into the scratch area
caller.data_mut().message = Some(message);
Ok(result)
})
}
//% lunatic::message::receive(data_ptr: i32, resource_ptr: i32)
//%
//% * **data_ptr** - Pointer to write the data to.
//% * **resource_ptr** - Pointer to an array of i64 values, where each value represents the
//% resource id inside the new process. Resources are in the same order they
//% were added.
//%
//% Writes the message that was prepared with `lunatic::message::prepare_receive` to the guest. It
//% should only be called if `prepare_receive` returned 0, otherwise it will trap. Signal message
//% don't cary any additional information and everything we need was returned by `prepare_receive`.
//%
//% Traps:
//% * If `lunatic::message::prepare_receive` was not called before.
//% * If **data_ptr + size of the message** is outside the memory.
//% * If **resource_ptr + size of the resources** is outside the memory.
fn receive(mut caller: Caller<ProcessState>, data_ptr: u32, resource_ptr: u32) -> Result<(), Trap> {
let last_message = caller
.data_mut()
.message
.take()
.or_trap("lunatic::message::receive")?;
match last_message {
Message::Data(last_message) => {
let memory = get_memory(&mut caller)?;
memory
.write(&mut caller, data_ptr as usize, last_message.buffer())
.or_trap("lunatic::message::receive")?;
let resources: Vec<u8> = last_message
.resources()
.into_iter()
.map(|resource| match resource {
crate::message::Resource::Process(process_handle) => {
u64::to_le_bytes(caller.data_mut().resources.processes.add(process_handle))
}
crate::message::Resource::TcpStream(tcp_stream) => {
u64::to_le_bytes(caller.data_mut().resources.tcp_streams.add(tcp_stream))
}
})
.flatten()
.collect();
memory
.write(&mut caller, resource_ptr as usize, &resources)
.or_trap("lunatic::message::receive")?;
Ok(())
}
Message::Signal => Err(Trap::new("`lunatic::message::receive` called on a signal")),
}
}
| prepare_receive | identifier_name |
string.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Format string literals.
use regex::Regex;
use unicode_segmentation::UnicodeSegmentation;
use config::Config;
use shape::Shape;
use utils::wrap_str;
const MIN_STRING: usize = 10;
pub struct StringFormat<'a> {
pub opener: &'a str,
pub closer: &'a str,
pub line_start: &'a str,
pub line_end: &'a str,
pub shape: Shape,
pub trim_end: bool,
pub config: &'a Config,
}
impl<'a> StringFormat<'a> {
pub fn new(shape: Shape, config: &'a Config) -> StringFormat<'a> {
StringFormat {
opener: "\"",
closer: "\"",
line_start: " ",
line_end: "\\",
shape,
trim_end: false,
config,
}
}
/// Returns the maximum number of graphemes that is possible on a line while taking the
/// indentation into account.
///
/// If we cannot put at least a single character per line, the rewrite won't succeed.
fn max_chars_with_indent(&self) -> Option<usize> {
Some(
self.shape
.width
.checked_sub(self.opener.len() + self.line_end.len() + 1)?
+ 1,
)
}
/// Like max_chars_with_indent but the indentation is not substracted.
/// This allows to fit more graphemes from the string on a line when
/// SnippetState::Overflow.
fn max_chars_without_indent(&self) -> Option<usize> {
Some(self.config.max_width().checked_sub(self.line_end.len())?)
}
}
pub fn rewrite_string<'a>(orig: &str, fmt: &StringFormat<'a>) -> Option<String> {
let max_chars_with_indent = fmt.max_chars_with_indent()?;
let max_chars_without_indent = fmt.max_chars_without_indent()?;
let indent = fmt.shape.indent.to_string_with_newline(fmt.config);
// Strip line breaks.
// With this regex applied, all remaining whitespaces are significant
let strip_line_breaks_re = Regex::new(r"([^\\](\\\\)*)\\[\n\r][[:space:]]*").unwrap();
let stripped_str = strip_line_breaks_re.replace_all(orig, "$1");
let graphemes = UnicodeSegmentation::graphemes(&*stripped_str, false).collect::<Vec<&str>>();
// `cur_start` is the position in `orig` of the start of the current line.
let mut cur_start = 0;
let mut result = String::with_capacity(
stripped_str
.len()
.checked_next_power_of_two()
.unwrap_or(usize::max_value()),
);
result.push_str(fmt.opener);
// Snip a line at a time from `stripped_str` until it is used up. Push the snippet
// onto result.
let mut cur_max_chars = max_chars_with_indent;
loop {
// All the input starting at cur_start fits on the current line
if graphemes.len() - cur_start <= cur_max_chars {
result.push_str(&graphemes[cur_start..].join(""));
break;
}
// The input starting at cur_start needs to be broken
match break_string(cur_max_chars, fmt.trim_end, &graphemes[cur_start..]) {
SnippetState::LineEnd(line, len) => {
result.push_str(&line);
result.push_str(fmt.line_end);
result.push_str(&indent);
result.push_str(fmt.line_start);
cur_max_chars = max_chars_with_indent;
cur_start += len;
}
SnippetState::Overflow(line, len) => {
result.push_str(&line);
cur_max_chars = max_chars_without_indent;
cur_start += len;
}
SnippetState::EndOfInput(line) => {
result.push_str(&line);
break;
}
}
}
result.push_str(fmt.closer);
wrap_str(result, fmt.config.max_width(), fmt.shape)
}
/// Result of breaking a string so it fits in a line and the state it ended in.
/// The state informs about what to do with the snippet and how to continue the breaking process.
#[derive(Debug, PartialEq)]
enum SnippetState {
/// The input could not be broken and so rewriting the string is finished.
EndOfInput(String),
/// The input could be broken and the returned snippet should be ended with a
/// `[StringFormat::line_end]`. The next snippet needs to be indented.
LineEnd(String, usize),
/// The input could be broken but the returned snippet should not be ended with a
/// `[StringFormat::line_end]` because the whitespace is significant. Therefore, the next
/// snippet should not be indented.
Overflow(String, usize),
}
/// Break the input string at a boundary character around the offset `max_chars`. A boundary
/// character is either a punctuation or a whitespace.
fn break_string(max_chars: usize, trim_end: bool, input: &[&str]) -> SnippetState {
let break_at = |index /* grapheme at index is included */| {
// Take in any whitespaces to the left/right of `input[index]` and
// check if there is a line feed, in which case whitespaces needs to be kept.
let mut index_minus_ws = index;
for (i, grapheme) in input[0..=index].iter().enumerate().rev() {
if !trim_end && is_line_feed(grapheme) {
return SnippetState::Overflow(input[0..=i].join("").to_string(), i + 1);
} else if !is_whitespace(grapheme) {
index_minus_ws = i;
break;
}
}
let mut index_plus_ws = index;
for (i, grapheme) in input[index + 1..].iter().enumerate() {
if !trim_end && is_line_feed(grapheme) {
return SnippetState::Overflow(
input[0..=index + 1 + i].join("").to_string(),
index + 2 + i,
);
} else if !is_whitespace(grapheme) {
index_plus_ws = index + i;
break;
}
}
if trim_end {
SnippetState::LineEnd(
input[0..=index_minus_ws].join("").to_string(),
index_plus_ws + 1,
)
} else {
SnippetState::LineEnd(
input[0..=index_plus_ws].join("").to_string(),
index_plus_ws + 1,
)
}
};
// Find the position in input for breaking the string
match input[0..max_chars]
.iter()
.rposition(|grapheme| is_whitespace(grapheme))
{
// Found a whitespace and what is on its left side is big enough.
Some(index) if index >= MIN_STRING => break_at(index),
// No whitespace found, try looking for a punctuation instead
_ => match input[0..max_chars]
.iter()
.rposition(|grapheme| is_punctuation(grapheme))
{
// Found a punctuation and what is on its left side is big enough.
Some(index) if index >= MIN_STRING => break_at(index),
// Either no boundary character was found to the left of `input[max_chars]`, or the line
// got too small. We try searching for a boundary character to the right.
_ => match input[max_chars..]
.iter()
.position(|grapheme| is_whitespace(grapheme) || is_punctuation(grapheme))
{
// A boundary was found after the line limit
Some(index) => break_at(max_chars + index),
// No boundary to the right, the input cannot be broken
None => SnippetState::EndOfInput(input.join("").to_string()),
},
},
}
}
fn is_line_feed(grapheme: &str) -> bool {
grapheme.as_bytes()[0] == b'\n'
}
fn is_whitespace(grapheme: &str) -> bool {
grapheme.chars().all(|c| c.is_whitespace())
}
fn is_punctuation(grapheme: &str) -> bool {
match grapheme.as_bytes()[0] {
b':' | b',' | b';' | b'.' => true,
_ => false,
}
}
#[cfg(test)]
mod test {
use super::{break_string, rewrite_string, SnippetState, StringFormat};
use shape::{Indent, Shape};
use unicode_segmentation::UnicodeSegmentation;
#[test]
fn issue343() {
let config = Default::default();
let fmt = StringFormat::new(Shape::legacy(2, Indent::empty()), &config);
rewrite_string("eq_", &fmt);
}
#[test]
fn should_break_on_whitespace() {
let string = "Placerat felis. Mauris porta ante sagittis purus.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::LineEnd("Placerat felis. ".to_string(), 16)
);
assert_eq!(
break_string(20, true, &graphemes[..]),
SnippetState::LineEnd("Placerat felis.".to_string(), 16)
);
}
#[test]
fn should_break_on_punctuation() {
let string = "Placerat_felis._Mauris_porta_ante_sagittis_purus.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::LineEnd("Placerat_felis.".to_string(), 15)
);
}
#[test]
fn should_break_forward() {
let string = "Venenatis_tellus_vel_tellus. Aliquam aliquam dolor at justo.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::LineEnd("Venenatis_tellus_vel_tellus. ".to_string(), 29)
);
assert_eq!(
break_string(20, true, &graphemes[..]),
SnippetState::LineEnd("Venenatis_tellus_vel_tellus.".to_string(), 29)
);
}
#[test]
fn | () {
let string = "Venenatis_tellus_vel_tellus";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::EndOfInput("Venenatis_tellus_vel_tellus".to_string())
);
}
#[test]
fn significant_whitespaces() {
let string = "Neque in sem. \n Pellentesque tellus augue.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(15, false, &graphemes[..]),
SnippetState::Overflow("Neque in sem. \n".to_string(), 20)
);
assert_eq!(
break_string(25, false, &graphemes[..]),
SnippetState::Overflow("Neque in sem. \n".to_string(), 20)
);
// if `StringFormat::line_end` is true, then the line feed does not matter anymore
assert_eq!(
break_string(15, true, &graphemes[..]),
SnippetState::LineEnd("Neque in sem.".to_string(), 26)
);
assert_eq!(
break_string(25, true, &graphemes[..]),
SnippetState::LineEnd("Neque in sem.".to_string(), 26)
);
}
#[test]
fn big_whitespace() {
let string = "Neque in sem. Pellentesque tellus augue.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::LineEnd("Neque in sem. ".to_string(), 25)
);
assert_eq!(
break_string(20, true, &graphemes[..]),
SnippetState::LineEnd("Neque in sem.".to_string(), 25)
);
}
}
| nothing_to_break | identifier_name |
string.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Format string literals.
use regex::Regex;
use unicode_segmentation::UnicodeSegmentation;
use config::Config;
use shape::Shape;
use utils::wrap_str;
const MIN_STRING: usize = 10;
pub struct StringFormat<'a> {
pub opener: &'a str,
pub closer: &'a str,
pub line_start: &'a str,
pub line_end: &'a str,
pub shape: Shape,
pub trim_end: bool,
pub config: &'a Config,
}
impl<'a> StringFormat<'a> {
pub fn new(shape: Shape, config: &'a Config) -> StringFormat<'a> {
StringFormat {
opener: "\"",
closer: "\"",
line_start: " ",
line_end: "\\",
shape,
trim_end: false,
config,
}
}
/// Returns the maximum number of graphemes that is possible on a line while taking the
/// indentation into account.
///
/// If we cannot put at least a single character per line, the rewrite won't succeed.
fn max_chars_with_indent(&self) -> Option<usize> {
Some(
self.shape
.width
.checked_sub(self.opener.len() + self.line_end.len() + 1)?
+ 1,
)
}
/// Like max_chars_with_indent but the indentation is not substracted.
/// This allows to fit more graphemes from the string on a line when
/// SnippetState::Overflow.
fn max_chars_without_indent(&self) -> Option<usize> {
Some(self.config.max_width().checked_sub(self.line_end.len())?)
}
}
pub fn rewrite_string<'a>(orig: &str, fmt: &StringFormat<'a>) -> Option<String> {
let max_chars_with_indent = fmt.max_chars_with_indent()?;
let max_chars_without_indent = fmt.max_chars_without_indent()?;
let indent = fmt.shape.indent.to_string_with_newline(fmt.config);
// Strip line breaks.
// With this regex applied, all remaining whitespaces are significant
let strip_line_breaks_re = Regex::new(r"([^\\](\\\\)*)\\[\n\r][[:space:]]*").unwrap();
let stripped_str = strip_line_breaks_re.replace_all(orig, "$1");
let graphemes = UnicodeSegmentation::graphemes(&*stripped_str, false).collect::<Vec<&str>>();
// `cur_start` is the position in `orig` of the start of the current line.
let mut cur_start = 0;
let mut result = String::with_capacity(
stripped_str
.len()
.checked_next_power_of_two()
.unwrap_or(usize::max_value()),
);
result.push_str(fmt.opener);
// Snip a line at a time from `stripped_str` until it is used up. Push the snippet
// onto result.
let mut cur_max_chars = max_chars_with_indent;
loop {
// All the input starting at cur_start fits on the current line
if graphemes.len() - cur_start <= cur_max_chars {
result.push_str(&graphemes[cur_start..].join(""));
break;
}
// The input starting at cur_start needs to be broken
match break_string(cur_max_chars, fmt.trim_end, &graphemes[cur_start..]) {
SnippetState::LineEnd(line, len) => {
result.push_str(&line);
result.push_str(fmt.line_end);
result.push_str(&indent);
result.push_str(fmt.line_start);
cur_max_chars = max_chars_with_indent;
cur_start += len;
}
SnippetState::Overflow(line, len) => {
result.push_str(&line);
cur_max_chars = max_chars_without_indent;
cur_start += len;
}
SnippetState::EndOfInput(line) => {
result.push_str(&line);
break;
}
}
}
result.push_str(fmt.closer);
wrap_str(result, fmt.config.max_width(), fmt.shape)
}
/// Result of breaking a string so it fits in a line and the state it ended in.
/// The state informs about what to do with the snippet and how to continue the breaking process.
#[derive(Debug, PartialEq)]
enum SnippetState {
/// The input could not be broken and so rewriting the string is finished.
EndOfInput(String),
/// The input could be broken and the returned snippet should be ended with a
/// `[StringFormat::line_end]`. The next snippet needs to be indented.
LineEnd(String, usize),
/// The input could be broken but the returned snippet should not be ended with a
/// `[StringFormat::line_end]` because the whitespace is significant. Therefore, the next
/// snippet should not be indented.
Overflow(String, usize),
}
/// Break the input string at a boundary character around the offset `max_chars`. A boundary
/// character is either a punctuation or a whitespace.
fn break_string(max_chars: usize, trim_end: bool, input: &[&str]) -> SnippetState {
let break_at = |index /* grapheme at index is included */| {
// Take in any whitespaces to the left/right of `input[index]` and
// check if there is a line feed, in which case whitespaces needs to be kept.
let mut index_minus_ws = index;
for (i, grapheme) in input[0..=index].iter().enumerate().rev() {
if !trim_end && is_line_feed(grapheme) {
return SnippetState::Overflow(input[0..=i].join("").to_string(), i + 1);
} else if !is_whitespace(grapheme) {
index_minus_ws = i;
break;
}
}
let mut index_plus_ws = index;
for (i, grapheme) in input[index + 1..].iter().enumerate() {
if !trim_end && is_line_feed(grapheme) {
return SnippetState::Overflow(
input[0..=index + 1 + i].join("").to_string(),
index + 2 + i,
);
} else if !is_whitespace(grapheme) {
index_plus_ws = index + i;
break;
}
}
if trim_end {
SnippetState::LineEnd(
input[0..=index_minus_ws].join("").to_string(),
index_plus_ws + 1,
)
} else {
SnippetState::LineEnd(
input[0..=index_plus_ws].join("").to_string(),
index_plus_ws + 1,
)
}
};
// Find the position in input for breaking the string
match input[0..max_chars]
.iter()
.rposition(|grapheme| is_whitespace(grapheme))
{
// Found a whitespace and what is on its left side is big enough.
Some(index) if index >= MIN_STRING => break_at(index),
// No whitespace found, try looking for a punctuation instead
_ => match input[0..max_chars]
.iter()
.rposition(|grapheme| is_punctuation(grapheme))
{
// Found a punctuation and what is on its left side is big enough.
Some(index) if index >= MIN_STRING => break_at(index),
// Either no boundary character was found to the left of `input[max_chars]`, or the line
// got too small. We try searching for a boundary character to the right.
_ => match input[max_chars..]
.iter()
.position(|grapheme| is_whitespace(grapheme) || is_punctuation(grapheme))
{
// A boundary was found after the line limit
Some(index) => break_at(max_chars + index),
// No boundary to the right, the input cannot be broken
None => SnippetState::EndOfInput(input.join("").to_string()),
},
},
}
}
fn is_line_feed(grapheme: &str) -> bool {
grapheme.as_bytes()[0] == b'\n'
}
fn is_whitespace(grapheme: &str) -> bool {
grapheme.chars().all(|c| c.is_whitespace())
}
fn is_punctuation(grapheme: &str) -> bool {
match grapheme.as_bytes()[0] {
b':' | b',' | b';' | b'.' => true,
_ => false,
}
}
#[cfg(test)]
mod test {
use super::{break_string, rewrite_string, SnippetState, StringFormat};
use shape::{Indent, Shape};
use unicode_segmentation::UnicodeSegmentation;
#[test]
fn issue343() {
let config = Default::default();
let fmt = StringFormat::new(Shape::legacy(2, Indent::empty()), &config);
rewrite_string("eq_", &fmt);
}
#[test]
fn should_break_on_whitespace() {
let string = "Placerat felis. Mauris porta ante sagittis purus.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::LineEnd("Placerat felis. ".to_string(), 16)
);
assert_eq!(
break_string(20, true, &graphemes[..]),
SnippetState::LineEnd("Placerat felis.".to_string(), 16)
);
}
#[test]
fn should_break_on_punctuation() {
let string = "Placerat_felis._Mauris_porta_ante_sagittis_purus.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::LineEnd("Placerat_felis.".to_string(), 15)
);
}
#[test]
fn should_break_forward() {
let string = "Venenatis_tellus_vel_tellus. Aliquam aliquam dolor at justo.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::LineEnd("Venenatis_tellus_vel_tellus. ".to_string(), 29)
);
assert_eq!(
break_string(20, true, &graphemes[..]),
SnippetState::LineEnd("Venenatis_tellus_vel_tellus.".to_string(), 29)
);
}
#[test]
fn nothing_to_break() {
let string = "Venenatis_tellus_vel_tellus";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::EndOfInput("Venenatis_tellus_vel_tellus".to_string())
);
}
#[test]
fn significant_whitespaces() |
#[test]
fn big_whitespace() {
let string = "Neque in sem. Pellentesque tellus augue.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::LineEnd("Neque in sem. ".to_string(), 25)
);
assert_eq!(
break_string(20, true, &graphemes[..]),
SnippetState::LineEnd("Neque in sem.".to_string(), 25)
);
}
}
| {
let string = "Neque in sem. \n Pellentesque tellus augue.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(15, false, &graphemes[..]),
SnippetState::Overflow("Neque in sem. \n".to_string(), 20)
);
assert_eq!(
break_string(25, false, &graphemes[..]),
SnippetState::Overflow("Neque in sem. \n".to_string(), 20)
);
// if `StringFormat::line_end` is true, then the line feed does not matter anymore
assert_eq!(
break_string(15, true, &graphemes[..]),
SnippetState::LineEnd("Neque in sem.".to_string(), 26)
);
assert_eq!(
break_string(25, true, &graphemes[..]),
SnippetState::LineEnd("Neque in sem.".to_string(), 26)
);
} | identifier_body |
string.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Format string literals.
use regex::Regex;
use unicode_segmentation::UnicodeSegmentation;
use config::Config;
use shape::Shape;
use utils::wrap_str;
const MIN_STRING: usize = 10;
pub struct StringFormat<'a> {
pub opener: &'a str,
pub closer: &'a str,
pub line_start: &'a str,
pub line_end: &'a str,
pub shape: Shape,
pub trim_end: bool,
pub config: &'a Config,
}
impl<'a> StringFormat<'a> {
pub fn new(shape: Shape, config: &'a Config) -> StringFormat<'a> {
StringFormat {
opener: "\"",
closer: "\"",
line_start: " ",
line_end: "\\",
shape,
trim_end: false,
config,
}
}
/// Returns the maximum number of graphemes that is possible on a line while taking the
/// indentation into account.
///
/// If we cannot put at least a single character per line, the rewrite won't succeed.
fn max_chars_with_indent(&self) -> Option<usize> {
Some(
self.shape
.width | /// Like max_chars_with_indent but the indentation is not substracted.
/// This allows to fit more graphemes from the string on a line when
/// SnippetState::Overflow.
fn max_chars_without_indent(&self) -> Option<usize> {
Some(self.config.max_width().checked_sub(self.line_end.len())?)
}
}
pub fn rewrite_string<'a>(orig: &str, fmt: &StringFormat<'a>) -> Option<String> {
let max_chars_with_indent = fmt.max_chars_with_indent()?;
let max_chars_without_indent = fmt.max_chars_without_indent()?;
let indent = fmt.shape.indent.to_string_with_newline(fmt.config);
// Strip line breaks.
// With this regex applied, all remaining whitespaces are significant
let strip_line_breaks_re = Regex::new(r"([^\\](\\\\)*)\\[\n\r][[:space:]]*").unwrap();
let stripped_str = strip_line_breaks_re.replace_all(orig, "$1");
let graphemes = UnicodeSegmentation::graphemes(&*stripped_str, false).collect::<Vec<&str>>();
// `cur_start` is the position in `orig` of the start of the current line.
let mut cur_start = 0;
let mut result = String::with_capacity(
stripped_str
.len()
.checked_next_power_of_two()
.unwrap_or(usize::max_value()),
);
result.push_str(fmt.opener);
// Snip a line at a time from `stripped_str` until it is used up. Push the snippet
// onto result.
let mut cur_max_chars = max_chars_with_indent;
loop {
// All the input starting at cur_start fits on the current line
if graphemes.len() - cur_start <= cur_max_chars {
result.push_str(&graphemes[cur_start..].join(""));
break;
}
// The input starting at cur_start needs to be broken
match break_string(cur_max_chars, fmt.trim_end, &graphemes[cur_start..]) {
SnippetState::LineEnd(line, len) => {
result.push_str(&line);
result.push_str(fmt.line_end);
result.push_str(&indent);
result.push_str(fmt.line_start);
cur_max_chars = max_chars_with_indent;
cur_start += len;
}
SnippetState::Overflow(line, len) => {
result.push_str(&line);
cur_max_chars = max_chars_without_indent;
cur_start += len;
}
SnippetState::EndOfInput(line) => {
result.push_str(&line);
break;
}
}
}
result.push_str(fmt.closer);
wrap_str(result, fmt.config.max_width(), fmt.shape)
}
/// Result of breaking a string so it fits in a line and the state it ended in.
/// The state informs about what to do with the snippet and how to continue the breaking process.
#[derive(Debug, PartialEq)]
enum SnippetState {
/// The input could not be broken and so rewriting the string is finished.
EndOfInput(String),
/// The input could be broken and the returned snippet should be ended with a
/// `[StringFormat::line_end]`. The next snippet needs to be indented.
LineEnd(String, usize),
/// The input could be broken but the returned snippet should not be ended with a
/// `[StringFormat::line_end]` because the whitespace is significant. Therefore, the next
/// snippet should not be indented.
Overflow(String, usize),
}
/// Break the input string at a boundary character around the offset `max_chars`. A boundary
/// character is either a punctuation or a whitespace.
fn break_string(max_chars: usize, trim_end: bool, input: &[&str]) -> SnippetState {
let break_at = |index /* grapheme at index is included */| {
// Take in any whitespaces to the left/right of `input[index]` and
// check if there is a line feed, in which case whitespaces needs to be kept.
let mut index_minus_ws = index;
for (i, grapheme) in input[0..=index].iter().enumerate().rev() {
if !trim_end && is_line_feed(grapheme) {
return SnippetState::Overflow(input[0..=i].join("").to_string(), i + 1);
} else if !is_whitespace(grapheme) {
index_minus_ws = i;
break;
}
}
let mut index_plus_ws = index;
for (i, grapheme) in input[index + 1..].iter().enumerate() {
if !trim_end && is_line_feed(grapheme) {
return SnippetState::Overflow(
input[0..=index + 1 + i].join("").to_string(),
index + 2 + i,
);
} else if !is_whitespace(grapheme) {
index_plus_ws = index + i;
break;
}
}
if trim_end {
SnippetState::LineEnd(
input[0..=index_minus_ws].join("").to_string(),
index_plus_ws + 1,
)
} else {
SnippetState::LineEnd(
input[0..=index_plus_ws].join("").to_string(),
index_plus_ws + 1,
)
}
};
// Find the position in input for breaking the string
match input[0..max_chars]
.iter()
.rposition(|grapheme| is_whitespace(grapheme))
{
// Found a whitespace and what is on its left side is big enough.
Some(index) if index >= MIN_STRING => break_at(index),
// No whitespace found, try looking for a punctuation instead
_ => match input[0..max_chars]
.iter()
.rposition(|grapheme| is_punctuation(grapheme))
{
// Found a punctuation and what is on its left side is big enough.
Some(index) if index >= MIN_STRING => break_at(index),
// Either no boundary character was found to the left of `input[max_chars]`, or the line
// got too small. We try searching for a boundary character to the right.
_ => match input[max_chars..]
.iter()
.position(|grapheme| is_whitespace(grapheme) || is_punctuation(grapheme))
{
// A boundary was found after the line limit
Some(index) => break_at(max_chars + index),
// No boundary to the right, the input cannot be broken
None => SnippetState::EndOfInput(input.join("").to_string()),
},
},
}
}
fn is_line_feed(grapheme: &str) -> bool {
grapheme.as_bytes()[0] == b'\n'
}
fn is_whitespace(grapheme: &str) -> bool {
grapheme.chars().all(|c| c.is_whitespace())
}
fn is_punctuation(grapheme: &str) -> bool {
match grapheme.as_bytes()[0] {
b':' | b',' | b';' | b'.' => true,
_ => false,
}
}
#[cfg(test)]
mod test {
use super::{break_string, rewrite_string, SnippetState, StringFormat};
use shape::{Indent, Shape};
use unicode_segmentation::UnicodeSegmentation;
#[test]
fn issue343() {
let config = Default::default();
let fmt = StringFormat::new(Shape::legacy(2, Indent::empty()), &config);
rewrite_string("eq_", &fmt);
}
#[test]
fn should_break_on_whitespace() {
let string = "Placerat felis. Mauris porta ante sagittis purus.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::LineEnd("Placerat felis. ".to_string(), 16)
);
assert_eq!(
break_string(20, true, &graphemes[..]),
SnippetState::LineEnd("Placerat felis.".to_string(), 16)
);
}
#[test]
fn should_break_on_punctuation() {
let string = "Placerat_felis._Mauris_porta_ante_sagittis_purus.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::LineEnd("Placerat_felis.".to_string(), 15)
);
}
#[test]
fn should_break_forward() {
let string = "Venenatis_tellus_vel_tellus. Aliquam aliquam dolor at justo.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::LineEnd("Venenatis_tellus_vel_tellus. ".to_string(), 29)
);
assert_eq!(
break_string(20, true, &graphemes[..]),
SnippetState::LineEnd("Venenatis_tellus_vel_tellus.".to_string(), 29)
);
}
#[test]
fn nothing_to_break() {
let string = "Venenatis_tellus_vel_tellus";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::EndOfInput("Venenatis_tellus_vel_tellus".to_string())
);
}
#[test]
fn significant_whitespaces() {
let string = "Neque in sem. \n Pellentesque tellus augue.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(15, false, &graphemes[..]),
SnippetState::Overflow("Neque in sem. \n".to_string(), 20)
);
assert_eq!(
break_string(25, false, &graphemes[..]),
SnippetState::Overflow("Neque in sem. \n".to_string(), 20)
);
// if `StringFormat::line_end` is true, then the line feed does not matter anymore
assert_eq!(
break_string(15, true, &graphemes[..]),
SnippetState::LineEnd("Neque in sem.".to_string(), 26)
);
assert_eq!(
break_string(25, true, &graphemes[..]),
SnippetState::LineEnd("Neque in sem.".to_string(), 26)
);
}
#[test]
fn big_whitespace() {
let string = "Neque in sem. Pellentesque tellus augue.";
let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
assert_eq!(
break_string(20, false, &graphemes[..]),
SnippetState::LineEnd("Neque in sem. ".to_string(), 25)
);
assert_eq!(
break_string(20, true, &graphemes[..]),
SnippetState::LineEnd("Neque in sem.".to_string(), 25)
);
}
} | .checked_sub(self.opener.len() + self.line_end.len() + 1)?
+ 1,
)
}
| random_line_split |
server.rs | use std::{
marker::Unpin,
net::SocketAddr,
str::FromStr,
time::Duration,
};
use futures::{
future::{
BoxFuture,
Either,
},
select,
};
use tokio::{
net::signal::ctrl_c,
prelude::*,
runtime::Runtime,
sync::oneshot,
};
use bytes::{
Bytes,
BytesMut,
};
use crate::{
diagnostics::*,
error::Error,
receive::Message,
};
metrics! {
receive_ok,
receive_err,
process_ok,
process_err,
tcp_conn_accept,
tcp_conn_close,
tcp_conn_timeout,
tcp_msg_overflow
}
/**
Server configuration.
*/
#[derive(Debug, Clone)]
pub struct Config {
/**
The address to bind the server to.
*/
pub bind: Bind,
/**
The duration to keep client TCP connections alive for.
If the client doesn't complete a message within the period
then the connection will be closed.
*/
pub tcp_keep_alive_secs: u64,
/**
The maximum size of a single event before it'll be discarded.
*/
pub tcp_max_size_bytes: u64,
}
#[derive(Debug, Clone)]
pub struct Bind {
pub addr: String,
pub protocol: Protocol,
}
#[derive(Debug, Clone, Copy)]
pub enum Protocol {
Udp,
Tcp,
}
impl FromStr for Bind {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.get(0..6) {
Some("tcp://") => Ok(Bind {
addr: s[6..].to_owned(),
protocol: Protocol::Tcp,
}),
Some("udp://") => Ok(Bind {
addr: s[6..].to_owned(),
protocol: Protocol::Udp,
}),
_ => Ok(Bind {
addr: s.to_owned(),
protocol: Protocol::Udp,
}),
}
}
}
impl Default for Config {
fn default() -> Self {
Config {
bind: Bind {
addr: "0.0.0.0:12201".to_owned(),
protocol: Protocol::Udp,
},
tcp_keep_alive_secs: 2 * 60, // 2 minutes
tcp_max_size_bytes: 1024 * 256, // 256kiB
}
}
}
/**
A GELF server.
*/
pub struct Server {
fut: BoxFuture<'static, ()>,
handle: Option<Handle>,
}
impl Server {
pub fn take_handle(&mut self) -> Option<Handle> {
self.handle.take()
}
pub fn run(self) -> Result<(), Error> {
// Run the server on a fresh runtime
// We attempt to shut this runtime down cleanly to release
// any used resources
let runtime = Runtime::new().expect("failed to start new Runtime");
runtime.block_on(self.fut);
runtime.shutdown_now();
Ok(())
}
}
/**
A handle to a running GELF server that can be used to interact with it
programmatically.
*/
pub struct Handle {
close: oneshot::Sender<()>,
}
impl Handle {
/**
Close the server.
*/
pub fn close(self) -> bool {
self.close.send(()).is_ok()
}
}
/**
Build a server to receive GELF messages and process them.
*/
pub fn build(
config: Config,
receive: impl FnMut(Bytes) -> Result<Option<Message>, Error> + Send + Sync + Unpin + Clone + 'static,
mut process: impl FnMut(Message) -> Result<(), Error> + Send + Sync + Unpin + Clone + 'static,
) -> Result<Server, Error> {
emit("Starting GELF server");
let addr = config.bind.addr.parse()?;
let (handle_tx, handle_rx) = oneshot::channel();
// Build a handle
let handle = Some(Handle { close: handle_tx });
let ctrl_c = ctrl_c()?;
let server = async move {
let incoming = match config.bind.protocol {
Protocol::Udp => {
let server = udp::Server::bind(&addr).await?.build(receive);
Either::Left(server)
}
Protocol::Tcp => {
let server = tcp::Server::bind(&addr).await?.build(
Duration::from_secs(config.tcp_keep_alive_secs),
config.tcp_max_size_bytes as usize,
receive,
);
Either::Right(server)
}
};
let mut close = handle_rx.fuse();
let mut ctrl_c = ctrl_c.fuse();
let mut incoming = incoming.fuse();
// NOTE: We don't use `?` here because we never want to carry results
// We always want to match them and deal with error cases directly
loop {
select! {
// A message that's ready to process
msg = incoming.next() => match msg {
// A complete message has been received
Some(Ok(Received::Complete(msg))) => {
increment!(server.receive_ok);
// Process the received message
match process(msg) {
Ok(()) => {
increment!(server.process_ok);
}
Err(err) => {
increment!(server.process_err);
emit_err(&err, "GELF processing failed");
}
}
},
// A chunk of a message has been received
Some(Ok(Received::Incomplete)) => {
continue;
},
// An error occurred receiving a chunk
Some(Err(err)) => {
increment!(server.receive_err);
emit_err(&err, "GELF processing failed");
},
None => {
unreachable!("receiver stream should never terminate")
},
},
// A termination signal from the programmatic handle
_ = close => {
emit("Handle closed; shutting down");
break;
},
// A termination signal from the environment
_ = ctrl_c.next() => {
emit("Termination signal received; shutting down");
break;
},
};
}
emit("Stopping GELF server");
Result::Ok::<(), Error>(())
};
Ok(Server {
fut: Box::pin(async move {
if let Err(err) = server.await {
emit_err(&err, "GELF server failed");
}
}),
handle,
})
}
enum Received {
Incomplete,
Complete(Message),
}
trait OptionMessageExt {
fn into_received(self) -> Option<Received>;
}
impl OptionMessageExt for Option<Message> {
fn into_received(self) -> Option<Received> {
match self {
Some(msg) => Some(Received::Complete(msg)),
None => Some(Received::Incomplete),
}
}
}
mod udp {
use super::*;
use tokio::{
codec::Decoder,
net::udp::{
UdpFramed,
UdpSocket,
},
};
pub(super) struct Server(UdpSocket);
impl Server {
pub(super) async fn bind(addr: &SocketAddr) -> Result<Self, Error> {
let sock = UdpSocket::bind(&addr).await?;
Ok(Server(sock))
}
pub(super) fn build(
self,
receive: impl FnMut(Bytes) -> Result<Option<Message>, Error> + Unpin,
) -> impl Stream<Item = Result<Received, Error>> {
emit("Setting up for UDP");
UdpFramed::new(self.0, Decode(receive)).map(|r| r.map(|(msg, _)| msg))
}
}
struct Decode<F>(F);
impl<F> Decoder for Decode<F>
where
F: FnMut(Bytes) -> Result<Option<Message>, Error> + Unpin,
{
type Item = Received;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
// All datagrams are considered a valid message
let src = src.take().freeze();
Ok((self.0)(src)?.into_received())
}
}
}
mod tcp {
use super::*;
use std::{
cmp,
pin::Pin,
};
use futures::{
future,
stream::{
futures_unordered::FuturesUnordered,
Fuse,
Stream,
StreamFuture,
},
task::{
Context,
Poll,
},
};
use pin_utils::unsafe_pinned;
use tokio::{
codec::{
Decoder,
FramedRead,
},
net::tcp::TcpListener,
timer::Timeout,
};
pub(super) struct Server(TcpListener);
impl Server {
pub(super) async fn bind(addr: &SocketAddr) -> Result<Self, Error> {
let listener = TcpListener::bind(&addr).await?;
Ok(Server(listener))
}
pub(super) fn build(
self,
keep_alive: Duration,
max_size_bytes: usize,
receive: impl FnMut(Bytes) -> Result<Option<Message>, Error>
+ Send
+ Sync
+ Unpin
+ Clone
+ 'static,
) -> impl Stream<Item = Result<Received, Error>> {
emit("Setting up for TCP");
self.0
.incoming()
.filter_map(move |conn| {
match conn {
// The connection was successfully established
// Create a new protocol reader over it
// It'll get added to the connection pool
Ok(conn) => {
let decode = Decode::new(max_size_bytes, receive.clone());
let protocol = FramedRead::new(conn, decode);
// NOTE: The timeout stream wraps _the protocol_
// That means it'll close the connection if it doesn't
// produce a valid message within the timeframe, not just
// whether or not it writes to the stream
future::ready(Some(TimeoutStream::new(protocol, keep_alive)))
}
// The connection could not be established
// Just ignore it
Err(_) => future::ready(None),
}
})
.listen(1024)
}
}
struct Listen<S>
where
S: Stream,
S::Item: Stream,
{
accept: Fuse<S>,
connections: FuturesUnordered<StreamFuture<S::Item>>,
max: usize,
}
impl<S> Listen<S>
where
S: Stream,
S::Item: Stream,
{
unsafe_pinned!(accept: Fuse<S>);
unsafe_pinned!(connections: FuturesUnordered<StreamFuture<S::Item>>);
}
impl<S, T> Stream for Listen<S>
where
S: Stream + Unpin,
S::Item: Stream<Item = Result<T, Error>> + Unpin,
{
type Item = Result<T, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
'poll_conns: loop {
// Fill up our accepted connections
'fill_conns: while self.connections.len() < self.max {
let conn = match self.as_mut().accept().poll_next(cx) {
Poll::Ready(Some(s)) => s.into_future(),
Poll::Ready(None) | Poll::Pending => break 'fill_conns,
};
self.connections.push(conn);
}
// Try polling the stream
// NOTE: We're assuming the unordered list will
// always make forward progress polling futures
// even if one future is particularly chatty
match self.as_mut().connections().poll_next(cx) {
// We have an item from a connection
Poll::Ready(Some((Some(item), conn))) => {
match item {
// A valid item was produced
// Return it and put the connection back in the pool.
Ok(item) => {
self.connections.push(conn.into_future());
return Poll::Ready(Some(Ok(item)));
}
// An error occurred, probably IO-related
// In this case the connection isn't returned to the pool.
// It's closed on drop and the error is returned.
Err(err) => {
return Poll::Ready(Some(Err(err.into())));
}
}
}
// A connection has closed
// Drop the connection and loop back
// This will mean attempting to accept a new connection
Poll::Ready(Some((None, _conn))) => continue 'poll_conns,
// The queue is empty or nothing is ready
Poll::Ready(None) | Poll::Pending => break 'poll_conns,
}
}
// If we've gotten this far, then there are no events for us to process
// and nothing was ready, so figure out if we're not done yet or if
// we've reached the end.
if self.accept.is_done() {
Poll::Ready(None)
} else {
Poll::Pending
}
}
}
trait StreamListenExt: Stream {
fn listen(self, max_connections: usize) -> Listen<Self>
where
Self: Sized + Unpin,
Self::Item: Stream + Unpin,
{
Listen {
accept: self.fuse(),
connections: FuturesUnordered::new(),
max: max_connections,
}
}
}
impl<S> StreamListenExt for S where S: Stream {}
struct Decode<F> {
max_size_bytes: usize,
read_head: usize,
discarding: bool,
receive: F,
}
impl<F> Decode<F> {
pub fn | (max_size_bytes: usize, receive: F) -> Self {
Decode {
read_head: 0,
discarding: false,
max_size_bytes,
receive,
}
}
}
impl<F> Decoder for Decode<F>
where
F: FnMut(Bytes) -> Result<Option<Message>, Error>,
{
type Item = Received;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
'read_frame: loop {
let read_to = cmp::min(self.max_size_bytes.saturating_add(1), src.len());
// Messages are separated by null bytes
let sep_offset = src[self.read_head..].iter().position(|b| *b == b'\0');
match (self.discarding, sep_offset) {
// A delimiter was found
// Split it from the buffer and return
(false, Some(offset)) => {
let frame_end = offset + self.read_head;
// The message is technically sitting right there
// for us, but since it's bigger than our max capacity
// we still discard it
if frame_end > self.max_size_bytes {
increment!(server.tcp_msg_overflow);
self.discarding = true;
continue 'read_frame;
}
self.read_head = 0;
let src = src.split_to(frame_end + 1).freeze();
return Ok((self.receive)(src.slice_to(src.len() - 1))?.into_received());
}
// A delimiter wasn't found, but the incomplete
// message is too big. Start discarding the input
(false, None) if src.len() > self.max_size_bytes => {
increment!(server.tcp_msg_overflow);
self.discarding = true;
continue 'read_frame;
}
// A delimiter wasn't found
// Move the read head forward so we'll check
// from that position next time data arrives
(false, None) => {
self.read_head = read_to;
// As per the contract of `Decoder`, we return `None`
// here to indicate more data is needed to complete a frame
return Ok(None);
}
// We're discarding input and have reached the end of the message
// Advance the source buffer to the end of that message and try again
(true, Some(offset)) => {
src.advance(offset + self.read_head + 1);
self.discarding = false;
self.read_head = 0;
continue 'read_frame;
}
// We're discarding input but haven't reached the end of the message yet
(true, None) => {
src.advance(read_to);
self.read_head = 0;
if src.is_empty() {
// We still return `Ok` here, even though we have no intention
// of processing those bytes. Our maximum buffer size should still
// be limited by the initial capacity, since we're responsible for
// reserving additional capacity and aren't doing that
return Ok(None);
}
continue 'read_frame;
}
}
}
}
fn decode_eof(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
Ok(match self.decode(src)? {
Some(frame) => Some(frame),
None => {
if src.is_empty() {
None
} else {
let src = src.take().freeze();
self.read_head = 0;
(self.receive)(src)?.into_received()
}
}
})
}
}
struct TimeoutStream<S> {
stream: Timeout<S>,
}
impl<S> TimeoutStream<S>
where
S: Stream,
{
fn new(stream: S, keep_alive: Duration) -> Self {
increment!(server.tcp_conn_accept);
TimeoutStream {
stream: Timeout::new(stream, keep_alive),
}
}
}
impl<S> Drop for TimeoutStream<S> {
fn drop(&mut self) {
increment!(server.tcp_conn_close);
}
}
impl<S> TimeoutStream<S> {
unsafe_pinned!(stream: Timeout<S>);
}
impl<S> Stream for TimeoutStream<S>
where
S: Stream,
{
type Item = S::Item;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
match self.stream().poll_next(cx) {
// The timeout has elapsed
Poll::Ready(Some(Err(_))) => {
increment!(server.tcp_conn_timeout);
Poll::Ready(None)
}
// The stream has produced an item
Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)),
// The stream has completed
Poll::Ready(None) => Poll::Ready(None),
// The timeout hasn't elapsed and the stream hasn't produced an item
Poll::Pending => Poll::Pending,
}
}
}
}
| new | identifier_name |
server.rs | use std::{
marker::Unpin,
net::SocketAddr,
str::FromStr,
time::Duration,
};
use futures::{
future::{
BoxFuture,
Either,
},
select,
};
use tokio::{
net::signal::ctrl_c,
prelude::*,
runtime::Runtime,
sync::oneshot,
};
use bytes::{
Bytes,
BytesMut,
};
use crate::{
diagnostics::*,
error::Error,
receive::Message,
};
metrics! {
receive_ok,
receive_err,
process_ok,
process_err,
tcp_conn_accept,
tcp_conn_close,
tcp_conn_timeout,
tcp_msg_overflow
}
/**
Server configuration.
*/
#[derive(Debug, Clone)]
pub struct Config {
/**
The address to bind the server to.
*/
pub bind: Bind,
/**
The duration to keep client TCP connections alive for.
If the client doesn't complete a message within the period
then the connection will be closed.
*/
pub tcp_keep_alive_secs: u64,
/**
The maximum size of a single event before it'll be discarded.
*/
pub tcp_max_size_bytes: u64,
}
#[derive(Debug, Clone)]
pub struct Bind {
pub addr: String,
pub protocol: Protocol,
}
#[derive(Debug, Clone, Copy)]
pub enum Protocol {
Udp,
Tcp,
}
impl FromStr for Bind {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.get(0..6) {
Some("tcp://") => Ok(Bind {
addr: s[6..].to_owned(),
protocol: Protocol::Tcp,
}),
Some("udp://") => Ok(Bind {
addr: s[6..].to_owned(),
protocol: Protocol::Udp,
}),
_ => Ok(Bind {
addr: s.to_owned(),
protocol: Protocol::Udp,
}),
}
}
}
impl Default for Config {
fn default() -> Self {
Config {
bind: Bind {
addr: "0.0.0.0:12201".to_owned(),
protocol: Protocol::Udp,
},
tcp_keep_alive_secs: 2 * 60, // 2 minutes
tcp_max_size_bytes: 1024 * 256, // 256kiB
}
}
}
/**
A GELF server.
*/
pub struct Server {
fut: BoxFuture<'static, ()>,
handle: Option<Handle>,
}
impl Server {
pub fn take_handle(&mut self) -> Option<Handle> {
self.handle.take()
}
pub fn run(self) -> Result<(), Error> {
// Run the server on a fresh runtime
// We attempt to shut this runtime down cleanly to release
// any used resources
let runtime = Runtime::new().expect("failed to start new Runtime");
runtime.block_on(self.fut);
runtime.shutdown_now();
Ok(())
}
}
/**
A handle to a running GELF server that can be used to interact with it
programmatically.
*/
pub struct Handle {
close: oneshot::Sender<()>,
}
impl Handle {
/**
Close the server.
*/
pub fn close(self) -> bool {
self.close.send(()).is_ok()
}
}
/**
Build a server to receive GELF messages and process them.
*/
pub fn build(
config: Config,
receive: impl FnMut(Bytes) -> Result<Option<Message>, Error> + Send + Sync + Unpin + Clone + 'static,
mut process: impl FnMut(Message) -> Result<(), Error> + Send + Sync + Unpin + Clone + 'static,
) -> Result<Server, Error> {
emit("Starting GELF server");
let addr = config.bind.addr.parse()?;
let (handle_tx, handle_rx) = oneshot::channel();
// Build a handle
let handle = Some(Handle { close: handle_tx });
let ctrl_c = ctrl_c()?;
let server = async move {
let incoming = match config.bind.protocol {
Protocol::Udp => {
let server = udp::Server::bind(&addr).await?.build(receive);
Either::Left(server)
}
Protocol::Tcp => {
let server = tcp::Server::bind(&addr).await?.build(
Duration::from_secs(config.tcp_keep_alive_secs),
config.tcp_max_size_bytes as usize,
receive,
);
Either::Right(server)
}
};
let mut close = handle_rx.fuse();
let mut ctrl_c = ctrl_c.fuse();
let mut incoming = incoming.fuse();
// NOTE: We don't use `?` here because we never want to carry results
// We always want to match them and deal with error cases directly
loop {
select! {
// A message that's ready to process
msg = incoming.next() => match msg {
// A complete message has been received
Some(Ok(Received::Complete(msg))) => {
increment!(server.receive_ok);
// Process the received message
match process(msg) {
Ok(()) => {
increment!(server.process_ok);
}
Err(err) => {
increment!(server.process_err);
emit_err(&err, "GELF processing failed");
}
}
},
// A chunk of a message has been received
Some(Ok(Received::Incomplete)) => {
continue;
},
// An error occurred receiving a chunk
Some(Err(err)) => {
increment!(server.receive_err);
emit_err(&err, "GELF processing failed");
},
None => {
unreachable!("receiver stream should never terminate")
},
},
// A termination signal from the programmatic handle
_ = close => {
emit("Handle closed; shutting down");
break;
},
// A termination signal from the environment
_ = ctrl_c.next() => {
emit("Termination signal received; shutting down");
break;
},
};
}
emit("Stopping GELF server");
Result::Ok::<(), Error>(())
};
Ok(Server {
fut: Box::pin(async move {
if let Err(err) = server.await {
emit_err(&err, "GELF server failed");
}
}),
handle,
})
}
enum Received {
Incomplete,
Complete(Message),
}
trait OptionMessageExt {
fn into_received(self) -> Option<Received>;
}
impl OptionMessageExt for Option<Message> {
fn into_received(self) -> Option<Received> {
match self {
Some(msg) => Some(Received::Complete(msg)),
None => Some(Received::Incomplete),
}
}
}
mod udp {
use super::*;
use tokio::{
codec::Decoder,
net::udp::{
UdpFramed,
UdpSocket,
},
};
pub(super) struct Server(UdpSocket);
impl Server {
pub(super) async fn bind(addr: &SocketAddr) -> Result<Self, Error> {
let sock = UdpSocket::bind(&addr).await?;
Ok(Server(sock))
}
pub(super) fn build(
self,
receive: impl FnMut(Bytes) -> Result<Option<Message>, Error> + Unpin,
) -> impl Stream<Item = Result<Received, Error>> |
}
struct Decode<F>(F);
impl<F> Decoder for Decode<F>
where
F: FnMut(Bytes) -> Result<Option<Message>, Error> + Unpin,
{
type Item = Received;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
// All datagrams are considered a valid message
let src = src.take().freeze();
Ok((self.0)(src)?.into_received())
}
}
}
mod tcp {
use super::*;
use std::{
cmp,
pin::Pin,
};
use futures::{
future,
stream::{
futures_unordered::FuturesUnordered,
Fuse,
Stream,
StreamFuture,
},
task::{
Context,
Poll,
},
};
use pin_utils::unsafe_pinned;
use tokio::{
codec::{
Decoder,
FramedRead,
},
net::tcp::TcpListener,
timer::Timeout,
};
pub(super) struct Server(TcpListener);
impl Server {
pub(super) async fn bind(addr: &SocketAddr) -> Result<Self, Error> {
let listener = TcpListener::bind(&addr).await?;
Ok(Server(listener))
}
pub(super) fn build(
self,
keep_alive: Duration,
max_size_bytes: usize,
receive: impl FnMut(Bytes) -> Result<Option<Message>, Error>
+ Send
+ Sync
+ Unpin
+ Clone
+ 'static,
) -> impl Stream<Item = Result<Received, Error>> {
emit("Setting up for TCP");
self.0
.incoming()
.filter_map(move |conn| {
match conn {
// The connection was successfully established
// Create a new protocol reader over it
// It'll get added to the connection pool
Ok(conn) => {
let decode = Decode::new(max_size_bytes, receive.clone());
let protocol = FramedRead::new(conn, decode);
// NOTE: The timeout stream wraps _the protocol_
// That means it'll close the connection if it doesn't
// produce a valid message within the timeframe, not just
// whether or not it writes to the stream
future::ready(Some(TimeoutStream::new(protocol, keep_alive)))
}
// The connection could not be established
// Just ignore it
Err(_) => future::ready(None),
}
})
.listen(1024)
}
}
struct Listen<S>
where
S: Stream,
S::Item: Stream,
{
accept: Fuse<S>,
connections: FuturesUnordered<StreamFuture<S::Item>>,
max: usize,
}
impl<S> Listen<S>
where
S: Stream,
S::Item: Stream,
{
unsafe_pinned!(accept: Fuse<S>);
unsafe_pinned!(connections: FuturesUnordered<StreamFuture<S::Item>>);
}
impl<S, T> Stream for Listen<S>
where
S: Stream + Unpin,
S::Item: Stream<Item = Result<T, Error>> + Unpin,
{
type Item = Result<T, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
'poll_conns: loop {
// Fill up our accepted connections
'fill_conns: while self.connections.len() < self.max {
let conn = match self.as_mut().accept().poll_next(cx) {
Poll::Ready(Some(s)) => s.into_future(),
Poll::Ready(None) | Poll::Pending => break 'fill_conns,
};
self.connections.push(conn);
}
// Try polling the stream
// NOTE: We're assuming the unordered list will
// always make forward progress polling futures
// even if one future is particularly chatty
match self.as_mut().connections().poll_next(cx) {
// We have an item from a connection
Poll::Ready(Some((Some(item), conn))) => {
match item {
// A valid item was produced
// Return it and put the connection back in the pool.
Ok(item) => {
self.connections.push(conn.into_future());
return Poll::Ready(Some(Ok(item)));
}
// An error occurred, probably IO-related
// In this case the connection isn't returned to the pool.
// It's closed on drop and the error is returned.
Err(err) => {
return Poll::Ready(Some(Err(err.into())));
}
}
}
// A connection has closed
// Drop the connection and loop back
// This will mean attempting to accept a new connection
Poll::Ready(Some((None, _conn))) => continue 'poll_conns,
// The queue is empty or nothing is ready
Poll::Ready(None) | Poll::Pending => break 'poll_conns,
}
}
// If we've gotten this far, then there are no events for us to process
// and nothing was ready, so figure out if we're not done yet or if
// we've reached the end.
if self.accept.is_done() {
Poll::Ready(None)
} else {
Poll::Pending
}
}
}
trait StreamListenExt: Stream {
fn listen(self, max_connections: usize) -> Listen<Self>
where
Self: Sized + Unpin,
Self::Item: Stream + Unpin,
{
Listen {
accept: self.fuse(),
connections: FuturesUnordered::new(),
max: max_connections,
}
}
}
impl<S> StreamListenExt for S where S: Stream {}
struct Decode<F> {
max_size_bytes: usize,
read_head: usize,
discarding: bool,
receive: F,
}
impl<F> Decode<F> {
pub fn new(max_size_bytes: usize, receive: F) -> Self {
Decode {
read_head: 0,
discarding: false,
max_size_bytes,
receive,
}
}
}
impl<F> Decoder for Decode<F>
where
F: FnMut(Bytes) -> Result<Option<Message>, Error>,
{
type Item = Received;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
'read_frame: loop {
let read_to = cmp::min(self.max_size_bytes.saturating_add(1), src.len());
// Messages are separated by null bytes
let sep_offset = src[self.read_head..].iter().position(|b| *b == b'\0');
match (self.discarding, sep_offset) {
// A delimiter was found
// Split it from the buffer and return
(false, Some(offset)) => {
let frame_end = offset + self.read_head;
// The message is technically sitting right there
// for us, but since it's bigger than our max capacity
// we still discard it
if frame_end > self.max_size_bytes {
increment!(server.tcp_msg_overflow);
self.discarding = true;
continue 'read_frame;
}
self.read_head = 0;
let src = src.split_to(frame_end + 1).freeze();
return Ok((self.receive)(src.slice_to(src.len() - 1))?.into_received());
}
// A delimiter wasn't found, but the incomplete
// message is too big. Start discarding the input
(false, None) if src.len() > self.max_size_bytes => {
increment!(server.tcp_msg_overflow);
self.discarding = true;
continue 'read_frame;
}
// A delimiter wasn't found
// Move the read head forward so we'll check
// from that position next time data arrives
(false, None) => {
self.read_head = read_to;
// As per the contract of `Decoder`, we return `None`
// here to indicate more data is needed to complete a frame
return Ok(None);
}
// We're discarding input and have reached the end of the message
// Advance the source buffer to the end of that message and try again
(true, Some(offset)) => {
src.advance(offset + self.read_head + 1);
self.discarding = false;
self.read_head = 0;
continue 'read_frame;
}
// We're discarding input but haven't reached the end of the message yet
(true, None) => {
src.advance(read_to);
self.read_head = 0;
if src.is_empty() {
// We still return `Ok` here, even though we have no intention
// of processing those bytes. Our maximum buffer size should still
// be limited by the initial capacity, since we're responsible for
// reserving additional capacity and aren't doing that
return Ok(None);
}
continue 'read_frame;
}
}
}
}
fn decode_eof(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
Ok(match self.decode(src)? {
Some(frame) => Some(frame),
None => {
if src.is_empty() {
None
} else {
let src = src.take().freeze();
self.read_head = 0;
(self.receive)(src)?.into_received()
}
}
})
}
}
struct TimeoutStream<S> {
stream: Timeout<S>,
}
impl<S> TimeoutStream<S>
where
S: Stream,
{
fn new(stream: S, keep_alive: Duration) -> Self {
increment!(server.tcp_conn_accept);
TimeoutStream {
stream: Timeout::new(stream, keep_alive),
}
}
}
impl<S> Drop for TimeoutStream<S> {
fn drop(&mut self) {
increment!(server.tcp_conn_close);
}
}
impl<S> TimeoutStream<S> {
unsafe_pinned!(stream: Timeout<S>);
}
impl<S> Stream for TimeoutStream<S>
where
S: Stream,
{
type Item = S::Item;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
match self.stream().poll_next(cx) {
// The timeout has elapsed
Poll::Ready(Some(Err(_))) => {
increment!(server.tcp_conn_timeout);
Poll::Ready(None)
}
// The stream has produced an item
Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)),
// The stream has completed
Poll::Ready(None) => Poll::Ready(None),
// The timeout hasn't elapsed and the stream hasn't produced an item
Poll::Pending => Poll::Pending,
}
}
}
}
| {
emit("Setting up for UDP");
UdpFramed::new(self.0, Decode(receive)).map(|r| r.map(|(msg, _)| msg))
} | identifier_body |
server.rs | use std::{
marker::Unpin,
net::SocketAddr,
str::FromStr,
time::Duration,
};
use futures::{
future::{
BoxFuture,
Either,
},
select,
};
use tokio::{
net::signal::ctrl_c,
prelude::*,
runtime::Runtime,
sync::oneshot,
};
use bytes::{
Bytes,
BytesMut,
};
use crate::{
diagnostics::*,
error::Error,
receive::Message,
};
metrics! {
receive_ok,
receive_err,
process_ok,
process_err,
tcp_conn_accept,
tcp_conn_close,
tcp_conn_timeout,
tcp_msg_overflow
}
/**
Server configuration.
*/
#[derive(Debug, Clone)]
pub struct Config {
/**
The address to bind the server to.
*/
pub bind: Bind,
/**
The duration to keep client TCP connections alive for.
If the client doesn't complete a message within the period
then the connection will be closed.
*/
pub tcp_keep_alive_secs: u64,
/**
The maximum size of a single event before it'll be discarded.
*/
pub tcp_max_size_bytes: u64,
}
#[derive(Debug, Clone)]
pub struct Bind {
pub addr: String,
pub protocol: Protocol,
}
#[derive(Debug, Clone, Copy)]
pub enum Protocol {
Udp,
Tcp,
}
impl FromStr for Bind {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.get(0..6) {
Some("tcp://") => Ok(Bind {
addr: s[6..].to_owned(),
protocol: Protocol::Tcp,
}),
Some("udp://") => Ok(Bind {
addr: s[6..].to_owned(),
protocol: Protocol::Udp,
}),
_ => Ok(Bind {
addr: s.to_owned(),
protocol: Protocol::Udp,
}),
}
}
}
impl Default for Config {
fn default() -> Self {
Config {
bind: Bind {
addr: "0.0.0.0:12201".to_owned(),
protocol: Protocol::Udp,
},
tcp_keep_alive_secs: 2 * 60, // 2 minutes
tcp_max_size_bytes: 1024 * 256, // 256kiB
}
}
}
/**
A GELF server.
*/
pub struct Server {
fut: BoxFuture<'static, ()>,
handle: Option<Handle>,
}
impl Server {
pub fn take_handle(&mut self) -> Option<Handle> {
self.handle.take()
}
pub fn run(self) -> Result<(), Error> {
// Run the server on a fresh runtime
// We attempt to shut this runtime down cleanly to release
// any used resources
let runtime = Runtime::new().expect("failed to start new Runtime");
runtime.block_on(self.fut);
runtime.shutdown_now();
Ok(())
}
}
/**
A handle to a running GELF server that can be used to interact with it
programmatically.
*/
pub struct Handle {
close: oneshot::Sender<()>,
}
impl Handle {
/**
Close the server.
*/
pub fn close(self) -> bool {
self.close.send(()).is_ok()
}
}
/**
Build a server to receive GELF messages and process them.
*/
pub fn build(
config: Config,
receive: impl FnMut(Bytes) -> Result<Option<Message>, Error> + Send + Sync + Unpin + Clone + 'static,
mut process: impl FnMut(Message) -> Result<(), Error> + Send + Sync + Unpin + Clone + 'static,
) -> Result<Server, Error> {
emit("Starting GELF server");
let addr = config.bind.addr.parse()?;
let (handle_tx, handle_rx) = oneshot::channel();
// Build a handle
let handle = Some(Handle { close: handle_tx });
let ctrl_c = ctrl_c()?;
let server = async move {
let incoming = match config.bind.protocol {
Protocol::Udp => {
let server = udp::Server::bind(&addr).await?.build(receive);
Either::Left(server)
}
Protocol::Tcp => {
let server = tcp::Server::bind(&addr).await?.build(
Duration::from_secs(config.tcp_keep_alive_secs),
config.tcp_max_size_bytes as usize,
receive,
);
Either::Right(server)
}
};
let mut close = handle_rx.fuse();
let mut ctrl_c = ctrl_c.fuse();
let mut incoming = incoming.fuse();
// NOTE: We don't use `?` here because we never want to carry results
// We always want to match them and deal with error cases directly
loop {
select! {
// A message that's ready to process
msg = incoming.next() => match msg {
// A complete message has been received | match process(msg) {
Ok(()) => {
increment!(server.process_ok);
}
Err(err) => {
increment!(server.process_err);
emit_err(&err, "GELF processing failed");
}
}
},
// A chunk of a message has been received
Some(Ok(Received::Incomplete)) => {
continue;
},
// An error occurred receiving a chunk
Some(Err(err)) => {
increment!(server.receive_err);
emit_err(&err, "GELF processing failed");
},
None => {
unreachable!("receiver stream should never terminate")
},
},
// A termination signal from the programmatic handle
_ = close => {
emit("Handle closed; shutting down");
break;
},
// A termination signal from the environment
_ = ctrl_c.next() => {
emit("Termination signal received; shutting down");
break;
},
};
}
emit("Stopping GELF server");
Result::Ok::<(), Error>(())
};
Ok(Server {
fut: Box::pin(async move {
if let Err(err) = server.await {
emit_err(&err, "GELF server failed");
}
}),
handle,
})
}
enum Received {
Incomplete,
Complete(Message),
}
trait OptionMessageExt {
fn into_received(self) -> Option<Received>;
}
impl OptionMessageExt for Option<Message> {
fn into_received(self) -> Option<Received> {
match self {
Some(msg) => Some(Received::Complete(msg)),
None => Some(Received::Incomplete),
}
}
}
mod udp {
use super::*;
use tokio::{
codec::Decoder,
net::udp::{
UdpFramed,
UdpSocket,
},
};
pub(super) struct Server(UdpSocket);
impl Server {
pub(super) async fn bind(addr: &SocketAddr) -> Result<Self, Error> {
let sock = UdpSocket::bind(&addr).await?;
Ok(Server(sock))
}
pub(super) fn build(
self,
receive: impl FnMut(Bytes) -> Result<Option<Message>, Error> + Unpin,
) -> impl Stream<Item = Result<Received, Error>> {
emit("Setting up for UDP");
UdpFramed::new(self.0, Decode(receive)).map(|r| r.map(|(msg, _)| msg))
}
}
struct Decode<F>(F);
impl<F> Decoder for Decode<F>
where
F: FnMut(Bytes) -> Result<Option<Message>, Error> + Unpin,
{
type Item = Received;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
// All datagrams are considered a valid message
let src = src.take().freeze();
Ok((self.0)(src)?.into_received())
}
}
}
mod tcp {
use super::*;
use std::{
cmp,
pin::Pin,
};
use futures::{
future,
stream::{
futures_unordered::FuturesUnordered,
Fuse,
Stream,
StreamFuture,
},
task::{
Context,
Poll,
},
};
use pin_utils::unsafe_pinned;
use tokio::{
codec::{
Decoder,
FramedRead,
},
net::tcp::TcpListener,
timer::Timeout,
};
pub(super) struct Server(TcpListener);
impl Server {
pub(super) async fn bind(addr: &SocketAddr) -> Result<Self, Error> {
let listener = TcpListener::bind(&addr).await?;
Ok(Server(listener))
}
pub(super) fn build(
self,
keep_alive: Duration,
max_size_bytes: usize,
receive: impl FnMut(Bytes) -> Result<Option<Message>, Error>
+ Send
+ Sync
+ Unpin
+ Clone
+ 'static,
) -> impl Stream<Item = Result<Received, Error>> {
emit("Setting up for TCP");
self.0
.incoming()
.filter_map(move |conn| {
match conn {
// The connection was successfully established
// Create a new protocol reader over it
// It'll get added to the connection pool
Ok(conn) => {
let decode = Decode::new(max_size_bytes, receive.clone());
let protocol = FramedRead::new(conn, decode);
// NOTE: The timeout stream wraps _the protocol_
// That means it'll close the connection if it doesn't
// produce a valid message within the timeframe, not just
// whether or not it writes to the stream
future::ready(Some(TimeoutStream::new(protocol, keep_alive)))
}
// The connection could not be established
// Just ignore it
Err(_) => future::ready(None),
}
})
.listen(1024)
}
}
struct Listen<S>
where
S: Stream,
S::Item: Stream,
{
accept: Fuse<S>,
connections: FuturesUnordered<StreamFuture<S::Item>>,
max: usize,
}
impl<S> Listen<S>
where
S: Stream,
S::Item: Stream,
{
unsafe_pinned!(accept: Fuse<S>);
unsafe_pinned!(connections: FuturesUnordered<StreamFuture<S::Item>>);
}
impl<S, T> Stream for Listen<S>
where
S: Stream + Unpin,
S::Item: Stream<Item = Result<T, Error>> + Unpin,
{
type Item = Result<T, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
'poll_conns: loop {
// Fill up our accepted connections
'fill_conns: while self.connections.len() < self.max {
let conn = match self.as_mut().accept().poll_next(cx) {
Poll::Ready(Some(s)) => s.into_future(),
Poll::Ready(None) | Poll::Pending => break 'fill_conns,
};
self.connections.push(conn);
}
// Try polling the stream
// NOTE: We're assuming the unordered list will
// always make forward progress polling futures
// even if one future is particularly chatty
match self.as_mut().connections().poll_next(cx) {
// We have an item from a connection
Poll::Ready(Some((Some(item), conn))) => {
match item {
// A valid item was produced
// Return it and put the connection back in the pool.
Ok(item) => {
self.connections.push(conn.into_future());
return Poll::Ready(Some(Ok(item)));
}
// An error occurred, probably IO-related
// In this case the connection isn't returned to the pool.
// It's closed on drop and the error is returned.
Err(err) => {
return Poll::Ready(Some(Err(err.into())));
}
}
}
// A connection has closed
// Drop the connection and loop back
// This will mean attempting to accept a new connection
Poll::Ready(Some((None, _conn))) => continue 'poll_conns,
// The queue is empty or nothing is ready
Poll::Ready(None) | Poll::Pending => break 'poll_conns,
}
}
// If we've gotten this far, then there are no events for us to process
// and nothing was ready, so figure out if we're not done yet or if
// we've reached the end.
if self.accept.is_done() {
Poll::Ready(None)
} else {
Poll::Pending
}
}
}
trait StreamListenExt: Stream {
fn listen(self, max_connections: usize) -> Listen<Self>
where
Self: Sized + Unpin,
Self::Item: Stream + Unpin,
{
Listen {
accept: self.fuse(),
connections: FuturesUnordered::new(),
max: max_connections,
}
}
}
impl<S> StreamListenExt for S where S: Stream {}
struct Decode<F> {
max_size_bytes: usize,
read_head: usize,
discarding: bool,
receive: F,
}
impl<F> Decode<F> {
pub fn new(max_size_bytes: usize, receive: F) -> Self {
Decode {
read_head: 0,
discarding: false,
max_size_bytes,
receive,
}
}
}
impl<F> Decoder for Decode<F>
where
F: FnMut(Bytes) -> Result<Option<Message>, Error>,
{
type Item = Received;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
'read_frame: loop {
let read_to = cmp::min(self.max_size_bytes.saturating_add(1), src.len());
// Messages are separated by null bytes
let sep_offset = src[self.read_head..].iter().position(|b| *b == b'\0');
match (self.discarding, sep_offset) {
// A delimiter was found
// Split it from the buffer and return
(false, Some(offset)) => {
let frame_end = offset + self.read_head;
// The message is technically sitting right there
// for us, but since it's bigger than our max capacity
// we still discard it
if frame_end > self.max_size_bytes {
increment!(server.tcp_msg_overflow);
self.discarding = true;
continue 'read_frame;
}
self.read_head = 0;
let src = src.split_to(frame_end + 1).freeze();
return Ok((self.receive)(src.slice_to(src.len() - 1))?.into_received());
}
// A delimiter wasn't found, but the incomplete
// message is too big. Start discarding the input
(false, None) if src.len() > self.max_size_bytes => {
increment!(server.tcp_msg_overflow);
self.discarding = true;
continue 'read_frame;
}
// A delimiter wasn't found
// Move the read head forward so we'll check
// from that position next time data arrives
(false, None) => {
self.read_head = read_to;
// As per the contract of `Decoder`, we return `None`
// here to indicate more data is needed to complete a frame
return Ok(None);
}
// We're discarding input and have reached the end of the message
// Advance the source buffer to the end of that message and try again
(true, Some(offset)) => {
src.advance(offset + self.read_head + 1);
self.discarding = false;
self.read_head = 0;
continue 'read_frame;
}
// We're discarding input but haven't reached the end of the message yet
(true, None) => {
src.advance(read_to);
self.read_head = 0;
if src.is_empty() {
// We still return `Ok` here, even though we have no intention
// of processing those bytes. Our maximum buffer size should still
// be limited by the initial capacity, since we're responsible for
// reserving additional capacity and aren't doing that
return Ok(None);
}
continue 'read_frame;
}
}
}
}
fn decode_eof(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
Ok(match self.decode(src)? {
Some(frame) => Some(frame),
None => {
if src.is_empty() {
None
} else {
let src = src.take().freeze();
self.read_head = 0;
(self.receive)(src)?.into_received()
}
}
})
}
}
struct TimeoutStream<S> {
stream: Timeout<S>,
}
impl<S> TimeoutStream<S>
where
S: Stream,
{
fn new(stream: S, keep_alive: Duration) -> Self {
increment!(server.tcp_conn_accept);
TimeoutStream {
stream: Timeout::new(stream, keep_alive),
}
}
}
impl<S> Drop for TimeoutStream<S> {
fn drop(&mut self) {
increment!(server.tcp_conn_close);
}
}
impl<S> TimeoutStream<S> {
unsafe_pinned!(stream: Timeout<S>);
}
impl<S> Stream for TimeoutStream<S>
where
S: Stream,
{
type Item = S::Item;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
match self.stream().poll_next(cx) {
// The timeout has elapsed
Poll::Ready(Some(Err(_))) => {
increment!(server.tcp_conn_timeout);
Poll::Ready(None)
}
// The stream has produced an item
Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)),
// The stream has completed
Poll::Ready(None) => Poll::Ready(None),
// The timeout hasn't elapsed and the stream hasn't produced an item
Poll::Pending => Poll::Pending,
}
}
}
} | Some(Ok(Received::Complete(msg))) => {
increment!(server.receive_ok);
// Process the received message | random_line_split |
remote.rs | use super::*;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::io::{ReadHalf, WriteHalf};
use tokio::net::TcpStream;
use tracing::debug;
use tracing::{error, Instrument};
async fn direct_to_control(mut incoming: TcpStream) {
let mut control_socket =
match TcpStream::connect(format!("localhost:{}", CONFIG.control_port)).await {
Ok(s) => s,
Err(error) => {
tracing::warn!(?error, "failed to connect to local control server");
return;
}
};
let (mut control_r, mut control_w) = control_socket.split();
let (mut incoming_r, mut incoming_w) = incoming.split();
let join_1 = tokio::io::copy(&mut control_r, &mut incoming_w);
let join_2 = tokio::io::copy(&mut incoming_r, &mut control_w);
match futures::future::join(join_1, join_2).await {
(Ok(_), Ok(_)) => {}
(Err(error), _) | (_, Err(error)) => {
tracing::error!(?error, "directing stream to control failed");
}
}
}
#[tracing::instrument(skip(socket))]
pub async fn accept_connection(socket: TcpStream) {
// peek the host of the http request
// if health check, then handle it and return
let StreamWithPeekedHost {
mut socket,
host,
forwarded_for,
} = match peek_http_request_host(socket).await {
Some(s) => s,
None => return,
};
tracing::info!(%host, %forwarded_for, "new remote connection");
// parse the host string and find our client
if CONFIG.allowed_hosts.contains(&host) {
error!("redirect to homepage");
let _ = socket.write_all(HTTP_REDIRECT_RESPONSE).await;
return;
}
let host = match validate_host_prefix(&host) {
Some(sub_domain) => sub_domain,
None => {
error!("invalid host specified");
let _ = socket.write_all(HTTP_INVALID_HOST_RESPONSE).await;
return;
}
};
// Special case -- we redirect this tcp connection to the control server
if host.as_str() == "wormhole" {
direct_to_control(socket).await;
return;
}
// find the client listening for this host
let client = match Connections::find_by_host(&host) {
Some(client) => client.clone(),
None => {
// check other instances that may be serving this host
match network::instance_for_host(&host).await {
Ok((instance, _)) => {
network::proxy_stream(instance, socket).await;
return;
}
Err(network::Error::DoesNotServeHost) => {
error!(%host, "no tunnel found");
let _ = socket.write_all(HTTP_NOT_FOUND_RESPONSE).await;
return;
}
Err(error) => {
error!(%host, ?error, "failed to find instance");
let _ = socket.write_all(HTTP_ERROR_LOCATING_HOST_RESPONSE).await;
return;
}
}
}
};
// allocate a new stream for this request
let (active_stream, queue_rx) = ActiveStream::new(client.clone());
let stream_id = active_stream.id.clone();
tracing::debug!(
stream_id = %active_stream.id.to_string(),
"new stream connected"
);
let (stream, sink) = tokio::io::split(socket);
// add our stream
ACTIVE_STREAMS.insert(stream_id.clone(), active_stream.clone());
// read from socket, write to client
let span = observability::remote_trace("process_tcp_stream");
tokio::spawn(
async move {
process_tcp_stream(active_stream, stream).await;
}
.instrument(span),
);
// read from client, write to socket
let span = observability::remote_trace("tunnel_to_stream");
tokio::spawn(
async move {
tunnel_to_stream(host, stream_id, sink, queue_rx).await;
}
.instrument(span),
);
}
fn validate_host_prefix(host: &str) -> Option<String> {
let url = format!("http://{}", host);
let host = match url::Url::parse(&url)
.map(|u| u.host().map(|h| h.to_owned()))
.unwrap_or(None)
{
Some(domain) => domain.to_string(),
None => {
error!("invalid host header");
return None;
}
};
let domain_segments = host.split(".").collect::<Vec<&str>>();
let prefix = &domain_segments[0];
let remaining = &domain_segments[1..].join(".");
if CONFIG.allowed_hosts.contains(remaining) {
Some(prefix.to_string())
} else {
None
}
}
/// Response Constants
const HTTP_REDIRECT_RESPONSE:&'static [u8] = b"HTTP/1.1 301 Moved Permanently\r\nLocation: https://tunnelto.dev/\r\nContent-Length: 20\r\n\r\nhttps://tunnelto.dev";
const HTTP_INVALID_HOST_RESPONSE: &'static [u8] =
b"HTTP/1.1 400\r\nContent-Length: 23\r\n\r\nError: Invalid Hostname";
const HTTP_NOT_FOUND_RESPONSE: &'static [u8] =
b"HTTP/1.1 404\r\nContent-Length: 23\r\n\r\nError: Tunnel Not Found";
const HTTP_ERROR_LOCATING_HOST_RESPONSE: &'static [u8] =
b"HTTP/1.1 500\r\nContent-Length: 27\r\n\r\nError: Error finding tunnel";
const HTTP_TUNNEL_REFUSED_RESPONSE: &'static [u8] =
b"HTTP/1.1 500\r\nContent-Length: 32\r\n\r\nTunnel says: connection refused.";
const HTTP_OK_RESPONSE: &'static [u8] = b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\nok";
const HEALTH_CHECK_PATH: &'static [u8] = b"/0xDEADBEEF_HEALTH_CHECK";
struct StreamWithPeekedHost {
socket: TcpStream,
host: String,
forwarded_for: String,
}
/// Filter incoming remote streams
#[tracing::instrument(skip(socket))]
async fn peek_http_request_host(mut socket: TcpStream) -> Option<StreamWithPeekedHost> {
/// Note we return out if the host header is not found
/// within the first 4kb of the request.
const MAX_HEADER_PEAK: usize = 4096;
let mut buf = vec![0; MAX_HEADER_PEAK]; //1kb
tracing::debug!("checking stream headers");
let n = match socket.peek(&mut buf).await {
Ok(n) => n,
Err(e) => {
error!("failed to read from tcp socket to determine host: {:?}", e);
return None;
}
};
// make sure we're not peeking the same header bytes
if n == 0 {
tracing::debug!("unable to peek header bytes");
return None;
}
tracing::debug!("peeked {} stream bytes ", n);
let mut headers = [httparse::EMPTY_HEADER; 64]; // 30 seems like a generous # of headers
let mut req = httparse::Request::new(&mut headers);
if let Err(e) = req.parse(&buf[..n]) {
error!("failed to parse incoming http bytes: {:?}", e);
return None;
}
// Handle the health check route
if req.path.map(|s| s.as_bytes()) == Some(HEALTH_CHECK_PATH) {
let _ = socket.write_all(HTTP_OK_RESPONSE).await.map_err(|e| {
error!("failed to write health_check: {:?}", e);
});
return None;
}
// get the ip addr in the header
let forwarded_for = if let Some(Ok(forwarded_for)) = req
.headers
.iter()
.filter(|h| h.name.to_lowercase() == "x-forwarded-for".to_string())
.map(|h| std::str::from_utf8(h.value))
.next()
{
forwarded_for.to_string()
} else {
String::default()
};
// look for a host header
if let Some(Ok(host)) = req
.headers
.iter()
.filter(|h| h.name.to_lowercase() == "host".to_string())
.map(|h| std::str::from_utf8(h.value))
.next()
{
tracing::info!(host=%host, path=%req.path.unwrap_or_default(), "peek request");
return Some(StreamWithPeekedHost {
socket,
host: host.to_string(),
forwarded_for,
});
}
tracing::info!("found no host header, dropping connection.");
None
}
/// Process Messages from the control path in & out of the remote stream
#[tracing::instrument(skip(tunnel_stream, tcp_stream))]
async fn process_tcp_stream(mut tunnel_stream: ActiveStream, mut tcp_stream: ReadHalf<TcpStream>) |
#[tracing::instrument(skip(sink, stream_id, queue))]
async fn tunnel_to_stream(
subdomain: String,
stream_id: StreamId,
mut sink: WriteHalf<TcpStream>,
mut queue: UnboundedReceiver<StreamMessage>,
) {
loop {
let result = queue.next().await;
let result = if let Some(message) = result {
match message {
StreamMessage::Data(data) => Some(data),
StreamMessage::TunnelRefused => {
tracing::debug!(?stream_id, "tunnel refused");
let _ = sink.write_all(HTTP_TUNNEL_REFUSED_RESPONSE).await;
None
}
StreamMessage::NoClientTunnel => {
tracing::info!(%subdomain, ?stream_id, "client tunnel not found");
let _ = sink.write_all(HTTP_NOT_FOUND_RESPONSE).await;
None
}
}
} else {
None
};
let data = match result {
Some(data) => data,
None => {
tracing::debug!("done tunneling to sink");
let _ = sink.shutdown().await.map_err(|_e| {
error!("error shutting down tcp stream");
});
ACTIVE_STREAMS.remove(&stream_id);
return;
}
};
let result = sink.write_all(&data).await;
if let Some(error) = result.err() {
tracing::warn!(?error, "stream closed, disconnecting");
return;
}
}
}
| {
// send initial control stream init to client
control_server::send_client_stream_init(tunnel_stream.clone()).await;
// now read from stream and forward to clients
let mut buf = [0; 1024];
loop {
// client is no longer connected
if Connections::get(&tunnel_stream.client.id).is_none() {
debug!("client disconnected, closing stream");
let _ = tunnel_stream.tx.send(StreamMessage::NoClientTunnel).await;
tunnel_stream.tx.close_channel();
return;
}
// read from stream
let n = match tcp_stream.read(&mut buf).await {
Ok(n) => n,
Err(e) => {
error!("failed to read from tcp socket: {:?}", e);
return;
}
};
if n == 0 {
debug!("stream ended");
let _ = tunnel_stream
.client
.tx
.send(ControlPacket::End(tunnel_stream.id.clone()))
.await
.map_err(|e| {
error!("failed to send end signal: {:?}", e);
});
return;
}
debug!("read {} bytes", n);
let data = &buf[..n];
let packet = ControlPacket::Data(tunnel_stream.id.clone(), data.to_vec());
match tunnel_stream.client.tx.send(packet.clone()).await {
Ok(_) => debug!(client_id = %tunnel_stream.client.id, "sent data packet to client"),
Err(_) => {
error!("failed to forward tcp packets to disconnected client. dropping client.");
Connections::remove(&tunnel_stream.client);
}
}
}
} | identifier_body |
remote.rs | use super::*;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::io::{ReadHalf, WriteHalf};
use tokio::net::TcpStream;
use tracing::debug;
use tracing::{error, Instrument};
async fn direct_to_control(mut incoming: TcpStream) {
let mut control_socket =
match TcpStream::connect(format!("localhost:{}", CONFIG.control_port)).await {
Ok(s) => s,
Err(error) => {
tracing::warn!(?error, "failed to connect to local control server");
return;
}
};
let (mut control_r, mut control_w) = control_socket.split();
let (mut incoming_r, mut incoming_w) = incoming.split();
let join_1 = tokio::io::copy(&mut control_r, &mut incoming_w);
let join_2 = tokio::io::copy(&mut incoming_r, &mut control_w);
match futures::future::join(join_1, join_2).await {
(Ok(_), Ok(_)) => {}
(Err(error), _) | (_, Err(error)) => {
tracing::error!(?error, "directing stream to control failed");
}
}
}
#[tracing::instrument(skip(socket))]
pub async fn accept_connection(socket: TcpStream) {
// peek the host of the http request
// if health check, then handle it and return
let StreamWithPeekedHost {
mut socket,
host,
forwarded_for,
} = match peek_http_request_host(socket).await {
Some(s) => s,
None => return,
};
tracing::info!(%host, %forwarded_for, "new remote connection");
// parse the host string and find our client
if CONFIG.allowed_hosts.contains(&host) {
error!("redirect to homepage");
let _ = socket.write_all(HTTP_REDIRECT_RESPONSE).await;
return;
}
let host = match validate_host_prefix(&host) {
Some(sub_domain) => sub_domain,
None => {
error!("invalid host specified");
let _ = socket.write_all(HTTP_INVALID_HOST_RESPONSE).await;
return;
}
};
// Special case -- we redirect this tcp connection to the control server
if host.as_str() == "wormhole" {
direct_to_control(socket).await;
return;
}
// find the client listening for this host
let client = match Connections::find_by_host(&host) {
Some(client) => client.clone(),
None => {
// check other instances that may be serving this host
match network::instance_for_host(&host).await {
Ok((instance, _)) => {
network::proxy_stream(instance, socket).await;
return;
}
Err(network::Error::DoesNotServeHost) => {
error!(%host, "no tunnel found");
let _ = socket.write_all(HTTP_NOT_FOUND_RESPONSE).await;
return;
}
Err(error) => {
error!(%host, ?error, "failed to find instance");
let _ = socket.write_all(HTTP_ERROR_LOCATING_HOST_RESPONSE).await;
return;
}
}
}
};
// allocate a new stream for this request
let (active_stream, queue_rx) = ActiveStream::new(client.clone());
let stream_id = active_stream.id.clone();
tracing::debug!(
stream_id = %active_stream.id.to_string(),
"new stream connected"
);
let (stream, sink) = tokio::io::split(socket);
// add our stream
ACTIVE_STREAMS.insert(stream_id.clone(), active_stream.clone());
// read from socket, write to client
let span = observability::remote_trace("process_tcp_stream");
tokio::spawn(
async move {
process_tcp_stream(active_stream, stream).await;
}
.instrument(span),
);
// read from client, write to socket
let span = observability::remote_trace("tunnel_to_stream");
tokio::spawn(
async move {
tunnel_to_stream(host, stream_id, sink, queue_rx).await;
}
.instrument(span),
);
}
fn validate_host_prefix(host: &str) -> Option<String> {
let url = format!("http://{}", host);
let host = match url::Url::parse(&url)
.map(|u| u.host().map(|h| h.to_owned()))
.unwrap_or(None)
{
Some(domain) => domain.to_string(),
None => {
error!("invalid host header");
return None;
}
};
let domain_segments = host.split(".").collect::<Vec<&str>>();
let prefix = &domain_segments[0];
let remaining = &domain_segments[1..].join(".");
if CONFIG.allowed_hosts.contains(remaining) {
Some(prefix.to_string())
} else {
None
}
}
/// Response Constants
const HTTP_REDIRECT_RESPONSE:&'static [u8] = b"HTTP/1.1 301 Moved Permanently\r\nLocation: https://tunnelto.dev/\r\nContent-Length: 20\r\n\r\nhttps://tunnelto.dev";
const HTTP_INVALID_HOST_RESPONSE: &'static [u8] =
b"HTTP/1.1 400\r\nContent-Length: 23\r\n\r\nError: Invalid Hostname";
const HTTP_NOT_FOUND_RESPONSE: &'static [u8] =
b"HTTP/1.1 404\r\nContent-Length: 23\r\n\r\nError: Tunnel Not Found";
const HTTP_ERROR_LOCATING_HOST_RESPONSE: &'static [u8] =
b"HTTP/1.1 500\r\nContent-Length: 27\r\n\r\nError: Error finding tunnel";
const HTTP_TUNNEL_REFUSED_RESPONSE: &'static [u8] =
b"HTTP/1.1 500\r\nContent-Length: 32\r\n\r\nTunnel says: connection refused.";
const HTTP_OK_RESPONSE: &'static [u8] = b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\nok";
const HEALTH_CHECK_PATH: &'static [u8] = b"/0xDEADBEEF_HEALTH_CHECK";
struct StreamWithPeekedHost {
socket: TcpStream,
host: String,
forwarded_for: String,
}
/// Filter incoming remote streams
#[tracing::instrument(skip(socket))]
async fn peek_http_request_host(mut socket: TcpStream) -> Option<StreamWithPeekedHost> {
/// Note we return out if the host header is not found
/// within the first 4kb of the request.
const MAX_HEADER_PEAK: usize = 4096;
let mut buf = vec![0; MAX_HEADER_PEAK]; //1kb
tracing::debug!("checking stream headers");
let n = match socket.peek(&mut buf).await {
Ok(n) => n,
Err(e) => {
error!("failed to read from tcp socket to determine host: {:?}", e);
return None;
}
};
// make sure we're not peeking the same header bytes
if n == 0 {
tracing::debug!("unable to peek header bytes");
return None;
}
tracing::debug!("peeked {} stream bytes ", n);
let mut headers = [httparse::EMPTY_HEADER; 64]; // 30 seems like a generous # of headers
let mut req = httparse::Request::new(&mut headers);
if let Err(e) = req.parse(&buf[..n]) {
error!("failed to parse incoming http bytes: {:?}", e);
return None;
}
// Handle the health check route
if req.path.map(|s| s.as_bytes()) == Some(HEALTH_CHECK_PATH) {
let _ = socket.write_all(HTTP_OK_RESPONSE).await.map_err(|e| {
error!("failed to write health_check: {:?}", e);
});
return None;
}
// get the ip addr in the header
let forwarded_for = if let Some(Ok(forwarded_for)) = req
.headers
.iter()
.filter(|h| h.name.to_lowercase() == "x-forwarded-for".to_string())
.map(|h| std::str::from_utf8(h.value))
.next()
{
forwarded_for.to_string()
} else { | // look for a host header
if let Some(Ok(host)) = req
.headers
.iter()
.filter(|h| h.name.to_lowercase() == "host".to_string())
.map(|h| std::str::from_utf8(h.value))
.next()
{
tracing::info!(host=%host, path=%req.path.unwrap_or_default(), "peek request");
return Some(StreamWithPeekedHost {
socket,
host: host.to_string(),
forwarded_for,
});
}
tracing::info!("found no host header, dropping connection.");
None
}
/// Process Messages from the control path in & out of the remote stream
#[tracing::instrument(skip(tunnel_stream, tcp_stream))]
async fn process_tcp_stream(mut tunnel_stream: ActiveStream, mut tcp_stream: ReadHalf<TcpStream>) {
// send initial control stream init to client
control_server::send_client_stream_init(tunnel_stream.clone()).await;
// now read from stream and forward to clients
let mut buf = [0; 1024];
loop {
// client is no longer connected
if Connections::get(&tunnel_stream.client.id).is_none() {
debug!("client disconnected, closing stream");
let _ = tunnel_stream.tx.send(StreamMessage::NoClientTunnel).await;
tunnel_stream.tx.close_channel();
return;
}
// read from stream
let n = match tcp_stream.read(&mut buf).await {
Ok(n) => n,
Err(e) => {
error!("failed to read from tcp socket: {:?}", e);
return;
}
};
if n == 0 {
debug!("stream ended");
let _ = tunnel_stream
.client
.tx
.send(ControlPacket::End(tunnel_stream.id.clone()))
.await
.map_err(|e| {
error!("failed to send end signal: {:?}", e);
});
return;
}
debug!("read {} bytes", n);
let data = &buf[..n];
let packet = ControlPacket::Data(tunnel_stream.id.clone(), data.to_vec());
match tunnel_stream.client.tx.send(packet.clone()).await {
Ok(_) => debug!(client_id = %tunnel_stream.client.id, "sent data packet to client"),
Err(_) => {
error!("failed to forward tcp packets to disconnected client. dropping client.");
Connections::remove(&tunnel_stream.client);
}
}
}
}
#[tracing::instrument(skip(sink, stream_id, queue))]
async fn tunnel_to_stream(
subdomain: String,
stream_id: StreamId,
mut sink: WriteHalf<TcpStream>,
mut queue: UnboundedReceiver<StreamMessage>,
) {
loop {
let result = queue.next().await;
let result = if let Some(message) = result {
match message {
StreamMessage::Data(data) => Some(data),
StreamMessage::TunnelRefused => {
tracing::debug!(?stream_id, "tunnel refused");
let _ = sink.write_all(HTTP_TUNNEL_REFUSED_RESPONSE).await;
None
}
StreamMessage::NoClientTunnel => {
tracing::info!(%subdomain, ?stream_id, "client tunnel not found");
let _ = sink.write_all(HTTP_NOT_FOUND_RESPONSE).await;
None
}
}
} else {
None
};
let data = match result {
Some(data) => data,
None => {
tracing::debug!("done tunneling to sink");
let _ = sink.shutdown().await.map_err(|_e| {
error!("error shutting down tcp stream");
});
ACTIVE_STREAMS.remove(&stream_id);
return;
}
};
let result = sink.write_all(&data).await;
if let Some(error) = result.err() {
tracing::warn!(?error, "stream closed, disconnecting");
return;
}
}
} | String::default()
};
| random_line_split |
remote.rs | use super::*;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::io::{ReadHalf, WriteHalf};
use tokio::net::TcpStream;
use tracing::debug;
use tracing::{error, Instrument};
async fn direct_to_control(mut incoming: TcpStream) {
let mut control_socket =
match TcpStream::connect(format!("localhost:{}", CONFIG.control_port)).await {
Ok(s) => s,
Err(error) => {
tracing::warn!(?error, "failed to connect to local control server");
return;
}
};
let (mut control_r, mut control_w) = control_socket.split();
let (mut incoming_r, mut incoming_w) = incoming.split();
let join_1 = tokio::io::copy(&mut control_r, &mut incoming_w);
let join_2 = tokio::io::copy(&mut incoming_r, &mut control_w);
match futures::future::join(join_1, join_2).await {
(Ok(_), Ok(_)) => {}
(Err(error), _) | (_, Err(error)) => {
tracing::error!(?error, "directing stream to control failed");
}
}
}
#[tracing::instrument(skip(socket))]
pub async fn accept_connection(socket: TcpStream) {
// peek the host of the http request
// if health check, then handle it and return
let StreamWithPeekedHost {
mut socket,
host,
forwarded_for,
} = match peek_http_request_host(socket).await {
Some(s) => s,
None => return,
};
tracing::info!(%host, %forwarded_for, "new remote connection");
// parse the host string and find our client
if CONFIG.allowed_hosts.contains(&host) {
error!("redirect to homepage");
let _ = socket.write_all(HTTP_REDIRECT_RESPONSE).await;
return;
}
let host = match validate_host_prefix(&host) {
Some(sub_domain) => sub_domain,
None => {
error!("invalid host specified");
let _ = socket.write_all(HTTP_INVALID_HOST_RESPONSE).await;
return;
}
};
// Special case -- we redirect this tcp connection to the control server
if host.as_str() == "wormhole" {
direct_to_control(socket).await;
return;
}
// find the client listening for this host
let client = match Connections::find_by_host(&host) {
Some(client) => client.clone(),
None => {
// check other instances that may be serving this host
match network::instance_for_host(&host).await {
Ok((instance, _)) => {
network::proxy_stream(instance, socket).await;
return;
}
Err(network::Error::DoesNotServeHost) => {
error!(%host, "no tunnel found");
let _ = socket.write_all(HTTP_NOT_FOUND_RESPONSE).await;
return;
}
Err(error) => {
error!(%host, ?error, "failed to find instance");
let _ = socket.write_all(HTTP_ERROR_LOCATING_HOST_RESPONSE).await;
return;
}
}
}
};
// allocate a new stream for this request
let (active_stream, queue_rx) = ActiveStream::new(client.clone());
let stream_id = active_stream.id.clone();
tracing::debug!(
stream_id = %active_stream.id.to_string(),
"new stream connected"
);
let (stream, sink) = tokio::io::split(socket);
// add our stream
ACTIVE_STREAMS.insert(stream_id.clone(), active_stream.clone());
// read from socket, write to client
let span = observability::remote_trace("process_tcp_stream");
tokio::spawn(
async move {
process_tcp_stream(active_stream, stream).await;
}
.instrument(span),
);
// read from client, write to socket
let span = observability::remote_trace("tunnel_to_stream");
tokio::spawn(
async move {
tunnel_to_stream(host, stream_id, sink, queue_rx).await;
}
.instrument(span),
);
}
fn validate_host_prefix(host: &str) -> Option<String> {
let url = format!("http://{}", host);
let host = match url::Url::parse(&url)
.map(|u| u.host().map(|h| h.to_owned()))
.unwrap_or(None)
{
Some(domain) => domain.to_string(),
None => {
error!("invalid host header");
return None;
}
};
let domain_segments = host.split(".").collect::<Vec<&str>>();
let prefix = &domain_segments[0];
let remaining = &domain_segments[1..].join(".");
if CONFIG.allowed_hosts.contains(remaining) {
Some(prefix.to_string())
} else {
None
}
}
/// Response Constants
const HTTP_REDIRECT_RESPONSE:&'static [u8] = b"HTTP/1.1 301 Moved Permanently\r\nLocation: https://tunnelto.dev/\r\nContent-Length: 20\r\n\r\nhttps://tunnelto.dev";
const HTTP_INVALID_HOST_RESPONSE: &'static [u8] =
b"HTTP/1.1 400\r\nContent-Length: 23\r\n\r\nError: Invalid Hostname";
const HTTP_NOT_FOUND_RESPONSE: &'static [u8] =
b"HTTP/1.1 404\r\nContent-Length: 23\r\n\r\nError: Tunnel Not Found";
const HTTP_ERROR_LOCATING_HOST_RESPONSE: &'static [u8] =
b"HTTP/1.1 500\r\nContent-Length: 27\r\n\r\nError: Error finding tunnel";
const HTTP_TUNNEL_REFUSED_RESPONSE: &'static [u8] =
b"HTTP/1.1 500\r\nContent-Length: 32\r\n\r\nTunnel says: connection refused.";
const HTTP_OK_RESPONSE: &'static [u8] = b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\nok";
const HEALTH_CHECK_PATH: &'static [u8] = b"/0xDEADBEEF_HEALTH_CHECK";
struct StreamWithPeekedHost {
socket: TcpStream,
host: String,
forwarded_for: String,
}
/// Filter incoming remote streams
#[tracing::instrument(skip(socket))]
async fn peek_http_request_host(mut socket: TcpStream) -> Option<StreamWithPeekedHost> {
/// Note we return out if the host header is not found
/// within the first 4kb of the request.
const MAX_HEADER_PEAK: usize = 4096;
let mut buf = vec![0; MAX_HEADER_PEAK]; //1kb
tracing::debug!("checking stream headers");
let n = match socket.peek(&mut buf).await {
Ok(n) => n,
Err(e) => {
error!("failed to read from tcp socket to determine host: {:?}", e);
return None;
}
};
// make sure we're not peeking the same header bytes
if n == 0 {
tracing::debug!("unable to peek header bytes");
return None;
}
tracing::debug!("peeked {} stream bytes ", n);
let mut headers = [httparse::EMPTY_HEADER; 64]; // 30 seems like a generous # of headers
let mut req = httparse::Request::new(&mut headers);
if let Err(e) = req.parse(&buf[..n]) {
error!("failed to parse incoming http bytes: {:?}", e);
return None;
}
// Handle the health check route
if req.path.map(|s| s.as_bytes()) == Some(HEALTH_CHECK_PATH) {
let _ = socket.write_all(HTTP_OK_RESPONSE).await.map_err(|e| {
error!("failed to write health_check: {:?}", e);
});
return None;
}
// get the ip addr in the header
let forwarded_for = if let Some(Ok(forwarded_for)) = req
.headers
.iter()
.filter(|h| h.name.to_lowercase() == "x-forwarded-for".to_string())
.map(|h| std::str::from_utf8(h.value))
.next()
{
forwarded_for.to_string()
} else {
String::default()
};
// look for a host header
if let Some(Ok(host)) = req
.headers
.iter()
.filter(|h| h.name.to_lowercase() == "host".to_string())
.map(|h| std::str::from_utf8(h.value))
.next()
{
tracing::info!(host=%host, path=%req.path.unwrap_or_default(), "peek request");
return Some(StreamWithPeekedHost {
socket,
host: host.to_string(),
forwarded_for,
});
}
tracing::info!("found no host header, dropping connection.");
None
}
/// Process Messages from the control path in & out of the remote stream
#[tracing::instrument(skip(tunnel_stream, tcp_stream))]
async fn | (mut tunnel_stream: ActiveStream, mut tcp_stream: ReadHalf<TcpStream>) {
// send initial control stream init to client
control_server::send_client_stream_init(tunnel_stream.clone()).await;
// now read from stream and forward to clients
let mut buf = [0; 1024];
loop {
// client is no longer connected
if Connections::get(&tunnel_stream.client.id).is_none() {
debug!("client disconnected, closing stream");
let _ = tunnel_stream.tx.send(StreamMessage::NoClientTunnel).await;
tunnel_stream.tx.close_channel();
return;
}
// read from stream
let n = match tcp_stream.read(&mut buf).await {
Ok(n) => n,
Err(e) => {
error!("failed to read from tcp socket: {:?}", e);
return;
}
};
if n == 0 {
debug!("stream ended");
let _ = tunnel_stream
.client
.tx
.send(ControlPacket::End(tunnel_stream.id.clone()))
.await
.map_err(|e| {
error!("failed to send end signal: {:?}", e);
});
return;
}
debug!("read {} bytes", n);
let data = &buf[..n];
let packet = ControlPacket::Data(tunnel_stream.id.clone(), data.to_vec());
match tunnel_stream.client.tx.send(packet.clone()).await {
Ok(_) => debug!(client_id = %tunnel_stream.client.id, "sent data packet to client"),
Err(_) => {
error!("failed to forward tcp packets to disconnected client. dropping client.");
Connections::remove(&tunnel_stream.client);
}
}
}
}
#[tracing::instrument(skip(sink, stream_id, queue))]
async fn tunnel_to_stream(
subdomain: String,
stream_id: StreamId,
mut sink: WriteHalf<TcpStream>,
mut queue: UnboundedReceiver<StreamMessage>,
) {
loop {
let result = queue.next().await;
let result = if let Some(message) = result {
match message {
StreamMessage::Data(data) => Some(data),
StreamMessage::TunnelRefused => {
tracing::debug!(?stream_id, "tunnel refused");
let _ = sink.write_all(HTTP_TUNNEL_REFUSED_RESPONSE).await;
None
}
StreamMessage::NoClientTunnel => {
tracing::info!(%subdomain, ?stream_id, "client tunnel not found");
let _ = sink.write_all(HTTP_NOT_FOUND_RESPONSE).await;
None
}
}
} else {
None
};
let data = match result {
Some(data) => data,
None => {
tracing::debug!("done tunneling to sink");
let _ = sink.shutdown().await.map_err(|_e| {
error!("error shutting down tcp stream");
});
ACTIVE_STREAMS.remove(&stream_id);
return;
}
};
let result = sink.write_all(&data).await;
if let Some(error) = result.err() {
tracing::warn!(?error, "stream closed, disconnecting");
return;
}
}
}
| process_tcp_stream | identifier_name |
file-record.ts | import { getIconFromExt, SvgIcon } from './icons';
import utils from './utils';
import { RGBA, ImageThumbnail, VideoThumbnail } from './utils';
interface Dimensions {
height: number;
width: number;
}
interface Options {
accept?: string;
maxSize?: string;
read: boolean;
thumbnailSize?: number;
averageColor?: boolean;
withCredentials?: boolean;
}
interface ErrorText {
common?: string;
type?: string;
size?: string;
upload?: string;
}
interface ErrorFlags {
common?: boolean;
type?: boolean;
size?: boolean;
upload?: false | string;
}
interface RawFileRecord {
url: string | ((value?: string) => string | undefined | Promise<FileRecord>);
urlResized: string | null;
src: () => any;
name: any;
lastModified: number;
sizeText: string;
size: number;
type: string;
ext: string;
color: string;
file: File;
progress: number | ((progress?: number) => number | void);
error?: false | ErrorFlags;
dimensions: Dimensions;
videoThumbnail: string;
imageColor: RGBA;
customName: string;
upload: UploadData;
}
interface DummyFile {
name: string;
size: number;
type: string;
lastModified: number;
lastModifiedDate: Date;
}
interface UploadData {
data: any;
error: string | false;
}
export { Dimensions, Options, RawFileRecord };
class FileRecord {
public static getFromRaw(
rawFileRecord: RawFileRecord,
options: Options,
isSync = false
): FileRecord | Promise<FileRecord> {
const fileRecord = new FileRecord(rawFileRecord, options);
const promise = fileRecord.setUrl(rawFileRecord.url as string);
rawFileRecord.progress = fileRecord.progress.bind(fileRecord); // convert it as a function
rawFileRecord.src = fileRecord.src.bind(fileRecord);
rawFileRecord.name = fileRecord.name.bind(fileRecord); // convert it as a function
if (isSync) {
return fileRecord;
}
return promise;
}
public static fromRaw(rawFileRecord: RawFileRecord, options: Options): Promise<FileRecord> {
return FileRecord.getFromRaw(rawFileRecord, options, false) as Promise<FileRecord>;
}
public static fromRawSync(rawFileRecord: RawFileRecord, options: Options): FileRecord {
return FileRecord.getFromRaw(rawFileRecord, options, true) as FileRecord;
}
public static fromRawArray(rawFileRecords: RawFileRecord[], options: Options): Promise<FileRecord[]> {
const promises: Array<Promise<FileRecord>> = [];
for (const rawFileRecord of rawFileRecords) {
promises.push(FileRecord.fromRaw(rawFileRecord, options));
}
return Promise.all(promises);
}
public static toRawArray(fileRecords: FileRecord[]): RawFileRecord[] {
const rawFileRecords: RawFileRecord[] = [];
for (const fileRecord of fileRecords) {
rawFileRecords.push(fileRecord.toRaw());
}
return rawFileRecords;
}
public static readFile(fileRecord: FileRecord): Promise<FileRecord> {
return new Promise((resolve, reject) => {
if (!fileRecord.read) {
fileRecord.setUrl(null).then(
() => {
resolve(fileRecord);
},
(err) => {
// ignore error
resolve(fileRecord);
}
);
return;
}
utils.getDataURL(fileRecord.file).then((dataUrl) => {
fileRecord.setUrl(dataUrl).then(() => {
resolve(fileRecord);
}, reject);
}, reject);
});
}
public static readFiles(fileRecords: FileRecord[]): Promise<FileRecord[]> {
const promises: Array<Promise<FileRecord>> = [];
for (const fileRecord of fileRecords) {
promises.push(FileRecord.readFile(fileRecord));
}
return Promise.all(promises);
}
public urlValue: null | string = null;
public urlResized: null | string = null;
public image: HTMLImageElement | {} = {};
public isPlayingAv: boolean = false;
public oldFileName: string | null = null;
public oldCustomName: string | null = null;
public upload: UploadData = { data: null, error: false };
public raw: RawFileRecord;
public progressInternal: number;
public accept?: string;
public dimensions: Dimensions;
public error: false | ErrorFlags;
public file: File;
public height: undefined | number | string;
public width: undefined | number | string;
public id: string;
public imageColor?: RGBA;
public lastKnownSrc: null | string;
public maxSize?: string;
public options: Options;
public read: boolean;
public thumbnailSize: number;
public videoThumbnail: any;
public customName: any;
public xhr?: XMLHttpRequest;
public xhrQueue?: () => any;
public stopAv?: (() => any) | null;
public tusUpload?: any;
public calculateAverageColor: boolean;
public constructor(data: RawFileRecord, options: Options) {
this.urlValue = null;
this.urlResized = null;
this.lastKnownSrc = null;
this.image = {};
this.isPlayingAv = false;
this.oldFileName = null;
this.oldCustomName = null;
this.raw = data;
this.file = data.file instanceof File ? data.file : (this.createDummyFile(data) as any);
this.progressInternal = !isNaN(data.progress as number) ? (data.progress as number) : 0;
// this.width = FileRecord.defaultWidth;
// this.height = FileRecord.defaultHeight;
this.thumbnailSize = options.thumbnailSize || 360;
this.read = !!options.read;
this.dimensions = data.dimensions || { width: 0, height: 0 };
this.dimensions.width = this.dimensions.width || 0;
this.dimensions.height = this.dimensions.height || 0;
this.error = data.error || false;
this.options = options;
this.maxSize = options.maxSize;
this.accept = options.accept;
this.id = Math.random() + ':' + new Date().getTime();
this.videoThumbnail = data.videoThumbnail;
this.imageColor = data.imageColor;
this.customName = data.customName;
this.calculateAverageColor = options.averageColor !== undefined ? options.averageColor : true;
this.validate();
}
// populate(data, options = {}) {}
public createDummyFile(data: RawFileRecord): DummyFile {
const file: DummyFile = {} as DummyFile;
file.lastModified = data.lastModified;
const d = new Date();
if (file.lastModified) {
d.setTime(file.lastModified);
}
file.lastModifiedDate = d;
file.name = typeof data.name === 'function' ? data.name() : data.name;
file.size = data.size;
file.type = data.type;
return file;
}
public hasProgress(): boolean {
return !isNaN(this.progressInternal); // && this._progress <= 100;
}
public progress(value?: number): number | void {
if (value !== undefined) {
this.progressInternal = value;
return;
}
return this.progressInternal || 0;
}
public url(value?: string): string | undefined | Promise<this> {
if (value !== undefined) {
return this.setUrl(value);
}
return this.urlValue || undefined;
}
public src(): string {
if (this.isImage()) {
return this.urlResized || this.urlValue || (this.file as any).url;
}
if (this.isPlayableVideo()) {
return this.videoThumbnail || '';
}
return '';
}
public size(): string {
if (!this.file) {
return '';
}
return utils.getSizeFormatted(this.file.size);
}
public ext(): string {
if (this.file && this.file.name.indexOf('.') !== -1) {
return (this.file.name as any).split('.').pop();
}
return '?';
// return this.file.type.split('/').shift();
}
public name(withoutExt?: boolean): string {
const ext = this.ext();
if (this.customName) {
return this.customName + (withoutExt ? '' : ext !== '?' ? '.' + ext : '');
}
const name = this.file && this.file.name;
if (withoutExt) {
if (ext !== '?') {
return name.substr(0, name.length - (ext.length + 1));
}
}
return name;
}
public isDarkColor(): boolean {
if (this.imageColor) {
const rgb = this.imageColor;
const darkPoint = 20;
return rgb[0] <= darkPoint && rgb[1] <= darkPoint && rgb[2] <= darkPoint;
}
return false;
}
public color(): string {
if (this.imageColor) {
const rgb = this.imageColor;
return 'rgb(' + rgb[0] + ', ' + rgb[1] + ', ' + rgb[2] + ')';
}
if (this.isImage()) {
return 'transparent';
}
const ext = this.ext();
const svgIcon = this.icon();
// var svgIcon = getIconFromExt(ext);
if (svgIcon.color) {
return svgIcon.color;
}
return utils.getColorForText(ext);
}
public isImage(): boolean {
return this.file && !!this.file.type.match(/image((?!vnd).)*$/i);
}
public isVideo(): boolean {
return this.file && this.file.type.indexOf('video') !== -1;
}
public isPlayableVideo(): boolean {
return this.icon().category === 'video-playable';
}
public isAudio(): boolean {
return this.file && this.file.type.indexOf('audio') !== -1;
}
public isPlayableAudio(): boolean {
return this.icon().category === 'audio-playable';
}
public isText(): boolean {
return this.file && this.file.type.indexOf('text') !== -1;
}
public setUrl(url: string | null): Promise<this> {
this.urlValue = url;
return new Promise((resolve, reject) => {
if (this.isImage()) {
this.resizeImage().then(
() => {
resolve(this);
},
(err) => {
resolve(this);
}
);
return;
}
resolve(this);
});
}
public | (resized: ImageThumbnail | null) {
if (!resized) {
return;
}
this.urlResized = resized.url;
this.image = resized.image;
if (resized.image && resized.image.width && resized.image.height) {
this.dimensions.width = resized.image.width;
this.dimensions.height = resized.image.height;
}
this.lastKnownSrc = this.urlResized;
this.imageColor = resized.color;
}
public resizeImage(): Promise<this> {
return new Promise((resolve, reject) => {
utils
.resizeImage(this.thumbnailSize, this.file, this.urlValue as string, this.calculateAverageColor)
.then((resized) => {
this.imageResized(resized);
resolve(this);
})
.catch(reject);
});
}
public icon(): SvgIcon {
const ext = this.ext();
const svgIcon = getIconFromExt(ext);
return svgIcon;
}
public getErrorMessage(errorText?: ErrorText): string {
const error = this.error;
if (!error) {
return '';
}
errorText = errorText || {};
errorText = {
common: errorText.common || 'Invalid file.',
type: errorText.type || 'Invalid file type.',
size: errorText.size || 'Files should not exceed ' + this.maxSize + ' in size',
};
if (error.type) {
return errorText.type as string;
} else if (error.size) {
return errorText.size as string;
} else if (error.upload) {
return this.upload.error ? this.upload.error : error.upload;
}
return errorText.common as string;
}
public toRaw(): RawFileRecord {
const raw = this.raw || ({} as RawFileRecord);
// raw.url = this.urlValue;
raw.url = this.url.bind(this);
raw.urlResized = this.urlResized;
raw.src = this.src.bind(this);
raw.name = this.name.bind(this);
raw.lastModified = this.file.lastModified;
raw.sizeText = this.size();
raw.size = this.file.size;
raw.type = this.file.type;
raw.ext = this.ext();
raw.color = this.color();
raw.file = this.file;
raw.progress = this.progress.bind(this); // pass it as a function
raw.upload = this.upload;
if (!('error' in raw)) {
Object.defineProperty(raw, 'error', {
get: () => {
return this.error;
},
});
}
raw.dimensions = this.dimensions;
return raw;
}
public validate(): void {
const validType = utils.validateType(this.file, this.accept);
const validSize = utils.validateSize(this.file, this.maxSize as string);
if (!validType || !validSize) {
this.error = {
type: !validType,
size: !validSize,
};
} else {
this.error = false;
}
}
}
export default FileRecord;
| imageResized | identifier_name |
file-record.ts | import { getIconFromExt, SvgIcon } from './icons';
import utils from './utils';
import { RGBA, ImageThumbnail, VideoThumbnail } from './utils';
interface Dimensions {
height: number;
width: number;
}
interface Options {
accept?: string;
maxSize?: string;
read: boolean;
thumbnailSize?: number;
averageColor?: boolean;
withCredentials?: boolean;
}
interface ErrorText {
common?: string;
type?: string;
size?: string;
upload?: string;
}
interface ErrorFlags {
common?: boolean;
type?: boolean;
size?: boolean;
upload?: false | string;
}
interface RawFileRecord {
url: string | ((value?: string) => string | undefined | Promise<FileRecord>);
urlResized: string | null;
src: () => any;
name: any;
lastModified: number;
sizeText: string;
size: number;
type: string;
ext: string;
color: string;
file: File;
progress: number | ((progress?: number) => number | void);
error?: false | ErrorFlags;
dimensions: Dimensions;
videoThumbnail: string;
imageColor: RGBA;
customName: string;
upload: UploadData;
}
interface DummyFile {
name: string;
size: number;
type: string;
lastModified: number;
lastModifiedDate: Date;
}
interface UploadData {
data: any;
error: string | false;
}
export { Dimensions, Options, RawFileRecord };
class FileRecord {
public static getFromRaw(
rawFileRecord: RawFileRecord,
options: Options,
isSync = false
): FileRecord | Promise<FileRecord> {
const fileRecord = new FileRecord(rawFileRecord, options);
const promise = fileRecord.setUrl(rawFileRecord.url as string);
rawFileRecord.progress = fileRecord.progress.bind(fileRecord); // convert it as a function
rawFileRecord.src = fileRecord.src.bind(fileRecord);
rawFileRecord.name = fileRecord.name.bind(fileRecord); // convert it as a function
if (isSync) {
return fileRecord;
}
return promise;
}
public static fromRaw(rawFileRecord: RawFileRecord, options: Options): Promise<FileRecord> {
return FileRecord.getFromRaw(rawFileRecord, options, false) as Promise<FileRecord>;
}
public static fromRawSync(rawFileRecord: RawFileRecord, options: Options): FileRecord {
return FileRecord.getFromRaw(rawFileRecord, options, true) as FileRecord;
}
public static fromRawArray(rawFileRecords: RawFileRecord[], options: Options): Promise<FileRecord[]> {
const promises: Array<Promise<FileRecord>> = [];
for (const rawFileRecord of rawFileRecords) {
promises.push(FileRecord.fromRaw(rawFileRecord, options));
}
return Promise.all(promises);
}
public static toRawArray(fileRecords: FileRecord[]): RawFileRecord[] {
const rawFileRecords: RawFileRecord[] = [];
for (const fileRecord of fileRecords) {
rawFileRecords.push(fileRecord.toRaw());
}
return rawFileRecords;
}
public static readFile(fileRecord: FileRecord): Promise<FileRecord> {
return new Promise((resolve, reject) => {
if (!fileRecord.read) {
fileRecord.setUrl(null).then(
() => {
resolve(fileRecord);
},
(err) => {
// ignore error
resolve(fileRecord);
}
);
return;
}
utils.getDataURL(fileRecord.file).then((dataUrl) => {
fileRecord.setUrl(dataUrl).then(() => {
resolve(fileRecord);
}, reject);
}, reject);
});
}
public static readFiles(fileRecords: FileRecord[]): Promise<FileRecord[]> {
const promises: Array<Promise<FileRecord>> = [];
for (const fileRecord of fileRecords) {
promises.push(FileRecord.readFile(fileRecord));
}
return Promise.all(promises);
}
public urlValue: null | string = null;
public urlResized: null | string = null;
public image: HTMLImageElement | {} = {};
public isPlayingAv: boolean = false;
public oldFileName: string | null = null;
public oldCustomName: string | null = null;
public upload: UploadData = { data: null, error: false };
public raw: RawFileRecord;
public progressInternal: number;
public accept?: string;
public dimensions: Dimensions;
public error: false | ErrorFlags;
public file: File;
public height: undefined | number | string;
public width: undefined | number | string;
public id: string;
public imageColor?: RGBA;
public lastKnownSrc: null | string;
public maxSize?: string;
public options: Options;
public read: boolean;
public thumbnailSize: number;
public videoThumbnail: any;
public customName: any;
public xhr?: XMLHttpRequest;
public xhrQueue?: () => any;
public stopAv?: (() => any) | null;
public tusUpload?: any;
public calculateAverageColor: boolean;
public constructor(data: RawFileRecord, options: Options) {
this.urlValue = null;
this.urlResized = null;
this.lastKnownSrc = null;
this.image = {};
this.isPlayingAv = false;
this.oldFileName = null;
this.oldCustomName = null;
this.raw = data;
this.file = data.file instanceof File ? data.file : (this.createDummyFile(data) as any);
this.progressInternal = !isNaN(data.progress as number) ? (data.progress as number) : 0;
// this.width = FileRecord.defaultWidth;
// this.height = FileRecord.defaultHeight;
this.thumbnailSize = options.thumbnailSize || 360;
this.read = !!options.read;
this.dimensions = data.dimensions || { width: 0, height: 0 };
this.dimensions.width = this.dimensions.width || 0;
this.dimensions.height = this.dimensions.height || 0;
this.error = data.error || false;
this.options = options;
this.maxSize = options.maxSize;
this.accept = options.accept;
this.id = Math.random() + ':' + new Date().getTime();
this.videoThumbnail = data.videoThumbnail;
this.imageColor = data.imageColor;
this.customName = data.customName;
this.calculateAverageColor = options.averageColor !== undefined ? options.averageColor : true;
this.validate();
}
// populate(data, options = {}) {}
public createDummyFile(data: RawFileRecord): DummyFile {
const file: DummyFile = {} as DummyFile;
file.lastModified = data.lastModified;
const d = new Date();
if (file.lastModified) {
d.setTime(file.lastModified);
}
file.lastModifiedDate = d;
file.name = typeof data.name === 'function' ? data.name() : data.name;
file.size = data.size;
file.type = data.type;
return file;
}
public hasProgress(): boolean {
return !isNaN(this.progressInternal); // && this._progress <= 100;
}
public progress(value?: number): number | void {
if (value !== undefined) {
this.progressInternal = value;
return;
}
return this.progressInternal || 0;
}
public url(value?: string): string | undefined | Promise<this> {
if (value !== undefined) {
return this.setUrl(value);
}
return this.urlValue || undefined;
}
public src(): string {
if (this.isImage()) {
return this.urlResized || this.urlValue || (this.file as any).url;
}
if (this.isPlayableVideo()) {
return this.videoThumbnail || '';
}
return '';
}
public size(): string {
if (!this.file) {
return '';
}
return utils.getSizeFormatted(this.file.size);
}
public ext(): string {
if (this.file && this.file.name.indexOf('.') !== -1) {
return (this.file.name as any).split('.').pop();
}
return '?';
// return this.file.type.split('/').shift();
}
public name(withoutExt?: boolean): string {
const ext = this.ext();
if (this.customName) {
return this.customName + (withoutExt ? '' : ext !== '?' ? '.' + ext : '');
}
const name = this.file && this.file.name;
if (withoutExt) {
if (ext !== '?') {
return name.substr(0, name.length - (ext.length + 1));
}
}
return name;
}
public isDarkColor(): boolean {
if (this.imageColor) {
const rgb = this.imageColor;
const darkPoint = 20;
return rgb[0] <= darkPoint && rgb[1] <= darkPoint && rgb[2] <= darkPoint;
}
return false;
}
public color(): string {
if (this.imageColor) {
const rgb = this.imageColor;
return 'rgb(' + rgb[0] + ', ' + rgb[1] + ', ' + rgb[2] + ')';
}
if (this.isImage()) {
return 'transparent';
}
const ext = this.ext();
const svgIcon = this.icon();
// var svgIcon = getIconFromExt(ext);
if (svgIcon.color) {
return svgIcon.color;
}
return utils.getColorForText(ext);
}
public isImage(): boolean {
return this.file && !!this.file.type.match(/image((?!vnd).)*$/i); |
public isPlayableVideo(): boolean {
return this.icon().category === 'video-playable';
}
public isAudio(): boolean {
return this.file && this.file.type.indexOf('audio') !== -1;
}
public isPlayableAudio(): boolean {
return this.icon().category === 'audio-playable';
}
public isText(): boolean {
return this.file && this.file.type.indexOf('text') !== -1;
}
public setUrl(url: string | null): Promise<this> {
this.urlValue = url;
return new Promise((resolve, reject) => {
if (this.isImage()) {
this.resizeImage().then(
() => {
resolve(this);
},
(err) => {
resolve(this);
}
);
return;
}
resolve(this);
});
}
public imageResized(resized: ImageThumbnail | null) {
if (!resized) {
return;
}
this.urlResized = resized.url;
this.image = resized.image;
if (resized.image && resized.image.width && resized.image.height) {
this.dimensions.width = resized.image.width;
this.dimensions.height = resized.image.height;
}
this.lastKnownSrc = this.urlResized;
this.imageColor = resized.color;
}
public resizeImage(): Promise<this> {
return new Promise((resolve, reject) => {
utils
.resizeImage(this.thumbnailSize, this.file, this.urlValue as string, this.calculateAverageColor)
.then((resized) => {
this.imageResized(resized);
resolve(this);
})
.catch(reject);
});
}
public icon(): SvgIcon {
const ext = this.ext();
const svgIcon = getIconFromExt(ext);
return svgIcon;
}
public getErrorMessage(errorText?: ErrorText): string {
const error = this.error;
if (!error) {
return '';
}
errorText = errorText || {};
errorText = {
common: errorText.common || 'Invalid file.',
type: errorText.type || 'Invalid file type.',
size: errorText.size || 'Files should not exceed ' + this.maxSize + ' in size',
};
if (error.type) {
return errorText.type as string;
} else if (error.size) {
return errorText.size as string;
} else if (error.upload) {
return this.upload.error ? this.upload.error : error.upload;
}
return errorText.common as string;
}
public toRaw(): RawFileRecord {
const raw = this.raw || ({} as RawFileRecord);
// raw.url = this.urlValue;
raw.url = this.url.bind(this);
raw.urlResized = this.urlResized;
raw.src = this.src.bind(this);
raw.name = this.name.bind(this);
raw.lastModified = this.file.lastModified;
raw.sizeText = this.size();
raw.size = this.file.size;
raw.type = this.file.type;
raw.ext = this.ext();
raw.color = this.color();
raw.file = this.file;
raw.progress = this.progress.bind(this); // pass it as a function
raw.upload = this.upload;
if (!('error' in raw)) {
Object.defineProperty(raw, 'error', {
get: () => {
return this.error;
},
});
}
raw.dimensions = this.dimensions;
return raw;
}
public validate(): void {
const validType = utils.validateType(this.file, this.accept);
const validSize = utils.validateSize(this.file, this.maxSize as string);
if (!validType || !validSize) {
this.error = {
type: !validType,
size: !validSize,
};
} else {
this.error = false;
}
}
}
export default FileRecord; | }
public isVideo(): boolean {
return this.file && this.file.type.indexOf('video') !== -1;
} | random_line_split |
file-record.ts | import { getIconFromExt, SvgIcon } from './icons';
import utils from './utils';
import { RGBA, ImageThumbnail, VideoThumbnail } from './utils';
interface Dimensions {
height: number;
width: number;
}
interface Options {
accept?: string;
maxSize?: string;
read: boolean;
thumbnailSize?: number;
averageColor?: boolean;
withCredentials?: boolean;
}
interface ErrorText {
common?: string;
type?: string;
size?: string;
upload?: string;
}
interface ErrorFlags {
common?: boolean;
type?: boolean;
size?: boolean;
upload?: false | string;
}
interface RawFileRecord {
url: string | ((value?: string) => string | undefined | Promise<FileRecord>);
urlResized: string | null;
src: () => any;
name: any;
lastModified: number;
sizeText: string;
size: number;
type: string;
ext: string;
color: string;
file: File;
progress: number | ((progress?: number) => number | void);
error?: false | ErrorFlags;
dimensions: Dimensions;
videoThumbnail: string;
imageColor: RGBA;
customName: string;
upload: UploadData;
}
interface DummyFile {
name: string;
size: number;
type: string;
lastModified: number;
lastModifiedDate: Date;
}
interface UploadData {
data: any;
error: string | false;
}
export { Dimensions, Options, RawFileRecord };
class FileRecord {
public static getFromRaw(
rawFileRecord: RawFileRecord,
options: Options,
isSync = false
): FileRecord | Promise<FileRecord> {
const fileRecord = new FileRecord(rawFileRecord, options);
const promise = fileRecord.setUrl(rawFileRecord.url as string);
rawFileRecord.progress = fileRecord.progress.bind(fileRecord); // convert it as a function
rawFileRecord.src = fileRecord.src.bind(fileRecord);
rawFileRecord.name = fileRecord.name.bind(fileRecord); // convert it as a function
if (isSync) {
return fileRecord;
}
return promise;
}
public static fromRaw(rawFileRecord: RawFileRecord, options: Options): Promise<FileRecord> {
return FileRecord.getFromRaw(rawFileRecord, options, false) as Promise<FileRecord>;
}
public static fromRawSync(rawFileRecord: RawFileRecord, options: Options): FileRecord {
return FileRecord.getFromRaw(rawFileRecord, options, true) as FileRecord;
}
public static fromRawArray(rawFileRecords: RawFileRecord[], options: Options): Promise<FileRecord[]> {
const promises: Array<Promise<FileRecord>> = [];
for (const rawFileRecord of rawFileRecords) {
promises.push(FileRecord.fromRaw(rawFileRecord, options));
}
return Promise.all(promises);
}
public static toRawArray(fileRecords: FileRecord[]): RawFileRecord[] {
const rawFileRecords: RawFileRecord[] = [];
for (const fileRecord of fileRecords) {
rawFileRecords.push(fileRecord.toRaw());
}
return rawFileRecords;
}
public static readFile(fileRecord: FileRecord): Promise<FileRecord> {
return new Promise((resolve, reject) => {
if (!fileRecord.read) {
fileRecord.setUrl(null).then(
() => {
resolve(fileRecord);
},
(err) => {
// ignore error
resolve(fileRecord);
}
);
return;
}
utils.getDataURL(fileRecord.file).then((dataUrl) => {
fileRecord.setUrl(dataUrl).then(() => {
resolve(fileRecord);
}, reject);
}, reject);
});
}
public static readFiles(fileRecords: FileRecord[]): Promise<FileRecord[]> {
const promises: Array<Promise<FileRecord>> = [];
for (const fileRecord of fileRecords) {
promises.push(FileRecord.readFile(fileRecord));
}
return Promise.all(promises);
}
public urlValue: null | string = null;
public urlResized: null | string = null;
public image: HTMLImageElement | {} = {};
public isPlayingAv: boolean = false;
public oldFileName: string | null = null;
public oldCustomName: string | null = null;
public upload: UploadData = { data: null, error: false };
public raw: RawFileRecord;
public progressInternal: number;
public accept?: string;
public dimensions: Dimensions;
public error: false | ErrorFlags;
public file: File;
public height: undefined | number | string;
public width: undefined | number | string;
public id: string;
public imageColor?: RGBA;
public lastKnownSrc: null | string;
public maxSize?: string;
public options: Options;
public read: boolean;
public thumbnailSize: number;
public videoThumbnail: any;
public customName: any;
public xhr?: XMLHttpRequest;
public xhrQueue?: () => any;
public stopAv?: (() => any) | null;
public tusUpload?: any;
public calculateAverageColor: boolean;
public constructor(data: RawFileRecord, options: Options) {
this.urlValue = null;
this.urlResized = null;
this.lastKnownSrc = null;
this.image = {};
this.isPlayingAv = false;
this.oldFileName = null;
this.oldCustomName = null;
this.raw = data;
this.file = data.file instanceof File ? data.file : (this.createDummyFile(data) as any);
this.progressInternal = !isNaN(data.progress as number) ? (data.progress as number) : 0;
// this.width = FileRecord.defaultWidth;
// this.height = FileRecord.defaultHeight;
this.thumbnailSize = options.thumbnailSize || 360;
this.read = !!options.read;
this.dimensions = data.dimensions || { width: 0, height: 0 };
this.dimensions.width = this.dimensions.width || 0;
this.dimensions.height = this.dimensions.height || 0;
this.error = data.error || false;
this.options = options;
this.maxSize = options.maxSize;
this.accept = options.accept;
this.id = Math.random() + ':' + new Date().getTime();
this.videoThumbnail = data.videoThumbnail;
this.imageColor = data.imageColor;
this.customName = data.customName;
this.calculateAverageColor = options.averageColor !== undefined ? options.averageColor : true;
this.validate();
}
// populate(data, options = {}) {}
public createDummyFile(data: RawFileRecord): DummyFile {
const file: DummyFile = {} as DummyFile;
file.lastModified = data.lastModified;
const d = new Date();
if (file.lastModified) {
d.setTime(file.lastModified);
}
file.lastModifiedDate = d;
file.name = typeof data.name === 'function' ? data.name() : data.name;
file.size = data.size;
file.type = data.type;
return file;
}
public hasProgress(): boolean {
return !isNaN(this.progressInternal); // && this._progress <= 100;
}
public progress(value?: number): number | void {
if (value !== undefined) {
this.progressInternal = value;
return;
}
return this.progressInternal || 0;
}
public url(value?: string): string | undefined | Promise<this> {
if (value !== undefined) {
return this.setUrl(value);
}
return this.urlValue || undefined;
}
public src(): string {
if (this.isImage()) {
return this.urlResized || this.urlValue || (this.file as any).url;
}
if (this.isPlayableVideo()) {
return this.videoThumbnail || '';
}
return '';
}
public size(): string {
if (!this.file) {
return '';
}
return utils.getSizeFormatted(this.file.size);
}
public ext(): string {
if (this.file && this.file.name.indexOf('.') !== -1) {
return (this.file.name as any).split('.').pop();
}
return '?';
// return this.file.type.split('/').shift();
}
public name(withoutExt?: boolean): string {
const ext = this.ext();
if (this.customName) {
return this.customName + (withoutExt ? '' : ext !== '?' ? '.' + ext : '');
}
const name = this.file && this.file.name;
if (withoutExt) {
if (ext !== '?') {
return name.substr(0, name.length - (ext.length + 1));
}
}
return name;
}
public isDarkColor(): boolean {
if (this.imageColor) {
const rgb = this.imageColor;
const darkPoint = 20;
return rgb[0] <= darkPoint && rgb[1] <= darkPoint && rgb[2] <= darkPoint;
}
return false;
}
public color(): string {
if (this.imageColor) {
const rgb = this.imageColor;
return 'rgb(' + rgb[0] + ', ' + rgb[1] + ', ' + rgb[2] + ')';
}
if (this.isImage()) {
return 'transparent';
}
const ext = this.ext();
const svgIcon = this.icon();
// var svgIcon = getIconFromExt(ext);
if (svgIcon.color) {
return svgIcon.color;
}
return utils.getColorForText(ext);
}
public isImage(): boolean {
return this.file && !!this.file.type.match(/image((?!vnd).)*$/i);
}
public isVideo(): boolean {
return this.file && this.file.type.indexOf('video') !== -1;
}
public isPlayableVideo(): boolean {
return this.icon().category === 'video-playable';
}
public isAudio(): boolean {
return this.file && this.file.type.indexOf('audio') !== -1;
}
public isPlayableAudio(): boolean {
return this.icon().category === 'audio-playable';
}
public isText(): boolean {
return this.file && this.file.type.indexOf('text') !== -1;
}
public setUrl(url: string | null): Promise<this> |
public imageResized(resized: ImageThumbnail | null) {
if (!resized) {
return;
}
this.urlResized = resized.url;
this.image = resized.image;
if (resized.image && resized.image.width && resized.image.height) {
this.dimensions.width = resized.image.width;
this.dimensions.height = resized.image.height;
}
this.lastKnownSrc = this.urlResized;
this.imageColor = resized.color;
}
public resizeImage(): Promise<this> {
return new Promise((resolve, reject) => {
utils
.resizeImage(this.thumbnailSize, this.file, this.urlValue as string, this.calculateAverageColor)
.then((resized) => {
this.imageResized(resized);
resolve(this);
})
.catch(reject);
});
}
public icon(): SvgIcon {
const ext = this.ext();
const svgIcon = getIconFromExt(ext);
return svgIcon;
}
public getErrorMessage(errorText?: ErrorText): string {
const error = this.error;
if (!error) {
return '';
}
errorText = errorText || {};
errorText = {
common: errorText.common || 'Invalid file.',
type: errorText.type || 'Invalid file type.',
size: errorText.size || 'Files should not exceed ' + this.maxSize + ' in size',
};
if (error.type) {
return errorText.type as string;
} else if (error.size) {
return errorText.size as string;
} else if (error.upload) {
return this.upload.error ? this.upload.error : error.upload;
}
return errorText.common as string;
}
public toRaw(): RawFileRecord {
const raw = this.raw || ({} as RawFileRecord);
// raw.url = this.urlValue;
raw.url = this.url.bind(this);
raw.urlResized = this.urlResized;
raw.src = this.src.bind(this);
raw.name = this.name.bind(this);
raw.lastModified = this.file.lastModified;
raw.sizeText = this.size();
raw.size = this.file.size;
raw.type = this.file.type;
raw.ext = this.ext();
raw.color = this.color();
raw.file = this.file;
raw.progress = this.progress.bind(this); // pass it as a function
raw.upload = this.upload;
if (!('error' in raw)) {
Object.defineProperty(raw, 'error', {
get: () => {
return this.error;
},
});
}
raw.dimensions = this.dimensions;
return raw;
}
public validate(): void {
const validType = utils.validateType(this.file, this.accept);
const validSize = utils.validateSize(this.file, this.maxSize as string);
if (!validType || !validSize) {
this.error = {
type: !validType,
size: !validSize,
};
} else {
this.error = false;
}
}
}
export default FileRecord;
| {
this.urlValue = url;
return new Promise((resolve, reject) => {
if (this.isImage()) {
this.resizeImage().then(
() => {
resolve(this);
},
(err) => {
resolve(this);
}
);
return;
}
resolve(this);
});
} | identifier_body |
file-record.ts | import { getIconFromExt, SvgIcon } from './icons';
import utils from './utils';
import { RGBA, ImageThumbnail, VideoThumbnail } from './utils';
interface Dimensions {
height: number;
width: number;
}
interface Options {
accept?: string;
maxSize?: string;
read: boolean;
thumbnailSize?: number;
averageColor?: boolean;
withCredentials?: boolean;
}
interface ErrorText {
common?: string;
type?: string;
size?: string;
upload?: string;
}
interface ErrorFlags {
common?: boolean;
type?: boolean;
size?: boolean;
upload?: false | string;
}
interface RawFileRecord {
url: string | ((value?: string) => string | undefined | Promise<FileRecord>);
urlResized: string | null;
src: () => any;
name: any;
lastModified: number;
sizeText: string;
size: number;
type: string;
ext: string;
color: string;
file: File;
progress: number | ((progress?: number) => number | void);
error?: false | ErrorFlags;
dimensions: Dimensions;
videoThumbnail: string;
imageColor: RGBA;
customName: string;
upload: UploadData;
}
interface DummyFile {
name: string;
size: number;
type: string;
lastModified: number;
lastModifiedDate: Date;
}
interface UploadData {
data: any;
error: string | false;
}
export { Dimensions, Options, RawFileRecord };
class FileRecord {
public static getFromRaw(
rawFileRecord: RawFileRecord,
options: Options,
isSync = false
): FileRecord | Promise<FileRecord> {
const fileRecord = new FileRecord(rawFileRecord, options);
const promise = fileRecord.setUrl(rawFileRecord.url as string);
rawFileRecord.progress = fileRecord.progress.bind(fileRecord); // convert it as a function
rawFileRecord.src = fileRecord.src.bind(fileRecord);
rawFileRecord.name = fileRecord.name.bind(fileRecord); // convert it as a function
if (isSync) {
return fileRecord;
}
return promise;
}
public static fromRaw(rawFileRecord: RawFileRecord, options: Options): Promise<FileRecord> {
return FileRecord.getFromRaw(rawFileRecord, options, false) as Promise<FileRecord>;
}
public static fromRawSync(rawFileRecord: RawFileRecord, options: Options): FileRecord {
return FileRecord.getFromRaw(rawFileRecord, options, true) as FileRecord;
}
public static fromRawArray(rawFileRecords: RawFileRecord[], options: Options): Promise<FileRecord[]> {
const promises: Array<Promise<FileRecord>> = [];
for (const rawFileRecord of rawFileRecords) {
promises.push(FileRecord.fromRaw(rawFileRecord, options));
}
return Promise.all(promises);
}
public static toRawArray(fileRecords: FileRecord[]): RawFileRecord[] {
const rawFileRecords: RawFileRecord[] = [];
for (const fileRecord of fileRecords) {
rawFileRecords.push(fileRecord.toRaw());
}
return rawFileRecords;
}
public static readFile(fileRecord: FileRecord): Promise<FileRecord> {
return new Promise((resolve, reject) => {
if (!fileRecord.read) {
fileRecord.setUrl(null).then(
() => {
resolve(fileRecord);
},
(err) => {
// ignore error
resolve(fileRecord);
}
);
return;
}
utils.getDataURL(fileRecord.file).then((dataUrl) => {
fileRecord.setUrl(dataUrl).then(() => {
resolve(fileRecord);
}, reject);
}, reject);
});
}
public static readFiles(fileRecords: FileRecord[]): Promise<FileRecord[]> {
const promises: Array<Promise<FileRecord>> = [];
for (const fileRecord of fileRecords) {
promises.push(FileRecord.readFile(fileRecord));
}
return Promise.all(promises);
}
public urlValue: null | string = null;
public urlResized: null | string = null;
public image: HTMLImageElement | {} = {};
public isPlayingAv: boolean = false;
public oldFileName: string | null = null;
public oldCustomName: string | null = null;
public upload: UploadData = { data: null, error: false };
public raw: RawFileRecord;
public progressInternal: number;
public accept?: string;
public dimensions: Dimensions;
public error: false | ErrorFlags;
public file: File;
public height: undefined | number | string;
public width: undefined | number | string;
public id: string;
public imageColor?: RGBA;
public lastKnownSrc: null | string;
public maxSize?: string;
public options: Options;
public read: boolean;
public thumbnailSize: number;
public videoThumbnail: any;
public customName: any;
public xhr?: XMLHttpRequest;
public xhrQueue?: () => any;
public stopAv?: (() => any) | null;
public tusUpload?: any;
public calculateAverageColor: boolean;
public constructor(data: RawFileRecord, options: Options) {
this.urlValue = null;
this.urlResized = null;
this.lastKnownSrc = null;
this.image = {};
this.isPlayingAv = false;
this.oldFileName = null;
this.oldCustomName = null;
this.raw = data;
this.file = data.file instanceof File ? data.file : (this.createDummyFile(data) as any);
this.progressInternal = !isNaN(data.progress as number) ? (data.progress as number) : 0;
// this.width = FileRecord.defaultWidth;
// this.height = FileRecord.defaultHeight;
this.thumbnailSize = options.thumbnailSize || 360;
this.read = !!options.read;
this.dimensions = data.dimensions || { width: 0, height: 0 };
this.dimensions.width = this.dimensions.width || 0;
this.dimensions.height = this.dimensions.height || 0;
this.error = data.error || false;
this.options = options;
this.maxSize = options.maxSize;
this.accept = options.accept;
this.id = Math.random() + ':' + new Date().getTime();
this.videoThumbnail = data.videoThumbnail;
this.imageColor = data.imageColor;
this.customName = data.customName;
this.calculateAverageColor = options.averageColor !== undefined ? options.averageColor : true;
this.validate();
}
// populate(data, options = {}) {}
public createDummyFile(data: RawFileRecord): DummyFile {
const file: DummyFile = {} as DummyFile;
file.lastModified = data.lastModified;
const d = new Date();
if (file.lastModified) {
d.setTime(file.lastModified);
}
file.lastModifiedDate = d;
file.name = typeof data.name === 'function' ? data.name() : data.name;
file.size = data.size;
file.type = data.type;
return file;
}
public hasProgress(): boolean {
return !isNaN(this.progressInternal); // && this._progress <= 100;
}
public progress(value?: number): number | void {
if (value !== undefined) |
return this.progressInternal || 0;
}
public url(value?: string): string | undefined | Promise<this> {
if (value !== undefined) {
return this.setUrl(value);
}
return this.urlValue || undefined;
}
public src(): string {
if (this.isImage()) {
return this.urlResized || this.urlValue || (this.file as any).url;
}
if (this.isPlayableVideo()) {
return this.videoThumbnail || '';
}
return '';
}
public size(): string {
if (!this.file) {
return '';
}
return utils.getSizeFormatted(this.file.size);
}
public ext(): string {
if (this.file && this.file.name.indexOf('.') !== -1) {
return (this.file.name as any).split('.').pop();
}
return '?';
// return this.file.type.split('/').shift();
}
public name(withoutExt?: boolean): string {
const ext = this.ext();
if (this.customName) {
return this.customName + (withoutExt ? '' : ext !== '?' ? '.' + ext : '');
}
const name = this.file && this.file.name;
if (withoutExt) {
if (ext !== '?') {
return name.substr(0, name.length - (ext.length + 1));
}
}
return name;
}
public isDarkColor(): boolean {
if (this.imageColor) {
const rgb = this.imageColor;
const darkPoint = 20;
return rgb[0] <= darkPoint && rgb[1] <= darkPoint && rgb[2] <= darkPoint;
}
return false;
}
public color(): string {
if (this.imageColor) {
const rgb = this.imageColor;
return 'rgb(' + rgb[0] + ', ' + rgb[1] + ', ' + rgb[2] + ')';
}
if (this.isImage()) {
return 'transparent';
}
const ext = this.ext();
const svgIcon = this.icon();
// var svgIcon = getIconFromExt(ext);
if (svgIcon.color) {
return svgIcon.color;
}
return utils.getColorForText(ext);
}
public isImage(): boolean {
return this.file && !!this.file.type.match(/image((?!vnd).)*$/i);
}
public isVideo(): boolean {
return this.file && this.file.type.indexOf('video') !== -1;
}
public isPlayableVideo(): boolean {
return this.icon().category === 'video-playable';
}
public isAudio(): boolean {
return this.file && this.file.type.indexOf('audio') !== -1;
}
public isPlayableAudio(): boolean {
return this.icon().category === 'audio-playable';
}
public isText(): boolean {
return this.file && this.file.type.indexOf('text') !== -1;
}
public setUrl(url: string | null): Promise<this> {
this.urlValue = url;
return new Promise((resolve, reject) => {
if (this.isImage()) {
this.resizeImage().then(
() => {
resolve(this);
},
(err) => {
resolve(this);
}
);
return;
}
resolve(this);
});
}
public imageResized(resized: ImageThumbnail | null) {
if (!resized) {
return;
}
this.urlResized = resized.url;
this.image = resized.image;
if (resized.image && resized.image.width && resized.image.height) {
this.dimensions.width = resized.image.width;
this.dimensions.height = resized.image.height;
}
this.lastKnownSrc = this.urlResized;
this.imageColor = resized.color;
}
public resizeImage(): Promise<this> {
return new Promise((resolve, reject) => {
utils
.resizeImage(this.thumbnailSize, this.file, this.urlValue as string, this.calculateAverageColor)
.then((resized) => {
this.imageResized(resized);
resolve(this);
})
.catch(reject);
});
}
public icon(): SvgIcon {
const ext = this.ext();
const svgIcon = getIconFromExt(ext);
return svgIcon;
}
public getErrorMessage(errorText?: ErrorText): string {
const error = this.error;
if (!error) {
return '';
}
errorText = errorText || {};
errorText = {
common: errorText.common || 'Invalid file.',
type: errorText.type || 'Invalid file type.',
size: errorText.size || 'Files should not exceed ' + this.maxSize + ' in size',
};
if (error.type) {
return errorText.type as string;
} else if (error.size) {
return errorText.size as string;
} else if (error.upload) {
return this.upload.error ? this.upload.error : error.upload;
}
return errorText.common as string;
}
public toRaw(): RawFileRecord {
const raw = this.raw || ({} as RawFileRecord);
// raw.url = this.urlValue;
raw.url = this.url.bind(this);
raw.urlResized = this.urlResized;
raw.src = this.src.bind(this);
raw.name = this.name.bind(this);
raw.lastModified = this.file.lastModified;
raw.sizeText = this.size();
raw.size = this.file.size;
raw.type = this.file.type;
raw.ext = this.ext();
raw.color = this.color();
raw.file = this.file;
raw.progress = this.progress.bind(this); // pass it as a function
raw.upload = this.upload;
if (!('error' in raw)) {
Object.defineProperty(raw, 'error', {
get: () => {
return this.error;
},
});
}
raw.dimensions = this.dimensions;
return raw;
}
public validate(): void {
const validType = utils.validateType(this.file, this.accept);
const validSize = utils.validateSize(this.file, this.maxSize as string);
if (!validType || !validSize) {
this.error = {
type: !validType,
size: !validSize,
};
} else {
this.error = false;
}
}
}
export default FileRecord;
| {
this.progressInternal = value;
return;
} | conditional_block |
script.js | var mov = 0,
physics,
lastFrame = new Date().getTime(),
beams = {obj:[], img:new Image()},
player = {obj:null, hits:0, life:3, balls:[new Image(), new Image(), new Image(), new Image()], openBall: new Image(), scores: {pt:0, mt: 0}, needScore:1000},
walls = {},
coins = {obj:[], img:[]},
destroyObj = [],
btnActions,
is_started = false;
var docBody = document.getElementById('container'),
scoresObj = document.getElementById('scores'),
scoreMtObj = scoresObj.querySelector('.mt'),
scorePtObj = scoresObj.querySelector('.pt'),
pauseObj = document.getElementById('pause-menu'),
btnObj = document.querySelectorAll('button'),
loaderObj = document.getElementById('loader'),
menuObj = document.getElementById('game-menu'),
overObj = document.getElementById('over-menu'),
cvObj = document.getElementById("canvas"),
bangObj = document.getElementById("bang"),
tipObj = document.getElementById("tip-menu");
(function() {
var b2Vec2 = Box2D.Common.Math.b2Vec2,
b2BodyDef = Box2D.Dynamics.b2BodyDef,
b2Body = Box2D.Dynamics.b2Body,
b2FixtureDef = Box2D.Dynamics.b2FixtureDef,
b2Fixture = Box2D.Dynamics.b2Fixture,
b2World = Box2D.Dynamics.b2World,
b2MassData = Box2D.Collision.Shapes.b2MassData,
b2PolygonShape = Box2D.Collision.Shapes.b2PolygonShape,
b2CircleShape = Box2D.Collision.Shapes.b2CircleShape,
b2DebugDraw = Box2D.Dynamics.b2DebugDraw;
var Physics = window.Physics = function(element,scale) {
var gravity = new b2Vec2(0,9.8);
this.world = new b2World(gravity, true);
this.element = element;
this.context = element.getContext("2d");
this.scale = scale || 20;
this.dtRemaining = 0;
this.stepAmount = 1/60;
this.isPause = false;
this.gaveOver = false;
};
Physics.prototype.debug = function() {
this.debugDraw = new b2DebugDraw();
this.debugDraw.SetSprite(this.context);
this.debugDraw.SetDrawScale(this.scale);
this.debugDraw.SetFillAlpha(0.3);
this.debugDraw.SetLineThickness(1.0);
this.debugDraw.SetFlags(b2DebugDraw.e_shapeBit | b2DebugDraw.e_jointBit);
this.world.SetDebugDraw(this.debugDraw);
};
Physics.prototype.step = function(dt) {
if(this.isPause) return false;
this.dtRemaining += dt;
while(this.dtRemaining > this.stepAmount) {
this.dtRemaining -= this.stepAmount;
this.world.Step(this.stepAmount,
10, // velocity iterations
10);// position iterations
}
if(this.debugDraw) {
this.world.DrawDebugData();
} else {
var obj = this.world.GetBodyList();
for (var i in destroyObj) {
this.world.DestroyBody(destroyObj[i]);
}
// Reset the array
destroyObj.length = 0;
this.context.setTransform(1,0,0,1,0,0);//reset the transform matrix as it is cumulative
this.context.clearRect(0,0,this.element.width,this.element.height);
var v = player.obj.GetPosition();
var posX = -v.x * this.scale + this.element.width / 2;
var posY = -v.y * this.scale + this.element.height / 2;
scoresObj.style.left = -posX + (this.element.width / 2) - 70 +'px';
if(v.x < (this.element.width/this.scale)/2){
posX = 0;
}else{
scoresObj.style.left = (screen.availWidth/2) - 70 + 'px';
}
scoresObj.style.top = -posY + (this.element.height / 2) +'px';
this.context.translate(posX, 0); //posY
this.context.save();
this.context.scale(this.scale,this.scale);
while(obj) {
var body = obj.GetUserData();
if(body) {
body.draw(this.context);
}
obj = obj.GetNext();
}
this.context.restore();
}
};
Physics.prototype.click = function(callback) {
var self = this;
function handleClick(e) {
e.preventDefault();
var point = {
x: (e.offsetX || e.layerX) / self.scale,
y: (e.offsetY || e.layerY) / self.scale
};
self.world.QueryPoint(function(fixture) {
callback(fixture.GetBody(), fixture, point);
}, point);
}
};
Physics.prototype.collision = function() {
this.listener = new Box2D.Dynamics.b2ContactListener();
this.listener.BeginContact = function(contact,impulse) {
if(physics.getGaveOver()) return false;
if(contact.GetFixtureB().GetBody().GetUserData().details.userData.name == 'coins'){
destroyObj.push(contact.GetFixtureB().GetBody());
var x = player.obj.GetUserData();
x.details.image = player.openBall;
player.obj.SetUserData(x);
player.scores.pt++;
scorePtObj.innerText = player.scores.pt;
//if(player.scores.pt >= )
}
if(contact.GetFixtureA().GetBody().GetUserData().details.userData.name == 'pillar' || contact.GetFixtureA().GetBody().GetUserData().details.userData.name == 'wall'){
//return false;
if(physics.getGaveOver()) return false;
if(contact.GetFixtureA().GetBody().GetUserData().details.userData.name == 'pillar')
player.hits++;
else
player.hits = player.life; //set game over
btnActions.speeder = 0;
var x = player.obj.GetUserData();
x.details.image = player.balls[player.hits]; //openBall
x.fixtureDef.density = 1;
x.fixtureDef.restitution = 1;
x.fixtureDef.friction = 4
player.obj.SetUserData(x);
showHiteffect();
if(player.hits >= player.life){
physics.setGaveOver();
window.removeEventListener("keydown", btnActions.keyActions, false);
overObj.style.display = 'block';
var collectedObj = overObj.querySelector('.collected');
collectedObj.innerHTML = 'Fruits Collected: <span>'+player.scores.pt+'</span>';
collectedObj.style.display = 'block';
return false;
}
}
}
this.listener.EndContact = function (contact) {
if(contact.GetFixtureB().GetBody().GetUserData().details.userData.name == 'coins'){
//physics.world.DestroyBody(contact.GetFixtureB().GetBody());
setTimeout(function(){
var x = player.obj.GetUserData();
x.details.image = player.balls[player.hits];
player.obj.SetUserData(x);
}, 200);
}
};
this.listener.PreSolve = function (contact, oldManifold) {
//console.log('hit')
};
this.listener.PostSolve = function(contact,impulse) {
var bodyA = contact.GetFixtureA().GetBody().GetUserData(),
bodyB = contact.GetFixtureB().GetBody().GetUserData();
if(bodyA.contact) { bodyA.contact(contact,impulse,true) }
if(bodyB.contact) { bodyB.contact(contact,impulse,false) }
//console.log('XXXXXX');console.log(contact);console.log(bodyA);console.log(bodyB);console.log('XXXXXX');
};
this.world.SetContactListener(this.listener);
};
Physics.prototype.resume = function() {
this.isPause = false;
}
Physics.prototype.pause = function() {
this.isPause = true;
}
Physics.prototype.setGaveOver = function() {
this.gaveOver = true;
}
Physics.prototype.getGaveOver = function() {
return this.gaveOver;
}
Physics.prototype.getPlayStatus = function() {
return this.isPause;
}
var Body = window.Body = function(physics,details) {
this.details = details = details || {};
// Create the definition
this.definition = new b2BodyDef();
// Set up the definition
for(var k in this.definitionDefaults) {
this.definition[k] = details[k] || this.definitionDefaults[k];
}
this.definition.position = new b2Vec2(details.x || 0, details.y || 0);
this.definition.linearVelocity = new b2Vec2(details.vx || 0, details.vy || 0);
this.definition.userData = this;
this.definition.type = details.type == "static" ? b2Body.b2_staticBody : b2Body.b2_dynamicBody;
// Create the Body
this.body = physics.world.CreateBody(this.definition);
// Create the fixture
this.fixtureDef = new b2FixtureDef();
for(var l in this.fixtureDefaults) {
this.fixtureDef[l] = details[l] || this.fixtureDefaults[l];
}
details.shape = details.shape || this.defaults.shape;
switch(details.shape) {
case "circle":
details.radius = details.radius || this.defaults.radius;
this.fixtureDef.shape = new b2CircleShape(details.radius);
break;
case "circle2":
details.radius = details.radius || this.defaults.radius;
this.fixtureDef.shape = new b2CircleShape(details.radius);
this.fixtureDef.isSensor = true;
/*coin.fixtureDef.friction = 0;
coin.fixtureDef.density = 0;
coin.fixtureDef.restitution = 0;
coin.fixtureDef.filter.categoryBits = 4;
coin.fixtureDef.filter.maskBits = 9;*/
break;
case "polygon":
this.fixtureDef.shape = new b2PolygonShape();
this.fixtureDef.shape.SetAsArray(details.points,details.points.length);
break;
case "block":
default:
details.width = details.width || this.defaults.width;
details.height = details.height || this.defaults.height;
this.fixtureDef.shape = new b2PolygonShape();
this.fixtureDef.shape.SetAsBox(details.width/2, details.height/2);
if(details.sensor) this.fixtureDef.isSensor = true;
break;
}
this.body.CreateFixture(this.fixtureDef);
};
Body.prototype.defaults = {
shape: "block",
width: 4,
height: 4,
radius: 1
};
Body.prototype.fixtureDefaults = {
density: 2,
friction: 1,
restitution: 0.2
};
Body.prototype.definitionDefaults = {
active: true,
allowSleep: true,
angle: 0,
angularVelocity: 0,
awake: true,
bullet: false,
fixedRotation: false
};
Body.prototype.draw = function(context) {
var pos = this.body.GetPosition(),
angle = this.body.GetAngle();
context.save();
context.translate(pos.x,pos.y);
context.rotate(angle);
if(this.details.color) {
context.fillStyle = this.details.color;
switch(this.details.shape) {
case "circle":
context.beginPath();
context.arc(0,0,this.details.radius,0,Math.PI*2);
context.fill();
break;
case "circle2":
context.beginPath();
context.arc(0,0,this.details.radius,0,Math.PI*2);
context.fill();
break;
case "polygon":
var points = this.details.points;
context.beginPath();
context.moveTo(points[0].x,points[0].y);
for(var i=1;i<points.length;i++) {
context.lineTo(points[i].x,points[i].y);
}
context.fill();
break;
case "block":
context.fillRect(-this.details.width/2,
-this.details.height/2,
this.details.width,
this.details.height);
default:
break;
}
}
if(this.details.image) {
context.drawImage(this.details.image,
-this.details.width/2,
-this.details.height/2,
this.details.width,
this.details.height);
}
context.restore();
}
window.gameLoop = function() {
var tm = new Date().getTime();
requestAnimationFrame(gameLoop);
var dt = (tm - lastFrame) / 1000;
if(dt > 1/15) { dt = 1/15; }
physics.step(dt);
lastFrame = tm;
player.scores.mt = player.scores.mt+dt;
scoreMtObj.innerText = Math.round(player.scores.mt*10);
};
function createWorld() {
physics = window.physics = new Physics(cvObj);
physics.collision();
var inner_width = physics.element.width / physics.scale;
var inner_height = physics.element.height / physics.scale;
setPillarsAndWalls(physics);
setCoins(physics);
player.obj = new Body(physics, {shape: 'circle', image:player.balls[player.hits], x: 5, y: 20, width: 2, height:2, radius:1, userData:{name:'player'} }).body;
/* setInterval(function(){
//btnActions.keyActions();
var im = {x : 10.0, y : 1.0}
player.obj.ApplyImpulse(im, player.obj.GetPosition());
}, 100); */
/*Event Bindings*/
//window.addEventListener("keydown", btnActions.keyActions, false);
for(var i=0; i<btnObj.length; i++){
btnObj[i].addEventListener("click", function(e){
switch(this.getAttribute('data-action')){
case 'resume':
btnActions.pauseOrResume();
break;
case 'start':
menuObj.style.display = 'none';
tipObj.style.display = 'block';
btnActions.pauseOrResume(true);
/*Event Bindings*/
window.addEventListener("keydown", btnActions.keyActions, false);
break;
}
});
}
}
btnActions = {
speeder:0,
keyActions: function(e){
if(e && e.which == 27){
btnActions.pauseOrResume();
return false;
}
if(is_started && physics.getPlayStatus()) return false;
if(player.hits == player.life){
return false;
}
if(e && !is_started){
tipObj.style.display = 'none';
physics.resume();
is_started = true;
}
var vel = player.obj.GetLinearVelocity();
vel.x = (player.hits) ? 10 - (player.hits*2) : 10;
btnActions.speeder = btnActions.speeder+0.2;
vel.x = vel.x+btnActions.speeder++;
vel.y = -10;
player.obj.SetLinearVelocity(vel);
var im = {x : 24.0, y : 0.0}
player.obj.ApplyImpulse(im, player.obj.GetPosition());
//console.log(player.obj)
},
pauseOrResume: function(is_pause){
if(!physics.getPlayStatus()){
physics.pause();
pauseObj.style.display = 'block';
}else{
physics.resume();
pauseObj.style.display = 'none';
}
if(is_pause){
physics.pause();
return false;
}
}
}
function setPillarsAndWalls(physics){
var inner_height = physics.element.height/physics.scale;
var wt = 4, orig_ht = ht = inner_height/1.4;
var bool = 1;
var x, y, pad = 1;
var arr = [];
for (var i = -6 ; i <= ht-10; i++) {
arr.push(i);
}
//arr[Math.floor(Math.random()*arr.length)];
for (var i = 5; i < 3000; i++){
ht = ht - arr[Math.floor(Math.random()*arr.length)];
//console.log('ov: '+ht)
if(ht > orig_ht){
ht = orig_ht;
}else if(ht <= 15){
ht = 15;
}
//console.log('nv: '+ht)
if(bool){
x = (wt+pad)*(i);
y = 0;
bool = 0;
}else{
x = (wt+pad)*(i-1);
y = inner_height; //25
bool = 1;
}
beams.obj.push(new Body(physics, { image: beams.img, type: "static", x: x, y: y, height: ht, width: wt, userData:{name:'pillar'}, sensor: false }));
}
var beamWidth = beams.obj[beams.obj.length-1].details.x+8;
// Create some walls
walls.left = new Body(physics, { color: "rgb(93, 198, 250)", type: "static", x: 0, y: 0, height: physics.element.height, width: 0.5, userData:{name:'wall'} });
walls.right = new Body(physics, { color: "red", type: "static", x: beamWidth, y: 0, height: physics.element.height, width: 0.5, userData:{name:'wall'}});
walls.top = new Body(physics, { color: "rgb(93, 198, 250)", type: "static", x: 0, y: 0, height: 0.5, width: beamWidth, userData:{name:'wall'} });
walls.bottom = new Body(physics, { color: "rgb(72, 76, 77)", type: "static", x: beamWidth/2, y:inner_height, height: 0.5, width: beamWidth, userData:{name:'wall'} });
}
function setCoins(physics) |
function init() {
var preloader = [];
var imgArray = ['images/flappy-pacman-logo.png', 'images/log.png', 'images/smily-40.png', 'images/smily-40-1.png', 'images/smily-40-2.png', 'images/smily-40-3.png', 'images/coins/c1.png', 'images/coins/c2.png', 'images/coins/c3.png', 'images/coins/c4.png', 'images/coins/c5.png', 'images/coins/c6.png', 'images/coins/c7.png', 'images/coins/c8.png', 'images/coins/c9.png', 'images/coins/c10.png', 'images/coins/c11.png', 'images/coins/c12.png', 'images/coins/c13.png', 'images/coins/c14.png', 'images/coins/c15.png', 'images/coins/c16.png', 'images/coins/c17.png', 'images/coins/c18.png', 'images/coins/c19.png', 'images/coins/c20.png', 'images/coins/c21.png', 'images/bang-hit.png'];
var $i = 0;
function loadImg(){
preloader[$i] = new Image();
preloader[$i].src = imgArray[$i];
preloader[$i].onload = function(){
$i++;
if($i == imgArray.length){
for(var i = 0; i<=21-1; i++){
coins.img[i] = new Image();
coins.img[i].src = 'images/coins/c'+(i+1)+'.png';
}
beams.img.src = 'images/log.png';
player.balls[0].src = 'images/smily-40.png';
player.balls[1].src = 'images/smily-40-1.png';
player.balls[2].src = 'images/smily-40-2.png';
player.balls[3].src = 'images/smily-40-3.png';
player.openBall.src = 'images/smily-40-eat.png';
createWorld();
requestAnimationFrame(gameLoop);
setTimeout(function(){physics.pause();}, 1000);
loaderObj.style.display = 'none';
menuObj.style.display = 'block';
return true;
}
loadImg();
}
}
loadImg();
}
window.addEventListener("load",init);
}());
function showHiteffect(){
var v = player.obj.GetPosition();
var posX = -v.x * physics.scale + physics.element.width / 2;
var posY = -v.y * physics.scale + physics.element.height / 2;
bangObj.style.display = 'block';
bangObj.style.left = -posX + (physics.element.width / 2) - 10+'px';
if(v.x < (physics.element.width/physics.scale)/2){
posX = 0;
}else{
bangObj.style.left = (screen.availWidth/2) - 10 + 'px';
}
bangObj.style.top = -posY + (physics.element.height / 2) - 20 +'px';
setTimeout(function(){bangObj.style.display = 'none';}, 200);
}
function setWindowSize(){
var cloud = document.getElementById("sky-layer");
docBody.style.width = window.screen.availWidth + 'px';
docBody.style.height = window.screen.availHeight-61 + 'px';
cvObj.width = window.screen.availWidth;
cvObj.height = window.screen.availHeight-61;
cloud.style.width = cvObj.width + 'px';
cloud.style.height = cvObj.height + 'px';
}
setWindowSize();
// Lastly, add in the `requestAnimationFrame` shim, if necessary. Does nothing
// if `requestAnimationFrame` is already on the `window` object.
(function() {
var lastTime = 0;
var vendors = ['ms', 'moz', 'webkit', 'o'];
for(var x = 0; x < vendors.length && !window.requestAnimationFrame; ++x) {
window.requestAnimationFrame = window[vendors[x]+'RequestAnimationFrame'];
window.cancelAnimationFrame = window[vendors[x]+'CancelAnimationFrame'] || window[vendors[x]+'CancelRequestAnimationFrame'];
}
if (!window.requestAnimationFrame) {
window.requestAnimationFrame = function(callback, element) {
var currTime = new Date().getTime();
var timeToCall = Math.max(0, 16 - (currTime - lastTime));
var id = window.setTimeout(function() { callback(currTime + timeToCall); }, timeToCall);
lastTime = currTime + timeToCall;
return id;
};
}
if (!window.cancelAnimationFrame) {
window.cancelAnimationFrame = function(id) {
clearTimeout(id);
};
}
}()); | {
var x, y;
var counter = 0;
// 100 iterations
var increase = Math.PI * 2 / 100;
for (var i = 25; i <= 15000; i +=6 ) {
x = i;
y = Math.sin(counter) / 2 + 18;
counter += increase * i;
var coin = new Body(physics, { shape: 'circle2', image: coins.img[Math.floor(Math.random()*coins.img.length)], type: "static", x: x, y: y, height: 1.4, width: 1.4, radius:1.4/2, userData:{name:'coins', value:1, i:i}});
coins.obj.push(coin);
}
} | identifier_body |
script.js | var mov = 0,
physics,
lastFrame = new Date().getTime(),
beams = {obj:[], img:new Image()},
player = {obj:null, hits:0, life:3, balls:[new Image(), new Image(), new Image(), new Image()], openBall: new Image(), scores: {pt:0, mt: 0}, needScore:1000},
walls = {},
coins = {obj:[], img:[]},
destroyObj = [],
btnActions,
is_started = false;
var docBody = document.getElementById('container'),
scoresObj = document.getElementById('scores'),
scoreMtObj = scoresObj.querySelector('.mt'),
scorePtObj = scoresObj.querySelector('.pt'),
pauseObj = document.getElementById('pause-menu'),
btnObj = document.querySelectorAll('button'),
loaderObj = document.getElementById('loader'),
menuObj = document.getElementById('game-menu'),
overObj = document.getElementById('over-menu'),
cvObj = document.getElementById("canvas"),
bangObj = document.getElementById("bang"),
tipObj = document.getElementById("tip-menu");
(function() {
var b2Vec2 = Box2D.Common.Math.b2Vec2,
b2BodyDef = Box2D.Dynamics.b2BodyDef,
b2Body = Box2D.Dynamics.b2Body,
b2FixtureDef = Box2D.Dynamics.b2FixtureDef,
b2Fixture = Box2D.Dynamics.b2Fixture,
b2World = Box2D.Dynamics.b2World,
b2MassData = Box2D.Collision.Shapes.b2MassData,
b2PolygonShape = Box2D.Collision.Shapes.b2PolygonShape,
b2CircleShape = Box2D.Collision.Shapes.b2CircleShape,
b2DebugDraw = Box2D.Dynamics.b2DebugDraw;
var Physics = window.Physics = function(element,scale) {
var gravity = new b2Vec2(0,9.8);
this.world = new b2World(gravity, true);
this.element = element;
this.context = element.getContext("2d");
this.scale = scale || 20;
this.dtRemaining = 0;
this.stepAmount = 1/60;
this.isPause = false;
this.gaveOver = false;
};
Physics.prototype.debug = function() {
this.debugDraw = new b2DebugDraw();
this.debugDraw.SetSprite(this.context);
this.debugDraw.SetDrawScale(this.scale);
this.debugDraw.SetFillAlpha(0.3);
this.debugDraw.SetLineThickness(1.0);
this.debugDraw.SetFlags(b2DebugDraw.e_shapeBit | b2DebugDraw.e_jointBit);
this.world.SetDebugDraw(this.debugDraw);
};
Physics.prototype.step = function(dt) {
if(this.isPause) return false;
this.dtRemaining += dt;
while(this.dtRemaining > this.stepAmount) {
this.dtRemaining -= this.stepAmount;
this.world.Step(this.stepAmount,
10, // velocity iterations
10);// position iterations
}
if(this.debugDraw) {
this.world.DrawDebugData();
} else {
var obj = this.world.GetBodyList();
for (var i in destroyObj) {
this.world.DestroyBody(destroyObj[i]);
}
// Reset the array
destroyObj.length = 0;
this.context.setTransform(1,0,0,1,0,0);//reset the transform matrix as it is cumulative
this.context.clearRect(0,0,this.element.width,this.element.height);
var v = player.obj.GetPosition();
var posX = -v.x * this.scale + this.element.width / 2;
var posY = -v.y * this.scale + this.element.height / 2;
scoresObj.style.left = -posX + (this.element.width / 2) - 70 +'px';
if(v.x < (this.element.width/this.scale)/2){
posX = 0;
}else{
scoresObj.style.left = (screen.availWidth/2) - 70 + 'px';
}
scoresObj.style.top = -posY + (this.element.height / 2) +'px';
this.context.translate(posX, 0); //posY
this.context.save();
this.context.scale(this.scale,this.scale);
while(obj) {
var body = obj.GetUserData();
if(body) {
body.draw(this.context);
}
obj = obj.GetNext();
}
this.context.restore();
}
};
Physics.prototype.click = function(callback) {
var self = this;
function handleClick(e) {
e.preventDefault();
var point = {
x: (e.offsetX || e.layerX) / self.scale,
y: (e.offsetY || e.layerY) / self.scale
};
self.world.QueryPoint(function(fixture) {
callback(fixture.GetBody(), fixture, point);
}, point);
}
};
Physics.prototype.collision = function() {
this.listener = new Box2D.Dynamics.b2ContactListener();
this.listener.BeginContact = function(contact,impulse) {
if(physics.getGaveOver()) return false;
if(contact.GetFixtureB().GetBody().GetUserData().details.userData.name == 'coins'){
destroyObj.push(contact.GetFixtureB().GetBody());
var x = player.obj.GetUserData();
x.details.image = player.openBall;
player.obj.SetUserData(x);
player.scores.pt++;
scorePtObj.innerText = player.scores.pt;
//if(player.scores.pt >= )
}
if(contact.GetFixtureA().GetBody().GetUserData().details.userData.name == 'pillar' || contact.GetFixtureA().GetBody().GetUserData().details.userData.name == 'wall'){
//return false;
if(physics.getGaveOver()) return false;
if(contact.GetFixtureA().GetBody().GetUserData().details.userData.name == 'pillar')
player.hits++;
else
player.hits = player.life; //set game over
btnActions.speeder = 0;
var x = player.obj.GetUserData();
x.details.image = player.balls[player.hits]; //openBall
x.fixtureDef.density = 1;
x.fixtureDef.restitution = 1;
x.fixtureDef.friction = 4
player.obj.SetUserData(x);
showHiteffect();
if(player.hits >= player.life){
physics.setGaveOver();
window.removeEventListener("keydown", btnActions.keyActions, false);
overObj.style.display = 'block';
var collectedObj = overObj.querySelector('.collected');
collectedObj.innerHTML = 'Fruits Collected: <span>'+player.scores.pt+'</span>';
collectedObj.style.display = 'block';
return false;
}
}
}
this.listener.EndContact = function (contact) {
if(contact.GetFixtureB().GetBody().GetUserData().details.userData.name == 'coins'){
//physics.world.DestroyBody(contact.GetFixtureB().GetBody());
setTimeout(function(){
var x = player.obj.GetUserData();
x.details.image = player.balls[player.hits];
player.obj.SetUserData(x);
}, 200);
}
};
this.listener.PreSolve = function (contact, oldManifold) {
//console.log('hit')
};
this.listener.PostSolve = function(contact,impulse) {
var bodyA = contact.GetFixtureA().GetBody().GetUserData(),
bodyB = contact.GetFixtureB().GetBody().GetUserData();
if(bodyA.contact) { bodyA.contact(contact,impulse,true) }
if(bodyB.contact) { bodyB.contact(contact,impulse,false) }
//console.log('XXXXXX');console.log(contact);console.log(bodyA);console.log(bodyB);console.log('XXXXXX');
};
this.world.SetContactListener(this.listener);
};
Physics.prototype.resume = function() {
this.isPause = false;
}
Physics.prototype.pause = function() {
this.isPause = true;
}
| this.gaveOver = true;
}
Physics.prototype.getGaveOver = function() {
return this.gaveOver;
}
Physics.prototype.getPlayStatus = function() {
return this.isPause;
}
var Body = window.Body = function(physics,details) {
this.details = details = details || {};
// Create the definition
this.definition = new b2BodyDef();
// Set up the definition
for(var k in this.definitionDefaults) {
this.definition[k] = details[k] || this.definitionDefaults[k];
}
this.definition.position = new b2Vec2(details.x || 0, details.y || 0);
this.definition.linearVelocity = new b2Vec2(details.vx || 0, details.vy || 0);
this.definition.userData = this;
this.definition.type = details.type == "static" ? b2Body.b2_staticBody : b2Body.b2_dynamicBody;
// Create the Body
this.body = physics.world.CreateBody(this.definition);
// Create the fixture
this.fixtureDef = new b2FixtureDef();
for(var l in this.fixtureDefaults) {
this.fixtureDef[l] = details[l] || this.fixtureDefaults[l];
}
details.shape = details.shape || this.defaults.shape;
switch(details.shape) {
case "circle":
details.radius = details.radius || this.defaults.radius;
this.fixtureDef.shape = new b2CircleShape(details.radius);
break;
case "circle2":
details.radius = details.radius || this.defaults.radius;
this.fixtureDef.shape = new b2CircleShape(details.radius);
this.fixtureDef.isSensor = true;
/*coin.fixtureDef.friction = 0;
coin.fixtureDef.density = 0;
coin.fixtureDef.restitution = 0;
coin.fixtureDef.filter.categoryBits = 4;
coin.fixtureDef.filter.maskBits = 9;*/
break;
case "polygon":
this.fixtureDef.shape = new b2PolygonShape();
this.fixtureDef.shape.SetAsArray(details.points,details.points.length);
break;
case "block":
default:
details.width = details.width || this.defaults.width;
details.height = details.height || this.defaults.height;
this.fixtureDef.shape = new b2PolygonShape();
this.fixtureDef.shape.SetAsBox(details.width/2, details.height/2);
if(details.sensor) this.fixtureDef.isSensor = true;
break;
}
this.body.CreateFixture(this.fixtureDef);
};
Body.prototype.defaults = {
shape: "block",
width: 4,
height: 4,
radius: 1
};
Body.prototype.fixtureDefaults = {
density: 2,
friction: 1,
restitution: 0.2
};
Body.prototype.definitionDefaults = {
active: true,
allowSleep: true,
angle: 0,
angularVelocity: 0,
awake: true,
bullet: false,
fixedRotation: false
};
Body.prototype.draw = function(context) {
var pos = this.body.GetPosition(),
angle = this.body.GetAngle();
context.save();
context.translate(pos.x,pos.y);
context.rotate(angle);
if(this.details.color) {
context.fillStyle = this.details.color;
switch(this.details.shape) {
case "circle":
context.beginPath();
context.arc(0,0,this.details.radius,0,Math.PI*2);
context.fill();
break;
case "circle2":
context.beginPath();
context.arc(0,0,this.details.radius,0,Math.PI*2);
context.fill();
break;
case "polygon":
var points = this.details.points;
context.beginPath();
context.moveTo(points[0].x,points[0].y);
for(var i=1;i<points.length;i++) {
context.lineTo(points[i].x,points[i].y);
}
context.fill();
break;
case "block":
context.fillRect(-this.details.width/2,
-this.details.height/2,
this.details.width,
this.details.height);
default:
break;
}
}
if(this.details.image) {
context.drawImage(this.details.image,
-this.details.width/2,
-this.details.height/2,
this.details.width,
this.details.height);
}
context.restore();
}
window.gameLoop = function() {
var tm = new Date().getTime();
requestAnimationFrame(gameLoop);
var dt = (tm - lastFrame) / 1000;
if(dt > 1/15) { dt = 1/15; }
physics.step(dt);
lastFrame = tm;
player.scores.mt = player.scores.mt+dt;
scoreMtObj.innerText = Math.round(player.scores.mt*10);
};
function createWorld() {
physics = window.physics = new Physics(cvObj);
physics.collision();
var inner_width = physics.element.width / physics.scale;
var inner_height = physics.element.height / physics.scale;
setPillarsAndWalls(physics);
setCoins(physics);
player.obj = new Body(physics, {shape: 'circle', image:player.balls[player.hits], x: 5, y: 20, width: 2, height:2, radius:1, userData:{name:'player'} }).body;
/* setInterval(function(){
//btnActions.keyActions();
var im = {x : 10.0, y : 1.0}
player.obj.ApplyImpulse(im, player.obj.GetPosition());
}, 100); */
/*Event Bindings*/
//window.addEventListener("keydown", btnActions.keyActions, false);
for(var i=0; i<btnObj.length; i++){
btnObj[i].addEventListener("click", function(e){
switch(this.getAttribute('data-action')){
case 'resume':
btnActions.pauseOrResume();
break;
case 'start':
menuObj.style.display = 'none';
tipObj.style.display = 'block';
btnActions.pauseOrResume(true);
/*Event Bindings*/
window.addEventListener("keydown", btnActions.keyActions, false);
break;
}
});
}
}
btnActions = {
speeder:0,
keyActions: function(e){
if(e && e.which == 27){
btnActions.pauseOrResume();
return false;
}
if(is_started && physics.getPlayStatus()) return false;
if(player.hits == player.life){
return false;
}
if(e && !is_started){
tipObj.style.display = 'none';
physics.resume();
is_started = true;
}
var vel = player.obj.GetLinearVelocity();
vel.x = (player.hits) ? 10 - (player.hits*2) : 10;
btnActions.speeder = btnActions.speeder+0.2;
vel.x = vel.x+btnActions.speeder++;
vel.y = -10;
player.obj.SetLinearVelocity(vel);
var im = {x : 24.0, y : 0.0}
player.obj.ApplyImpulse(im, player.obj.GetPosition());
//console.log(player.obj)
},
pauseOrResume: function(is_pause){
if(!physics.getPlayStatus()){
physics.pause();
pauseObj.style.display = 'block';
}else{
physics.resume();
pauseObj.style.display = 'none';
}
if(is_pause){
physics.pause();
return false;
}
}
}
function setPillarsAndWalls(physics){
var inner_height = physics.element.height/physics.scale;
var wt = 4, orig_ht = ht = inner_height/1.4;
var bool = 1;
var x, y, pad = 1;
var arr = [];
for (var i = -6 ; i <= ht-10; i++) {
arr.push(i);
}
//arr[Math.floor(Math.random()*arr.length)];
for (var i = 5; i < 3000; i++){
ht = ht - arr[Math.floor(Math.random()*arr.length)];
//console.log('ov: '+ht)
if(ht > orig_ht){
ht = orig_ht;
}else if(ht <= 15){
ht = 15;
}
//console.log('nv: '+ht)
if(bool){
x = (wt+pad)*(i);
y = 0;
bool = 0;
}else{
x = (wt+pad)*(i-1);
y = inner_height; //25
bool = 1;
}
beams.obj.push(new Body(physics, { image: beams.img, type: "static", x: x, y: y, height: ht, width: wt, userData:{name:'pillar'}, sensor: false }));
}
var beamWidth = beams.obj[beams.obj.length-1].details.x+8;
// Create some walls
walls.left = new Body(physics, { color: "rgb(93, 198, 250)", type: "static", x: 0, y: 0, height: physics.element.height, width: 0.5, userData:{name:'wall'} });
walls.right = new Body(physics, { color: "red", type: "static", x: beamWidth, y: 0, height: physics.element.height, width: 0.5, userData:{name:'wall'}});
walls.top = new Body(physics, { color: "rgb(93, 198, 250)", type: "static", x: 0, y: 0, height: 0.5, width: beamWidth, userData:{name:'wall'} });
walls.bottom = new Body(physics, { color: "rgb(72, 76, 77)", type: "static", x: beamWidth/2, y:inner_height, height: 0.5, width: beamWidth, userData:{name:'wall'} });
}
function setCoins(physics){
var x, y;
var counter = 0;
// 100 iterations
var increase = Math.PI * 2 / 100;
for (var i = 25; i <= 15000; i +=6 ) {
x = i;
y = Math.sin(counter) / 2 + 18;
counter += increase * i;
var coin = new Body(physics, { shape: 'circle2', image: coins.img[Math.floor(Math.random()*coins.img.length)], type: "static", x: x, y: y, height: 1.4, width: 1.4, radius:1.4/2, userData:{name:'coins', value:1, i:i}});
coins.obj.push(coin);
}
}
function init() {
var preloader = [];
var imgArray = ['images/flappy-pacman-logo.png', 'images/log.png', 'images/smily-40.png', 'images/smily-40-1.png', 'images/smily-40-2.png', 'images/smily-40-3.png', 'images/coins/c1.png', 'images/coins/c2.png', 'images/coins/c3.png', 'images/coins/c4.png', 'images/coins/c5.png', 'images/coins/c6.png', 'images/coins/c7.png', 'images/coins/c8.png', 'images/coins/c9.png', 'images/coins/c10.png', 'images/coins/c11.png', 'images/coins/c12.png', 'images/coins/c13.png', 'images/coins/c14.png', 'images/coins/c15.png', 'images/coins/c16.png', 'images/coins/c17.png', 'images/coins/c18.png', 'images/coins/c19.png', 'images/coins/c20.png', 'images/coins/c21.png', 'images/bang-hit.png'];
var $i = 0;
function loadImg(){
preloader[$i] = new Image();
preloader[$i].src = imgArray[$i];
preloader[$i].onload = function(){
$i++;
if($i == imgArray.length){
for(var i = 0; i<=21-1; i++){
coins.img[i] = new Image();
coins.img[i].src = 'images/coins/c'+(i+1)+'.png';
}
beams.img.src = 'images/log.png';
player.balls[0].src = 'images/smily-40.png';
player.balls[1].src = 'images/smily-40-1.png';
player.balls[2].src = 'images/smily-40-2.png';
player.balls[3].src = 'images/smily-40-3.png';
player.openBall.src = 'images/smily-40-eat.png';
createWorld();
requestAnimationFrame(gameLoop);
setTimeout(function(){physics.pause();}, 1000);
loaderObj.style.display = 'none';
menuObj.style.display = 'block';
return true;
}
loadImg();
}
}
loadImg();
}
window.addEventListener("load",init);
}());
function showHiteffect(){
var v = player.obj.GetPosition();
var posX = -v.x * physics.scale + physics.element.width / 2;
var posY = -v.y * physics.scale + physics.element.height / 2;
bangObj.style.display = 'block';
bangObj.style.left = -posX + (physics.element.width / 2) - 10+'px';
if(v.x < (physics.element.width/physics.scale)/2){
posX = 0;
}else{
bangObj.style.left = (screen.availWidth/2) - 10 + 'px';
}
bangObj.style.top = -posY + (physics.element.height / 2) - 20 +'px';
setTimeout(function(){bangObj.style.display = 'none';}, 200);
}
function setWindowSize(){
var cloud = document.getElementById("sky-layer");
docBody.style.width = window.screen.availWidth + 'px';
docBody.style.height = window.screen.availHeight-61 + 'px';
cvObj.width = window.screen.availWidth;
cvObj.height = window.screen.availHeight-61;
cloud.style.width = cvObj.width + 'px';
cloud.style.height = cvObj.height + 'px';
}
setWindowSize();
// Lastly, add in the `requestAnimationFrame` shim, if necessary. Does nothing
// if `requestAnimationFrame` is already on the `window` object.
(function() {
var lastTime = 0;
var vendors = ['ms', 'moz', 'webkit', 'o'];
for(var x = 0; x < vendors.length && !window.requestAnimationFrame; ++x) {
window.requestAnimationFrame = window[vendors[x]+'RequestAnimationFrame'];
window.cancelAnimationFrame = window[vendors[x]+'CancelAnimationFrame'] || window[vendors[x]+'CancelRequestAnimationFrame'];
}
if (!window.requestAnimationFrame) {
window.requestAnimationFrame = function(callback, element) {
var currTime = new Date().getTime();
var timeToCall = Math.max(0, 16 - (currTime - lastTime));
var id = window.setTimeout(function() { callback(currTime + timeToCall); }, timeToCall);
lastTime = currTime + timeToCall;
return id;
};
}
if (!window.cancelAnimationFrame) {
window.cancelAnimationFrame = function(id) {
clearTimeout(id);
};
}
}()); | Physics.prototype.setGaveOver = function() { | random_line_split |
script.js | var mov = 0,
physics,
lastFrame = new Date().getTime(),
beams = {obj:[], img:new Image()},
player = {obj:null, hits:0, life:3, balls:[new Image(), new Image(), new Image(), new Image()], openBall: new Image(), scores: {pt:0, mt: 0}, needScore:1000},
walls = {},
coins = {obj:[], img:[]},
destroyObj = [],
btnActions,
is_started = false;
var docBody = document.getElementById('container'),
scoresObj = document.getElementById('scores'),
scoreMtObj = scoresObj.querySelector('.mt'),
scorePtObj = scoresObj.querySelector('.pt'),
pauseObj = document.getElementById('pause-menu'),
btnObj = document.querySelectorAll('button'),
loaderObj = document.getElementById('loader'),
menuObj = document.getElementById('game-menu'),
overObj = document.getElementById('over-menu'),
cvObj = document.getElementById("canvas"),
bangObj = document.getElementById("bang"),
tipObj = document.getElementById("tip-menu");
(function() {
var b2Vec2 = Box2D.Common.Math.b2Vec2,
b2BodyDef = Box2D.Dynamics.b2BodyDef,
b2Body = Box2D.Dynamics.b2Body,
b2FixtureDef = Box2D.Dynamics.b2FixtureDef,
b2Fixture = Box2D.Dynamics.b2Fixture,
b2World = Box2D.Dynamics.b2World,
b2MassData = Box2D.Collision.Shapes.b2MassData,
b2PolygonShape = Box2D.Collision.Shapes.b2PolygonShape,
b2CircleShape = Box2D.Collision.Shapes.b2CircleShape,
b2DebugDraw = Box2D.Dynamics.b2DebugDraw;
var Physics = window.Physics = function(element,scale) {
var gravity = new b2Vec2(0,9.8);
this.world = new b2World(gravity, true);
this.element = element;
this.context = element.getContext("2d");
this.scale = scale || 20;
this.dtRemaining = 0;
this.stepAmount = 1/60;
this.isPause = false;
this.gaveOver = false;
};
Physics.prototype.debug = function() {
this.debugDraw = new b2DebugDraw();
this.debugDraw.SetSprite(this.context);
this.debugDraw.SetDrawScale(this.scale);
this.debugDraw.SetFillAlpha(0.3);
this.debugDraw.SetLineThickness(1.0);
this.debugDraw.SetFlags(b2DebugDraw.e_shapeBit | b2DebugDraw.e_jointBit);
this.world.SetDebugDraw(this.debugDraw);
};
Physics.prototype.step = function(dt) {
if(this.isPause) return false;
this.dtRemaining += dt;
while(this.dtRemaining > this.stepAmount) {
this.dtRemaining -= this.stepAmount;
this.world.Step(this.stepAmount,
10, // velocity iterations
10);// position iterations
}
if(this.debugDraw) {
this.world.DrawDebugData();
} else {
var obj = this.world.GetBodyList();
for (var i in destroyObj) {
this.world.DestroyBody(destroyObj[i]);
}
// Reset the array
destroyObj.length = 0;
this.context.setTransform(1,0,0,1,0,0);//reset the transform matrix as it is cumulative
this.context.clearRect(0,0,this.element.width,this.element.height);
var v = player.obj.GetPosition();
var posX = -v.x * this.scale + this.element.width / 2;
var posY = -v.y * this.scale + this.element.height / 2;
scoresObj.style.left = -posX + (this.element.width / 2) - 70 +'px';
if(v.x < (this.element.width/this.scale)/2){
posX = 0;
}else{
scoresObj.style.left = (screen.availWidth/2) - 70 + 'px';
}
scoresObj.style.top = -posY + (this.element.height / 2) +'px';
this.context.translate(posX, 0); //posY
this.context.save();
this.context.scale(this.scale,this.scale);
while(obj) {
var body = obj.GetUserData();
if(body) {
body.draw(this.context);
}
obj = obj.GetNext();
}
this.context.restore();
}
};
Physics.prototype.click = function(callback) {
var self = this;
function handleClick(e) {
e.preventDefault();
var point = {
x: (e.offsetX || e.layerX) / self.scale,
y: (e.offsetY || e.layerY) / self.scale
};
self.world.QueryPoint(function(fixture) {
callback(fixture.GetBody(), fixture, point);
}, point);
}
};
Physics.prototype.collision = function() {
this.listener = new Box2D.Dynamics.b2ContactListener();
this.listener.BeginContact = function(contact,impulse) {
if(physics.getGaveOver()) return false;
if(contact.GetFixtureB().GetBody().GetUserData().details.userData.name == 'coins'){
destroyObj.push(contact.GetFixtureB().GetBody());
var x = player.obj.GetUserData();
x.details.image = player.openBall;
player.obj.SetUserData(x);
player.scores.pt++;
scorePtObj.innerText = player.scores.pt;
//if(player.scores.pt >= )
}
if(contact.GetFixtureA().GetBody().GetUserData().details.userData.name == 'pillar' || contact.GetFixtureA().GetBody().GetUserData().details.userData.name == 'wall'){
//return false;
if(physics.getGaveOver()) return false;
if(contact.GetFixtureA().GetBody().GetUserData().details.userData.name == 'pillar')
player.hits++;
else
player.hits = player.life; //set game over
btnActions.speeder = 0;
var x = player.obj.GetUserData();
x.details.image = player.balls[player.hits]; //openBall
x.fixtureDef.density = 1;
x.fixtureDef.restitution = 1;
x.fixtureDef.friction = 4
player.obj.SetUserData(x);
showHiteffect();
if(player.hits >= player.life){
physics.setGaveOver();
window.removeEventListener("keydown", btnActions.keyActions, false);
overObj.style.display = 'block';
var collectedObj = overObj.querySelector('.collected');
collectedObj.innerHTML = 'Fruits Collected: <span>'+player.scores.pt+'</span>';
collectedObj.style.display = 'block';
return false;
}
}
}
this.listener.EndContact = function (contact) {
if(contact.GetFixtureB().GetBody().GetUserData().details.userData.name == 'coins'){
//physics.world.DestroyBody(contact.GetFixtureB().GetBody());
setTimeout(function(){
var x = player.obj.GetUserData();
x.details.image = player.balls[player.hits];
player.obj.SetUserData(x);
}, 200);
}
};
this.listener.PreSolve = function (contact, oldManifold) {
//console.log('hit')
};
this.listener.PostSolve = function(contact,impulse) {
var bodyA = contact.GetFixtureA().GetBody().GetUserData(),
bodyB = contact.GetFixtureB().GetBody().GetUserData();
if(bodyA.contact) { bodyA.contact(contact,impulse,true) }
if(bodyB.contact) { bodyB.contact(contact,impulse,false) }
//console.log('XXXXXX');console.log(contact);console.log(bodyA);console.log(bodyB);console.log('XXXXXX');
};
this.world.SetContactListener(this.listener);
};
Physics.prototype.resume = function() {
this.isPause = false;
}
Physics.prototype.pause = function() {
this.isPause = true;
}
Physics.prototype.setGaveOver = function() {
this.gaveOver = true;
}
Physics.prototype.getGaveOver = function() {
return this.gaveOver;
}
Physics.prototype.getPlayStatus = function() {
return this.isPause;
}
var Body = window.Body = function(physics,details) {
this.details = details = details || {};
// Create the definition
this.definition = new b2BodyDef();
// Set up the definition
for(var k in this.definitionDefaults) {
this.definition[k] = details[k] || this.definitionDefaults[k];
}
this.definition.position = new b2Vec2(details.x || 0, details.y || 0);
this.definition.linearVelocity = new b2Vec2(details.vx || 0, details.vy || 0);
this.definition.userData = this;
this.definition.type = details.type == "static" ? b2Body.b2_staticBody : b2Body.b2_dynamicBody;
// Create the Body
this.body = physics.world.CreateBody(this.definition);
// Create the fixture
this.fixtureDef = new b2FixtureDef();
for(var l in this.fixtureDefaults) {
this.fixtureDef[l] = details[l] || this.fixtureDefaults[l];
}
details.shape = details.shape || this.defaults.shape;
switch(details.shape) {
case "circle":
details.radius = details.radius || this.defaults.radius;
this.fixtureDef.shape = new b2CircleShape(details.radius);
break;
case "circle2":
details.radius = details.radius || this.defaults.radius;
this.fixtureDef.shape = new b2CircleShape(details.radius);
this.fixtureDef.isSensor = true;
/*coin.fixtureDef.friction = 0;
coin.fixtureDef.density = 0;
coin.fixtureDef.restitution = 0;
coin.fixtureDef.filter.categoryBits = 4;
coin.fixtureDef.filter.maskBits = 9;*/
break;
case "polygon":
this.fixtureDef.shape = new b2PolygonShape();
this.fixtureDef.shape.SetAsArray(details.points,details.points.length);
break;
case "block":
default:
details.width = details.width || this.defaults.width;
details.height = details.height || this.defaults.height;
this.fixtureDef.shape = new b2PolygonShape();
this.fixtureDef.shape.SetAsBox(details.width/2, details.height/2);
if(details.sensor) this.fixtureDef.isSensor = true;
break;
}
this.body.CreateFixture(this.fixtureDef);
};
Body.prototype.defaults = {
shape: "block",
width: 4,
height: 4,
radius: 1
};
Body.prototype.fixtureDefaults = {
density: 2,
friction: 1,
restitution: 0.2
};
Body.prototype.definitionDefaults = {
active: true,
allowSleep: true,
angle: 0,
angularVelocity: 0,
awake: true,
bullet: false,
fixedRotation: false
};
Body.prototype.draw = function(context) {
var pos = this.body.GetPosition(),
angle = this.body.GetAngle();
context.save();
context.translate(pos.x,pos.y);
context.rotate(angle);
if(this.details.color) {
context.fillStyle = this.details.color;
switch(this.details.shape) {
case "circle":
context.beginPath();
context.arc(0,0,this.details.radius,0,Math.PI*2);
context.fill();
break;
case "circle2":
context.beginPath();
context.arc(0,0,this.details.radius,0,Math.PI*2);
context.fill();
break;
case "polygon":
var points = this.details.points;
context.beginPath();
context.moveTo(points[0].x,points[0].y);
for(var i=1;i<points.length;i++) {
context.lineTo(points[i].x,points[i].y);
}
context.fill();
break;
case "block":
context.fillRect(-this.details.width/2,
-this.details.height/2,
this.details.width,
this.details.height);
default:
break;
}
}
if(this.details.image) {
context.drawImage(this.details.image,
-this.details.width/2,
-this.details.height/2,
this.details.width,
this.details.height);
}
context.restore();
}
window.gameLoop = function() {
var tm = new Date().getTime();
requestAnimationFrame(gameLoop);
var dt = (tm - lastFrame) / 1000;
if(dt > 1/15) { dt = 1/15; }
physics.step(dt);
lastFrame = tm;
player.scores.mt = player.scores.mt+dt;
scoreMtObj.innerText = Math.round(player.scores.mt*10);
};
function createWorld() {
physics = window.physics = new Physics(cvObj);
physics.collision();
var inner_width = physics.element.width / physics.scale;
var inner_height = physics.element.height / physics.scale;
setPillarsAndWalls(physics);
setCoins(physics);
player.obj = new Body(physics, {shape: 'circle', image:player.balls[player.hits], x: 5, y: 20, width: 2, height:2, radius:1, userData:{name:'player'} }).body;
/* setInterval(function(){
//btnActions.keyActions();
var im = {x : 10.0, y : 1.0}
player.obj.ApplyImpulse(im, player.obj.GetPosition());
}, 100); */
/*Event Bindings*/
//window.addEventListener("keydown", btnActions.keyActions, false);
for(var i=0; i<btnObj.length; i++){
btnObj[i].addEventListener("click", function(e){
switch(this.getAttribute('data-action')){
case 'resume':
btnActions.pauseOrResume();
break;
case 'start':
menuObj.style.display = 'none';
tipObj.style.display = 'block';
btnActions.pauseOrResume(true);
/*Event Bindings*/
window.addEventListener("keydown", btnActions.keyActions, false);
break;
}
});
}
}
btnActions = {
speeder:0,
keyActions: function(e){
if(e && e.which == 27){
btnActions.pauseOrResume();
return false;
}
if(is_started && physics.getPlayStatus()) return false;
if(player.hits == player.life){
return false;
}
if(e && !is_started){
tipObj.style.display = 'none';
physics.resume();
is_started = true;
}
var vel = player.obj.GetLinearVelocity();
vel.x = (player.hits) ? 10 - (player.hits*2) : 10;
btnActions.speeder = btnActions.speeder+0.2;
vel.x = vel.x+btnActions.speeder++;
vel.y = -10;
player.obj.SetLinearVelocity(vel);
var im = {x : 24.0, y : 0.0}
player.obj.ApplyImpulse(im, player.obj.GetPosition());
//console.log(player.obj)
},
pauseOrResume: function(is_pause){
if(!physics.getPlayStatus()){
physics.pause();
pauseObj.style.display = 'block';
}else{
physics.resume();
pauseObj.style.display = 'none';
}
if(is_pause){
physics.pause();
return false;
}
}
}
function setPillarsAndWalls(physics){
var inner_height = physics.element.height/physics.scale;
var wt = 4, orig_ht = ht = inner_height/1.4;
var bool = 1;
var x, y, pad = 1;
var arr = [];
for (var i = -6 ; i <= ht-10; i++) {
arr.push(i);
}
//arr[Math.floor(Math.random()*arr.length)];
for (var i = 5; i < 3000; i++){
ht = ht - arr[Math.floor(Math.random()*arr.length)];
//console.log('ov: '+ht)
if(ht > orig_ht){
ht = orig_ht;
}else if(ht <= 15){
ht = 15;
}
//console.log('nv: '+ht)
if(bool){
x = (wt+pad)*(i);
y = 0;
bool = 0;
}else{
x = (wt+pad)*(i-1);
y = inner_height; //25
bool = 1;
}
beams.obj.push(new Body(physics, { image: beams.img, type: "static", x: x, y: y, height: ht, width: wt, userData:{name:'pillar'}, sensor: false }));
}
var beamWidth = beams.obj[beams.obj.length-1].details.x+8;
// Create some walls
walls.left = new Body(physics, { color: "rgb(93, 198, 250)", type: "static", x: 0, y: 0, height: physics.element.height, width: 0.5, userData:{name:'wall'} });
walls.right = new Body(physics, { color: "red", type: "static", x: beamWidth, y: 0, height: physics.element.height, width: 0.5, userData:{name:'wall'}});
walls.top = new Body(physics, { color: "rgb(93, 198, 250)", type: "static", x: 0, y: 0, height: 0.5, width: beamWidth, userData:{name:'wall'} });
walls.bottom = new Body(physics, { color: "rgb(72, 76, 77)", type: "static", x: beamWidth/2, y:inner_height, height: 0.5, width: beamWidth, userData:{name:'wall'} });
}
function setCoins(physics){
var x, y;
var counter = 0;
// 100 iterations
var increase = Math.PI * 2 / 100;
for (var i = 25; i <= 15000; i +=6 ) {
x = i;
y = Math.sin(counter) / 2 + 18;
counter += increase * i;
var coin = new Body(physics, { shape: 'circle2', image: coins.img[Math.floor(Math.random()*coins.img.length)], type: "static", x: x, y: y, height: 1.4, width: 1.4, radius:1.4/2, userData:{name:'coins', value:1, i:i}});
coins.obj.push(coin);
}
}
function init() {
var preloader = [];
var imgArray = ['images/flappy-pacman-logo.png', 'images/log.png', 'images/smily-40.png', 'images/smily-40-1.png', 'images/smily-40-2.png', 'images/smily-40-3.png', 'images/coins/c1.png', 'images/coins/c2.png', 'images/coins/c3.png', 'images/coins/c4.png', 'images/coins/c5.png', 'images/coins/c6.png', 'images/coins/c7.png', 'images/coins/c8.png', 'images/coins/c9.png', 'images/coins/c10.png', 'images/coins/c11.png', 'images/coins/c12.png', 'images/coins/c13.png', 'images/coins/c14.png', 'images/coins/c15.png', 'images/coins/c16.png', 'images/coins/c17.png', 'images/coins/c18.png', 'images/coins/c19.png', 'images/coins/c20.png', 'images/coins/c21.png', 'images/bang-hit.png'];
var $i = 0;
function loadImg(){
preloader[$i] = new Image();
preloader[$i].src = imgArray[$i];
preloader[$i].onload = function(){
$i++;
if($i == imgArray.length){
for(var i = 0; i<=21-1; i++){
coins.img[i] = new Image();
coins.img[i].src = 'images/coins/c'+(i+1)+'.png';
}
beams.img.src = 'images/log.png';
player.balls[0].src = 'images/smily-40.png';
player.balls[1].src = 'images/smily-40-1.png';
player.balls[2].src = 'images/smily-40-2.png';
player.balls[3].src = 'images/smily-40-3.png';
player.openBall.src = 'images/smily-40-eat.png';
createWorld();
requestAnimationFrame(gameLoop);
setTimeout(function(){physics.pause();}, 1000);
loaderObj.style.display = 'none';
menuObj.style.display = 'block';
return true;
}
loadImg();
}
}
loadImg();
}
window.addEventListener("load",init);
}());
function | (){
var v = player.obj.GetPosition();
var posX = -v.x * physics.scale + physics.element.width / 2;
var posY = -v.y * physics.scale + physics.element.height / 2;
bangObj.style.display = 'block';
bangObj.style.left = -posX + (physics.element.width / 2) - 10+'px';
if(v.x < (physics.element.width/physics.scale)/2){
posX = 0;
}else{
bangObj.style.left = (screen.availWidth/2) - 10 + 'px';
}
bangObj.style.top = -posY + (physics.element.height / 2) - 20 +'px';
setTimeout(function(){bangObj.style.display = 'none';}, 200);
}
function setWindowSize(){
var cloud = document.getElementById("sky-layer");
docBody.style.width = window.screen.availWidth + 'px';
docBody.style.height = window.screen.availHeight-61 + 'px';
cvObj.width = window.screen.availWidth;
cvObj.height = window.screen.availHeight-61;
cloud.style.width = cvObj.width + 'px';
cloud.style.height = cvObj.height + 'px';
}
setWindowSize();
// Lastly, add in the `requestAnimationFrame` shim, if necessary. Does nothing
// if `requestAnimationFrame` is already on the `window` object.
(function() {
var lastTime = 0;
var vendors = ['ms', 'moz', 'webkit', 'o'];
for(var x = 0; x < vendors.length && !window.requestAnimationFrame; ++x) {
window.requestAnimationFrame = window[vendors[x]+'RequestAnimationFrame'];
window.cancelAnimationFrame = window[vendors[x]+'CancelAnimationFrame'] || window[vendors[x]+'CancelRequestAnimationFrame'];
}
if (!window.requestAnimationFrame) {
window.requestAnimationFrame = function(callback, element) {
var currTime = new Date().getTime();
var timeToCall = Math.max(0, 16 - (currTime - lastTime));
var id = window.setTimeout(function() { callback(currTime + timeToCall); }, timeToCall);
lastTime = currTime + timeToCall;
return id;
};
}
if (!window.cancelAnimationFrame) {
window.cancelAnimationFrame = function(id) {
clearTimeout(id);
};
}
}()); | showHiteffect | identifier_name |
script.js | var mov = 0,
physics,
lastFrame = new Date().getTime(),
beams = {obj:[], img:new Image()},
player = {obj:null, hits:0, life:3, balls:[new Image(), new Image(), new Image(), new Image()], openBall: new Image(), scores: {pt:0, mt: 0}, needScore:1000},
walls = {},
coins = {obj:[], img:[]},
destroyObj = [],
btnActions,
is_started = false;
var docBody = document.getElementById('container'),
scoresObj = document.getElementById('scores'),
scoreMtObj = scoresObj.querySelector('.mt'),
scorePtObj = scoresObj.querySelector('.pt'),
pauseObj = document.getElementById('pause-menu'),
btnObj = document.querySelectorAll('button'),
loaderObj = document.getElementById('loader'),
menuObj = document.getElementById('game-menu'),
overObj = document.getElementById('over-menu'),
cvObj = document.getElementById("canvas"),
bangObj = document.getElementById("bang"),
tipObj = document.getElementById("tip-menu");
(function() {
var b2Vec2 = Box2D.Common.Math.b2Vec2,
b2BodyDef = Box2D.Dynamics.b2BodyDef,
b2Body = Box2D.Dynamics.b2Body,
b2FixtureDef = Box2D.Dynamics.b2FixtureDef,
b2Fixture = Box2D.Dynamics.b2Fixture,
b2World = Box2D.Dynamics.b2World,
b2MassData = Box2D.Collision.Shapes.b2MassData,
b2PolygonShape = Box2D.Collision.Shapes.b2PolygonShape,
b2CircleShape = Box2D.Collision.Shapes.b2CircleShape,
b2DebugDraw = Box2D.Dynamics.b2DebugDraw;
var Physics = window.Physics = function(element,scale) {
var gravity = new b2Vec2(0,9.8);
this.world = new b2World(gravity, true);
this.element = element;
this.context = element.getContext("2d");
this.scale = scale || 20;
this.dtRemaining = 0;
this.stepAmount = 1/60;
this.isPause = false;
this.gaveOver = false;
};
Physics.prototype.debug = function() {
this.debugDraw = new b2DebugDraw();
this.debugDraw.SetSprite(this.context);
this.debugDraw.SetDrawScale(this.scale);
this.debugDraw.SetFillAlpha(0.3);
this.debugDraw.SetLineThickness(1.0);
this.debugDraw.SetFlags(b2DebugDraw.e_shapeBit | b2DebugDraw.e_jointBit);
this.world.SetDebugDraw(this.debugDraw);
};
Physics.prototype.step = function(dt) {
if(this.isPause) return false;
this.dtRemaining += dt;
while(this.dtRemaining > this.stepAmount) {
this.dtRemaining -= this.stepAmount;
this.world.Step(this.stepAmount,
10, // velocity iterations
10);// position iterations
}
if(this.debugDraw) {
this.world.DrawDebugData();
} else {
var obj = this.world.GetBodyList();
for (var i in destroyObj) {
this.world.DestroyBody(destroyObj[i]);
}
// Reset the array
destroyObj.length = 0;
this.context.setTransform(1,0,0,1,0,0);//reset the transform matrix as it is cumulative
this.context.clearRect(0,0,this.element.width,this.element.height);
var v = player.obj.GetPosition();
var posX = -v.x * this.scale + this.element.width / 2;
var posY = -v.y * this.scale + this.element.height / 2;
scoresObj.style.left = -posX + (this.element.width / 2) - 70 +'px';
if(v.x < (this.element.width/this.scale)/2){
posX = 0;
}else{
scoresObj.style.left = (screen.availWidth/2) - 70 + 'px';
}
scoresObj.style.top = -posY + (this.element.height / 2) +'px';
this.context.translate(posX, 0); //posY
this.context.save();
this.context.scale(this.scale,this.scale);
while(obj) {
var body = obj.GetUserData();
if(body) {
body.draw(this.context);
}
obj = obj.GetNext();
}
this.context.restore();
}
};
Physics.prototype.click = function(callback) {
var self = this;
function handleClick(e) {
e.preventDefault();
var point = {
x: (e.offsetX || e.layerX) / self.scale,
y: (e.offsetY || e.layerY) / self.scale
};
self.world.QueryPoint(function(fixture) {
callback(fixture.GetBody(), fixture, point);
}, point);
}
};
Physics.prototype.collision = function() {
this.listener = new Box2D.Dynamics.b2ContactListener();
this.listener.BeginContact = function(contact,impulse) {
if(physics.getGaveOver()) return false;
if(contact.GetFixtureB().GetBody().GetUserData().details.userData.name == 'coins'){
destroyObj.push(contact.GetFixtureB().GetBody());
var x = player.obj.GetUserData();
x.details.image = player.openBall;
player.obj.SetUserData(x);
player.scores.pt++;
scorePtObj.innerText = player.scores.pt;
//if(player.scores.pt >= )
}
if(contact.GetFixtureA().GetBody().GetUserData().details.userData.name == 'pillar' || contact.GetFixtureA().GetBody().GetUserData().details.userData.name == 'wall'){
//return false;
if(physics.getGaveOver()) return false;
if(contact.GetFixtureA().GetBody().GetUserData().details.userData.name == 'pillar')
player.hits++;
else
player.hits = player.life; //set game over
btnActions.speeder = 0;
var x = player.obj.GetUserData();
x.details.image = player.balls[player.hits]; //openBall
x.fixtureDef.density = 1;
x.fixtureDef.restitution = 1;
x.fixtureDef.friction = 4
player.obj.SetUserData(x);
showHiteffect();
if(player.hits >= player.life){
physics.setGaveOver();
window.removeEventListener("keydown", btnActions.keyActions, false);
overObj.style.display = 'block';
var collectedObj = overObj.querySelector('.collected');
collectedObj.innerHTML = 'Fruits Collected: <span>'+player.scores.pt+'</span>';
collectedObj.style.display = 'block';
return false;
}
}
}
this.listener.EndContact = function (contact) {
if(contact.GetFixtureB().GetBody().GetUserData().details.userData.name == 'coins'){
//physics.world.DestroyBody(contact.GetFixtureB().GetBody());
setTimeout(function(){
var x = player.obj.GetUserData();
x.details.image = player.balls[player.hits];
player.obj.SetUserData(x);
}, 200);
}
};
this.listener.PreSolve = function (contact, oldManifold) {
//console.log('hit')
};
this.listener.PostSolve = function(contact,impulse) {
var bodyA = contact.GetFixtureA().GetBody().GetUserData(),
bodyB = contact.GetFixtureB().GetBody().GetUserData();
if(bodyA.contact) { bodyA.contact(contact,impulse,true) }
if(bodyB.contact) { bodyB.contact(contact,impulse,false) }
//console.log('XXXXXX');console.log(contact);console.log(bodyA);console.log(bodyB);console.log('XXXXXX');
};
this.world.SetContactListener(this.listener);
};
Physics.prototype.resume = function() {
this.isPause = false;
}
Physics.prototype.pause = function() {
this.isPause = true;
}
Physics.prototype.setGaveOver = function() {
this.gaveOver = true;
}
Physics.prototype.getGaveOver = function() {
return this.gaveOver;
}
Physics.prototype.getPlayStatus = function() {
return this.isPause;
}
var Body = window.Body = function(physics,details) {
this.details = details = details || {};
// Create the definition
this.definition = new b2BodyDef();
// Set up the definition
for(var k in this.definitionDefaults) {
this.definition[k] = details[k] || this.definitionDefaults[k];
}
this.definition.position = new b2Vec2(details.x || 0, details.y || 0);
this.definition.linearVelocity = new b2Vec2(details.vx || 0, details.vy || 0);
this.definition.userData = this;
this.definition.type = details.type == "static" ? b2Body.b2_staticBody : b2Body.b2_dynamicBody;
// Create the Body
this.body = physics.world.CreateBody(this.definition);
// Create the fixture
this.fixtureDef = new b2FixtureDef();
for(var l in this.fixtureDefaults) {
this.fixtureDef[l] = details[l] || this.fixtureDefaults[l];
}
details.shape = details.shape || this.defaults.shape;
switch(details.shape) {
case "circle":
details.radius = details.radius || this.defaults.radius;
this.fixtureDef.shape = new b2CircleShape(details.radius);
break;
case "circle2":
details.radius = details.radius || this.defaults.radius;
this.fixtureDef.shape = new b2CircleShape(details.radius);
this.fixtureDef.isSensor = true;
/*coin.fixtureDef.friction = 0;
coin.fixtureDef.density = 0;
coin.fixtureDef.restitution = 0;
coin.fixtureDef.filter.categoryBits = 4;
coin.fixtureDef.filter.maskBits = 9;*/
break;
case "polygon":
this.fixtureDef.shape = new b2PolygonShape();
this.fixtureDef.shape.SetAsArray(details.points,details.points.length);
break;
case "block":
default:
details.width = details.width || this.defaults.width;
details.height = details.height || this.defaults.height;
this.fixtureDef.shape = new b2PolygonShape();
this.fixtureDef.shape.SetAsBox(details.width/2, details.height/2);
if(details.sensor) this.fixtureDef.isSensor = true;
break;
}
this.body.CreateFixture(this.fixtureDef);
};
Body.prototype.defaults = {
shape: "block",
width: 4,
height: 4,
radius: 1
};
Body.prototype.fixtureDefaults = {
density: 2,
friction: 1,
restitution: 0.2
};
Body.prototype.definitionDefaults = {
active: true,
allowSleep: true,
angle: 0,
angularVelocity: 0,
awake: true,
bullet: false,
fixedRotation: false
};
Body.prototype.draw = function(context) {
var pos = this.body.GetPosition(),
angle = this.body.GetAngle();
context.save();
context.translate(pos.x,pos.y);
context.rotate(angle);
if(this.details.color) {
context.fillStyle = this.details.color;
switch(this.details.shape) {
case "circle":
context.beginPath();
context.arc(0,0,this.details.radius,0,Math.PI*2);
context.fill();
break;
case "circle2":
context.beginPath();
context.arc(0,0,this.details.radius,0,Math.PI*2);
context.fill();
break;
case "polygon":
var points = this.details.points;
context.beginPath();
context.moveTo(points[0].x,points[0].y);
for(var i=1;i<points.length;i++) {
context.lineTo(points[i].x,points[i].y);
}
context.fill();
break;
case "block":
context.fillRect(-this.details.width/2,
-this.details.height/2,
this.details.width,
this.details.height);
default:
break;
}
}
if(this.details.image) |
context.restore();
}
window.gameLoop = function() {
var tm = new Date().getTime();
requestAnimationFrame(gameLoop);
var dt = (tm - lastFrame) / 1000;
if(dt > 1/15) { dt = 1/15; }
physics.step(dt);
lastFrame = tm;
player.scores.mt = player.scores.mt+dt;
scoreMtObj.innerText = Math.round(player.scores.mt*10);
};
function createWorld() {
physics = window.physics = new Physics(cvObj);
physics.collision();
var inner_width = physics.element.width / physics.scale;
var inner_height = physics.element.height / physics.scale;
setPillarsAndWalls(physics);
setCoins(physics);
player.obj = new Body(physics, {shape: 'circle', image:player.balls[player.hits], x: 5, y: 20, width: 2, height:2, radius:1, userData:{name:'player'} }).body;
/* setInterval(function(){
//btnActions.keyActions();
var im = {x : 10.0, y : 1.0}
player.obj.ApplyImpulse(im, player.obj.GetPosition());
}, 100); */
/*Event Bindings*/
//window.addEventListener("keydown", btnActions.keyActions, false);
for(var i=0; i<btnObj.length; i++){
btnObj[i].addEventListener("click", function(e){
switch(this.getAttribute('data-action')){
case 'resume':
btnActions.pauseOrResume();
break;
case 'start':
menuObj.style.display = 'none';
tipObj.style.display = 'block';
btnActions.pauseOrResume(true);
/*Event Bindings*/
window.addEventListener("keydown", btnActions.keyActions, false);
break;
}
});
}
}
btnActions = {
speeder:0,
keyActions: function(e){
if(e && e.which == 27){
btnActions.pauseOrResume();
return false;
}
if(is_started && physics.getPlayStatus()) return false;
if(player.hits == player.life){
return false;
}
if(e && !is_started){
tipObj.style.display = 'none';
physics.resume();
is_started = true;
}
var vel = player.obj.GetLinearVelocity();
vel.x = (player.hits) ? 10 - (player.hits*2) : 10;
btnActions.speeder = btnActions.speeder+0.2;
vel.x = vel.x+btnActions.speeder++;
vel.y = -10;
player.obj.SetLinearVelocity(vel);
var im = {x : 24.0, y : 0.0}
player.obj.ApplyImpulse(im, player.obj.GetPosition());
//console.log(player.obj)
},
pauseOrResume: function(is_pause){
if(!physics.getPlayStatus()){
physics.pause();
pauseObj.style.display = 'block';
}else{
physics.resume();
pauseObj.style.display = 'none';
}
if(is_pause){
physics.pause();
return false;
}
}
}
function setPillarsAndWalls(physics){
var inner_height = physics.element.height/physics.scale;
var wt = 4, orig_ht = ht = inner_height/1.4;
var bool = 1;
var x, y, pad = 1;
var arr = [];
for (var i = -6 ; i <= ht-10; i++) {
arr.push(i);
}
//arr[Math.floor(Math.random()*arr.length)];
for (var i = 5; i < 3000; i++){
ht = ht - arr[Math.floor(Math.random()*arr.length)];
//console.log('ov: '+ht)
if(ht > orig_ht){
ht = orig_ht;
}else if(ht <= 15){
ht = 15;
}
//console.log('nv: '+ht)
if(bool){
x = (wt+pad)*(i);
y = 0;
bool = 0;
}else{
x = (wt+pad)*(i-1);
y = inner_height; //25
bool = 1;
}
beams.obj.push(new Body(physics, { image: beams.img, type: "static", x: x, y: y, height: ht, width: wt, userData:{name:'pillar'}, sensor: false }));
}
var beamWidth = beams.obj[beams.obj.length-1].details.x+8;
// Create some walls
walls.left = new Body(physics, { color: "rgb(93, 198, 250)", type: "static", x: 0, y: 0, height: physics.element.height, width: 0.5, userData:{name:'wall'} });
walls.right = new Body(physics, { color: "red", type: "static", x: beamWidth, y: 0, height: physics.element.height, width: 0.5, userData:{name:'wall'}});
walls.top = new Body(physics, { color: "rgb(93, 198, 250)", type: "static", x: 0, y: 0, height: 0.5, width: beamWidth, userData:{name:'wall'} });
walls.bottom = new Body(physics, { color: "rgb(72, 76, 77)", type: "static", x: beamWidth/2, y:inner_height, height: 0.5, width: beamWidth, userData:{name:'wall'} });
}
function setCoins(physics){
var x, y;
var counter = 0;
// 100 iterations
var increase = Math.PI * 2 / 100;
for (var i = 25; i <= 15000; i +=6 ) {
x = i;
y = Math.sin(counter) / 2 + 18;
counter += increase * i;
var coin = new Body(physics, { shape: 'circle2', image: coins.img[Math.floor(Math.random()*coins.img.length)], type: "static", x: x, y: y, height: 1.4, width: 1.4, radius:1.4/2, userData:{name:'coins', value:1, i:i}});
coins.obj.push(coin);
}
}
function init() {
var preloader = [];
var imgArray = ['images/flappy-pacman-logo.png', 'images/log.png', 'images/smily-40.png', 'images/smily-40-1.png', 'images/smily-40-2.png', 'images/smily-40-3.png', 'images/coins/c1.png', 'images/coins/c2.png', 'images/coins/c3.png', 'images/coins/c4.png', 'images/coins/c5.png', 'images/coins/c6.png', 'images/coins/c7.png', 'images/coins/c8.png', 'images/coins/c9.png', 'images/coins/c10.png', 'images/coins/c11.png', 'images/coins/c12.png', 'images/coins/c13.png', 'images/coins/c14.png', 'images/coins/c15.png', 'images/coins/c16.png', 'images/coins/c17.png', 'images/coins/c18.png', 'images/coins/c19.png', 'images/coins/c20.png', 'images/coins/c21.png', 'images/bang-hit.png'];
var $i = 0;
function loadImg(){
preloader[$i] = new Image();
preloader[$i].src = imgArray[$i];
preloader[$i].onload = function(){
$i++;
if($i == imgArray.length){
for(var i = 0; i<=21-1; i++){
coins.img[i] = new Image();
coins.img[i].src = 'images/coins/c'+(i+1)+'.png';
}
beams.img.src = 'images/log.png';
player.balls[0].src = 'images/smily-40.png';
player.balls[1].src = 'images/smily-40-1.png';
player.balls[2].src = 'images/smily-40-2.png';
player.balls[3].src = 'images/smily-40-3.png';
player.openBall.src = 'images/smily-40-eat.png';
createWorld();
requestAnimationFrame(gameLoop);
setTimeout(function(){physics.pause();}, 1000);
loaderObj.style.display = 'none';
menuObj.style.display = 'block';
return true;
}
loadImg();
}
}
loadImg();
}
window.addEventListener("load",init);
}());
function showHiteffect(){
var v = player.obj.GetPosition();
var posX = -v.x * physics.scale + physics.element.width / 2;
var posY = -v.y * physics.scale + physics.element.height / 2;
bangObj.style.display = 'block';
bangObj.style.left = -posX + (physics.element.width / 2) - 10+'px';
if(v.x < (physics.element.width/physics.scale)/2){
posX = 0;
}else{
bangObj.style.left = (screen.availWidth/2) - 10 + 'px';
}
bangObj.style.top = -posY + (physics.element.height / 2) - 20 +'px';
setTimeout(function(){bangObj.style.display = 'none';}, 200);
}
function setWindowSize(){
var cloud = document.getElementById("sky-layer");
docBody.style.width = window.screen.availWidth + 'px';
docBody.style.height = window.screen.availHeight-61 + 'px';
cvObj.width = window.screen.availWidth;
cvObj.height = window.screen.availHeight-61;
cloud.style.width = cvObj.width + 'px';
cloud.style.height = cvObj.height + 'px';
}
setWindowSize();
// Lastly, add in the `requestAnimationFrame` shim, if necessary. Does nothing
// if `requestAnimationFrame` is already on the `window` object.
(function() {
var lastTime = 0;
var vendors = ['ms', 'moz', 'webkit', 'o'];
for(var x = 0; x < vendors.length && !window.requestAnimationFrame; ++x) {
window.requestAnimationFrame = window[vendors[x]+'RequestAnimationFrame'];
window.cancelAnimationFrame = window[vendors[x]+'CancelAnimationFrame'] || window[vendors[x]+'CancelRequestAnimationFrame'];
}
if (!window.requestAnimationFrame) {
window.requestAnimationFrame = function(callback, element) {
var currTime = new Date().getTime();
var timeToCall = Math.max(0, 16 - (currTime - lastTime));
var id = window.setTimeout(function() { callback(currTime + timeToCall); }, timeToCall);
lastTime = currTime + timeToCall;
return id;
};
}
if (!window.cancelAnimationFrame) {
window.cancelAnimationFrame = function(id) {
clearTimeout(id);
};
}
}()); | {
context.drawImage(this.details.image,
-this.details.width/2,
-this.details.height/2,
this.details.width,
this.details.height);
} | conditional_block |
mining_test.go | // Copyright (c) 2018-2020 The asimov developers
// Copyright (c) 2013-2017 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package mining
import (
"container/heap"
"github.com/AsimovNetwork/asimov/asiutil"
"github.com/AsimovNetwork/asimov/blockchain"
"github.com/AsimovNetwork/asimov/blockchain/txo"
"github.com/AsimovNetwork/asimov/chaincfg"
"github.com/AsimovNetwork/asimov/common"
"github.com/AsimovNetwork/asimov/common/address"
"github.com/AsimovNetwork/asimov/crypto"
"github.com/AsimovNetwork/asimov/protos"
"math"
"math/rand"
"testing"
"time"
)
// TestTxPriceHeap ensures the priority queue for transaction fees and
// priorities works as expected.
func TestTxPriceHeap(t *testing.T) {
// Create some fake priority items that exercise the expected sort
// edge conditions.
testItems := []*TxPrioItem{
{gasPrice: 5678,},
{gasPrice: 1234,},
{gasPrice: 10001,},
{gasPrice: 0,},
}
// Add random data in addition to the edge conditions already manually
// specified.
randSeed := rand.Int63()
defer func() {
if t.Failed() {
t.Logf("Random numbers using seed: %v", randSeed)
}
}()
prng := rand.New(rand.NewSource(randSeed))
for i := 0; i < 1000; i++ {
testItems = append(testItems, &TxPrioItem{
gasPrice: prng.Float64() * 10000,
})
}
// Test sorting by fee per KB then priority.
var highest *TxPrioItem
priorityQueue := NewTxPriorityQueue(len(testItems))
for i := 0; i < len(testItems); i++ {
prioItem := testItems[i]
if highest == nil {
highest = prioItem
}
if prioItem.gasPrice >= highest.gasPrice {
highest = prioItem
}
heap.Push(priorityQueue, prioItem)
}
for i := 0; i < len(testItems); i++ {
prioItem := heap.Pop(priorityQueue).(*TxPrioItem)
if prioItem.gasPrice > highest.gasPrice {
t.Fatalf("fee sort: item (fee per KB: %v) higher than than prev "+
"(fee per KB: %v)", prioItem.gasPrice, highest.gasPrice, )
}
highest = prioItem
}
}
func TestCreateCoinbaseTx(t *testing.T) {
privKey, _ := crypto.NewPrivateKey(crypto.S256())
pkaddr, _ := address.NewAddressPubKey(privKey.PubKey().SerializeCompressed())
addr := pkaddr.AddressPubKeyHash()
tests := []struct {
validater common.IAddress
height int32
wantErr bool
}{
{
pkaddr,
1,
false,
}, {
addr,
1,
false,
}, {
&common.Address{},
1,
true,
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
_, _, err := CreateCoinbaseTx(&chaincfg.MainNetParams, test.height, test.validater, nil)
if test.wantErr != (err != nil) {
t.Errorf("tests #%d error %v", i, err)
}
}
}
func TestNewBlockTemplate(t *testing.T) {
policy := Policy{
BlockProductedTimeOut: chaincfg.DefaultBlockProductedTimeOut,
TxConnectTimeOut: chaincfg.DefaultTxConnectTimeOut,
UtxoValidateTimeOut: chaincfg.DefaultUtxoValidateTimeOut,
}
chain, teardownFunc, err := newFakeChain(&chaincfg.MainNetParams)
if err != nil {
t.Error("newFakeChain error: ", err)
return
}
fakeTxSource := &fakeTxSource{make(map[common.Hash]*TxDesc)}
fakeSigSource := &fakeSigSource{make([]*asiutil.BlockSign, 0)}
g := NewBlkTmplGenerator(
&policy,
fakeTxSource,
fakeSigSource,
chain,
)
defer teardownFunc()
global_view := txo.NewUtxoViewpoint()
g.FetchUtxoView = func(tx *asiutil.Tx, dolock bool) (viewpoint *txo.UtxoViewpoint, e error) {
neededSet := make(map[protos.OutPoint]struct{})
prevOut := protos.OutPoint{Hash: *tx.Hash()}
for txOutIdx := range tx.MsgTx().TxOut {
prevOut.Index = uint32(txOutIdx)
neededSet[prevOut] = struct{}{}
}
if !blockchain.IsCoinBase(tx) {
for _, txIn := range tx.MsgTx().TxIn {
neededSet[txIn.PreviousOutPoint] = struct{}{}
}
}
// Request the utxos from the point of view of the end of the main
// chain.
view := txo.NewUtxoViewpoint()
for k, _ := range neededSet {
view.AddEntry(k,global_view.LookupEntry(k))
}
return view, nil
}
invaildAsset := protos.NewAsset(0, 0, 1)
keys := []*crypto.Account{}
for i := 0; i < 16; i++ {
privKey, _ := crypto.NewPrivateKey(crypto.S256())
pkaddr, _ := address.NewAddressPubKey(privKey.PubKey().SerializeCompressed())
addr := pkaddr.AddressPubKeyHash()
keys = append(keys, &crypto.Account {*privKey, *privKey.PubKey(),addr})
}
fakeTxs := TxDescList{
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("1"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1e4, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 1, false, 0, common.HexToHash("1"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 2},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e18, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("2"),
}, {
keys[0], 1e4, &asiutil.AsimovAsset, 1, false, 0, common.HexToHash("3"),
},
}, []*fakeOut{
{
keys[0].Address, 1e18 - 1e12, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 3},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1234567890, &asiutil.AsimovAsset, 3, false, 0, common.HexToHash("4"),
}, {
keys[1], 1e6, &asiutil.AsimovAsset, 5, false, 0, common.HexToHash("4"),
}, {
keys[3], 1e4, &asiutil.AsimovAsset, 8, false, 0, common.HexToHash("5"),
},
}, []*fakeOut{
{
keys[2].Address, 1234567890 + 1e6, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 4},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e4, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("5"),
},
}, []*fakeOut{
{
keys[0].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[2].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[1].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[1].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[0].Address, 6e3 - 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 5},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("6"),
}, {
keys[1], 1e6, &asiutil.AsimovAsset, 1, false, 0, common.HexToHash("6"),
}, {
keys[2], 1e4, &asiutil.AsimovAsset, 2, false, 0, common.HexToHash("6"),
}, {
keys[2], 1e4, &asiutil.AsimovAsset, 2, false, 0, common.HexToHash("7"),
}, {
keys[3], 1e4, &asiutil.AsimovAsset, 4, false, 0, common.HexToHash("7"),
},
}, []*fakeOut{
{
keys[2].Address, 1e6, &asiutil.AsimovAsset,
}, {
keys[2].Address, 1e4 - 1, &asiutil.AsimovAsset,
}, {
keys[4].Address, 1e4, &asiutil.AsimovAsset,
}, {
keys[5].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[5].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[6].Address, 8e3 - 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 6},
}
//create tx depend last tx
fakeTxs = append(fakeTxs, &TxDesc{Tx: createFakeTx([]*fakeIn{
{
keys[5], 1e3, &asiutil.AsimovAsset, 4, false, 0x7FFFFFFF, *fakeTxs[len(fakeTxs)-1].Tx.Hash(),
},
}, []*fakeOut{
{
keys[0].Address, 1e3 - 2, &asiutil.AsimovAsset,
},
}, nil), GasPrice: 7})
invalidFakeTxs := TxDescList{
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("1"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1e4, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, math.MaxUint32, true, 0, common.HexToHash("0"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 + 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 3, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, invaildAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, invaildAsset, 4, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, invaildAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 5, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, &asiutil.AsimovAsset,
},
}, nil), GasPrice: 1},
}
getFees := func(amounts int64) map[protos.Asset]int64 {
res := make(map[protos.Asset]int64)
res[asiutil.AsimovAsset] = amounts
return res
}
privateKey := "0xd0f0461b7b4d26cf370e6c73b58ef7fa26e8e30853a8cee901ed42cf0879cb6e"
account,_ := crypto.NewAccount(privateKey)
tests := []struct {
validator *crypto.Account
gasFloor uint64
gasCeil uint64
round uint32
slot uint16
txs TxDescList
wantTx []*common.Hash
wantFees map[protos.Asset]int64
wantOpCosts []int64
wantWeight uint16
wantErr bool
}{
{
account, 160000000, 160000000, 1, 0, TxDescList{},
[]*common.Hash{},
make(map[protos.Asset]int64),
[]int64{1}, 720, false,
}, {
account, 160000000, 160000000, 1, 0, fakeTxs[0:1],
[]*common.Hash{fakeTxs[0].Tx.Hash()},
getFees(1e4),
[]int64{1, 1}, 720, false,
}, {
account, 160000000, 160000000, 1, 0, fakeTxs[1:7],
[]*common.Hash{fakeTxs[5].Tx.Hash(), fakeTxs[6].Tx.Hash(), fakeTxs[4].Tx.Hash(), fakeTxs[3].Tx.Hash(), fakeTxs[2].Tx.Hash(), fakeTxs[1].Tx.Hash()},
getFees(1 + 1 + 1e12 + 1e4 + 1 + 1e4 + 3),
[]int64{1, 6, 1, 5, 1, 1, 1}, 720, false,
}, {
account, 160000000, 160000000, 1, 0, invalidFakeTxs,
[]*common.Hash{},
make(map[protos.Asset]int64),
[]int64{1}, 720, false,
}, {
keys[0], 160000000, 160000000, 1, 0, TxDescList{},
[]*common.Hash{},
make(map[protos.Asset]int64),
[]int64{1}, 0, true,
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
fakeTxSource.clear()
for _, v := range test.txs {
fakeTxSource.push(v)
}
template, err := g.ProduceNewBlock(test.validator, test.gasFloor, test.gasCeil,
time.Now().Unix(), test.round, test.slot, 5*100000)
if err != nil {
if test.wantErr != true {
t.Errorf("tests #%d error %v", i, err)
}
continue
}
block := template.Block
txs := block.MsgBlock().Transactions
if block.MsgBlock().Header.CoinBase != *test.validator.Address ||
block.MsgBlock().Header.Round != test.round ||
block.MsgBlock().Header.SlotIndex != test.slot ||
block.MsgBlock().Header.Weight != test.wantWeight {
t.Errorf("tests #%d Coinbase: %v ,Round: %v ,Slot: %v Weight: %v",
i, block.MsgBlock().Header.CoinBase, block.MsgBlock().Header.Round, block.MsgBlock().Header.SlotIndex, block.MsgBlock().Header.Weight)
}
outTxEqual := func(ltxs []*protos.MsgTx, rtxs []*common.Hash) bool {
if len(ltxs) != len(rtxs) |
for k, v := range ltxs {
if v.TxHash() != *rtxs[k] {
return false
}
}
return true
}
t.Log(i)
for _, v := range txs {
t.Log(v.TxHash())
}
t.Log(test.wantTx)
if !outTxEqual(txs[:len(txs)-1], test.wantTx) {
t.Errorf("tests #%d out tx error, txlen %d, want tx: %v", i, len(txs), test.wantTx)
}
feesEqual := func(outs []*protos.TxOut, r map[protos.Asset]int64) bool {
for _, out := range outs {
if out.Asset != asiutil.AsimovAsset {
if out.Value != r[out.Asset]{
return false
}
}
}
return true
}
coinbase := txs[len(txs)-1]
if !feesEqual(coinbase.TxOut, test.wantFees) {
t.Errorf("tests #%d fees error,coinbase out: %v ,want fees: %v", i, coinbase.TxOut, test.wantFees)
}
}
}
| {
return false
} | conditional_block |
mining_test.go | // Copyright (c) 2018-2020 The asimov developers
// Copyright (c) 2013-2017 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package mining
import (
"container/heap"
"github.com/AsimovNetwork/asimov/asiutil"
"github.com/AsimovNetwork/asimov/blockchain"
"github.com/AsimovNetwork/asimov/blockchain/txo"
"github.com/AsimovNetwork/asimov/chaincfg"
"github.com/AsimovNetwork/asimov/common"
"github.com/AsimovNetwork/asimov/common/address"
"github.com/AsimovNetwork/asimov/crypto"
"github.com/AsimovNetwork/asimov/protos"
"math"
"math/rand"
"testing"
"time"
)
// TestTxPriceHeap ensures the priority queue for transaction fees and
// priorities works as expected.
func | (t *testing.T) {
// Create some fake priority items that exercise the expected sort
// edge conditions.
testItems := []*TxPrioItem{
{gasPrice: 5678,},
{gasPrice: 1234,},
{gasPrice: 10001,},
{gasPrice: 0,},
}
// Add random data in addition to the edge conditions already manually
// specified.
randSeed := rand.Int63()
defer func() {
if t.Failed() {
t.Logf("Random numbers using seed: %v", randSeed)
}
}()
prng := rand.New(rand.NewSource(randSeed))
for i := 0; i < 1000; i++ {
testItems = append(testItems, &TxPrioItem{
gasPrice: prng.Float64() * 10000,
})
}
// Test sorting by fee per KB then priority.
var highest *TxPrioItem
priorityQueue := NewTxPriorityQueue(len(testItems))
for i := 0; i < len(testItems); i++ {
prioItem := testItems[i]
if highest == nil {
highest = prioItem
}
if prioItem.gasPrice >= highest.gasPrice {
highest = prioItem
}
heap.Push(priorityQueue, prioItem)
}
for i := 0; i < len(testItems); i++ {
prioItem := heap.Pop(priorityQueue).(*TxPrioItem)
if prioItem.gasPrice > highest.gasPrice {
t.Fatalf("fee sort: item (fee per KB: %v) higher than than prev "+
"(fee per KB: %v)", prioItem.gasPrice, highest.gasPrice, )
}
highest = prioItem
}
}
func TestCreateCoinbaseTx(t *testing.T) {
privKey, _ := crypto.NewPrivateKey(crypto.S256())
pkaddr, _ := address.NewAddressPubKey(privKey.PubKey().SerializeCompressed())
addr := pkaddr.AddressPubKeyHash()
tests := []struct {
validater common.IAddress
height int32
wantErr bool
}{
{
pkaddr,
1,
false,
}, {
addr,
1,
false,
}, {
&common.Address{},
1,
true,
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
_, _, err := CreateCoinbaseTx(&chaincfg.MainNetParams, test.height, test.validater, nil)
if test.wantErr != (err != nil) {
t.Errorf("tests #%d error %v", i, err)
}
}
}
func TestNewBlockTemplate(t *testing.T) {
policy := Policy{
BlockProductedTimeOut: chaincfg.DefaultBlockProductedTimeOut,
TxConnectTimeOut: chaincfg.DefaultTxConnectTimeOut,
UtxoValidateTimeOut: chaincfg.DefaultUtxoValidateTimeOut,
}
chain, teardownFunc, err := newFakeChain(&chaincfg.MainNetParams)
if err != nil {
t.Error("newFakeChain error: ", err)
return
}
fakeTxSource := &fakeTxSource{make(map[common.Hash]*TxDesc)}
fakeSigSource := &fakeSigSource{make([]*asiutil.BlockSign, 0)}
g := NewBlkTmplGenerator(
&policy,
fakeTxSource,
fakeSigSource,
chain,
)
defer teardownFunc()
global_view := txo.NewUtxoViewpoint()
g.FetchUtxoView = func(tx *asiutil.Tx, dolock bool) (viewpoint *txo.UtxoViewpoint, e error) {
neededSet := make(map[protos.OutPoint]struct{})
prevOut := protos.OutPoint{Hash: *tx.Hash()}
for txOutIdx := range tx.MsgTx().TxOut {
prevOut.Index = uint32(txOutIdx)
neededSet[prevOut] = struct{}{}
}
if !blockchain.IsCoinBase(tx) {
for _, txIn := range tx.MsgTx().TxIn {
neededSet[txIn.PreviousOutPoint] = struct{}{}
}
}
// Request the utxos from the point of view of the end of the main
// chain.
view := txo.NewUtxoViewpoint()
for k, _ := range neededSet {
view.AddEntry(k,global_view.LookupEntry(k))
}
return view, nil
}
invaildAsset := protos.NewAsset(0, 0, 1)
keys := []*crypto.Account{}
for i := 0; i < 16; i++ {
privKey, _ := crypto.NewPrivateKey(crypto.S256())
pkaddr, _ := address.NewAddressPubKey(privKey.PubKey().SerializeCompressed())
addr := pkaddr.AddressPubKeyHash()
keys = append(keys, &crypto.Account {*privKey, *privKey.PubKey(),addr})
}
fakeTxs := TxDescList{
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("1"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1e4, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 1, false, 0, common.HexToHash("1"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 2},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e18, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("2"),
}, {
keys[0], 1e4, &asiutil.AsimovAsset, 1, false, 0, common.HexToHash("3"),
},
}, []*fakeOut{
{
keys[0].Address, 1e18 - 1e12, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 3},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1234567890, &asiutil.AsimovAsset, 3, false, 0, common.HexToHash("4"),
}, {
keys[1], 1e6, &asiutil.AsimovAsset, 5, false, 0, common.HexToHash("4"),
}, {
keys[3], 1e4, &asiutil.AsimovAsset, 8, false, 0, common.HexToHash("5"),
},
}, []*fakeOut{
{
keys[2].Address, 1234567890 + 1e6, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 4},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e4, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("5"),
},
}, []*fakeOut{
{
keys[0].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[2].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[1].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[1].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[0].Address, 6e3 - 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 5},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("6"),
}, {
keys[1], 1e6, &asiutil.AsimovAsset, 1, false, 0, common.HexToHash("6"),
}, {
keys[2], 1e4, &asiutil.AsimovAsset, 2, false, 0, common.HexToHash("6"),
}, {
keys[2], 1e4, &asiutil.AsimovAsset, 2, false, 0, common.HexToHash("7"),
}, {
keys[3], 1e4, &asiutil.AsimovAsset, 4, false, 0, common.HexToHash("7"),
},
}, []*fakeOut{
{
keys[2].Address, 1e6, &asiutil.AsimovAsset,
}, {
keys[2].Address, 1e4 - 1, &asiutil.AsimovAsset,
}, {
keys[4].Address, 1e4, &asiutil.AsimovAsset,
}, {
keys[5].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[5].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[6].Address, 8e3 - 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 6},
}
//create tx depend last tx
fakeTxs = append(fakeTxs, &TxDesc{Tx: createFakeTx([]*fakeIn{
{
keys[5], 1e3, &asiutil.AsimovAsset, 4, false, 0x7FFFFFFF, *fakeTxs[len(fakeTxs)-1].Tx.Hash(),
},
}, []*fakeOut{
{
keys[0].Address, 1e3 - 2, &asiutil.AsimovAsset,
},
}, nil), GasPrice: 7})
invalidFakeTxs := TxDescList{
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("1"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1e4, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, math.MaxUint32, true, 0, common.HexToHash("0"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 + 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 3, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, invaildAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, invaildAsset, 4, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, invaildAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 5, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, &asiutil.AsimovAsset,
},
}, nil), GasPrice: 1},
}
getFees := func(amounts int64) map[protos.Asset]int64 {
res := make(map[protos.Asset]int64)
res[asiutil.AsimovAsset] = amounts
return res
}
privateKey := "0xd0f0461b7b4d26cf370e6c73b58ef7fa26e8e30853a8cee901ed42cf0879cb6e"
account,_ := crypto.NewAccount(privateKey)
tests := []struct {
validator *crypto.Account
gasFloor uint64
gasCeil uint64
round uint32
slot uint16
txs TxDescList
wantTx []*common.Hash
wantFees map[protos.Asset]int64
wantOpCosts []int64
wantWeight uint16
wantErr bool
}{
{
account, 160000000, 160000000, 1, 0, TxDescList{},
[]*common.Hash{},
make(map[protos.Asset]int64),
[]int64{1}, 720, false,
}, {
account, 160000000, 160000000, 1, 0, fakeTxs[0:1],
[]*common.Hash{fakeTxs[0].Tx.Hash()},
getFees(1e4),
[]int64{1, 1}, 720, false,
}, {
account, 160000000, 160000000, 1, 0, fakeTxs[1:7],
[]*common.Hash{fakeTxs[5].Tx.Hash(), fakeTxs[6].Tx.Hash(), fakeTxs[4].Tx.Hash(), fakeTxs[3].Tx.Hash(), fakeTxs[2].Tx.Hash(), fakeTxs[1].Tx.Hash()},
getFees(1 + 1 + 1e12 + 1e4 + 1 + 1e4 + 3),
[]int64{1, 6, 1, 5, 1, 1, 1}, 720, false,
}, {
account, 160000000, 160000000, 1, 0, invalidFakeTxs,
[]*common.Hash{},
make(map[protos.Asset]int64),
[]int64{1}, 720, false,
}, {
keys[0], 160000000, 160000000, 1, 0, TxDescList{},
[]*common.Hash{},
make(map[protos.Asset]int64),
[]int64{1}, 0, true,
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
fakeTxSource.clear()
for _, v := range test.txs {
fakeTxSource.push(v)
}
template, err := g.ProduceNewBlock(test.validator, test.gasFloor, test.gasCeil,
time.Now().Unix(), test.round, test.slot, 5*100000)
if err != nil {
if test.wantErr != true {
t.Errorf("tests #%d error %v", i, err)
}
continue
}
block := template.Block
txs := block.MsgBlock().Transactions
if block.MsgBlock().Header.CoinBase != *test.validator.Address ||
block.MsgBlock().Header.Round != test.round ||
block.MsgBlock().Header.SlotIndex != test.slot ||
block.MsgBlock().Header.Weight != test.wantWeight {
t.Errorf("tests #%d Coinbase: %v ,Round: %v ,Slot: %v Weight: %v",
i, block.MsgBlock().Header.CoinBase, block.MsgBlock().Header.Round, block.MsgBlock().Header.SlotIndex, block.MsgBlock().Header.Weight)
}
outTxEqual := func(ltxs []*protos.MsgTx, rtxs []*common.Hash) bool {
if len(ltxs) != len(rtxs) {
return false
}
for k, v := range ltxs {
if v.TxHash() != *rtxs[k] {
return false
}
}
return true
}
t.Log(i)
for _, v := range txs {
t.Log(v.TxHash())
}
t.Log(test.wantTx)
if !outTxEqual(txs[:len(txs)-1], test.wantTx) {
t.Errorf("tests #%d out tx error, txlen %d, want tx: %v", i, len(txs), test.wantTx)
}
feesEqual := func(outs []*protos.TxOut, r map[protos.Asset]int64) bool {
for _, out := range outs {
if out.Asset != asiutil.AsimovAsset {
if out.Value != r[out.Asset]{
return false
}
}
}
return true
}
coinbase := txs[len(txs)-1]
if !feesEqual(coinbase.TxOut, test.wantFees) {
t.Errorf("tests #%d fees error,coinbase out: %v ,want fees: %v", i, coinbase.TxOut, test.wantFees)
}
}
}
| TestTxPriceHeap | identifier_name |
mining_test.go | // Copyright (c) 2018-2020 The asimov developers
// Copyright (c) 2013-2017 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package mining
import (
"container/heap"
"github.com/AsimovNetwork/asimov/asiutil"
"github.com/AsimovNetwork/asimov/blockchain"
"github.com/AsimovNetwork/asimov/blockchain/txo"
"github.com/AsimovNetwork/asimov/chaincfg"
"github.com/AsimovNetwork/asimov/common"
"github.com/AsimovNetwork/asimov/common/address"
"github.com/AsimovNetwork/asimov/crypto"
"github.com/AsimovNetwork/asimov/protos"
"math"
"math/rand"
"testing"
"time"
)
// TestTxPriceHeap ensures the priority queue for transaction fees and
// priorities works as expected.
func TestTxPriceHeap(t *testing.T) {
// Create some fake priority items that exercise the expected sort
// edge conditions.
testItems := []*TxPrioItem{
{gasPrice: 5678,},
{gasPrice: 1234,},
{gasPrice: 10001,},
{gasPrice: 0,},
}
// Add random data in addition to the edge conditions already manually
// specified.
randSeed := rand.Int63()
defer func() {
if t.Failed() {
t.Logf("Random numbers using seed: %v", randSeed)
}
}()
prng := rand.New(rand.NewSource(randSeed))
for i := 0; i < 1000; i++ {
testItems = append(testItems, &TxPrioItem{
gasPrice: prng.Float64() * 10000,
})
}
// Test sorting by fee per KB then priority.
var highest *TxPrioItem
priorityQueue := NewTxPriorityQueue(len(testItems))
for i := 0; i < len(testItems); i++ {
prioItem := testItems[i]
if highest == nil {
highest = prioItem
}
if prioItem.gasPrice >= highest.gasPrice {
highest = prioItem
}
heap.Push(priorityQueue, prioItem)
}
for i := 0; i < len(testItems); i++ {
prioItem := heap.Pop(priorityQueue).(*TxPrioItem)
if prioItem.gasPrice > highest.gasPrice {
t.Fatalf("fee sort: item (fee per KB: %v) higher than than prev "+
"(fee per KB: %v)", prioItem.gasPrice, highest.gasPrice, )
}
highest = prioItem
}
}
func TestCreateCoinbaseTx(t *testing.T) |
func TestNewBlockTemplate(t *testing.T) {
policy := Policy{
BlockProductedTimeOut: chaincfg.DefaultBlockProductedTimeOut,
TxConnectTimeOut: chaincfg.DefaultTxConnectTimeOut,
UtxoValidateTimeOut: chaincfg.DefaultUtxoValidateTimeOut,
}
chain, teardownFunc, err := newFakeChain(&chaincfg.MainNetParams)
if err != nil {
t.Error("newFakeChain error: ", err)
return
}
fakeTxSource := &fakeTxSource{make(map[common.Hash]*TxDesc)}
fakeSigSource := &fakeSigSource{make([]*asiutil.BlockSign, 0)}
g := NewBlkTmplGenerator(
&policy,
fakeTxSource,
fakeSigSource,
chain,
)
defer teardownFunc()
global_view := txo.NewUtxoViewpoint()
g.FetchUtxoView = func(tx *asiutil.Tx, dolock bool) (viewpoint *txo.UtxoViewpoint, e error) {
neededSet := make(map[protos.OutPoint]struct{})
prevOut := protos.OutPoint{Hash: *tx.Hash()}
for txOutIdx := range tx.MsgTx().TxOut {
prevOut.Index = uint32(txOutIdx)
neededSet[prevOut] = struct{}{}
}
if !blockchain.IsCoinBase(tx) {
for _, txIn := range tx.MsgTx().TxIn {
neededSet[txIn.PreviousOutPoint] = struct{}{}
}
}
// Request the utxos from the point of view of the end of the main
// chain.
view := txo.NewUtxoViewpoint()
for k, _ := range neededSet {
view.AddEntry(k,global_view.LookupEntry(k))
}
return view, nil
}
invaildAsset := protos.NewAsset(0, 0, 1)
keys := []*crypto.Account{}
for i := 0; i < 16; i++ {
privKey, _ := crypto.NewPrivateKey(crypto.S256())
pkaddr, _ := address.NewAddressPubKey(privKey.PubKey().SerializeCompressed())
addr := pkaddr.AddressPubKeyHash()
keys = append(keys, &crypto.Account {*privKey, *privKey.PubKey(),addr})
}
fakeTxs := TxDescList{
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("1"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1e4, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 1, false, 0, common.HexToHash("1"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 2},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e18, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("2"),
}, {
keys[0], 1e4, &asiutil.AsimovAsset, 1, false, 0, common.HexToHash("3"),
},
}, []*fakeOut{
{
keys[0].Address, 1e18 - 1e12, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 3},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1234567890, &asiutil.AsimovAsset, 3, false, 0, common.HexToHash("4"),
}, {
keys[1], 1e6, &asiutil.AsimovAsset, 5, false, 0, common.HexToHash("4"),
}, {
keys[3], 1e4, &asiutil.AsimovAsset, 8, false, 0, common.HexToHash("5"),
},
}, []*fakeOut{
{
keys[2].Address, 1234567890 + 1e6, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 4},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e4, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("5"),
},
}, []*fakeOut{
{
keys[0].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[2].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[1].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[1].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[0].Address, 6e3 - 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 5},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("6"),
}, {
keys[1], 1e6, &asiutil.AsimovAsset, 1, false, 0, common.HexToHash("6"),
}, {
keys[2], 1e4, &asiutil.AsimovAsset, 2, false, 0, common.HexToHash("6"),
}, {
keys[2], 1e4, &asiutil.AsimovAsset, 2, false, 0, common.HexToHash("7"),
}, {
keys[3], 1e4, &asiutil.AsimovAsset, 4, false, 0, common.HexToHash("7"),
},
}, []*fakeOut{
{
keys[2].Address, 1e6, &asiutil.AsimovAsset,
}, {
keys[2].Address, 1e4 - 1, &asiutil.AsimovAsset,
}, {
keys[4].Address, 1e4, &asiutil.AsimovAsset,
}, {
keys[5].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[5].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[6].Address, 8e3 - 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 6},
}
//create tx depend last tx
fakeTxs = append(fakeTxs, &TxDesc{Tx: createFakeTx([]*fakeIn{
{
keys[5], 1e3, &asiutil.AsimovAsset, 4, false, 0x7FFFFFFF, *fakeTxs[len(fakeTxs)-1].Tx.Hash(),
},
}, []*fakeOut{
{
keys[0].Address, 1e3 - 2, &asiutil.AsimovAsset,
},
}, nil), GasPrice: 7})
invalidFakeTxs := TxDescList{
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("1"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1e4, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, math.MaxUint32, true, 0, common.HexToHash("0"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 + 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 3, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, invaildAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, invaildAsset, 4, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, invaildAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 5, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, &asiutil.AsimovAsset,
},
}, nil), GasPrice: 1},
}
getFees := func(amounts int64) map[protos.Asset]int64 {
res := make(map[protos.Asset]int64)
res[asiutil.AsimovAsset] = amounts
return res
}
privateKey := "0xd0f0461b7b4d26cf370e6c73b58ef7fa26e8e30853a8cee901ed42cf0879cb6e"
account,_ := crypto.NewAccount(privateKey)
tests := []struct {
validator *crypto.Account
gasFloor uint64
gasCeil uint64
round uint32
slot uint16
txs TxDescList
wantTx []*common.Hash
wantFees map[protos.Asset]int64
wantOpCosts []int64
wantWeight uint16
wantErr bool
}{
{
account, 160000000, 160000000, 1, 0, TxDescList{},
[]*common.Hash{},
make(map[protos.Asset]int64),
[]int64{1}, 720, false,
}, {
account, 160000000, 160000000, 1, 0, fakeTxs[0:1],
[]*common.Hash{fakeTxs[0].Tx.Hash()},
getFees(1e4),
[]int64{1, 1}, 720, false,
}, {
account, 160000000, 160000000, 1, 0, fakeTxs[1:7],
[]*common.Hash{fakeTxs[5].Tx.Hash(), fakeTxs[6].Tx.Hash(), fakeTxs[4].Tx.Hash(), fakeTxs[3].Tx.Hash(), fakeTxs[2].Tx.Hash(), fakeTxs[1].Tx.Hash()},
getFees(1 + 1 + 1e12 + 1e4 + 1 + 1e4 + 3),
[]int64{1, 6, 1, 5, 1, 1, 1}, 720, false,
}, {
account, 160000000, 160000000, 1, 0, invalidFakeTxs,
[]*common.Hash{},
make(map[protos.Asset]int64),
[]int64{1}, 720, false,
}, {
keys[0], 160000000, 160000000, 1, 0, TxDescList{},
[]*common.Hash{},
make(map[protos.Asset]int64),
[]int64{1}, 0, true,
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
fakeTxSource.clear()
for _, v := range test.txs {
fakeTxSource.push(v)
}
template, err := g.ProduceNewBlock(test.validator, test.gasFloor, test.gasCeil,
time.Now().Unix(), test.round, test.slot, 5*100000)
if err != nil {
if test.wantErr != true {
t.Errorf("tests #%d error %v", i, err)
}
continue
}
block := template.Block
txs := block.MsgBlock().Transactions
if block.MsgBlock().Header.CoinBase != *test.validator.Address ||
block.MsgBlock().Header.Round != test.round ||
block.MsgBlock().Header.SlotIndex != test.slot ||
block.MsgBlock().Header.Weight != test.wantWeight {
t.Errorf("tests #%d Coinbase: %v ,Round: %v ,Slot: %v Weight: %v",
i, block.MsgBlock().Header.CoinBase, block.MsgBlock().Header.Round, block.MsgBlock().Header.SlotIndex, block.MsgBlock().Header.Weight)
}
outTxEqual := func(ltxs []*protos.MsgTx, rtxs []*common.Hash) bool {
if len(ltxs) != len(rtxs) {
return false
}
for k, v := range ltxs {
if v.TxHash() != *rtxs[k] {
return false
}
}
return true
}
t.Log(i)
for _, v := range txs {
t.Log(v.TxHash())
}
t.Log(test.wantTx)
if !outTxEqual(txs[:len(txs)-1], test.wantTx) {
t.Errorf("tests #%d out tx error, txlen %d, want tx: %v", i, len(txs), test.wantTx)
}
feesEqual := func(outs []*protos.TxOut, r map[protos.Asset]int64) bool {
for _, out := range outs {
if out.Asset != asiutil.AsimovAsset {
if out.Value != r[out.Asset]{
return false
}
}
}
return true
}
coinbase := txs[len(txs)-1]
if !feesEqual(coinbase.TxOut, test.wantFees) {
t.Errorf("tests #%d fees error,coinbase out: %v ,want fees: %v", i, coinbase.TxOut, test.wantFees)
}
}
}
| {
privKey, _ := crypto.NewPrivateKey(crypto.S256())
pkaddr, _ := address.NewAddressPubKey(privKey.PubKey().SerializeCompressed())
addr := pkaddr.AddressPubKeyHash()
tests := []struct {
validater common.IAddress
height int32
wantErr bool
}{
{
pkaddr,
1,
false,
}, {
addr,
1,
false,
}, {
&common.Address{},
1,
true,
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
_, _, err := CreateCoinbaseTx(&chaincfg.MainNetParams, test.height, test.validater, nil)
if test.wantErr != (err != nil) {
t.Errorf("tests #%d error %v", i, err)
}
}
} | identifier_body |
mining_test.go | // Copyright (c) 2018-2020 The asimov developers
// Copyright (c) 2013-2017 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package mining
import (
"container/heap"
"github.com/AsimovNetwork/asimov/asiutil"
"github.com/AsimovNetwork/asimov/blockchain"
"github.com/AsimovNetwork/asimov/blockchain/txo"
"github.com/AsimovNetwork/asimov/chaincfg"
"github.com/AsimovNetwork/asimov/common"
"github.com/AsimovNetwork/asimov/common/address"
"github.com/AsimovNetwork/asimov/crypto"
"github.com/AsimovNetwork/asimov/protos"
"math"
"math/rand"
"testing"
"time"
)
// TestTxPriceHeap ensures the priority queue for transaction fees and
// priorities works as expected.
func TestTxPriceHeap(t *testing.T) {
// Create some fake priority items that exercise the expected sort
// edge conditions.
testItems := []*TxPrioItem{
{gasPrice: 5678,},
{gasPrice: 1234,},
{gasPrice: 10001,},
{gasPrice: 0,},
}
// Add random data in addition to the edge conditions already manually
// specified.
randSeed := rand.Int63()
defer func() {
if t.Failed() {
t.Logf("Random numbers using seed: %v", randSeed)
}
}()
prng := rand.New(rand.NewSource(randSeed))
for i := 0; i < 1000; i++ {
testItems = append(testItems, &TxPrioItem{
gasPrice: prng.Float64() * 10000,
})
}
// Test sorting by fee per KB then priority.
var highest *TxPrioItem
priorityQueue := NewTxPriorityQueue(len(testItems))
for i := 0; i < len(testItems); i++ {
prioItem := testItems[i]
if highest == nil {
highest = prioItem
}
if prioItem.gasPrice >= highest.gasPrice {
highest = prioItem
}
heap.Push(priorityQueue, prioItem)
}
for i := 0; i < len(testItems); i++ {
prioItem := heap.Pop(priorityQueue).(*TxPrioItem)
if prioItem.gasPrice > highest.gasPrice {
t.Fatalf("fee sort: item (fee per KB: %v) higher than than prev "+
"(fee per KB: %v)", prioItem.gasPrice, highest.gasPrice, )
}
highest = prioItem
}
}
func TestCreateCoinbaseTx(t *testing.T) {
privKey, _ := crypto.NewPrivateKey(crypto.S256())
pkaddr, _ := address.NewAddressPubKey(privKey.PubKey().SerializeCompressed())
addr := pkaddr.AddressPubKeyHash()
tests := []struct {
validater common.IAddress
height int32
wantErr bool
}{
{ | addr,
1,
false,
}, {
&common.Address{},
1,
true,
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
_, _, err := CreateCoinbaseTx(&chaincfg.MainNetParams, test.height, test.validater, nil)
if test.wantErr != (err != nil) {
t.Errorf("tests #%d error %v", i, err)
}
}
}
func TestNewBlockTemplate(t *testing.T) {
policy := Policy{
BlockProductedTimeOut: chaincfg.DefaultBlockProductedTimeOut,
TxConnectTimeOut: chaincfg.DefaultTxConnectTimeOut,
UtxoValidateTimeOut: chaincfg.DefaultUtxoValidateTimeOut,
}
chain, teardownFunc, err := newFakeChain(&chaincfg.MainNetParams)
if err != nil {
t.Error("newFakeChain error: ", err)
return
}
fakeTxSource := &fakeTxSource{make(map[common.Hash]*TxDesc)}
fakeSigSource := &fakeSigSource{make([]*asiutil.BlockSign, 0)}
g := NewBlkTmplGenerator(
&policy,
fakeTxSource,
fakeSigSource,
chain,
)
defer teardownFunc()
global_view := txo.NewUtxoViewpoint()
g.FetchUtxoView = func(tx *asiutil.Tx, dolock bool) (viewpoint *txo.UtxoViewpoint, e error) {
neededSet := make(map[protos.OutPoint]struct{})
prevOut := protos.OutPoint{Hash: *tx.Hash()}
for txOutIdx := range tx.MsgTx().TxOut {
prevOut.Index = uint32(txOutIdx)
neededSet[prevOut] = struct{}{}
}
if !blockchain.IsCoinBase(tx) {
for _, txIn := range tx.MsgTx().TxIn {
neededSet[txIn.PreviousOutPoint] = struct{}{}
}
}
// Request the utxos from the point of view of the end of the main
// chain.
view := txo.NewUtxoViewpoint()
for k, _ := range neededSet {
view.AddEntry(k,global_view.LookupEntry(k))
}
return view, nil
}
invaildAsset := protos.NewAsset(0, 0, 1)
keys := []*crypto.Account{}
for i := 0; i < 16; i++ {
privKey, _ := crypto.NewPrivateKey(crypto.S256())
pkaddr, _ := address.NewAddressPubKey(privKey.PubKey().SerializeCompressed())
addr := pkaddr.AddressPubKeyHash()
keys = append(keys, &crypto.Account {*privKey, *privKey.PubKey(),addr})
}
fakeTxs := TxDescList{
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("1"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1e4, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 1, false, 0, common.HexToHash("1"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 2},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e18, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("2"),
}, {
keys[0], 1e4, &asiutil.AsimovAsset, 1, false, 0, common.HexToHash("3"),
},
}, []*fakeOut{
{
keys[0].Address, 1e18 - 1e12, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 3},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1234567890, &asiutil.AsimovAsset, 3, false, 0, common.HexToHash("4"),
}, {
keys[1], 1e6, &asiutil.AsimovAsset, 5, false, 0, common.HexToHash("4"),
}, {
keys[3], 1e4, &asiutil.AsimovAsset, 8, false, 0, common.HexToHash("5"),
},
}, []*fakeOut{
{
keys[2].Address, 1234567890 + 1e6, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 4},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e4, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("5"),
},
}, []*fakeOut{
{
keys[0].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[2].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[1].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[1].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[0].Address, 6e3 - 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 5},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("6"),
}, {
keys[1], 1e6, &asiutil.AsimovAsset, 1, false, 0, common.HexToHash("6"),
}, {
keys[2], 1e4, &asiutil.AsimovAsset, 2, false, 0, common.HexToHash("6"),
}, {
keys[2], 1e4, &asiutil.AsimovAsset, 2, false, 0, common.HexToHash("7"),
}, {
keys[3], 1e4, &asiutil.AsimovAsset, 4, false, 0, common.HexToHash("7"),
},
}, []*fakeOut{
{
keys[2].Address, 1e6, &asiutil.AsimovAsset,
}, {
keys[2].Address, 1e4 - 1, &asiutil.AsimovAsset,
}, {
keys[4].Address, 1e4, &asiutil.AsimovAsset,
}, {
keys[5].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[5].Address, 1e3, &asiutil.AsimovAsset,
}, {
keys[6].Address, 8e3 - 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 6},
}
//create tx depend last tx
fakeTxs = append(fakeTxs, &TxDesc{Tx: createFakeTx([]*fakeIn{
{
keys[5], 1e3, &asiutil.AsimovAsset, 4, false, 0x7FFFFFFF, *fakeTxs[len(fakeTxs)-1].Tx.Hash(),
},
}, []*fakeOut{
{
keys[0].Address, 1e3 - 2, &asiutil.AsimovAsset,
},
}, nil), GasPrice: 7})
invalidFakeTxs := TxDescList{
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("1"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1e4, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, math.MaxUint32, true, 0, common.HexToHash("0"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 0, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 + 1, &asiutil.AsimovAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 3, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, invaildAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, invaildAsset, 4, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, invaildAsset,
},
}, global_view), GasPrice: 1},
{Tx: createFakeTx([]*fakeIn{
{
keys[0], 1e8, &asiutil.AsimovAsset, 5, false, 0, common.HexToHash("8"),
},
}, []*fakeOut{
{
keys[1].Address, 1e8 - 1, &asiutil.AsimovAsset,
},
}, nil), GasPrice: 1},
}
getFees := func(amounts int64) map[protos.Asset]int64 {
res := make(map[protos.Asset]int64)
res[asiutil.AsimovAsset] = amounts
return res
}
privateKey := "0xd0f0461b7b4d26cf370e6c73b58ef7fa26e8e30853a8cee901ed42cf0879cb6e"
account,_ := crypto.NewAccount(privateKey)
tests := []struct {
validator *crypto.Account
gasFloor uint64
gasCeil uint64
round uint32
slot uint16
txs TxDescList
wantTx []*common.Hash
wantFees map[protos.Asset]int64
wantOpCosts []int64
wantWeight uint16
wantErr bool
}{
{
account, 160000000, 160000000, 1, 0, TxDescList{},
[]*common.Hash{},
make(map[protos.Asset]int64),
[]int64{1}, 720, false,
}, {
account, 160000000, 160000000, 1, 0, fakeTxs[0:1],
[]*common.Hash{fakeTxs[0].Tx.Hash()},
getFees(1e4),
[]int64{1, 1}, 720, false,
}, {
account, 160000000, 160000000, 1, 0, fakeTxs[1:7],
[]*common.Hash{fakeTxs[5].Tx.Hash(), fakeTxs[6].Tx.Hash(), fakeTxs[4].Tx.Hash(), fakeTxs[3].Tx.Hash(), fakeTxs[2].Tx.Hash(), fakeTxs[1].Tx.Hash()},
getFees(1 + 1 + 1e12 + 1e4 + 1 + 1e4 + 3),
[]int64{1, 6, 1, 5, 1, 1, 1}, 720, false,
}, {
account, 160000000, 160000000, 1, 0, invalidFakeTxs,
[]*common.Hash{},
make(map[protos.Asset]int64),
[]int64{1}, 720, false,
}, {
keys[0], 160000000, 160000000, 1, 0, TxDescList{},
[]*common.Hash{},
make(map[protos.Asset]int64),
[]int64{1}, 0, true,
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
fakeTxSource.clear()
for _, v := range test.txs {
fakeTxSource.push(v)
}
template, err := g.ProduceNewBlock(test.validator, test.gasFloor, test.gasCeil,
time.Now().Unix(), test.round, test.slot, 5*100000)
if err != nil {
if test.wantErr != true {
t.Errorf("tests #%d error %v", i, err)
}
continue
}
block := template.Block
txs := block.MsgBlock().Transactions
if block.MsgBlock().Header.CoinBase != *test.validator.Address ||
block.MsgBlock().Header.Round != test.round ||
block.MsgBlock().Header.SlotIndex != test.slot ||
block.MsgBlock().Header.Weight != test.wantWeight {
t.Errorf("tests #%d Coinbase: %v ,Round: %v ,Slot: %v Weight: %v",
i, block.MsgBlock().Header.CoinBase, block.MsgBlock().Header.Round, block.MsgBlock().Header.SlotIndex, block.MsgBlock().Header.Weight)
}
outTxEqual := func(ltxs []*protos.MsgTx, rtxs []*common.Hash) bool {
if len(ltxs) != len(rtxs) {
return false
}
for k, v := range ltxs {
if v.TxHash() != *rtxs[k] {
return false
}
}
return true
}
t.Log(i)
for _, v := range txs {
t.Log(v.TxHash())
}
t.Log(test.wantTx)
if !outTxEqual(txs[:len(txs)-1], test.wantTx) {
t.Errorf("tests #%d out tx error, txlen %d, want tx: %v", i, len(txs), test.wantTx)
}
feesEqual := func(outs []*protos.TxOut, r map[protos.Asset]int64) bool {
for _, out := range outs {
if out.Asset != asiutil.AsimovAsset {
if out.Value != r[out.Asset]{
return false
}
}
}
return true
}
coinbase := txs[len(txs)-1]
if !feesEqual(coinbase.TxOut, test.wantFees) {
t.Errorf("tests #%d fees error,coinbase out: %v ,want fees: %v", i, coinbase.TxOut, test.wantFees)
}
}
} | pkaddr,
1,
false,
}, { | random_line_split |
__init__.py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import crypten.communicator as comm
import crypten.mpc # noqa: F401
import crypten.nn # noqa: F401
import torch
# other imports:
from . import debug
from .cryptensor import CrypTensor
from .mpc import ptype
def init():
comm._init(use_threads=False, init_ttp=crypten.mpc.ttp_required())
if comm.get().get_rank() < comm.get().get_world_size():
_setup_przs()
if crypten.mpc.ttp_required():
crypten.mpc.provider.ttp_provider.TTPClient._init()
def init_thread(rank, world_size):
comm._init(use_threads=True, rank=rank, world_size=world_size)
_setup_przs()
def uninit():
return comm.uninit()
def is_initialized():
return comm.is_initialized()
# the different private type attributes of an mpc encrypted tensor
arithmetic = ptype.arithmetic
binary = ptype.binary
def print_communication_stats():
comm.get().print_communication_stats()
def reset_communication_stats():
comm.get().reset_communication_stats()
# Set backend
__SUPPORTED_BACKENDS = [crypten.mpc]
__default_backend = __SUPPORTED_BACKENDS[0]
def set_default_backend(new_default_backend):
"""Sets the default cryptensor backend (mpc, he)"""
global __default_backend
assert new_default_backend in __SUPPORTED_BACKENDS, (
"Backend %s is not supported" % new_default_backend
)
__default_backend = new_default_backend
def get_default_backend():
"""Returns the default cryptensor backend (mpc, he)"""
return __default_backend
def cryptensor(*args, backend=None, **kwargs):
"""
Factory function to return encrypted tensor of given backend.
"""
if backend is None:
backend = get_default_backend()
if backend == crypten.mpc:
return backend.MPCTensor(*args, **kwargs)
else:
raise TypeError("Backend %s is not supported" % backend)
def is_encrypted_tensor(obj):
"""
Returns True if obj is an encrypted tensor.
"""
return isinstance(obj, CrypTensor)
def _setup_przs():
"""
Generate shared random seeds to generate pseudo-random sharings of
zero. The random seeds are shared such that each process shares
one seed with the previous rank process and one with the next rank.
This allows for the generation of `n` random values, each known to
exactly two of the `n` parties.
For arithmetic sharing, one of these parties will add the number
while the other subtracts it, allowing for the generation of a
pseudo-random sharing of zero. (This can be done for binary
sharing using bitwise-xor rather than addition / subtraction)
"""
# Initialize RNG Generators
comm.get().g0 = torch.Generator()
comm.get().g1 = torch.Generator()
# Generate random seeds for Generators
# NOTE: Chosen seed can be any number, but we choose as a random 64-bit
# integer here so other parties cannot guess its value.
# We sometimes get here from a forked process, which causes all parties
# to have the same RNG state. Reset the seed to make sure RNG streams
# are different in all the parties. We use numpy's random here since
# setting its seed to None will produce different seeds even from
# forked processes.
import numpy
numpy.random.seed(seed=None)
next_seed = torch.tensor(numpy.random.randint(-2 ** 63, 2 ** 63 - 1, (1,)))
prev_seed = torch.LongTensor([0]) # placeholder
# Send random seed to next party, receive random seed from prev party
world_size = comm.get().get_world_size()
rank = comm.get().get_rank()
if world_size >= 2: # Otherwise sending seeds will segfault.
next_rank = (rank + 1) % world_size
prev_rank = (next_rank - 2) % world_size
req0 = comm.get().isend(tensor=next_seed, dst=next_rank)
req1 = comm.get().irecv(tensor=prev_seed, src=prev_rank)
req0.wait()
req1.wait()
else:
prev_seed = next_seed
# Seed Generators
comm.get().g0.manual_seed(next_seed.item())
comm.get().g1.manual_seed(prev_seed.item())
def __validate_model(loaded_model, dummy_model):
"""Validates that two models have the same architecture"""
loaded_modules = [loaded_model]
dummy_modules = [dummy_model]
valid = torch.tensor(1, dtype=torch.long)
try:
while len(loaded_modules) > 0:
loaded_module = loaded_modules.pop(0)
dummy_module = dummy_modules.pop(0)
# Assert modules have the same number of parameters
loaded_params = [param for param in loaded_module.parameters()]
dummy_params = [param for param in dummy_module.parameters()]
assert len(loaded_params) == len(dummy_params)
for i, param in enumerate(loaded_params):
assert param.size() == dummy_params[i].size()
# Assert that modules have the same number of sub-modules
loaded_module_modules = [mod for mod in loaded_module.modules()][1:]
dummy_module_modules = [mod for mod in dummy_module.modules()][1:]
loaded_modules.extend(loaded_module_modules)
dummy_modules.extend(dummy_module_modules)
assert len(loaded_modules) == len(dummy_modules)
except AssertionError:
valid = torch.tensor(0, dtype=torch.long)
return valid
def load(f, encrypted=False, dummy_model=None, src=0, **kwargs):
"""
Loads an object saved with `torch.save()` or `crypten.save()`.
Args:
f: a file-like object (has to implement `read()`, `readline()`,
`tell()`, and `seek()`), or a string containing a file name
encrypted: Determines whether crypten should load an encrypted tensor
or a plaintext torch tensor.
dummy_model: Takes a model architecture to fill with the loaded model
(on the `src` party only). Non-source parties will return the
`dummy_model` input (with data unchanged). Loading a model will
assert the correctness of the model architecture provided against
the model loaded. This argument is ignored if the file loaded is
a tensor.
src: Determines the source of the tensor. If `src` is None, each
party will attempt to read in the specified file. If `src` is
specified, the source party will read the tensor from
"""
if encrypted:
raise NotImplementedError("Loading encrypted tensors is not yet supported")
else:
assert isinstance(src, int), "Load failed: src argument must be an integer"
assert (
src >= 0 and src < comm.get().get_world_size()
), "Load failed: src must be in [0, world_size)"
# TODO: Use send_obj and recv_obj to send modules without requiring a
# dummy_model
# source party
if comm.get().get_rank() == src:
result = torch.load(f, **kwargs)
# file contains torch.tensor
if torch.is_tensor(result):
# Broadcast load type
load_type = torch.tensor(0, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# Broadcast size to other parties.
dim = torch.tensor(result.dim(), dtype=torch.long)
size = torch.tensor(result.size(), dtype=torch.long)
comm.get().broadcast(dim, src=src)
comm.get().broadcast(size, src=src)
result = cryptensor(result, src=src)
# file contains torch module
elif isinstance(result, torch.nn.Module):
# Broadcast load type
load_type = torch.tensor(1, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# Assert that dummy_model is provided
assert dummy_model is not None and isinstance(
dummy_model, torch.nn.Module
), "dummy model must be provided when loading a model"
# Assert that model architectures are the same
valid = __validate_model(result, dummy_model)
comm.get().broadcast(valid, src=src) # Broadcast validation
assert valid.item(), "Model architecture does not match loaded module"
result.src = src
# file contains unrecognized type
else:
# Broadcast load type
load_type = torch.tensor(-1, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# raise error
raise TypeError("Unrecognized load type %s" % type(result))
# Non-source party
else:
# Receive load type from source party
load_type = torch.tensor(-1, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# Load in tensor
if load_type.item() == 0:
# Receive size from source party
dim = torch.empty(size=(), dtype=torch.long)
comm.get().broadcast(dim, src=src)
size = torch.empty(size=(dim.item(),), dtype=torch.long)
comm.get().broadcast(size, src=src)
result = cryptensor(torch.empty(size=tuple(size.tolist())), src=src)
# Load module using dummy_model
elif load_type.item() == 1:
# Assert dummy_model is given
assert dummy_model is not None and isinstance(
dummy_model, torch.nn.Module
), "dummy model must be provided when loading a model"
result = dummy_model
# Receive model architecture validation
valid = torch.tensor(1, dtype=torch.long)
comm.get().broadcast(valid, src=src)
assert valid.item(), "Model architecture does not match loaded module"
result.src = src
else:
raise TypeError("Unrecognized load type on src")
# TODO: Encrypt modules before returning them
return result
def save(obj, f, src=0, **kwargs):
|
def where(condition, input, other):
"""
Return a tensor of elements selected from either `input` or `other`, depending
on `condition`.
"""
if is_encrypted_tensor(condition):
return condition * input + (1 - condition) * other
elif torch.is_tensor(condition):
condition = condition.float()
return input * condition + other * (1 - condition)
def cat(tensors, dim=0):
assert isinstance(tensors, list), "input to cat must be a list"
if len(tensors) == 1:
return tensors[0]
from .autograd_cryptensor import AutogradCrypTensor
if any(isinstance(t, AutogradCrypTensor) for t in tensors):
if not isinstance(tensors[0], AutogradCrypTensor):
tensors[0] = AutogradCrypTensor(tensors[0], requires_grad=False)
return tensors[0].cat(*tensors[1:], dim=dim)
else:
return get_default_backend().cat(tensors, dim=dim)
def stack(tensors, dim=0):
assert isinstance(tensors, list), "input to stack must be a list"
if len(tensors) == 1:
return tensors[0].unsqueeze(dim)
from .autograd_cryptensor import AutogradCrypTensor
if any(isinstance(t, AutogradCrypTensor) for t in tensors):
if not isinstance(tensors[0], AutogradCrypTensor):
tensors[0] = AutogradCrypTensor(tensors[0], requires_grad=False)
return tensors[0].stack(*tensors[1:], dim=dim)
else:
return get_default_backend().stack(tensors, dim=dim)
# Top level tensor functions
__PASSTHROUGH_FUNCTIONS = ["bernoulli", "rand", "randperm"]
def __add_top_level_function(func_name):
def _passthrough_function(*args, backend=None, **kwargs):
if backend is None:
backend = get_default_backend()
return getattr(backend, func_name)(*args, **kwargs)
globals()[func_name] = _passthrough_function
for func in __PASSTHROUGH_FUNCTIONS:
__add_top_level_function(func)
# expose classes and functions in package:
__all__ = ["CrypTensor", "debug", "init", "init_thread", "mpc", "nn", "uninit"]
| """
Saves a CrypTensor or PyTorch tensor to a file.
Args:
obj: The CrypTensor or PyTorch tensor to be saved
f: a file-like object (has to implement `read()`, `readline()`,
`tell()`, and `seek()`), or a string containing a file name
src: The source party that writes data to the specified file.
"""
if is_encrypted_tensor(obj):
raise NotImplementedError("Saving encrypted tensors is not yet supported")
else:
assert isinstance(src, int), "Save failed: src must be an integer"
assert (
src >= 0 and src < comm.get().get_world_size()
), "Save failed: src must be an integer in [0, world_size)"
if comm.get().get_rank() == src:
torch.save(obj, f, **kwargs)
# Implement barrier to avoid race conditions that require file to exist
comm.get().barrier() | identifier_body |
__init__.py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import crypten.communicator as comm
import crypten.mpc # noqa: F401
import crypten.nn # noqa: F401
import torch
# other imports:
from . import debug
from .cryptensor import CrypTensor
from .mpc import ptype
def init():
comm._init(use_threads=False, init_ttp=crypten.mpc.ttp_required())
if comm.get().get_rank() < comm.get().get_world_size():
_setup_przs()
if crypten.mpc.ttp_required():
crypten.mpc.provider.ttp_provider.TTPClient._init()
def init_thread(rank, world_size):
comm._init(use_threads=True, rank=rank, world_size=world_size)
_setup_przs()
def uninit():
return comm.uninit()
def is_initialized():
return comm.is_initialized()
# the different private type attributes of an mpc encrypted tensor
arithmetic = ptype.arithmetic
binary = ptype.binary
def print_communication_stats():
comm.get().print_communication_stats()
def reset_communication_stats():
comm.get().reset_communication_stats()
# Set backend
__SUPPORTED_BACKENDS = [crypten.mpc]
__default_backend = __SUPPORTED_BACKENDS[0]
def set_default_backend(new_default_backend):
"""Sets the default cryptensor backend (mpc, he)"""
global __default_backend
assert new_default_backend in __SUPPORTED_BACKENDS, (
"Backend %s is not supported" % new_default_backend
)
__default_backend = new_default_backend
def get_default_backend():
"""Returns the default cryptensor backend (mpc, he)"""
return __default_backend
def cryptensor(*args, backend=None, **kwargs):
"""
Factory function to return encrypted tensor of given backend.
"""
if backend is None:
backend = get_default_backend()
if backend == crypten.mpc:
return backend.MPCTensor(*args, **kwargs)
else:
raise TypeError("Backend %s is not supported" % backend)
def is_encrypted_tensor(obj):
"""
Returns True if obj is an encrypted tensor.
"""
return isinstance(obj, CrypTensor)
def _setup_przs():
"""
Generate shared random seeds to generate pseudo-random sharings of
zero. The random seeds are shared such that each process shares
one seed with the previous rank process and one with the next rank.
This allows for the generation of `n` random values, each known to
exactly two of the `n` parties.
For arithmetic sharing, one of these parties will add the number
while the other subtracts it, allowing for the generation of a
pseudo-random sharing of zero. (This can be done for binary
sharing using bitwise-xor rather than addition / subtraction)
"""
# Initialize RNG Generators
comm.get().g0 = torch.Generator()
comm.get().g1 = torch.Generator()
# Generate random seeds for Generators
# NOTE: Chosen seed can be any number, but we choose as a random 64-bit
# integer here so other parties cannot guess its value.
# We sometimes get here from a forked process, which causes all parties
# to have the same RNG state. Reset the seed to make sure RNG streams
# are different in all the parties. We use numpy's random here since
# setting its seed to None will produce different seeds even from
# forked processes.
import numpy
numpy.random.seed(seed=None)
next_seed = torch.tensor(numpy.random.randint(-2 ** 63, 2 ** 63 - 1, (1,)))
prev_seed = torch.LongTensor([0]) # placeholder
# Send random seed to next party, receive random seed from prev party
world_size = comm.get().get_world_size()
rank = comm.get().get_rank()
if world_size >= 2: # Otherwise sending seeds will segfault.
next_rank = (rank + 1) % world_size
prev_rank = (next_rank - 2) % world_size
req0 = comm.get().isend(tensor=next_seed, dst=next_rank)
req1 = comm.get().irecv(tensor=prev_seed, src=prev_rank)
req0.wait()
req1.wait()
else:
prev_seed = next_seed
# Seed Generators
comm.get().g0.manual_seed(next_seed.item())
comm.get().g1.manual_seed(prev_seed.item())
def __validate_model(loaded_model, dummy_model):
"""Validates that two models have the same architecture"""
loaded_modules = [loaded_model]
dummy_modules = [dummy_model]
valid = torch.tensor(1, dtype=torch.long)
try:
while len(loaded_modules) > 0:
|
except AssertionError:
valid = torch.tensor(0, dtype=torch.long)
return valid
def load(f, encrypted=False, dummy_model=None, src=0, **kwargs):
"""
Loads an object saved with `torch.save()` or `crypten.save()`.
Args:
f: a file-like object (has to implement `read()`, `readline()`,
`tell()`, and `seek()`), or a string containing a file name
encrypted: Determines whether crypten should load an encrypted tensor
or a plaintext torch tensor.
dummy_model: Takes a model architecture to fill with the loaded model
(on the `src` party only). Non-source parties will return the
`dummy_model` input (with data unchanged). Loading a model will
assert the correctness of the model architecture provided against
the model loaded. This argument is ignored if the file loaded is
a tensor.
src: Determines the source of the tensor. If `src` is None, each
party will attempt to read in the specified file. If `src` is
specified, the source party will read the tensor from
"""
if encrypted:
raise NotImplementedError("Loading encrypted tensors is not yet supported")
else:
assert isinstance(src, int), "Load failed: src argument must be an integer"
assert (
src >= 0 and src < comm.get().get_world_size()
), "Load failed: src must be in [0, world_size)"
# TODO: Use send_obj and recv_obj to send modules without requiring a
# dummy_model
# source party
if comm.get().get_rank() == src:
result = torch.load(f, **kwargs)
# file contains torch.tensor
if torch.is_tensor(result):
# Broadcast load type
load_type = torch.tensor(0, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# Broadcast size to other parties.
dim = torch.tensor(result.dim(), dtype=torch.long)
size = torch.tensor(result.size(), dtype=torch.long)
comm.get().broadcast(dim, src=src)
comm.get().broadcast(size, src=src)
result = cryptensor(result, src=src)
# file contains torch module
elif isinstance(result, torch.nn.Module):
# Broadcast load type
load_type = torch.tensor(1, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# Assert that dummy_model is provided
assert dummy_model is not None and isinstance(
dummy_model, torch.nn.Module
), "dummy model must be provided when loading a model"
# Assert that model architectures are the same
valid = __validate_model(result, dummy_model)
comm.get().broadcast(valid, src=src) # Broadcast validation
assert valid.item(), "Model architecture does not match loaded module"
result.src = src
# file contains unrecognized type
else:
# Broadcast load type
load_type = torch.tensor(-1, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# raise error
raise TypeError("Unrecognized load type %s" % type(result))
# Non-source party
else:
# Receive load type from source party
load_type = torch.tensor(-1, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# Load in tensor
if load_type.item() == 0:
# Receive size from source party
dim = torch.empty(size=(), dtype=torch.long)
comm.get().broadcast(dim, src=src)
size = torch.empty(size=(dim.item(),), dtype=torch.long)
comm.get().broadcast(size, src=src)
result = cryptensor(torch.empty(size=tuple(size.tolist())), src=src)
# Load module using dummy_model
elif load_type.item() == 1:
# Assert dummy_model is given
assert dummy_model is not None and isinstance(
dummy_model, torch.nn.Module
), "dummy model must be provided when loading a model"
result = dummy_model
# Receive model architecture validation
valid = torch.tensor(1, dtype=torch.long)
comm.get().broadcast(valid, src=src)
assert valid.item(), "Model architecture does not match loaded module"
result.src = src
else:
raise TypeError("Unrecognized load type on src")
# TODO: Encrypt modules before returning them
return result
def save(obj, f, src=0, **kwargs):
"""
Saves a CrypTensor or PyTorch tensor to a file.
Args:
obj: The CrypTensor or PyTorch tensor to be saved
f: a file-like object (has to implement `read()`, `readline()`,
`tell()`, and `seek()`), or a string containing a file name
src: The source party that writes data to the specified file.
"""
if is_encrypted_tensor(obj):
raise NotImplementedError("Saving encrypted tensors is not yet supported")
else:
assert isinstance(src, int), "Save failed: src must be an integer"
assert (
src >= 0 and src < comm.get().get_world_size()
), "Save failed: src must be an integer in [0, world_size)"
if comm.get().get_rank() == src:
torch.save(obj, f, **kwargs)
# Implement barrier to avoid race conditions that require file to exist
comm.get().barrier()
def where(condition, input, other):
"""
Return a tensor of elements selected from either `input` or `other`, depending
on `condition`.
"""
if is_encrypted_tensor(condition):
return condition * input + (1 - condition) * other
elif torch.is_tensor(condition):
condition = condition.float()
return input * condition + other * (1 - condition)
def cat(tensors, dim=0):
assert isinstance(tensors, list), "input to cat must be a list"
if len(tensors) == 1:
return tensors[0]
from .autograd_cryptensor import AutogradCrypTensor
if any(isinstance(t, AutogradCrypTensor) for t in tensors):
if not isinstance(tensors[0], AutogradCrypTensor):
tensors[0] = AutogradCrypTensor(tensors[0], requires_grad=False)
return tensors[0].cat(*tensors[1:], dim=dim)
else:
return get_default_backend().cat(tensors, dim=dim)
def stack(tensors, dim=0):
assert isinstance(tensors, list), "input to stack must be a list"
if len(tensors) == 1:
return tensors[0].unsqueeze(dim)
from .autograd_cryptensor import AutogradCrypTensor
if any(isinstance(t, AutogradCrypTensor) for t in tensors):
if not isinstance(tensors[0], AutogradCrypTensor):
tensors[0] = AutogradCrypTensor(tensors[0], requires_grad=False)
return tensors[0].stack(*tensors[1:], dim=dim)
else:
return get_default_backend().stack(tensors, dim=dim)
# Top level tensor functions
__PASSTHROUGH_FUNCTIONS = ["bernoulli", "rand", "randperm"]
def __add_top_level_function(func_name):
def _passthrough_function(*args, backend=None, **kwargs):
if backend is None:
backend = get_default_backend()
return getattr(backend, func_name)(*args, **kwargs)
globals()[func_name] = _passthrough_function
for func in __PASSTHROUGH_FUNCTIONS:
__add_top_level_function(func)
# expose classes and functions in package:
__all__ = ["CrypTensor", "debug", "init", "init_thread", "mpc", "nn", "uninit"]
| loaded_module = loaded_modules.pop(0)
dummy_module = dummy_modules.pop(0)
# Assert modules have the same number of parameters
loaded_params = [param for param in loaded_module.parameters()]
dummy_params = [param for param in dummy_module.parameters()]
assert len(loaded_params) == len(dummy_params)
for i, param in enumerate(loaded_params):
assert param.size() == dummy_params[i].size()
# Assert that modules have the same number of sub-modules
loaded_module_modules = [mod for mod in loaded_module.modules()][1:]
dummy_module_modules = [mod for mod in dummy_module.modules()][1:]
loaded_modules.extend(loaded_module_modules)
dummy_modules.extend(dummy_module_modules)
assert len(loaded_modules) == len(dummy_modules) | conditional_block |
__init__.py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import crypten.communicator as comm
import crypten.mpc # noqa: F401
import crypten.nn # noqa: F401
import torch
# other imports:
from . import debug
from .cryptensor import CrypTensor
from .mpc import ptype
def init():
comm._init(use_threads=False, init_ttp=crypten.mpc.ttp_required())
if comm.get().get_rank() < comm.get().get_world_size():
_setup_przs()
if crypten.mpc.ttp_required():
crypten.mpc.provider.ttp_provider.TTPClient._init()
def init_thread(rank, world_size):
comm._init(use_threads=True, rank=rank, world_size=world_size)
_setup_przs()
def uninit():
return comm.uninit()
def is_initialized():
return comm.is_initialized()
# the different private type attributes of an mpc encrypted tensor
arithmetic = ptype.arithmetic
binary = ptype.binary
def print_communication_stats():
comm.get().print_communication_stats()
def reset_communication_stats():
comm.get().reset_communication_stats()
# Set backend
__SUPPORTED_BACKENDS = [crypten.mpc]
__default_backend = __SUPPORTED_BACKENDS[0]
def set_default_backend(new_default_backend):
"""Sets the default cryptensor backend (mpc, he)"""
global __default_backend
assert new_default_backend in __SUPPORTED_BACKENDS, (
"Backend %s is not supported" % new_default_backend
)
__default_backend = new_default_backend
def get_default_backend():
"""Returns the default cryptensor backend (mpc, he)"""
return __default_backend
def | (*args, backend=None, **kwargs):
"""
Factory function to return encrypted tensor of given backend.
"""
if backend is None:
backend = get_default_backend()
if backend == crypten.mpc:
return backend.MPCTensor(*args, **kwargs)
else:
raise TypeError("Backend %s is not supported" % backend)
def is_encrypted_tensor(obj):
"""
Returns True if obj is an encrypted tensor.
"""
return isinstance(obj, CrypTensor)
def _setup_przs():
"""
Generate shared random seeds to generate pseudo-random sharings of
zero. The random seeds are shared such that each process shares
one seed with the previous rank process and one with the next rank.
This allows for the generation of `n` random values, each known to
exactly two of the `n` parties.
For arithmetic sharing, one of these parties will add the number
while the other subtracts it, allowing for the generation of a
pseudo-random sharing of zero. (This can be done for binary
sharing using bitwise-xor rather than addition / subtraction)
"""
# Initialize RNG Generators
comm.get().g0 = torch.Generator()
comm.get().g1 = torch.Generator()
# Generate random seeds for Generators
# NOTE: Chosen seed can be any number, but we choose as a random 64-bit
# integer here so other parties cannot guess its value.
# We sometimes get here from a forked process, which causes all parties
# to have the same RNG state. Reset the seed to make sure RNG streams
# are different in all the parties. We use numpy's random here since
# setting its seed to None will produce different seeds even from
# forked processes.
import numpy
numpy.random.seed(seed=None)
next_seed = torch.tensor(numpy.random.randint(-2 ** 63, 2 ** 63 - 1, (1,)))
prev_seed = torch.LongTensor([0]) # placeholder
# Send random seed to next party, receive random seed from prev party
world_size = comm.get().get_world_size()
rank = comm.get().get_rank()
if world_size >= 2: # Otherwise sending seeds will segfault.
next_rank = (rank + 1) % world_size
prev_rank = (next_rank - 2) % world_size
req0 = comm.get().isend(tensor=next_seed, dst=next_rank)
req1 = comm.get().irecv(tensor=prev_seed, src=prev_rank)
req0.wait()
req1.wait()
else:
prev_seed = next_seed
# Seed Generators
comm.get().g0.manual_seed(next_seed.item())
comm.get().g1.manual_seed(prev_seed.item())
def __validate_model(loaded_model, dummy_model):
"""Validates that two models have the same architecture"""
loaded_modules = [loaded_model]
dummy_modules = [dummy_model]
valid = torch.tensor(1, dtype=torch.long)
try:
while len(loaded_modules) > 0:
loaded_module = loaded_modules.pop(0)
dummy_module = dummy_modules.pop(0)
# Assert modules have the same number of parameters
loaded_params = [param for param in loaded_module.parameters()]
dummy_params = [param for param in dummy_module.parameters()]
assert len(loaded_params) == len(dummy_params)
for i, param in enumerate(loaded_params):
assert param.size() == dummy_params[i].size()
# Assert that modules have the same number of sub-modules
loaded_module_modules = [mod for mod in loaded_module.modules()][1:]
dummy_module_modules = [mod for mod in dummy_module.modules()][1:]
loaded_modules.extend(loaded_module_modules)
dummy_modules.extend(dummy_module_modules)
assert len(loaded_modules) == len(dummy_modules)
except AssertionError:
valid = torch.tensor(0, dtype=torch.long)
return valid
def load(f, encrypted=False, dummy_model=None, src=0, **kwargs):
"""
Loads an object saved with `torch.save()` or `crypten.save()`.
Args:
f: a file-like object (has to implement `read()`, `readline()`,
`tell()`, and `seek()`), or a string containing a file name
encrypted: Determines whether crypten should load an encrypted tensor
or a plaintext torch tensor.
dummy_model: Takes a model architecture to fill with the loaded model
(on the `src` party only). Non-source parties will return the
`dummy_model` input (with data unchanged). Loading a model will
assert the correctness of the model architecture provided against
the model loaded. This argument is ignored if the file loaded is
a tensor.
src: Determines the source of the tensor. If `src` is None, each
party will attempt to read in the specified file. If `src` is
specified, the source party will read the tensor from
"""
if encrypted:
raise NotImplementedError("Loading encrypted tensors is not yet supported")
else:
assert isinstance(src, int), "Load failed: src argument must be an integer"
assert (
src >= 0 and src < comm.get().get_world_size()
), "Load failed: src must be in [0, world_size)"
# TODO: Use send_obj and recv_obj to send modules without requiring a
# dummy_model
# source party
if comm.get().get_rank() == src:
result = torch.load(f, **kwargs)
# file contains torch.tensor
if torch.is_tensor(result):
# Broadcast load type
load_type = torch.tensor(0, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# Broadcast size to other parties.
dim = torch.tensor(result.dim(), dtype=torch.long)
size = torch.tensor(result.size(), dtype=torch.long)
comm.get().broadcast(dim, src=src)
comm.get().broadcast(size, src=src)
result = cryptensor(result, src=src)
# file contains torch module
elif isinstance(result, torch.nn.Module):
# Broadcast load type
load_type = torch.tensor(1, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# Assert that dummy_model is provided
assert dummy_model is not None and isinstance(
dummy_model, torch.nn.Module
), "dummy model must be provided when loading a model"
# Assert that model architectures are the same
valid = __validate_model(result, dummy_model)
comm.get().broadcast(valid, src=src) # Broadcast validation
assert valid.item(), "Model architecture does not match loaded module"
result.src = src
# file contains unrecognized type
else:
# Broadcast load type
load_type = torch.tensor(-1, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# raise error
raise TypeError("Unrecognized load type %s" % type(result))
# Non-source party
else:
# Receive load type from source party
load_type = torch.tensor(-1, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# Load in tensor
if load_type.item() == 0:
# Receive size from source party
dim = torch.empty(size=(), dtype=torch.long)
comm.get().broadcast(dim, src=src)
size = torch.empty(size=(dim.item(),), dtype=torch.long)
comm.get().broadcast(size, src=src)
result = cryptensor(torch.empty(size=tuple(size.tolist())), src=src)
# Load module using dummy_model
elif load_type.item() == 1:
# Assert dummy_model is given
assert dummy_model is not None and isinstance(
dummy_model, torch.nn.Module
), "dummy model must be provided when loading a model"
result = dummy_model
# Receive model architecture validation
valid = torch.tensor(1, dtype=torch.long)
comm.get().broadcast(valid, src=src)
assert valid.item(), "Model architecture does not match loaded module"
result.src = src
else:
raise TypeError("Unrecognized load type on src")
# TODO: Encrypt modules before returning them
return result
def save(obj, f, src=0, **kwargs):
"""
Saves a CrypTensor or PyTorch tensor to a file.
Args:
obj: The CrypTensor or PyTorch tensor to be saved
f: a file-like object (has to implement `read()`, `readline()`,
`tell()`, and `seek()`), or a string containing a file name
src: The source party that writes data to the specified file.
"""
if is_encrypted_tensor(obj):
raise NotImplementedError("Saving encrypted tensors is not yet supported")
else:
assert isinstance(src, int), "Save failed: src must be an integer"
assert (
src >= 0 and src < comm.get().get_world_size()
), "Save failed: src must be an integer in [0, world_size)"
if comm.get().get_rank() == src:
torch.save(obj, f, **kwargs)
# Implement barrier to avoid race conditions that require file to exist
comm.get().barrier()
def where(condition, input, other):
"""
Return a tensor of elements selected from either `input` or `other`, depending
on `condition`.
"""
if is_encrypted_tensor(condition):
return condition * input + (1 - condition) * other
elif torch.is_tensor(condition):
condition = condition.float()
return input * condition + other * (1 - condition)
def cat(tensors, dim=0):
assert isinstance(tensors, list), "input to cat must be a list"
if len(tensors) == 1:
return tensors[0]
from .autograd_cryptensor import AutogradCrypTensor
if any(isinstance(t, AutogradCrypTensor) for t in tensors):
if not isinstance(tensors[0], AutogradCrypTensor):
tensors[0] = AutogradCrypTensor(tensors[0], requires_grad=False)
return tensors[0].cat(*tensors[1:], dim=dim)
else:
return get_default_backend().cat(tensors, dim=dim)
def stack(tensors, dim=0):
assert isinstance(tensors, list), "input to stack must be a list"
if len(tensors) == 1:
return tensors[0].unsqueeze(dim)
from .autograd_cryptensor import AutogradCrypTensor
if any(isinstance(t, AutogradCrypTensor) for t in tensors):
if not isinstance(tensors[0], AutogradCrypTensor):
tensors[0] = AutogradCrypTensor(tensors[0], requires_grad=False)
return tensors[0].stack(*tensors[1:], dim=dim)
else:
return get_default_backend().stack(tensors, dim=dim)
# Top level tensor functions
__PASSTHROUGH_FUNCTIONS = ["bernoulli", "rand", "randperm"]
def __add_top_level_function(func_name):
def _passthrough_function(*args, backend=None, **kwargs):
if backend is None:
backend = get_default_backend()
return getattr(backend, func_name)(*args, **kwargs)
globals()[func_name] = _passthrough_function
for func in __PASSTHROUGH_FUNCTIONS:
__add_top_level_function(func)
# expose classes and functions in package:
__all__ = ["CrypTensor", "debug", "init", "init_thread", "mpc", "nn", "uninit"]
| cryptensor | identifier_name |
__init__.py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import crypten.communicator as comm | import crypten.nn # noqa: F401
import torch
# other imports:
from . import debug
from .cryptensor import CrypTensor
from .mpc import ptype
def init():
comm._init(use_threads=False, init_ttp=crypten.mpc.ttp_required())
if comm.get().get_rank() < comm.get().get_world_size():
_setup_przs()
if crypten.mpc.ttp_required():
crypten.mpc.provider.ttp_provider.TTPClient._init()
def init_thread(rank, world_size):
comm._init(use_threads=True, rank=rank, world_size=world_size)
_setup_przs()
def uninit():
return comm.uninit()
def is_initialized():
return comm.is_initialized()
# the different private type attributes of an mpc encrypted tensor
arithmetic = ptype.arithmetic
binary = ptype.binary
def print_communication_stats():
comm.get().print_communication_stats()
def reset_communication_stats():
comm.get().reset_communication_stats()
# Set backend
__SUPPORTED_BACKENDS = [crypten.mpc]
__default_backend = __SUPPORTED_BACKENDS[0]
def set_default_backend(new_default_backend):
"""Sets the default cryptensor backend (mpc, he)"""
global __default_backend
assert new_default_backend in __SUPPORTED_BACKENDS, (
"Backend %s is not supported" % new_default_backend
)
__default_backend = new_default_backend
def get_default_backend():
"""Returns the default cryptensor backend (mpc, he)"""
return __default_backend
def cryptensor(*args, backend=None, **kwargs):
"""
Factory function to return encrypted tensor of given backend.
"""
if backend is None:
backend = get_default_backend()
if backend == crypten.mpc:
return backend.MPCTensor(*args, **kwargs)
else:
raise TypeError("Backend %s is not supported" % backend)
def is_encrypted_tensor(obj):
"""
Returns True if obj is an encrypted tensor.
"""
return isinstance(obj, CrypTensor)
def _setup_przs():
"""
Generate shared random seeds to generate pseudo-random sharings of
zero. The random seeds are shared such that each process shares
one seed with the previous rank process and one with the next rank.
This allows for the generation of `n` random values, each known to
exactly two of the `n` parties.
For arithmetic sharing, one of these parties will add the number
while the other subtracts it, allowing for the generation of a
pseudo-random sharing of zero. (This can be done for binary
sharing using bitwise-xor rather than addition / subtraction)
"""
# Initialize RNG Generators
comm.get().g0 = torch.Generator()
comm.get().g1 = torch.Generator()
# Generate random seeds for Generators
# NOTE: Chosen seed can be any number, but we choose as a random 64-bit
# integer here so other parties cannot guess its value.
# We sometimes get here from a forked process, which causes all parties
# to have the same RNG state. Reset the seed to make sure RNG streams
# are different in all the parties. We use numpy's random here since
# setting its seed to None will produce different seeds even from
# forked processes.
import numpy
numpy.random.seed(seed=None)
next_seed = torch.tensor(numpy.random.randint(-2 ** 63, 2 ** 63 - 1, (1,)))
prev_seed = torch.LongTensor([0]) # placeholder
# Send random seed to next party, receive random seed from prev party
world_size = comm.get().get_world_size()
rank = comm.get().get_rank()
if world_size >= 2: # Otherwise sending seeds will segfault.
next_rank = (rank + 1) % world_size
prev_rank = (next_rank - 2) % world_size
req0 = comm.get().isend(tensor=next_seed, dst=next_rank)
req1 = comm.get().irecv(tensor=prev_seed, src=prev_rank)
req0.wait()
req1.wait()
else:
prev_seed = next_seed
# Seed Generators
comm.get().g0.manual_seed(next_seed.item())
comm.get().g1.manual_seed(prev_seed.item())
def __validate_model(loaded_model, dummy_model):
"""Validates that two models have the same architecture"""
loaded_modules = [loaded_model]
dummy_modules = [dummy_model]
valid = torch.tensor(1, dtype=torch.long)
try:
while len(loaded_modules) > 0:
loaded_module = loaded_modules.pop(0)
dummy_module = dummy_modules.pop(0)
# Assert modules have the same number of parameters
loaded_params = [param for param in loaded_module.parameters()]
dummy_params = [param for param in dummy_module.parameters()]
assert len(loaded_params) == len(dummy_params)
for i, param in enumerate(loaded_params):
assert param.size() == dummy_params[i].size()
# Assert that modules have the same number of sub-modules
loaded_module_modules = [mod for mod in loaded_module.modules()][1:]
dummy_module_modules = [mod for mod in dummy_module.modules()][1:]
loaded_modules.extend(loaded_module_modules)
dummy_modules.extend(dummy_module_modules)
assert len(loaded_modules) == len(dummy_modules)
except AssertionError:
valid = torch.tensor(0, dtype=torch.long)
return valid
def load(f, encrypted=False, dummy_model=None, src=0, **kwargs):
"""
Loads an object saved with `torch.save()` or `crypten.save()`.
Args:
f: a file-like object (has to implement `read()`, `readline()`,
`tell()`, and `seek()`), or a string containing a file name
encrypted: Determines whether crypten should load an encrypted tensor
or a plaintext torch tensor.
dummy_model: Takes a model architecture to fill with the loaded model
(on the `src` party only). Non-source parties will return the
`dummy_model` input (with data unchanged). Loading a model will
assert the correctness of the model architecture provided against
the model loaded. This argument is ignored if the file loaded is
a tensor.
src: Determines the source of the tensor. If `src` is None, each
party will attempt to read in the specified file. If `src` is
specified, the source party will read the tensor from
"""
if encrypted:
raise NotImplementedError("Loading encrypted tensors is not yet supported")
else:
assert isinstance(src, int), "Load failed: src argument must be an integer"
assert (
src >= 0 and src < comm.get().get_world_size()
), "Load failed: src must be in [0, world_size)"
# TODO: Use send_obj and recv_obj to send modules without requiring a
# dummy_model
# source party
if comm.get().get_rank() == src:
result = torch.load(f, **kwargs)
# file contains torch.tensor
if torch.is_tensor(result):
# Broadcast load type
load_type = torch.tensor(0, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# Broadcast size to other parties.
dim = torch.tensor(result.dim(), dtype=torch.long)
size = torch.tensor(result.size(), dtype=torch.long)
comm.get().broadcast(dim, src=src)
comm.get().broadcast(size, src=src)
result = cryptensor(result, src=src)
# file contains torch module
elif isinstance(result, torch.nn.Module):
# Broadcast load type
load_type = torch.tensor(1, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# Assert that dummy_model is provided
assert dummy_model is not None and isinstance(
dummy_model, torch.nn.Module
), "dummy model must be provided when loading a model"
# Assert that model architectures are the same
valid = __validate_model(result, dummy_model)
comm.get().broadcast(valid, src=src) # Broadcast validation
assert valid.item(), "Model architecture does not match loaded module"
result.src = src
# file contains unrecognized type
else:
# Broadcast load type
load_type = torch.tensor(-1, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# raise error
raise TypeError("Unrecognized load type %s" % type(result))
# Non-source party
else:
# Receive load type from source party
load_type = torch.tensor(-1, dtype=torch.long)
comm.get().broadcast(load_type, src=src)
# Load in tensor
if load_type.item() == 0:
# Receive size from source party
dim = torch.empty(size=(), dtype=torch.long)
comm.get().broadcast(dim, src=src)
size = torch.empty(size=(dim.item(),), dtype=torch.long)
comm.get().broadcast(size, src=src)
result = cryptensor(torch.empty(size=tuple(size.tolist())), src=src)
# Load module using dummy_model
elif load_type.item() == 1:
# Assert dummy_model is given
assert dummy_model is not None and isinstance(
dummy_model, torch.nn.Module
), "dummy model must be provided when loading a model"
result = dummy_model
# Receive model architecture validation
valid = torch.tensor(1, dtype=torch.long)
comm.get().broadcast(valid, src=src)
assert valid.item(), "Model architecture does not match loaded module"
result.src = src
else:
raise TypeError("Unrecognized load type on src")
# TODO: Encrypt modules before returning them
return result
def save(obj, f, src=0, **kwargs):
"""
Saves a CrypTensor or PyTorch tensor to a file.
Args:
obj: The CrypTensor or PyTorch tensor to be saved
f: a file-like object (has to implement `read()`, `readline()`,
`tell()`, and `seek()`), or a string containing a file name
src: The source party that writes data to the specified file.
"""
if is_encrypted_tensor(obj):
raise NotImplementedError("Saving encrypted tensors is not yet supported")
else:
assert isinstance(src, int), "Save failed: src must be an integer"
assert (
src >= 0 and src < comm.get().get_world_size()
), "Save failed: src must be an integer in [0, world_size)"
if comm.get().get_rank() == src:
torch.save(obj, f, **kwargs)
# Implement barrier to avoid race conditions that require file to exist
comm.get().barrier()
def where(condition, input, other):
"""
Return a tensor of elements selected from either `input` or `other`, depending
on `condition`.
"""
if is_encrypted_tensor(condition):
return condition * input + (1 - condition) * other
elif torch.is_tensor(condition):
condition = condition.float()
return input * condition + other * (1 - condition)
def cat(tensors, dim=0):
assert isinstance(tensors, list), "input to cat must be a list"
if len(tensors) == 1:
return tensors[0]
from .autograd_cryptensor import AutogradCrypTensor
if any(isinstance(t, AutogradCrypTensor) for t in tensors):
if not isinstance(tensors[0], AutogradCrypTensor):
tensors[0] = AutogradCrypTensor(tensors[0], requires_grad=False)
return tensors[0].cat(*tensors[1:], dim=dim)
else:
return get_default_backend().cat(tensors, dim=dim)
def stack(tensors, dim=0):
assert isinstance(tensors, list), "input to stack must be a list"
if len(tensors) == 1:
return tensors[0].unsqueeze(dim)
from .autograd_cryptensor import AutogradCrypTensor
if any(isinstance(t, AutogradCrypTensor) for t in tensors):
if not isinstance(tensors[0], AutogradCrypTensor):
tensors[0] = AutogradCrypTensor(tensors[0], requires_grad=False)
return tensors[0].stack(*tensors[1:], dim=dim)
else:
return get_default_backend().stack(tensors, dim=dim)
# Top level tensor functions
__PASSTHROUGH_FUNCTIONS = ["bernoulli", "rand", "randperm"]
def __add_top_level_function(func_name):
def _passthrough_function(*args, backend=None, **kwargs):
if backend is None:
backend = get_default_backend()
return getattr(backend, func_name)(*args, **kwargs)
globals()[func_name] = _passthrough_function
for func in __PASSTHROUGH_FUNCTIONS:
__add_top_level_function(func)
# expose classes and functions in package:
__all__ = ["CrypTensor", "debug", "init", "init_thread", "mpc", "nn", "uninit"] | import crypten.mpc # noqa: F401 | random_line_split |
debugvm.py | #!/usr/bin/python3
from dearpygui.core import *
from dearpygui.simple import *
from disasm import DebugDis
import json, sys, random
from collections import OrderedDict
from vm import *
import systemtime
FontSize = 5
Windows = {}
CharW = 1
CharH = 1
SYSTIME = systemtime.SystemTime()
VMCPU = CPU(SYSTIME)
PREFS_FILE_NAME = 'prefs.debugvm.json'
UI_FILE_NAME = 'ui.debugvm.json'
FILETOLOAD = "froths/ShotSequencer.debug"
def charW(x):
return int(round(x*CharW))
def charH(x):
return int(round(x*CharH))
def add_default_prefs():
add_value("Display DPI", 160.0)
add_value("Font height (mm)", 4.0)
def set_from_prefs_dict(prefs):
print("Loading prefs from prefs.json: ")
for i in prefs:
set_value(i, prefs[i])
print(" %s: %s" % (i, prefs[i]))
def save_prefs():
prefs = OrderedDict()
dpi = get_value("Display DPI")
height = get_value("Font height (mm)")
prefs["Display DPI"] = dpi
prefs["Font height (mm)"] = height
with open(PREFS_FILE_NAME, 'w') as outfile:
json.dump(prefs, outfile, indent=2)
outfile.close()
def load_prefs():
try:
with open(PREFS_FILE_NAME, 'r') as infile:
prefs = json.load(infile)
set_from_prefs_dict(prefs)
except FileNotFoundError:
# No prefs file, so save defaults to create new prefs file
save_prefs()
def save_ui():
wlist = get_windows()
wlist.remove('filedialog')
vp = {"ViewportSize" : get_main_window_size()}
config = [vp]
for win in wlist:
config.append(get_item_configuration(win))
with open(UI_FILE_NAME, 'w') as outfile:
json.dump(config, outfile, indent=2)
def cb_save_ui(sender, data):
save_ui()
def delkeys(dict, keys):
for key in keys:
try:
del dict[key]
except KeyError:
pass
def restore_ui():
try:
with open(UI_FILE_NAME, 'r') as infile:
config = json.load(infile)
viewportinfo, config = config[0], config[1:]
set_main_window_size(viewportinfo['ViewportSize'][0], viewportinfo['ViewportSize'][1])
for win in config:
name = win['name']
if does_item_exist(name):
#print(f"Config for {name} : {win}\n")
delkeys(win, ["source", "tip", "enabled", "menubar"])
iconfig = {}
for i in ("x_pos", "y_pos", "width", "height", "enabled", "show"):
if i in win:
iconfig[i] = win[i]
#print(win)
configure_item(name, **iconfig)
except FileNotFoundError:
pass
def cb_restore_ui(sender, data):
restore_ui()
def log_callback(sender, data):
log_debug(f"{sender} ran a callback its value is {get_value(sender)}")
def setupFonts():
dpi = get_value("Display DPI")
height = get_value("Font height (mm)")
# Use Mononoki font, 0.25 inches high
global FontSize
FontSize = int(round(height/25.4*dpi))
add_additional_font("fonts/mononoki-Regular.ttf", FontSize)
#add_additional_font("fonts/FiraCode-Regular.otf", FontSize)
#add_additional_font("fonts/Inconsolata.otf", FontSize)
#add_additional_font("fonts/ProggyClean.ttf", FontSize)
#add_additional_font("fonts/TerminusTTF-4.46.0.ttf", FontSize)
def cb_set_display_DPI(sender, data):
print(sender, data)
setupFonts()
def cb_set_font_height(sender, data):
print(sender, data)
setupFonts()
def callback_size_prefs(sender, data):
with window("Display Preferences", autosize=True):
add_drag_float("Display DPI", callback=cb_set_display_DPI,
default_value=160,
min_value=10,
max_value=300,
clamped=True,
)
add_drag_float("Font height (mm)", callback=cb_set_font_height,
default_value=4.0,
min_value=1.0,
max_value=20.0,
clamped=True
)
def cb_load_data(sender, data):
print(sender, data)
def callback_load_debug(sender, data):
open_file_dialog(callback=cb_load_data, extensions = ".debug") # Works great
def update_cpu_views():
if "CallStack" in Windows:
Windows["CallStack"].updateDisplay()
if "Stack" in Windows:
Windows["Stack"].updateDisplay()
if "CPUInfo" in Windows:
Windows["CPUInfo"].updateDisplay()
if "Program" in Windows:
Windows["Program"].updateDisplay()
if "Memory" in Windows:
Windows["Memory"].updateDisplay()
def cb_run(sender, data):
if "Program" not in Windows:
return
try:
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
while True:
VMCPU.step()
if random.random() < 0.01: # Don't update UI every step... can be slow
update_cpu_views()
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_step(sender, data):
if "Program" not in Windows:
return
try:
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_out(sender, data):
if "Program" not in Windows:
return
try:
while VMCPU.getCurrentOpcodeName()!= ";":
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_lstep(sender, data):
# Step opcode execution until associated source line changes
if "Program" not in Windows:
return
try:
editor = Windows["Program"]
currentpc = VMCPU.PC
currentline = editor.D.getSourceLineForAddr(VMCPU.PC)
line = currentline
while (line == currentline):
VMCPU.step(ignorebp=currentpc)
update_cpu_views()
line = editor.D.getSourceLineForAddr(VMCPU.PC)
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
# def step_opcode():
# addr = VMCPU.PC
# nextaddr = VMCPU.nextOpcodeAddr(addr)
# while VMCPU.PC != nextaddr:
# VMCPU.step()
def cb_nextl(sender, data):
# Step opcode execution until source line increments, ignoring subroutines
if "Program" not in Windows:
return
try:
editor = Windows["Program"]
currentop = VMCPU.getOpcodeName(VMCPU.ROM[VMCPU.PC])
if currentop == ';':
cb_step(sender, data)
return
# TODO: FOR is going to create some interesting corner cases. Will deal with it later.
currentpc = VMCPU.PC
currentline = editor.D.getSourceLineForAddr(VMCPU.PC)
line = currentline
nextlineaddr = editor.D.getAddrForSourceLine(currentline+1)
while line == currentline:
while VMCPU.PC != nextlineaddr:
VMCPU.step(ignorebp=currentpc)
if random.random() < 0.05:
update_cpu_views() # Update UI 5% of the time
update_cpu_views()
line = editor.D.getSourceLineForAddr(VMCPU.PC)
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_over(sender, data):
# Step opcode execution until source line changes, without viewing subroutines
if "Program" not in Windows:
return
try:
editor = Windows["Program"]
currentop = VMCPU.getOpcodeName(VMCPU.ROM[VMCPU.PC])
if currentop == ';':
cb_step(sender, data)
return
currentpc = VMCPU.PC
currentline = editor.D.getSourceLineForAddr(VMCPU.PC)
line = currentline
while line == currentline:
if VMCPU.isCurrentOpCall():
# It's a call to a subroutine. Run until we get out.
nextlineaddr = editor.D.getAddrForSourceLine(line+1)
while VMCPU.PC != nextlineaddr:
VMCPU.step(ignorebp=currentpc)
else:
# Not a call. Just execute an opcode
VMCPU.step(ignorebp=currentpc)
update_cpu_views()
line = editor.D.getSourceLineForAddr(VMCPU.PC)
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_shot(sender, data):
SYSTIME.reset()
VMCPU.reset()
VMCPU.moveToWord("RunShot")
update_cpu_views()
def cb_idle(sender, data):
VMCPU.reset()
VMCPU.moveToWord("Idle")
update_cpu_views()
def cb_halt(sender, data):
VMCPU.reset()
VMCPU.moveToWord("Halt")
update_cpu_views()
def add_controls():
if does_item_exist("Controls"):
delete_item("Controls")
with window("Controls", autosize=True, x_pos=0, y_pos=0):
with group("Buttons1", horizontal=True):
w = charW(6)
add_button("STEP", width=w, callback=cb_step, tip="Run one instruction")
add_button("STEPL", width=w, callback=cb_lstep, tip="Run one source line of code")
add_button("NEXTL", width=w, callback=cb_nextl, tip="Run until next source line of code")
with group("Buttons2", horizontal=True):
add_button("OVER", width=w, callback=cb_over, tip="Run one line of code, don't show subroutines")
add_button("OUT", width=w, callback=cb_out, tip="Run until ';' is executed")
add_button("RUN", width=w, callback=cb_run, tip="Run until completion, or a breakpoint")
with group("Buttons3", horizontal=True):
add_button("SHOT", width=w, callback=cb_shot, tip="Move to 'RunShot'")
add_button("IDLE", width=w, callback=cb_idle, tip="Move to 'Idle'")
add_button("HALT", width=w, callback=cb_halt, tip="Move to 'Halt'")
for item in get_item_children("Controls"):
set_item_style_var(item, mvGuiStyleVar_FrameRounding, [charH(1)*0.3])
set_item_style_var(item, mvGuiStyleVar_FramePadding, [charW(1)*0.3, 1])
def add_editor():
if does_item_exist("Program"):
del Windows["Program"]
delete_item("Program")
Windows["Program"] = Editor(FILETOLOAD)
def cb_add_controls(sender, data):
add_controls()
def cb_add_editor(sender, data):
add_editor()
def cb_nop(sender, data):
pass
def hsv_to_rgb(h: float, s: float, v: float, a:float) -> (float, float, float, float):
if s == 0.0: return (v, v, v, 255*a)
i = int(h*6.)
f = (h*6.)-i; p,q,t = v*(1.-s), v*(1.-s*f), v*(1.-s*(1.-f)); i%=6
if i == 0: return (255*v, 255*t, 255*p, 255*a)
if i == 1: return (255*q, 255*v, 255*p, 255*a)
if i == 2: return (255*p, 255*v, 255*t, 255*a)
if i == 3: return (255*p, 255*q, 255*v, 255*a)
if i == 4: return (255*t, 255*p, 255*v, 255*a)
if i == 5: return (255*v, 255*p, 255*q, 255*a)
class Editor:
def __init__(self, filename):
self.D = DebugDis(filename)
self.TextLines = self.D.SourceLines
self.addLines()
self.Selected = None
def selectMemAddr(self, addr):
"""
Highlight the line of code associated with the CPU program counter
"""
oldaddr = self.Selected
if oldaddr != None:
sl = self.D.getSourceLineForAddr(oldaddr)
item = f"SourceL{sl}"
#for item in get_item_children(f"SourceG{sl}"):
set_item_color(item, mvGuiCol_Button, [0,0,0,0])
self.Selected = addr
if self.Selected != None:
sl = self.D.getSourceLineForAddr(addr)
#for item in get_item_children(f"SourceG{sl}"):
item = f"SourceL{sl}"
set_item_color(item, mvGuiCol_Button, hsv_to_rgb(4/7.0, 0.8, 0.8, 1.0))
#set_item_color(f"SourceLNG{sl}", mvGuiCol_Text, [155,0,75,175])
#configure_item(f"SourceL{sl}", enabled=True)
#print(get_item_configuration(f"SourceL{sl}"))
def updateDisplay(self):
self.selectMemAddr(VMCPU.PC)
def cb_addr_click(self, sender, data):
#print(sender, data)
VMCPU.toggleBP(data)
item = f"SourceLN{self.D.getSourceLineForAddr(data)}"
i = 4
hovercol = hsv_to_rgb(i/7.0, 0.7, 0.7, 0.3)
if VMCPU.isBP(data):
set_item_color(item, mvGuiCol_Button, hsv_to_rgb(i/7.0, 0.8, 0.8, 1.0))
set_item_color(item, mvGuiCol_ButtonHovered, hsv_to_rgb(i/7.0, 0.8, 0.8, 1.0))
configure_item(item, tip="Breakpoint at Addr %d" % data)
else:
set_item_color(item, mvGuiCol_Button, [0,0,0,0])
set_item_color(item, mvGuiCol_ButtonHovered, hovercol)
configure_item(item, tip="")
def addLine(self, name, count, field1, field2, padto, cb, cb_data):
field2 = field2 + (' '*(padto-len(field2))) + ' '
with group(f"{name}G{count}", horizontal=True):
add_button(f"{name}LN{count}", label = field1, callback=cb, callback_data=cb_data)
if field2 == '':
add_button(f"{name}L{count}", label = ' ')
else:
add_button(f"{name}L{count}", label = field2)
i = 4
hovercol = hsv_to_rgb(i/7.0, 0.7, 0.7, 0.3)
for item in get_item_children(f"{name}G{count}"):
set_item_color(item, mvGuiCol_Button, [0,0,0,0])
set_item_color(item, mvGuiCol_ButtonHovered, hovercol)
set_item_color(item, mvGuiCol_ButtonActive, hsv_to_rgb(i/7.0, 0.8, 0.8, 1.0))
set_item_style_var(item, mvGuiStyleVar_FrameRounding, [2])
set_item_style_var(item, mvGuiStyleVar_FramePadding, [1, 1])
def addLines(self):
longestline = max(len(x.rstrip()) for x in self.TextLines)
with window("Program", x_pos=400, y_pos=200, width=charW(longestline+12), height=charH(40), no_scrollbar=True):
with tab_bar("ProgramTab"):
with tab("Source"):
with child("SourceChild", autosize_x=True, autosize_y=True):
for i, line in enumerate(self.TextLines, start=1):
addr = self.D.getAddrForSourceLine(i)
self.addLine("Source", i, "%5d" % i, line, longestline, self.cb_addr_click, addr)
with tab("Opcodes"):
memdump = self.D.dumpOpcodes()
for i, op in enumerate(memdump):
addr = op[0]
with group(f"opcodesLG{addr}", horizontal=True):
add_text(f"opcodeAddr{addr}", default_value= "%5d" % op[0])
add_text(f"opcodeBytes{addr}", default_value= " ".join([ "%02X" % x for x in op[1]]))
if op[2]:
add_text(f"opcodeval{i}", default_value= ("%d" % op[2]))
add_text(f"opcodesym{i}", default_value='('+op[3]+')')
else:
add_text(f"opcodesym{i}", default_value=op[3])
class MemoryDisplay:
def __init__(self, cpu, name):
self.Name = name
self.CPU = cpu
self.createDisplay()
def getFloatAddrInfo(self, a):
"""
Given an address, return the value at that address, and it's symbol
"""
return self.CPU.memFetch(SEntry(a, None))
def getByteAddrInfo(self, a):
"""
Given an address, return the value at that address, and it's symbol
"""
return self.CPU.memFetchB(SEntry(a, None))
def | (self):
wl = self.CPU.getMemWriteList()
if len(wl):
for addr, writelen in wl:
for byteaddr in range(addr, addr+writelen):
val, sym = self.getByteAddrInfo(byteaddr)
val = int(round(val))
set_value(f"{self.Name}bval_{byteaddr}", "%02X %4d" % (val, val))
set_value(f"{self.Name}bsym_{byteaddr}", "%s" % sym)
if (byteaddr % 4) == 0:
val, sym = self.getFloatAddrInfo(byteaddr)
set_value(f"{self.Name}fval_{byteaddr}", "%12.6f" % val)
set_value(f"{self.Name}fsym_{byteaddr}", "%s" % sym)
self.CPU.clearMemWriteList()
def createDisplay(self):
with window(self.Name):
with child(f"{self.Name}child", width=charW(100), height=charH(16), border=False):
with managed_columns(f"{self.Name}mc", 2):
with group(f"{self.Name}left"):
for byteaddr in range(256):
# 4 Bytes and a Float
# Horizontal double column, 4 bytes on the left, float on the right
with group(f"{self.Name}bline_{byteaddr}", horizontal=True):
val, sym = self.getByteAddrInfo(byteaddr)
val = int(round(val))
add_text(f"{self.Name}byte_{byteaddr}", default_value="%05d %04x" % (byteaddr, byteaddr))
add_text(f"{self.Name}bval_{byteaddr}", default_value="%02X %4d" % (val, val))
add_text(f"{self.Name}bsym_{byteaddr}", default_value="%s" % (sym,))
with group(f"{self.Name}right"):
for addr in range(0, 256):
if (addr % 4) == 0:
with group(f"{self.Name}fline_{addr}", horizontal=True):
val, sym = self.getFloatAddrInfo(addr)
#add_text(f"{self.Name}float_{addr}", default_value="%05d %04x" % (addr, addr))
add_text(f"{self.Name}fval_{addr}", default_value="%12.6f" % (val,))
add_text(f"{self.Name}fsym_{addr}", default_value="%s" % (sym,))
else:
with group(f"{self.Name}spacerg_{addr}", horizontal=True):
add_text(f"{self.Name}spacerl_{addr}", default_value=' ')
class StackDisplay:
def __init__(self, name, stack):
self.Stack = stack
self.Name = name
self.createDisplay()
def getStackVal(self, pos):
if len(self.Stack) > pos:
return self.Stack.read(pos)
else:
return None
def updateDisplay(self):
if self.Stack.Changed:
for i in range(64):
sv = self.getStackVal(i)
if sv != None:
#print(get_item_configuration(f"{self.Name}val_{i}"))
configure_item(f"{self.Name}val_{i}", label=("%12.6f" % self.getStackVal(i).float))
set_value(f"{self.Name}sym_{i}", self.getStackVal(i).symbol)
configure_item(f"{self.Name}sym_{i}", tip=self.getStackVal(i).symbol)
else:
configure_item(f"{self.Name}val_{i}", label="------------")
set_value(f"{self.Name}sym_{i}", '')
configure_item(f"{self.Name}sym_{i}", tip='')
def createDisplay(self):
with window(self.Name, autosize=True):
with child(f"{self.Name}child", width=charW(40), height=charH(16), border=False):
for i in range(64):
with group(f"{self.Name}group_{i}", horizontal=True):
add_text(f"{self.Name}pos_{i}", default_value="%02d" % i)
sv = self.getStackVal(i)
if sv != None:
with tree_node(f"{self.Name}val_{i}", label="%12.6f" % self.getStackVal(i).float, default_open=True):
add_text(f"{self.Name}sym_{i}", default_value=self.getStackVal(i).symbol)
else:
with tree_node(f"{self.Name}val_{i}", label="------------", default_open=True):
add_text(f"{self.Name}sym_{i}", default_value='')
def add_stack(stack, name):
if does_item_exist(name):
del Windows[name]
delete_item(name)
Windows[name] = StackDisplay(name, stack)
def add_mem(cpu, name):
if does_item_exist(name):
del Windows[name]
delete_item(name)
Windows[name] = MemoryDisplay(cpu, name)
class CPUInfo:
def __init__(self, cpu, name):
self.Name = name
self.CPU = cpu
self.createDisplay()
def updateDisplay(self):
set_value(f"{self.Name}PC", "PC: %05d" % self.CPU.PC)
set_value(f"{self.Name}Cycles", "Cycles: %06d" % self.CPU.Cycles)
def createDisplay(self):
with window(self.Name, autosize=True):
with child(f"{self.Name}child", width=charW(16), height=charH(3)):
with group(f"{self.Name}group"):
add_text(f"{self.Name}PC", default_value="PC: %05d" % self.CPU.PC)
add_text(f"{self.Name}Cycles", default_value="Cycles: %06d" % self.CPU.Cycles)
def add_cpu_info(cpu, name):
if does_item_exist(name):
del Windows[name]
delete_item(name)
Windows[name] = CPUInfo(cpu, name)
def fix_window_positions():
wp = get_style_frame_padding()
mbw, mbh = [int(x) for x in get_item_rect_size("MenuBar")]
windows = get_windows()
windows = [x for x in windows if x != "Main Window"]
for i in windows:
x, y = [int(x) for x in get_window_pos(i)]
fix = False
if x < 0:
x = 0
fix = True
if y < mbh:
y = mbh+int(wp[1])
fix = True
if fix:
set_window_pos(i, x, y)
def cb_mouse_release(sender, data):
fix_window_positions()
def cb_close(sender, data):
set_mouse_release_callback(None)
set_render_callback(None)
def add_de1graph():
with window("Graphs"):
add_plot("DE1", x_axis_name="Time/[s]", y_axis_name="Pressure",
yaxis2=True,
yaxis3=True
)
def setup_UI(sender, data):
global CharW
global CharH
x, y = get_item_rect_size("CharRuler")
#print(x,y)
CharW = float(x/100)
CharH = float(y/10)
#print(f"Character width is: {CharW}")
#print(f"Character height is: {CharH}")
delete_item("CharRuler")
add_controls()
add_editor()
add_stack(VMCPU.CallStack, "CallStack")
add_stack(VMCPU.Stack, "Stack")
add_cpu_info(VMCPU, "CPUInfo")
add_mem(VMCPU, "Memory")
VMCPU.loadDebug(FILETOLOAD)
VMCPU.moveToWord("RunShot")
update_cpu_views()
set_main_window_title("Dalgona Debugger")
set_item_color("Main Window", mvGuiCol_WindowBg, [128, 128, 128, 0])
set_style_global_alpha(1.0)
set_mouse_release_callback(cb_mouse_release)
restore_ui()
fix_window_positions()
def main():
add_default_prefs()
load_prefs()
set_theme("Dark")
setupFonts()
#enable_docking(dock_space=True, shift_only=False)
#set_style_window_rounding(charH(0.25))
#set_style_frame_rounding(charH(0.25))
with window("Main Window", label="Espresso Forth Debugger", width=160, height=120, on_close=cb_close):
with menu_bar("MenuBar"):
with menu("File"):
add_menu_item("Load Debug", callback=callback_load_debug)
with menu("Windows"):
add_menu_item("Save Layout", label="Save Layout", callback=cb_save_ui)
add_menu_item("Restore Layout", label="Restore Layout", callback=cb_restore_ui)
add_menu_item("ControlsMI", label="Controls", callback=cb_add_controls)
add_menu_item("EditorMI", label="Editor", callback=cb_add_editor)
with menu("Extras"):
add_menu_item("Show Logger", callback=show_logger)
add_menu_item("Show About", callback=show_about)
add_menu_item("Show Metrics", callback=show_metrics)
add_menu_item("Show Documentation", callback=show_documentation)
add_menu_item("Show Debug", callback=show_debug)
add_menu_item("Show Style Editor", callback=show_style_editor)
add_text("CharRuler", default_value = ("\n".join(['H'*100]*10))) #, color=[0,0,0,0])
add_de1graph()
set_start_callback(setup_UI)
try:
with open(UI_FILE_NAME, 'r') as infile:
config = json.load(infile)
viewportinfo, config = config[0], config[1:]
set_main_window_size(viewportinfo['ViewportSize'][0], viewportinfo['ViewportSize'][1])
except FileNotFoundError:
pass
start_dearpygui(primary_window="Main Window")
if __name__ == '__main__':
main() | updateDisplay | identifier_name |
debugvm.py | #!/usr/bin/python3
from dearpygui.core import *
from dearpygui.simple import *
from disasm import DebugDis
import json, sys, random
from collections import OrderedDict
from vm import *
import systemtime
FontSize = 5
Windows = {}
CharW = 1
CharH = 1
SYSTIME = systemtime.SystemTime()
VMCPU = CPU(SYSTIME)
PREFS_FILE_NAME = 'prefs.debugvm.json'
UI_FILE_NAME = 'ui.debugvm.json'
FILETOLOAD = "froths/ShotSequencer.debug"
def charW(x):
return int(round(x*CharW))
def charH(x):
return int(round(x*CharH))
def add_default_prefs():
add_value("Display DPI", 160.0)
add_value("Font height (mm)", 4.0)
def set_from_prefs_dict(prefs):
print("Loading prefs from prefs.json: ")
for i in prefs:
set_value(i, prefs[i])
print(" %s: %s" % (i, prefs[i]))
def save_prefs():
prefs = OrderedDict()
dpi = get_value("Display DPI")
height = get_value("Font height (mm)")
prefs["Display DPI"] = dpi
prefs["Font height (mm)"] = height
with open(PREFS_FILE_NAME, 'w') as outfile:
json.dump(prefs, outfile, indent=2)
outfile.close()
def load_prefs():
try:
with open(PREFS_FILE_NAME, 'r') as infile:
prefs = json.load(infile)
set_from_prefs_dict(prefs)
except FileNotFoundError:
# No prefs file, so save defaults to create new prefs file
save_prefs()
def save_ui():
wlist = get_windows()
wlist.remove('filedialog')
vp = {"ViewportSize" : get_main_window_size()}
config = [vp]
for win in wlist:
config.append(get_item_configuration(win))
with open(UI_FILE_NAME, 'w') as outfile:
json.dump(config, outfile, indent=2)
def cb_save_ui(sender, data):
save_ui()
def delkeys(dict, keys):
for key in keys:
try:
del dict[key]
except KeyError:
pass
def restore_ui():
try:
with open(UI_FILE_NAME, 'r') as infile:
config = json.load(infile)
viewportinfo, config = config[0], config[1:]
set_main_window_size(viewportinfo['ViewportSize'][0], viewportinfo['ViewportSize'][1])
for win in config:
name = win['name']
if does_item_exist(name):
#print(f"Config for {name} : {win}\n")
delkeys(win, ["source", "tip", "enabled", "menubar"])
iconfig = {}
for i in ("x_pos", "y_pos", "width", "height", "enabled", "show"):
if i in win:
iconfig[i] = win[i]
#print(win)
configure_item(name, **iconfig)
except FileNotFoundError:
pass
def cb_restore_ui(sender, data):
restore_ui()
def log_callback(sender, data):
log_debug(f"{sender} ran a callback its value is {get_value(sender)}")
def setupFonts():
dpi = get_value("Display DPI")
height = get_value("Font height (mm)")
# Use Mononoki font, 0.25 inches high
global FontSize
FontSize = int(round(height/25.4*dpi))
add_additional_font("fonts/mononoki-Regular.ttf", FontSize)
#add_additional_font("fonts/FiraCode-Regular.otf", FontSize)
#add_additional_font("fonts/Inconsolata.otf", FontSize)
#add_additional_font("fonts/ProggyClean.ttf", FontSize)
#add_additional_font("fonts/TerminusTTF-4.46.0.ttf", FontSize)
def cb_set_display_DPI(sender, data):
print(sender, data)
setupFonts()
def cb_set_font_height(sender, data):
print(sender, data)
setupFonts()
def callback_size_prefs(sender, data):
with window("Display Preferences", autosize=True):
add_drag_float("Display DPI", callback=cb_set_display_DPI,
default_value=160,
min_value=10,
max_value=300,
clamped=True,
)
add_drag_float("Font height (mm)", callback=cb_set_font_height,
default_value=4.0,
min_value=1.0,
max_value=20.0,
clamped=True
)
def cb_load_data(sender, data):
print(sender, data)
def callback_load_debug(sender, data):
open_file_dialog(callback=cb_load_data, extensions = ".debug") # Works great
def update_cpu_views():
if "CallStack" in Windows:
Windows["CallStack"].updateDisplay()
if "Stack" in Windows:
Windows["Stack"].updateDisplay()
if "CPUInfo" in Windows:
Windows["CPUInfo"].updateDisplay()
if "Program" in Windows:
Windows["Program"].updateDisplay()
if "Memory" in Windows:
Windows["Memory"].updateDisplay()
def cb_run(sender, data):
if "Program" not in Windows:
return
try:
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
while True:
VMCPU.step()
if random.random() < 0.01: # Don't update UI every step... can be slow
update_cpu_views()
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_step(sender, data):
if "Program" not in Windows:
return
try:
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_out(sender, data):
if "Program" not in Windows:
return
try:
while VMCPU.getCurrentOpcodeName()!= ";":
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_lstep(sender, data):
# Step opcode execution until associated source line changes
if "Program" not in Windows:
return
try:
editor = Windows["Program"]
currentpc = VMCPU.PC
currentline = editor.D.getSourceLineForAddr(VMCPU.PC)
line = currentline
while (line == currentline):
VMCPU.step(ignorebp=currentpc)
update_cpu_views()
line = editor.D.getSourceLineForAddr(VMCPU.PC)
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
# def step_opcode():
# addr = VMCPU.PC
# nextaddr = VMCPU.nextOpcodeAddr(addr)
# while VMCPU.PC != nextaddr:
# VMCPU.step()
def cb_nextl(sender, data):
# Step opcode execution until source line increments, ignoring subroutines
if "Program" not in Windows:
return
try:
editor = Windows["Program"]
currentop = VMCPU.getOpcodeName(VMCPU.ROM[VMCPU.PC])
if currentop == ';':
cb_step(sender, data)
return
# TODO: FOR is going to create some interesting corner cases. Will deal with it later.
currentpc = VMCPU.PC
currentline = editor.D.getSourceLineForAddr(VMCPU.PC)
line = currentline
nextlineaddr = editor.D.getAddrForSourceLine(currentline+1)
while line == currentline:
while VMCPU.PC != nextlineaddr:
VMCPU.step(ignorebp=currentpc)
if random.random() < 0.05:
update_cpu_views() # Update UI 5% of the time
update_cpu_views()
line = editor.D.getSourceLineForAddr(VMCPU.PC)
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_over(sender, data):
# Step opcode execution until source line changes, without viewing subroutines
if "Program" not in Windows:
return
try:
editor = Windows["Program"]
currentop = VMCPU.getOpcodeName(VMCPU.ROM[VMCPU.PC])
if currentop == ';':
cb_step(sender, data)
return
currentpc = VMCPU.PC
currentline = editor.D.getSourceLineForAddr(VMCPU.PC)
line = currentline
while line == currentline:
if VMCPU.isCurrentOpCall():
# It's a call to a subroutine. Run until we get out.
nextlineaddr = editor.D.getAddrForSourceLine(line+1)
while VMCPU.PC != nextlineaddr:
VMCPU.step(ignorebp=currentpc)
else:
# Not a call. Just execute an opcode
VMCPU.step(ignorebp=currentpc)
update_cpu_views()
line = editor.D.getSourceLineForAddr(VMCPU.PC)
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_shot(sender, data):
SYSTIME.reset()
VMCPU.reset()
VMCPU.moveToWord("RunShot")
update_cpu_views()
def cb_idle(sender, data):
VMCPU.reset()
VMCPU.moveToWord("Idle")
update_cpu_views()
def cb_halt(sender, data):
VMCPU.reset()
VMCPU.moveToWord("Halt")
update_cpu_views()
def add_controls():
if does_item_exist("Controls"):
|
with window("Controls", autosize=True, x_pos=0, y_pos=0):
with group("Buttons1", horizontal=True):
w = charW(6)
add_button("STEP", width=w, callback=cb_step, tip="Run one instruction")
add_button("STEPL", width=w, callback=cb_lstep, tip="Run one source line of code")
add_button("NEXTL", width=w, callback=cb_nextl, tip="Run until next source line of code")
with group("Buttons2", horizontal=True):
add_button("OVER", width=w, callback=cb_over, tip="Run one line of code, don't show subroutines")
add_button("OUT", width=w, callback=cb_out, tip="Run until ';' is executed")
add_button("RUN", width=w, callback=cb_run, tip="Run until completion, or a breakpoint")
with group("Buttons3", horizontal=True):
add_button("SHOT", width=w, callback=cb_shot, tip="Move to 'RunShot'")
add_button("IDLE", width=w, callback=cb_idle, tip="Move to 'Idle'")
add_button("HALT", width=w, callback=cb_halt, tip="Move to 'Halt'")
for item in get_item_children("Controls"):
set_item_style_var(item, mvGuiStyleVar_FrameRounding, [charH(1)*0.3])
set_item_style_var(item, mvGuiStyleVar_FramePadding, [charW(1)*0.3, 1])
def add_editor():
if does_item_exist("Program"):
del Windows["Program"]
delete_item("Program")
Windows["Program"] = Editor(FILETOLOAD)
def cb_add_controls(sender, data):
add_controls()
def cb_add_editor(sender, data):
add_editor()
def cb_nop(sender, data):
pass
def hsv_to_rgb(h: float, s: float, v: float, a:float) -> (float, float, float, float):
if s == 0.0: return (v, v, v, 255*a)
i = int(h*6.)
f = (h*6.)-i; p,q,t = v*(1.-s), v*(1.-s*f), v*(1.-s*(1.-f)); i%=6
if i == 0: return (255*v, 255*t, 255*p, 255*a)
if i == 1: return (255*q, 255*v, 255*p, 255*a)
if i == 2: return (255*p, 255*v, 255*t, 255*a)
if i == 3: return (255*p, 255*q, 255*v, 255*a)
if i == 4: return (255*t, 255*p, 255*v, 255*a)
if i == 5: return (255*v, 255*p, 255*q, 255*a)
class Editor:
def __init__(self, filename):
self.D = DebugDis(filename)
self.TextLines = self.D.SourceLines
self.addLines()
self.Selected = None
def selectMemAddr(self, addr):
"""
Highlight the line of code associated with the CPU program counter
"""
oldaddr = self.Selected
if oldaddr != None:
sl = self.D.getSourceLineForAddr(oldaddr)
item = f"SourceL{sl}"
#for item in get_item_children(f"SourceG{sl}"):
set_item_color(item, mvGuiCol_Button, [0,0,0,0])
self.Selected = addr
if self.Selected != None:
sl = self.D.getSourceLineForAddr(addr)
#for item in get_item_children(f"SourceG{sl}"):
item = f"SourceL{sl}"
set_item_color(item, mvGuiCol_Button, hsv_to_rgb(4/7.0, 0.8, 0.8, 1.0))
#set_item_color(f"SourceLNG{sl}", mvGuiCol_Text, [155,0,75,175])
#configure_item(f"SourceL{sl}", enabled=True)
#print(get_item_configuration(f"SourceL{sl}"))
def updateDisplay(self):
self.selectMemAddr(VMCPU.PC)
def cb_addr_click(self, sender, data):
#print(sender, data)
VMCPU.toggleBP(data)
item = f"SourceLN{self.D.getSourceLineForAddr(data)}"
i = 4
hovercol = hsv_to_rgb(i/7.0, 0.7, 0.7, 0.3)
if VMCPU.isBP(data):
set_item_color(item, mvGuiCol_Button, hsv_to_rgb(i/7.0, 0.8, 0.8, 1.0))
set_item_color(item, mvGuiCol_ButtonHovered, hsv_to_rgb(i/7.0, 0.8, 0.8, 1.0))
configure_item(item, tip="Breakpoint at Addr %d" % data)
else:
set_item_color(item, mvGuiCol_Button, [0,0,0,0])
set_item_color(item, mvGuiCol_ButtonHovered, hovercol)
configure_item(item, tip="")
def addLine(self, name, count, field1, field2, padto, cb, cb_data):
field2 = field2 + (' '*(padto-len(field2))) + ' '
with group(f"{name}G{count}", horizontal=True):
add_button(f"{name}LN{count}", label = field1, callback=cb, callback_data=cb_data)
if field2 == '':
add_button(f"{name}L{count}", label = ' ')
else:
add_button(f"{name}L{count}", label = field2)
i = 4
hovercol = hsv_to_rgb(i/7.0, 0.7, 0.7, 0.3)
for item in get_item_children(f"{name}G{count}"):
set_item_color(item, mvGuiCol_Button, [0,0,0,0])
set_item_color(item, mvGuiCol_ButtonHovered, hovercol)
set_item_color(item, mvGuiCol_ButtonActive, hsv_to_rgb(i/7.0, 0.8, 0.8, 1.0))
set_item_style_var(item, mvGuiStyleVar_FrameRounding, [2])
set_item_style_var(item, mvGuiStyleVar_FramePadding, [1, 1])
def addLines(self):
longestline = max(len(x.rstrip()) for x in self.TextLines)
with window("Program", x_pos=400, y_pos=200, width=charW(longestline+12), height=charH(40), no_scrollbar=True):
with tab_bar("ProgramTab"):
with tab("Source"):
with child("SourceChild", autosize_x=True, autosize_y=True):
for i, line in enumerate(self.TextLines, start=1):
addr = self.D.getAddrForSourceLine(i)
self.addLine("Source", i, "%5d" % i, line, longestline, self.cb_addr_click, addr)
with tab("Opcodes"):
memdump = self.D.dumpOpcodes()
for i, op in enumerate(memdump):
addr = op[0]
with group(f"opcodesLG{addr}", horizontal=True):
add_text(f"opcodeAddr{addr}", default_value= "%5d" % op[0])
add_text(f"opcodeBytes{addr}", default_value= " ".join([ "%02X" % x for x in op[1]]))
if op[2]:
add_text(f"opcodeval{i}", default_value= ("%d" % op[2]))
add_text(f"opcodesym{i}", default_value='('+op[3]+')')
else:
add_text(f"opcodesym{i}", default_value=op[3])
class MemoryDisplay:
def __init__(self, cpu, name):
self.Name = name
self.CPU = cpu
self.createDisplay()
def getFloatAddrInfo(self, a):
"""
Given an address, return the value at that address, and it's symbol
"""
return self.CPU.memFetch(SEntry(a, None))
def getByteAddrInfo(self, a):
"""
Given an address, return the value at that address, and it's symbol
"""
return self.CPU.memFetchB(SEntry(a, None))
def updateDisplay(self):
wl = self.CPU.getMemWriteList()
if len(wl):
for addr, writelen in wl:
for byteaddr in range(addr, addr+writelen):
val, sym = self.getByteAddrInfo(byteaddr)
val = int(round(val))
set_value(f"{self.Name}bval_{byteaddr}", "%02X %4d" % (val, val))
set_value(f"{self.Name}bsym_{byteaddr}", "%s" % sym)
if (byteaddr % 4) == 0:
val, sym = self.getFloatAddrInfo(byteaddr)
set_value(f"{self.Name}fval_{byteaddr}", "%12.6f" % val)
set_value(f"{self.Name}fsym_{byteaddr}", "%s" % sym)
self.CPU.clearMemWriteList()
def createDisplay(self):
with window(self.Name):
with child(f"{self.Name}child", width=charW(100), height=charH(16), border=False):
with managed_columns(f"{self.Name}mc", 2):
with group(f"{self.Name}left"):
for byteaddr in range(256):
# 4 Bytes and a Float
# Horizontal double column, 4 bytes on the left, float on the right
with group(f"{self.Name}bline_{byteaddr}", horizontal=True):
val, sym = self.getByteAddrInfo(byteaddr)
val = int(round(val))
add_text(f"{self.Name}byte_{byteaddr}", default_value="%05d %04x" % (byteaddr, byteaddr))
add_text(f"{self.Name}bval_{byteaddr}", default_value="%02X %4d" % (val, val))
add_text(f"{self.Name}bsym_{byteaddr}", default_value="%s" % (sym,))
with group(f"{self.Name}right"):
for addr in range(0, 256):
if (addr % 4) == 0:
with group(f"{self.Name}fline_{addr}", horizontal=True):
val, sym = self.getFloatAddrInfo(addr)
#add_text(f"{self.Name}float_{addr}", default_value="%05d %04x" % (addr, addr))
add_text(f"{self.Name}fval_{addr}", default_value="%12.6f" % (val,))
add_text(f"{self.Name}fsym_{addr}", default_value="%s" % (sym,))
else:
with group(f"{self.Name}spacerg_{addr}", horizontal=True):
add_text(f"{self.Name}spacerl_{addr}", default_value=' ')
class StackDisplay:
def __init__(self, name, stack):
self.Stack = stack
self.Name = name
self.createDisplay()
def getStackVal(self, pos):
if len(self.Stack) > pos:
return self.Stack.read(pos)
else:
return None
def updateDisplay(self):
if self.Stack.Changed:
for i in range(64):
sv = self.getStackVal(i)
if sv != None:
#print(get_item_configuration(f"{self.Name}val_{i}"))
configure_item(f"{self.Name}val_{i}", label=("%12.6f" % self.getStackVal(i).float))
set_value(f"{self.Name}sym_{i}", self.getStackVal(i).symbol)
configure_item(f"{self.Name}sym_{i}", tip=self.getStackVal(i).symbol)
else:
configure_item(f"{self.Name}val_{i}", label="------------")
set_value(f"{self.Name}sym_{i}", '')
configure_item(f"{self.Name}sym_{i}", tip='')
def createDisplay(self):
with window(self.Name, autosize=True):
with child(f"{self.Name}child", width=charW(40), height=charH(16), border=False):
for i in range(64):
with group(f"{self.Name}group_{i}", horizontal=True):
add_text(f"{self.Name}pos_{i}", default_value="%02d" % i)
sv = self.getStackVal(i)
if sv != None:
with tree_node(f"{self.Name}val_{i}", label="%12.6f" % self.getStackVal(i).float, default_open=True):
add_text(f"{self.Name}sym_{i}", default_value=self.getStackVal(i).symbol)
else:
with tree_node(f"{self.Name}val_{i}", label="------------", default_open=True):
add_text(f"{self.Name}sym_{i}", default_value='')
def add_stack(stack, name):
if does_item_exist(name):
del Windows[name]
delete_item(name)
Windows[name] = StackDisplay(name, stack)
def add_mem(cpu, name):
if does_item_exist(name):
del Windows[name]
delete_item(name)
Windows[name] = MemoryDisplay(cpu, name)
class CPUInfo:
def __init__(self, cpu, name):
self.Name = name
self.CPU = cpu
self.createDisplay()
def updateDisplay(self):
set_value(f"{self.Name}PC", "PC: %05d" % self.CPU.PC)
set_value(f"{self.Name}Cycles", "Cycles: %06d" % self.CPU.Cycles)
def createDisplay(self):
with window(self.Name, autosize=True):
with child(f"{self.Name}child", width=charW(16), height=charH(3)):
with group(f"{self.Name}group"):
add_text(f"{self.Name}PC", default_value="PC: %05d" % self.CPU.PC)
add_text(f"{self.Name}Cycles", default_value="Cycles: %06d" % self.CPU.Cycles)
def add_cpu_info(cpu, name):
if does_item_exist(name):
del Windows[name]
delete_item(name)
Windows[name] = CPUInfo(cpu, name)
def fix_window_positions():
wp = get_style_frame_padding()
mbw, mbh = [int(x) for x in get_item_rect_size("MenuBar")]
windows = get_windows()
windows = [x for x in windows if x != "Main Window"]
for i in windows:
x, y = [int(x) for x in get_window_pos(i)]
fix = False
if x < 0:
x = 0
fix = True
if y < mbh:
y = mbh+int(wp[1])
fix = True
if fix:
set_window_pos(i, x, y)
def cb_mouse_release(sender, data):
fix_window_positions()
def cb_close(sender, data):
set_mouse_release_callback(None)
set_render_callback(None)
def add_de1graph():
with window("Graphs"):
add_plot("DE1", x_axis_name="Time/[s]", y_axis_name="Pressure",
yaxis2=True,
yaxis3=True
)
def setup_UI(sender, data):
global CharW
global CharH
x, y = get_item_rect_size("CharRuler")
#print(x,y)
CharW = float(x/100)
CharH = float(y/10)
#print(f"Character width is: {CharW}")
#print(f"Character height is: {CharH}")
delete_item("CharRuler")
add_controls()
add_editor()
add_stack(VMCPU.CallStack, "CallStack")
add_stack(VMCPU.Stack, "Stack")
add_cpu_info(VMCPU, "CPUInfo")
add_mem(VMCPU, "Memory")
VMCPU.loadDebug(FILETOLOAD)
VMCPU.moveToWord("RunShot")
update_cpu_views()
set_main_window_title("Dalgona Debugger")
set_item_color("Main Window", mvGuiCol_WindowBg, [128, 128, 128, 0])
set_style_global_alpha(1.0)
set_mouse_release_callback(cb_mouse_release)
restore_ui()
fix_window_positions()
def main():
add_default_prefs()
load_prefs()
set_theme("Dark")
setupFonts()
#enable_docking(dock_space=True, shift_only=False)
#set_style_window_rounding(charH(0.25))
#set_style_frame_rounding(charH(0.25))
with window("Main Window", label="Espresso Forth Debugger", width=160, height=120, on_close=cb_close):
with menu_bar("MenuBar"):
with menu("File"):
add_menu_item("Load Debug", callback=callback_load_debug)
with menu("Windows"):
add_menu_item("Save Layout", label="Save Layout", callback=cb_save_ui)
add_menu_item("Restore Layout", label="Restore Layout", callback=cb_restore_ui)
add_menu_item("ControlsMI", label="Controls", callback=cb_add_controls)
add_menu_item("EditorMI", label="Editor", callback=cb_add_editor)
with menu("Extras"):
add_menu_item("Show Logger", callback=show_logger)
add_menu_item("Show About", callback=show_about)
add_menu_item("Show Metrics", callback=show_metrics)
add_menu_item("Show Documentation", callback=show_documentation)
add_menu_item("Show Debug", callback=show_debug)
add_menu_item("Show Style Editor", callback=show_style_editor)
add_text("CharRuler", default_value = ("\n".join(['H'*100]*10))) #, color=[0,0,0,0])
add_de1graph()
set_start_callback(setup_UI)
try:
with open(UI_FILE_NAME, 'r') as infile:
config = json.load(infile)
viewportinfo, config = config[0], config[1:]
set_main_window_size(viewportinfo['ViewportSize'][0], viewportinfo['ViewportSize'][1])
except FileNotFoundError:
pass
start_dearpygui(primary_window="Main Window")
if __name__ == '__main__':
main() | delete_item("Controls") | conditional_block |
debugvm.py | #!/usr/bin/python3
from dearpygui.core import *
from dearpygui.simple import *
from disasm import DebugDis
import json, sys, random
from collections import OrderedDict
from vm import *
import systemtime
FontSize = 5
Windows = {}
CharW = 1
CharH = 1
SYSTIME = systemtime.SystemTime()
VMCPU = CPU(SYSTIME)
PREFS_FILE_NAME = 'prefs.debugvm.json'
UI_FILE_NAME = 'ui.debugvm.json'
FILETOLOAD = "froths/ShotSequencer.debug"
def charW(x):
return int(round(x*CharW))
def charH(x):
return int(round(x*CharH))
def add_default_prefs():
add_value("Display DPI", 160.0)
add_value("Font height (mm)", 4.0)
def set_from_prefs_dict(prefs):
print("Loading prefs from prefs.json: ")
for i in prefs:
set_value(i, prefs[i])
print(" %s: %s" % (i, prefs[i]))
def save_prefs():
prefs = OrderedDict()
dpi = get_value("Display DPI")
height = get_value("Font height (mm)")
prefs["Display DPI"] = dpi
prefs["Font height (mm)"] = height
with open(PREFS_FILE_NAME, 'w') as outfile:
json.dump(prefs, outfile, indent=2)
outfile.close()
def load_prefs():
try:
with open(PREFS_FILE_NAME, 'r') as infile:
prefs = json.load(infile)
set_from_prefs_dict(prefs)
except FileNotFoundError:
# No prefs file, so save defaults to create new prefs file
save_prefs()
def save_ui():
wlist = get_windows()
wlist.remove('filedialog')
vp = {"ViewportSize" : get_main_window_size()}
config = [vp]
for win in wlist:
config.append(get_item_configuration(win))
with open(UI_FILE_NAME, 'w') as outfile:
json.dump(config, outfile, indent=2)
def cb_save_ui(sender, data):
save_ui()
def delkeys(dict, keys):
for key in keys:
try:
del dict[key]
except KeyError:
pass
def restore_ui():
try:
with open(UI_FILE_NAME, 'r') as infile:
config = json.load(infile)
viewportinfo, config = config[0], config[1:]
set_main_window_size(viewportinfo['ViewportSize'][0], viewportinfo['ViewportSize'][1])
for win in config:
name = win['name']
if does_item_exist(name):
#print(f"Config for {name} : {win}\n")
delkeys(win, ["source", "tip", "enabled", "menubar"])
iconfig = {}
for i in ("x_pos", "y_pos", "width", "height", "enabled", "show"):
if i in win:
iconfig[i] = win[i]
#print(win)
configure_item(name, **iconfig)
except FileNotFoundError:
pass
def cb_restore_ui(sender, data):
restore_ui()
def log_callback(sender, data):
log_debug(f"{sender} ran a callback its value is {get_value(sender)}")
def setupFonts():
dpi = get_value("Display DPI")
height = get_value("Font height (mm)")
# Use Mononoki font, 0.25 inches high
global FontSize
FontSize = int(round(height/25.4*dpi))
add_additional_font("fonts/mononoki-Regular.ttf", FontSize)
#add_additional_font("fonts/FiraCode-Regular.otf", FontSize)
#add_additional_font("fonts/Inconsolata.otf", FontSize)
#add_additional_font("fonts/ProggyClean.ttf", FontSize)
#add_additional_font("fonts/TerminusTTF-4.46.0.ttf", FontSize)
def cb_set_display_DPI(sender, data):
print(sender, data)
setupFonts()
def cb_set_font_height(sender, data):
print(sender, data)
setupFonts()
def callback_size_prefs(sender, data):
with window("Display Preferences", autosize=True):
add_drag_float("Display DPI", callback=cb_set_display_DPI,
default_value=160,
min_value=10,
max_value=300,
clamped=True,
)
add_drag_float("Font height (mm)", callback=cb_set_font_height,
default_value=4.0,
min_value=1.0,
max_value=20.0,
clamped=True
)
def cb_load_data(sender, data):
print(sender, data)
def callback_load_debug(sender, data):
open_file_dialog(callback=cb_load_data, extensions = ".debug") # Works great
def update_cpu_views():
if "CallStack" in Windows:
Windows["CallStack"].updateDisplay()
if "Stack" in Windows:
Windows["Stack"].updateDisplay()
if "CPUInfo" in Windows:
Windows["CPUInfo"].updateDisplay()
if "Program" in Windows:
Windows["Program"].updateDisplay()
if "Memory" in Windows:
Windows["Memory"].updateDisplay()
def cb_run(sender, data):
if "Program" not in Windows:
return
try:
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
while True:
VMCPU.step()
if random.random() < 0.01: # Don't update UI every step... can be slow
update_cpu_views()
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_step(sender, data):
if "Program" not in Windows:
return
try:
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_out(sender, data):
if "Program" not in Windows:
return
try:
while VMCPU.getCurrentOpcodeName()!= ";":
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_lstep(sender, data):
# Step opcode execution until associated source line changes
if "Program" not in Windows:
return
try:
editor = Windows["Program"]
currentpc = VMCPU.PC
currentline = editor.D.getSourceLineForAddr(VMCPU.PC)
line = currentline
while (line == currentline):
VMCPU.step(ignorebp=currentpc)
update_cpu_views()
line = editor.D.getSourceLineForAddr(VMCPU.PC)
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
# def step_opcode():
# addr = VMCPU.PC
# nextaddr = VMCPU.nextOpcodeAddr(addr)
# while VMCPU.PC != nextaddr:
# VMCPU.step()
def cb_nextl(sender, data):
# Step opcode execution until source line increments, ignoring subroutines
if "Program" not in Windows:
return
try:
editor = Windows["Program"]
currentop = VMCPU.getOpcodeName(VMCPU.ROM[VMCPU.PC])
if currentop == ';':
cb_step(sender, data)
return
# TODO: FOR is going to create some interesting corner cases. Will deal with it later.
currentpc = VMCPU.PC
currentline = editor.D.getSourceLineForAddr(VMCPU.PC)
line = currentline
nextlineaddr = editor.D.getAddrForSourceLine(currentline+1)
while line == currentline:
while VMCPU.PC != nextlineaddr:
VMCPU.step(ignorebp=currentpc)
if random.random() < 0.05:
update_cpu_views() # Update UI 5% of the time
update_cpu_views()
line = editor.D.getSourceLineForAddr(VMCPU.PC)
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_over(sender, data):
# Step opcode execution until source line changes, without viewing subroutines
if "Program" not in Windows:
return
try:
editor = Windows["Program"]
currentop = VMCPU.getOpcodeName(VMCPU.ROM[VMCPU.PC])
if currentop == ';':
cb_step(sender, data)
return
currentpc = VMCPU.PC
currentline = editor.D.getSourceLineForAddr(VMCPU.PC)
line = currentline
while line == currentline:
if VMCPU.isCurrentOpCall():
# It's a call to a subroutine. Run until we get out.
nextlineaddr = editor.D.getAddrForSourceLine(line+1)
while VMCPU.PC != nextlineaddr:
VMCPU.step(ignorebp=currentpc)
else:
# Not a call. Just execute an opcode
VMCPU.step(ignorebp=currentpc)
update_cpu_views()
line = editor.D.getSourceLineForAddr(VMCPU.PC)
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_shot(sender, data):
SYSTIME.reset()
VMCPU.reset()
VMCPU.moveToWord("RunShot")
update_cpu_views()
def cb_idle(sender, data):
VMCPU.reset()
VMCPU.moveToWord("Idle")
update_cpu_views()
def cb_halt(sender, data):
VMCPU.reset()
VMCPU.moveToWord("Halt")
update_cpu_views()
def add_controls():
if does_item_exist("Controls"):
delete_item("Controls")
with window("Controls", autosize=True, x_pos=0, y_pos=0):
with group("Buttons1", horizontal=True):
w = charW(6)
add_button("STEP", width=w, callback=cb_step, tip="Run one instruction")
add_button("STEPL", width=w, callback=cb_lstep, tip="Run one source line of code")
add_button("NEXTL", width=w, callback=cb_nextl, tip="Run until next source line of code")
with group("Buttons2", horizontal=True):
add_button("OVER", width=w, callback=cb_over, tip="Run one line of code, don't show subroutines")
add_button("OUT", width=w, callback=cb_out, tip="Run until ';' is executed")
add_button("RUN", width=w, callback=cb_run, tip="Run until completion, or a breakpoint")
with group("Buttons3", horizontal=True):
add_button("SHOT", width=w, callback=cb_shot, tip="Move to 'RunShot'")
add_button("IDLE", width=w, callback=cb_idle, tip="Move to 'Idle'")
add_button("HALT", width=w, callback=cb_halt, tip="Move to 'Halt'")
for item in get_item_children("Controls"):
set_item_style_var(item, mvGuiStyleVar_FrameRounding, [charH(1)*0.3])
set_item_style_var(item, mvGuiStyleVar_FramePadding, [charW(1)*0.3, 1])
def add_editor():
if does_item_exist("Program"):
del Windows["Program"]
delete_item("Program")
Windows["Program"] = Editor(FILETOLOAD)
def cb_add_controls(sender, data):
add_controls()
def cb_add_editor(sender, data):
add_editor()
def cb_nop(sender, data):
pass
def hsv_to_rgb(h: float, s: float, v: float, a:float) -> (float, float, float, float):
if s == 0.0: return (v, v, v, 255*a)
i = int(h*6.)
f = (h*6.)-i; p,q,t = v*(1.-s), v*(1.-s*f), v*(1.-s*(1.-f)); i%=6
if i == 0: return (255*v, 255*t, 255*p, 255*a)
if i == 1: return (255*q, 255*v, 255*p, 255*a)
if i == 2: return (255*p, 255*v, 255*t, 255*a)
if i == 3: return (255*p, 255*q, 255*v, 255*a)
if i == 4: return (255*t, 255*p, 255*v, 255*a)
if i == 5: return (255*v, 255*p, 255*q, 255*a)
class Editor:
def __init__(self, filename):
self.D = DebugDis(filename)
self.TextLines = self.D.SourceLines
self.addLines()
self.Selected = None
def selectMemAddr(self, addr):
"""
Highlight the line of code associated with the CPU program counter
"""
oldaddr = self.Selected
if oldaddr != None:
sl = self.D.getSourceLineForAddr(oldaddr)
item = f"SourceL{sl}"
#for item in get_item_children(f"SourceG{sl}"):
set_item_color(item, mvGuiCol_Button, [0,0,0,0])
self.Selected = addr
if self.Selected != None:
sl = self.D.getSourceLineForAddr(addr)
#for item in get_item_children(f"SourceG{sl}"):
item = f"SourceL{sl}"
set_item_color(item, mvGuiCol_Button, hsv_to_rgb(4/7.0, 0.8, 0.8, 1.0))
#set_item_color(f"SourceLNG{sl}", mvGuiCol_Text, [155,0,75,175])
#configure_item(f"SourceL{sl}", enabled=True)
#print(get_item_configuration(f"SourceL{sl}"))
def updateDisplay(self):
self.selectMemAddr(VMCPU.PC)
def cb_addr_click(self, sender, data):
#print(sender, data)
VMCPU.toggleBP(data)
item = f"SourceLN{self.D.getSourceLineForAddr(data)}"
i = 4
hovercol = hsv_to_rgb(i/7.0, 0.7, 0.7, 0.3)
if VMCPU.isBP(data):
set_item_color(item, mvGuiCol_Button, hsv_to_rgb(i/7.0, 0.8, 0.8, 1.0))
set_item_color(item, mvGuiCol_ButtonHovered, hsv_to_rgb(i/7.0, 0.8, 0.8, 1.0))
configure_item(item, tip="Breakpoint at Addr %d" % data)
else:
set_item_color(item, mvGuiCol_Button, [0,0,0,0])
set_item_color(item, mvGuiCol_ButtonHovered, hovercol)
configure_item(item, tip="")
def addLine(self, name, count, field1, field2, padto, cb, cb_data):
field2 = field2 + (' '*(padto-len(field2))) + ' '
with group(f"{name}G{count}", horizontal=True):
add_button(f"{name}LN{count}", label = field1, callback=cb, callback_data=cb_data)
if field2 == '':
add_button(f"{name}L{count}", label = ' ')
else:
add_button(f"{name}L{count}", label = field2)
i = 4
hovercol = hsv_to_rgb(i/7.0, 0.7, 0.7, 0.3)
for item in get_item_children(f"{name}G{count}"):
set_item_color(item, mvGuiCol_Button, [0,0,0,0])
set_item_color(item, mvGuiCol_ButtonHovered, hovercol)
set_item_color(item, mvGuiCol_ButtonActive, hsv_to_rgb(i/7.0, 0.8, 0.8, 1.0))
set_item_style_var(item, mvGuiStyleVar_FrameRounding, [2])
set_item_style_var(item, mvGuiStyleVar_FramePadding, [1, 1])
def addLines(self):
longestline = max(len(x.rstrip()) for x in self.TextLines)
with window("Program", x_pos=400, y_pos=200, width=charW(longestline+12), height=charH(40), no_scrollbar=True):
with tab_bar("ProgramTab"):
with tab("Source"):
with child("SourceChild", autosize_x=True, autosize_y=True):
for i, line in enumerate(self.TextLines, start=1):
addr = self.D.getAddrForSourceLine(i)
self.addLine("Source", i, "%5d" % i, line, longestline, self.cb_addr_click, addr)
with tab("Opcodes"):
memdump = self.D.dumpOpcodes()
for i, op in enumerate(memdump):
addr = op[0]
with group(f"opcodesLG{addr}", horizontal=True):
add_text(f"opcodeAddr{addr}", default_value= "%5d" % op[0])
add_text(f"opcodeBytes{addr}", default_value= " ".join([ "%02X" % x for x in op[1]]))
if op[2]:
add_text(f"opcodeval{i}", default_value= ("%d" % op[2]))
add_text(f"opcodesym{i}", default_value='('+op[3]+')')
else:
add_text(f"opcodesym{i}", default_value=op[3])
class MemoryDisplay:
def __init__(self, cpu, name):
self.Name = name
self.CPU = cpu
self.createDisplay()
def getFloatAddrInfo(self, a):
"""
Given an address, return the value at that address, and it's symbol
"""
return self.CPU.memFetch(SEntry(a, None))
def getByteAddrInfo(self, a):
"""
Given an address, return the value at that address, and it's symbol
"""
return self.CPU.memFetchB(SEntry(a, None))
def updateDisplay(self):
wl = self.CPU.getMemWriteList()
if len(wl):
for addr, writelen in wl:
for byteaddr in range(addr, addr+writelen):
val, sym = self.getByteAddrInfo(byteaddr)
val = int(round(val))
set_value(f"{self.Name}bval_{byteaddr}", "%02X %4d" % (val, val))
set_value(f"{self.Name}bsym_{byteaddr}", "%s" % sym)
if (byteaddr % 4) == 0:
val, sym = self.getFloatAddrInfo(byteaddr)
set_value(f"{self.Name}fval_{byteaddr}", "%12.6f" % val)
set_value(f"{self.Name}fsym_{byteaddr}", "%s" % sym)
self.CPU.clearMemWriteList()
def createDisplay(self):
with window(self.Name):
with child(f"{self.Name}child", width=charW(100), height=charH(16), border=False):
with managed_columns(f"{self.Name}mc", 2):
with group(f"{self.Name}left"):
for byteaddr in range(256):
# 4 Bytes and a Float
# Horizontal double column, 4 bytes on the left, float on the right
with group(f"{self.Name}bline_{byteaddr}", horizontal=True):
val, sym = self.getByteAddrInfo(byteaddr)
val = int(round(val))
add_text(f"{self.Name}byte_{byteaddr}", default_value="%05d %04x" % (byteaddr, byteaddr))
add_text(f"{self.Name}bval_{byteaddr}", default_value="%02X %4d" % (val, val))
add_text(f"{self.Name}bsym_{byteaddr}", default_value="%s" % (sym,))
with group(f"{self.Name}right"):
for addr in range(0, 256):
if (addr % 4) == 0:
with group(f"{self.Name}fline_{addr}", horizontal=True):
val, sym = self.getFloatAddrInfo(addr)
#add_text(f"{self.Name}float_{addr}", default_value="%05d %04x" % (addr, addr))
add_text(f"{self.Name}fval_{addr}", default_value="%12.6f" % (val,))
add_text(f"{self.Name}fsym_{addr}", default_value="%s" % (sym,))
else:
with group(f"{self.Name}spacerg_{addr}", horizontal=True):
add_text(f"{self.Name}spacerl_{addr}", default_value=' ')
class StackDisplay:
def __init__(self, name, stack):
self.Stack = stack
self.Name = name
self.createDisplay()
def getStackVal(self, pos):
if len(self.Stack) > pos:
return self.Stack.read(pos)
else:
return None
def updateDisplay(self):
if self.Stack.Changed:
for i in range(64):
sv = self.getStackVal(i)
if sv != None:
#print(get_item_configuration(f"{self.Name}val_{i}"))
configure_item(f"{self.Name}val_{i}", label=("%12.6f" % self.getStackVal(i).float))
set_value(f"{self.Name}sym_{i}", self.getStackVal(i).symbol)
configure_item(f"{self.Name}sym_{i}", tip=self.getStackVal(i).symbol)
else:
configure_item(f"{self.Name}val_{i}", label="------------")
set_value(f"{self.Name}sym_{i}", '')
configure_item(f"{self.Name}sym_{i}", tip='')
def createDisplay(self):
with window(self.Name, autosize=True):
with child(f"{self.Name}child", width=charW(40), height=charH(16), border=False):
for i in range(64):
with group(f"{self.Name}group_{i}", horizontal=True):
add_text(f"{self.Name}pos_{i}", default_value="%02d" % i)
sv = self.getStackVal(i)
if sv != None:
with tree_node(f"{self.Name}val_{i}", label="%12.6f" % self.getStackVal(i).float, default_open=True):
add_text(f"{self.Name}sym_{i}", default_value=self.getStackVal(i).symbol)
else:
with tree_node(f"{self.Name}val_{i}", label="------------", default_open=True):
add_text(f"{self.Name}sym_{i}", default_value='')
def add_stack(stack, name):
|
def add_mem(cpu, name):
if does_item_exist(name):
del Windows[name]
delete_item(name)
Windows[name] = MemoryDisplay(cpu, name)
class CPUInfo:
def __init__(self, cpu, name):
self.Name = name
self.CPU = cpu
self.createDisplay()
def updateDisplay(self):
set_value(f"{self.Name}PC", "PC: %05d" % self.CPU.PC)
set_value(f"{self.Name}Cycles", "Cycles: %06d" % self.CPU.Cycles)
def createDisplay(self):
with window(self.Name, autosize=True):
with child(f"{self.Name}child", width=charW(16), height=charH(3)):
with group(f"{self.Name}group"):
add_text(f"{self.Name}PC", default_value="PC: %05d" % self.CPU.PC)
add_text(f"{self.Name}Cycles", default_value="Cycles: %06d" % self.CPU.Cycles)
def add_cpu_info(cpu, name):
if does_item_exist(name):
del Windows[name]
delete_item(name)
Windows[name] = CPUInfo(cpu, name)
def fix_window_positions():
wp = get_style_frame_padding()
mbw, mbh = [int(x) for x in get_item_rect_size("MenuBar")]
windows = get_windows()
windows = [x for x in windows if x != "Main Window"]
for i in windows:
x, y = [int(x) for x in get_window_pos(i)]
fix = False
if x < 0:
x = 0
fix = True
if y < mbh:
y = mbh+int(wp[1])
fix = True
if fix:
set_window_pos(i, x, y)
def cb_mouse_release(sender, data):
fix_window_positions()
def cb_close(sender, data):
set_mouse_release_callback(None)
set_render_callback(None)
def add_de1graph():
with window("Graphs"):
add_plot("DE1", x_axis_name="Time/[s]", y_axis_name="Pressure",
yaxis2=True,
yaxis3=True
)
def setup_UI(sender, data):
global CharW
global CharH
x, y = get_item_rect_size("CharRuler")
#print(x,y)
CharW = float(x/100)
CharH = float(y/10)
#print(f"Character width is: {CharW}")
#print(f"Character height is: {CharH}")
delete_item("CharRuler")
add_controls()
add_editor()
add_stack(VMCPU.CallStack, "CallStack")
add_stack(VMCPU.Stack, "Stack")
add_cpu_info(VMCPU, "CPUInfo")
add_mem(VMCPU, "Memory")
VMCPU.loadDebug(FILETOLOAD)
VMCPU.moveToWord("RunShot")
update_cpu_views()
set_main_window_title("Dalgona Debugger")
set_item_color("Main Window", mvGuiCol_WindowBg, [128, 128, 128, 0])
set_style_global_alpha(1.0)
set_mouse_release_callback(cb_mouse_release)
restore_ui()
fix_window_positions()
def main():
add_default_prefs()
load_prefs()
set_theme("Dark")
setupFonts()
#enable_docking(dock_space=True, shift_only=False)
#set_style_window_rounding(charH(0.25))
#set_style_frame_rounding(charH(0.25))
with window("Main Window", label="Espresso Forth Debugger", width=160, height=120, on_close=cb_close):
with menu_bar("MenuBar"):
with menu("File"):
add_menu_item("Load Debug", callback=callback_load_debug)
with menu("Windows"):
add_menu_item("Save Layout", label="Save Layout", callback=cb_save_ui)
add_menu_item("Restore Layout", label="Restore Layout", callback=cb_restore_ui)
add_menu_item("ControlsMI", label="Controls", callback=cb_add_controls)
add_menu_item("EditorMI", label="Editor", callback=cb_add_editor)
with menu("Extras"):
add_menu_item("Show Logger", callback=show_logger)
add_menu_item("Show About", callback=show_about)
add_menu_item("Show Metrics", callback=show_metrics)
add_menu_item("Show Documentation", callback=show_documentation)
add_menu_item("Show Debug", callback=show_debug)
add_menu_item("Show Style Editor", callback=show_style_editor)
add_text("CharRuler", default_value = ("\n".join(['H'*100]*10))) #, color=[0,0,0,0])
add_de1graph()
set_start_callback(setup_UI)
try:
with open(UI_FILE_NAME, 'r') as infile:
config = json.load(infile)
viewportinfo, config = config[0], config[1:]
set_main_window_size(viewportinfo['ViewportSize'][0], viewportinfo['ViewportSize'][1])
except FileNotFoundError:
pass
start_dearpygui(primary_window="Main Window")
if __name__ == '__main__':
main() | if does_item_exist(name):
del Windows[name]
delete_item(name)
Windows[name] = StackDisplay(name, stack) | identifier_body |
debugvm.py | #!/usr/bin/python3
from dearpygui.core import *
from dearpygui.simple import *
from disasm import DebugDis
import json, sys, random
from collections import OrderedDict
from vm import *
import systemtime
FontSize = 5
Windows = {}
CharW = 1
CharH = 1
SYSTIME = systemtime.SystemTime()
VMCPU = CPU(SYSTIME)
PREFS_FILE_NAME = 'prefs.debugvm.json'
UI_FILE_NAME = 'ui.debugvm.json'
FILETOLOAD = "froths/ShotSequencer.debug"
def charW(x):
return int(round(x*CharW))
def charH(x):
return int(round(x*CharH))
def add_default_prefs():
add_value("Display DPI", 160.0)
add_value("Font height (mm)", 4.0)
def set_from_prefs_dict(prefs):
print("Loading prefs from prefs.json: ")
for i in prefs:
set_value(i, prefs[i])
print(" %s: %s" % (i, prefs[i]))
def save_prefs():
prefs = OrderedDict()
dpi = get_value("Display DPI")
height = get_value("Font height (mm)")
prefs["Display DPI"] = dpi
prefs["Font height (mm)"] = height
with open(PREFS_FILE_NAME, 'w') as outfile:
json.dump(prefs, outfile, indent=2)
outfile.close()
def load_prefs():
try:
with open(PREFS_FILE_NAME, 'r') as infile:
prefs = json.load(infile)
set_from_prefs_dict(prefs)
except FileNotFoundError:
# No prefs file, so save defaults to create new prefs file
save_prefs()
def save_ui():
wlist = get_windows()
wlist.remove('filedialog')
vp = {"ViewportSize" : get_main_window_size()}
config = [vp]
for win in wlist:
config.append(get_item_configuration(win))
with open(UI_FILE_NAME, 'w') as outfile:
json.dump(config, outfile, indent=2)
def cb_save_ui(sender, data):
save_ui()
def delkeys(dict, keys):
for key in keys:
try:
del dict[key]
except KeyError:
pass
def restore_ui():
try:
with open(UI_FILE_NAME, 'r') as infile:
config = json.load(infile)
viewportinfo, config = config[0], config[1:]
set_main_window_size(viewportinfo['ViewportSize'][0], viewportinfo['ViewportSize'][1])
for win in config:
name = win['name']
if does_item_exist(name):
#print(f"Config for {name} : {win}\n")
delkeys(win, ["source", "tip", "enabled", "menubar"])
iconfig = {}
for i in ("x_pos", "y_pos", "width", "height", "enabled", "show"):
if i in win:
iconfig[i] = win[i]
#print(win)
configure_item(name, **iconfig)
except FileNotFoundError:
pass
def cb_restore_ui(sender, data):
restore_ui()
def log_callback(sender, data):
log_debug(f"{sender} ran a callback its value is {get_value(sender)}")
def setupFonts():
dpi = get_value("Display DPI")
height = get_value("Font height (mm)")
# Use Mononoki font, 0.25 inches high
global FontSize
FontSize = int(round(height/25.4*dpi))
add_additional_font("fonts/mononoki-Regular.ttf", FontSize)
#add_additional_font("fonts/FiraCode-Regular.otf", FontSize)
#add_additional_font("fonts/Inconsolata.otf", FontSize)
#add_additional_font("fonts/ProggyClean.ttf", FontSize)
#add_additional_font("fonts/TerminusTTF-4.46.0.ttf", FontSize)
def cb_set_display_DPI(sender, data):
print(sender, data)
setupFonts()
def cb_set_font_height(sender, data):
print(sender, data)
setupFonts()
def callback_size_prefs(sender, data):
with window("Display Preferences", autosize=True):
add_drag_float("Display DPI", callback=cb_set_display_DPI,
default_value=160,
min_value=10,
max_value=300,
clamped=True,
)
add_drag_float("Font height (mm)", callback=cb_set_font_height,
default_value=4.0,
min_value=1.0,
max_value=20.0,
clamped=True
)
def cb_load_data(sender, data):
print(sender, data)
def callback_load_debug(sender, data):
open_file_dialog(callback=cb_load_data, extensions = ".debug") # Works great
def update_cpu_views():
if "CallStack" in Windows:
Windows["CallStack"].updateDisplay()
if "Stack" in Windows:
Windows["Stack"].updateDisplay()
if "CPUInfo" in Windows:
Windows["CPUInfo"].updateDisplay()
if "Program" in Windows:
Windows["Program"].updateDisplay()
if "Memory" in Windows:
Windows["Memory"].updateDisplay()
def cb_run(sender, data):
if "Program" not in Windows:
return
try:
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
while True:
VMCPU.step()
if random.random() < 0.01: # Don't update UI every step... can be slow
update_cpu_views()
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_step(sender, data):
if "Program" not in Windows:
return
try:
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_out(sender, data):
if "Program" not in Windows:
return
try:
while VMCPU.getCurrentOpcodeName()!= ";":
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
VMCPU.step(ignorebp=VMCPU.PC)
update_cpu_views()
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_lstep(sender, data):
# Step opcode execution until associated source line changes
if "Program" not in Windows:
return
try:
editor = Windows["Program"]
currentpc = VMCPU.PC
currentline = editor.D.getSourceLineForAddr(VMCPU.PC)
line = currentline
while (line == currentline):
VMCPU.step(ignorebp=currentpc)
update_cpu_views()
line = editor.D.getSourceLineForAddr(VMCPU.PC)
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
# def step_opcode():
# addr = VMCPU.PC
# nextaddr = VMCPU.nextOpcodeAddr(addr)
# while VMCPU.PC != nextaddr:
# VMCPU.step()
def cb_nextl(sender, data):
# Step opcode execution until source line increments, ignoring subroutines
if "Program" not in Windows:
return
try:
editor = Windows["Program"]
currentop = VMCPU.getOpcodeName(VMCPU.ROM[VMCPU.PC])
if currentop == ';':
cb_step(sender, data)
return
# TODO: FOR is going to create some interesting corner cases. Will deal with it later.
currentpc = VMCPU.PC
currentline = editor.D.getSourceLineForAddr(VMCPU.PC)
line = currentline
nextlineaddr = editor.D.getAddrForSourceLine(currentline+1)
while line == currentline:
while VMCPU.PC != nextlineaddr:
VMCPU.step(ignorebp=currentpc)
if random.random() < 0.05:
update_cpu_views() # Update UI 5% of the time
update_cpu_views()
line = editor.D.getSourceLineForAddr(VMCPU.PC)
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_over(sender, data):
# Step opcode execution until source line changes, without viewing subroutines
if "Program" not in Windows:
return
try:
editor = Windows["Program"]
currentop = VMCPU.getOpcodeName(VMCPU.ROM[VMCPU.PC])
if currentop == ';':
cb_step(sender, data)
return
currentpc = VMCPU.PC
currentline = editor.D.getSourceLineForAddr(VMCPU.PC)
line = currentline
while line == currentline:
if VMCPU.isCurrentOpCall():
# It's a call to a subroutine. Run until we get out.
nextlineaddr = editor.D.getAddrForSourceLine(line+1)
while VMCPU.PC != nextlineaddr:
VMCPU.step(ignorebp=currentpc)
else:
# Not a call. Just execute an opcode
VMCPU.step(ignorebp=currentpc)
update_cpu_views()
line = editor.D.getSourceLineForAddr(VMCPU.PC)
except VMCPUStopped:
update_cpu_views()
except VMCPUBreakpoint:
update_cpu_views()
def cb_shot(sender, data):
SYSTIME.reset()
VMCPU.reset()
VMCPU.moveToWord("RunShot")
update_cpu_views()
def cb_idle(sender, data):
VMCPU.reset()
VMCPU.moveToWord("Idle")
update_cpu_views()
def cb_halt(sender, data):
VMCPU.reset()
VMCPU.moveToWord("Halt")
update_cpu_views()
def add_controls():
if does_item_exist("Controls"):
delete_item("Controls")
with window("Controls", autosize=True, x_pos=0, y_pos=0):
with group("Buttons1", horizontal=True):
w = charW(6)
add_button("STEP", width=w, callback=cb_step, tip="Run one instruction")
add_button("STEPL", width=w, callback=cb_lstep, tip="Run one source line of code")
add_button("NEXTL", width=w, callback=cb_nextl, tip="Run until next source line of code")
with group("Buttons2", horizontal=True):
add_button("OVER", width=w, callback=cb_over, tip="Run one line of code, don't show subroutines")
add_button("OUT", width=w, callback=cb_out, tip="Run until ';' is executed")
add_button("RUN", width=w, callback=cb_run, tip="Run until completion, or a breakpoint")
with group("Buttons3", horizontal=True):
add_button("SHOT", width=w, callback=cb_shot, tip="Move to 'RunShot'")
add_button("IDLE", width=w, callback=cb_idle, tip="Move to 'Idle'")
add_button("HALT", width=w, callback=cb_halt, tip="Move to 'Halt'")
for item in get_item_children("Controls"):
set_item_style_var(item, mvGuiStyleVar_FrameRounding, [charH(1)*0.3])
set_item_style_var(item, mvGuiStyleVar_FramePadding, [charW(1)*0.3, 1])
def add_editor():
if does_item_exist("Program"):
del Windows["Program"]
delete_item("Program")
Windows["Program"] = Editor(FILETOLOAD)
def cb_add_controls(sender, data):
add_controls()
def cb_add_editor(sender, data):
add_editor()
def cb_nop(sender, data):
pass
def hsv_to_rgb(h: float, s: float, v: float, a:float) -> (float, float, float, float):
if s == 0.0: return (v, v, v, 255*a)
i = int(h*6.)
f = (h*6.)-i; p,q,t = v*(1.-s), v*(1.-s*f), v*(1.-s*(1.-f)); i%=6
if i == 0: return (255*v, 255*t, 255*p, 255*a)
if i == 1: return (255*q, 255*v, 255*p, 255*a)
if i == 2: return (255*p, 255*v, 255*t, 255*a)
if i == 3: return (255*p, 255*q, 255*v, 255*a)
if i == 4: return (255*t, 255*p, 255*v, 255*a)
if i == 5: return (255*v, 255*p, 255*q, 255*a)
class Editor:
def __init__(self, filename):
self.D = DebugDis(filename)
self.TextLines = self.D.SourceLines
self.addLines()
self.Selected = None
def selectMemAddr(self, addr):
"""
Highlight the line of code associated with the CPU program counter
"""
oldaddr = self.Selected
if oldaddr != None:
sl = self.D.getSourceLineForAddr(oldaddr)
item = f"SourceL{sl}"
#for item in get_item_children(f"SourceG{sl}"):
set_item_color(item, mvGuiCol_Button, [0,0,0,0])
self.Selected = addr
if self.Selected != None:
sl = self.D.getSourceLineForAddr(addr)
#for item in get_item_children(f"SourceG{sl}"):
item = f"SourceL{sl}"
set_item_color(item, mvGuiCol_Button, hsv_to_rgb(4/7.0, 0.8, 0.8, 1.0))
#set_item_color(f"SourceLNG{sl}", mvGuiCol_Text, [155,0,75,175])
#configure_item(f"SourceL{sl}", enabled=True)
#print(get_item_configuration(f"SourceL{sl}"))
def updateDisplay(self):
self.selectMemAddr(VMCPU.PC)
def cb_addr_click(self, sender, data):
#print(sender, data)
VMCPU.toggleBP(data)
item = f"SourceLN{self.D.getSourceLineForAddr(data)}"
i = 4
hovercol = hsv_to_rgb(i/7.0, 0.7, 0.7, 0.3)
if VMCPU.isBP(data):
set_item_color(item, mvGuiCol_Button, hsv_to_rgb(i/7.0, 0.8, 0.8, 1.0))
set_item_color(item, mvGuiCol_ButtonHovered, hsv_to_rgb(i/7.0, 0.8, 0.8, 1.0))
configure_item(item, tip="Breakpoint at Addr %d" % data)
else:
set_item_color(item, mvGuiCol_Button, [0,0,0,0])
set_item_color(item, mvGuiCol_ButtonHovered, hovercol)
configure_item(item, tip="")
def addLine(self, name, count, field1, field2, padto, cb, cb_data):
field2 = field2 + (' '*(padto-len(field2))) + ' '
with group(f"{name}G{count}", horizontal=True):
add_button(f"{name}LN{count}", label = field1, callback=cb, callback_data=cb_data)
if field2 == '':
add_button(f"{name}L{count}", label = ' ')
else:
add_button(f"{name}L{count}", label = field2)
i = 4
hovercol = hsv_to_rgb(i/7.0, 0.7, 0.7, 0.3)
for item in get_item_children(f"{name}G{count}"):
set_item_color(item, mvGuiCol_Button, [0,0,0,0])
set_item_color(item, mvGuiCol_ButtonHovered, hovercol)
set_item_color(item, mvGuiCol_ButtonActive, hsv_to_rgb(i/7.0, 0.8, 0.8, 1.0))
set_item_style_var(item, mvGuiStyleVar_FrameRounding, [2])
set_item_style_var(item, mvGuiStyleVar_FramePadding, [1, 1])
def addLines(self):
longestline = max(len(x.rstrip()) for x in self.TextLines)
with window("Program", x_pos=400, y_pos=200, width=charW(longestline+12), height=charH(40), no_scrollbar=True):
with tab_bar("ProgramTab"):
with tab("Source"):
with child("SourceChild", autosize_x=True, autosize_y=True):
for i, line in enumerate(self.TextLines, start=1):
addr = self.D.getAddrForSourceLine(i)
self.addLine("Source", i, "%5d" % i, line, longestline, self.cb_addr_click, addr)
with tab("Opcodes"):
memdump = self.D.dumpOpcodes()
for i, op in enumerate(memdump):
addr = op[0]
with group(f"opcodesLG{addr}", horizontal=True):
add_text(f"opcodeAddr{addr}", default_value= "%5d" % op[0])
add_text(f"opcodeBytes{addr}", default_value= " ".join([ "%02X" % x for x in op[1]]))
if op[2]:
add_text(f"opcodeval{i}", default_value= ("%d" % op[2]))
add_text(f"opcodesym{i}", default_value='('+op[3]+')')
else:
add_text(f"opcodesym{i}", default_value=op[3])
class MemoryDisplay:
def __init__(self, cpu, name):
self.Name = name
self.CPU = cpu
self.createDisplay()
def getFloatAddrInfo(self, a):
"""
Given an address, return the value at that address, and it's symbol
"""
return self.CPU.memFetch(SEntry(a, None))
def getByteAddrInfo(self, a):
"""
Given an address, return the value at that address, and it's symbol
"""
return self.CPU.memFetchB(SEntry(a, None))
def updateDisplay(self):
wl = self.CPU.getMemWriteList()
if len(wl):
for addr, writelen in wl:
for byteaddr in range(addr, addr+writelen):
val, sym = self.getByteAddrInfo(byteaddr)
val = int(round(val))
set_value(f"{self.Name}bval_{byteaddr}", "%02X %4d" % (val, val))
set_value(f"{self.Name}bsym_{byteaddr}", "%s" % sym)
if (byteaddr % 4) == 0:
val, sym = self.getFloatAddrInfo(byteaddr)
set_value(f"{self.Name}fval_{byteaddr}", "%12.6f" % val)
set_value(f"{self.Name}fsym_{byteaddr}", "%s" % sym)
self.CPU.clearMemWriteList()
def createDisplay(self):
with window(self.Name):
with child(f"{self.Name}child", width=charW(100), height=charH(16), border=False):
with managed_columns(f"{self.Name}mc", 2):
with group(f"{self.Name}left"):
for byteaddr in range(256):
# 4 Bytes and a Float
# Horizontal double column, 4 bytes on the left, float on the right
with group(f"{self.Name}bline_{byteaddr}", horizontal=True):
val, sym = self.getByteAddrInfo(byteaddr)
val = int(round(val))
add_text(f"{self.Name}byte_{byteaddr}", default_value="%05d %04x" % (byteaddr, byteaddr))
add_text(f"{self.Name}bval_{byteaddr}", default_value="%02X %4d" % (val, val))
add_text(f"{self.Name}bsym_{byteaddr}", default_value="%s" % (sym,))
with group(f"{self.Name}right"):
for addr in range(0, 256):
if (addr % 4) == 0:
with group(f"{self.Name}fline_{addr}", horizontal=True):
val, sym = self.getFloatAddrInfo(addr)
#add_text(f"{self.Name}float_{addr}", default_value="%05d %04x" % (addr, addr))
add_text(f"{self.Name}fval_{addr}", default_value="%12.6f" % (val,))
add_text(f"{self.Name}fsym_{addr}", default_value="%s" % (sym,))
else:
with group(f"{self.Name}spacerg_{addr}", horizontal=True):
add_text(f"{self.Name}spacerl_{addr}", default_value=' ')
class StackDisplay:
def __init__(self, name, stack):
self.Stack = stack
self.Name = name
self.createDisplay()
def getStackVal(self, pos):
if len(self.Stack) > pos:
return self.Stack.read(pos)
else:
return None
def updateDisplay(self):
if self.Stack.Changed:
for i in range(64):
sv = self.getStackVal(i)
if sv != None:
#print(get_item_configuration(f"{self.Name}val_{i}"))
configure_item(f"{self.Name}val_{i}", label=("%12.6f" % self.getStackVal(i).float))
set_value(f"{self.Name}sym_{i}", self.getStackVal(i).symbol)
configure_item(f"{self.Name}sym_{i}", tip=self.getStackVal(i).symbol)
else:
configure_item(f"{self.Name}val_{i}", label="------------")
set_value(f"{self.Name}sym_{i}", '')
configure_item(f"{self.Name}sym_{i}", tip='')
def createDisplay(self):
with window(self.Name, autosize=True):
with child(f"{self.Name}child", width=charW(40), height=charH(16), border=False):
for i in range(64):
with group(f"{self.Name}group_{i}", horizontal=True):
add_text(f"{self.Name}pos_{i}", default_value="%02d" % i)
sv = self.getStackVal(i)
if sv != None:
with tree_node(f"{self.Name}val_{i}", label="%12.6f" % self.getStackVal(i).float, default_open=True):
add_text(f"{self.Name}sym_{i}", default_value=self.getStackVal(i).symbol)
else:
with tree_node(f"{self.Name}val_{i}", label="------------", default_open=True):
add_text(f"{self.Name}sym_{i}", default_value='')
def add_stack(stack, name):
if does_item_exist(name):
del Windows[name]
delete_item(name)
Windows[name] = StackDisplay(name, stack)
def add_mem(cpu, name):
if does_item_exist(name):
del Windows[name]
delete_item(name)
Windows[name] = MemoryDisplay(cpu, name)
class CPUInfo:
def __init__(self, cpu, name):
self.Name = name
self.CPU = cpu
self.createDisplay()
def updateDisplay(self):
set_value(f"{self.Name}PC", "PC: %05d" % self.CPU.PC)
set_value(f"{self.Name}Cycles", "Cycles: %06d" % self.CPU.Cycles)
def createDisplay(self):
with window(self.Name, autosize=True):
with child(f"{self.Name}child", width=charW(16), height=charH(3)):
with group(f"{self.Name}group"):
add_text(f"{self.Name}PC", default_value="PC: %05d" % self.CPU.PC)
add_text(f"{self.Name}Cycles", default_value="Cycles: %06d" % self.CPU.Cycles)
def add_cpu_info(cpu, name):
if does_item_exist(name):
del Windows[name]
delete_item(name)
Windows[name] = CPUInfo(cpu, name)
def fix_window_positions():
wp = get_style_frame_padding()
mbw, mbh = [int(x) for x in get_item_rect_size("MenuBar")]
windows = get_windows()
windows = [x for x in windows if x != "Main Window"]
for i in windows:
x, y = [int(x) for x in get_window_pos(i)]
fix = False
if x < 0:
x = 0
fix = True | y = mbh+int(wp[1])
fix = True
if fix:
set_window_pos(i, x, y)
def cb_mouse_release(sender, data):
fix_window_positions()
def cb_close(sender, data):
set_mouse_release_callback(None)
set_render_callback(None)
def add_de1graph():
with window("Graphs"):
add_plot("DE1", x_axis_name="Time/[s]", y_axis_name="Pressure",
yaxis2=True,
yaxis3=True
)
def setup_UI(sender, data):
global CharW
global CharH
x, y = get_item_rect_size("CharRuler")
#print(x,y)
CharW = float(x/100)
CharH = float(y/10)
#print(f"Character width is: {CharW}")
#print(f"Character height is: {CharH}")
delete_item("CharRuler")
add_controls()
add_editor()
add_stack(VMCPU.CallStack, "CallStack")
add_stack(VMCPU.Stack, "Stack")
add_cpu_info(VMCPU, "CPUInfo")
add_mem(VMCPU, "Memory")
VMCPU.loadDebug(FILETOLOAD)
VMCPU.moveToWord("RunShot")
update_cpu_views()
set_main_window_title("Dalgona Debugger")
set_item_color("Main Window", mvGuiCol_WindowBg, [128, 128, 128, 0])
set_style_global_alpha(1.0)
set_mouse_release_callback(cb_mouse_release)
restore_ui()
fix_window_positions()
def main():
add_default_prefs()
load_prefs()
set_theme("Dark")
setupFonts()
#enable_docking(dock_space=True, shift_only=False)
#set_style_window_rounding(charH(0.25))
#set_style_frame_rounding(charH(0.25))
with window("Main Window", label="Espresso Forth Debugger", width=160, height=120, on_close=cb_close):
with menu_bar("MenuBar"):
with menu("File"):
add_menu_item("Load Debug", callback=callback_load_debug)
with menu("Windows"):
add_menu_item("Save Layout", label="Save Layout", callback=cb_save_ui)
add_menu_item("Restore Layout", label="Restore Layout", callback=cb_restore_ui)
add_menu_item("ControlsMI", label="Controls", callback=cb_add_controls)
add_menu_item("EditorMI", label="Editor", callback=cb_add_editor)
with menu("Extras"):
add_menu_item("Show Logger", callback=show_logger)
add_menu_item("Show About", callback=show_about)
add_menu_item("Show Metrics", callback=show_metrics)
add_menu_item("Show Documentation", callback=show_documentation)
add_menu_item("Show Debug", callback=show_debug)
add_menu_item("Show Style Editor", callback=show_style_editor)
add_text("CharRuler", default_value = ("\n".join(['H'*100]*10))) #, color=[0,0,0,0])
add_de1graph()
set_start_callback(setup_UI)
try:
with open(UI_FILE_NAME, 'r') as infile:
config = json.load(infile)
viewportinfo, config = config[0], config[1:]
set_main_window_size(viewportinfo['ViewportSize'][0], viewportinfo['ViewportSize'][1])
except FileNotFoundError:
pass
start_dearpygui(primary_window="Main Window")
if __name__ == '__main__':
main() |
if y < mbh: | random_line_split |
aug_utility.py | import numpy as np
import cv2
from pixcel import *
from scipy import ndimage
import math
from socket import *
from config import *
from time import time
def find_bounding_boxes(fimage, lables):
# initialize boxes array
boxes = []
for lable in lables:
# iterate all lables
# filter out image pixels with current lable
|
return boxes
def find_margined_bounding_boxes(fimage, lables, margins):
# initialize boxes array
boxes = []
for lable in lables:
# iterate all lables
# filter out image pixels with current lable
labled = (fimage == lable) + 0
# find indexes
box = find_bounding_box(labled, margins)
# append found bouding box
boxes.append(box)
return boxes
def find_bounding_box(binary_matrix, margins=(0, 0)):
# extract indexes of foreground pixels
indicies = np.array(np.nonzero(binary_matrix + 0))
# get contours
ys = margins[1] + np.amin(indicies[0])
ye = margins[1] + np.amax(indicies[0])
xs = margins[0] + np.amin(indicies[1])
xe = margins[0] + np.amax(indicies[1])
# return contours
return [(xs, ys), (xe, ye)]
def weightFilter(image, lables, weight):
max = 0
weights = np.zeros((lables))
fimage = np.zeros_like(image)
retained_lables = []
for i in range(lables):
weights[i] = np.sum(np.sum(image == i))
if weights[i] > weights[max]:
max = i
if weights[i] > weight:
fimage += np.uint8((image == i) + 0)
retained_lables.append(i)
fimage -= np.uint8((image == max) + 0)
fimage = np.uint8(fimage * 255)
boxes = []
if (len(retained_lables) > 0):
retained_lables.remove(max)
boxes = find_bounding_boxes(image.copy(), retained_lables)
return fimage, boxes
def weightFilterMini(image, weight):
image = np.uint8(image)
# extract contours
image, contours, hierarchy = cv2.findContours(image, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
final_contours = []
for cnt in contours:
if cv2.contourArea(cnt) >= weight:
# add it to final_contours
final_contours.append(cnt)
fimage = np.zeros((image.shape[:2]), np.uint8)
cv2.drawContours(fimage, final_contours, -1, 255, -1)
boxes = RBox.toPointBoundingBoxes(RBox.fromClassicalBoundingBoxes([cv2.boundingRect(cnt) for cnt in final_contours]))
return fimage, boxes
def weightFilterMargined(image, lables, weight, margins):
max = 0
weights = np.zeros((lables))
fimage = np.zeros_like(image)
retained_lables = []
for i in range(lables):
weights[i] = np.sum(np.sum(image == i))
if weights[i] > weights[max]:
max = i
if weights[i] > weight:
fimage += np.uint8((image == i) + 0)
retained_lables.append(i)
fimage -= np.uint8(image == max)
fimage = np.uint8(fimage * 255)
boxes = []
if (len(retained_lables) > 0):
retained_lables.remove(max)
boxes = find_margined_bounding_boxes(image.copy(), retained_lables, margins)
return fimage, boxes
def calculatePossiblePadding(box, shape, default = 20):
w_pad = default
h_pad = default
# dynamic padding
if default == 0:
rbox = RBox.fromPointBoundingBox(box)
w_pad = round(0.205 * rbox.w)
h_pad = round(0.205 * rbox.h)
# extract with and height from shape
height, width = shape[0:2]
# extract starting, ending x and y from box
((x_start, y_start), (x_end, y_end)) = box
# check if is it possible to add certain padding
# if not add possible padding for all 4 points
pad_x_start = h_pad
if y_start - pad_x_start < 0:
pad_x_start = y_start
pad_y_start = w_pad
if x_start - pad_y_start < 0:
pad_y_start = x_start
pad_x_end = w_pad
if y_end + pad_x_end >= height:
pad_x_end = height - y_end - 1
pad_y_end = h_pad
if x_end + pad_y_end >= width:
pad_y_end = width - x_end - 1
# return resultant padding
return pad_x_start, pad_x_end, pad_y_start, pad_y_end
def findConnectedComponents(frame, threshold = 150, blur_radius = 1.0):
img = frame.copy() # gray-scale image
# smooth the image (to remove small objects)
imgf = ndimage.gaussian_filter(img, blur_radius)
# find connected components
labeled, nr_objects = ndimage.label(imgf > threshold)
return labeled, nr_objects
def drawBoundingBox(im, start, end, color):
cv2.rectangle(im, start, end, color, 1)
def pwpBasedTracking(image, frame_models, threshold):
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (7, 7))
predicted = np.zeros((image.shape[0:2]), np.uint8)
# FOREACH GIVEN PATCH AND ITS MODEL, APPLY MODEL TO PATCH
for fm in frame_models:
patch = extractPatch(image, fm[1])
#patch = cv2.medianBlur(patch, 5)
mask = np.zeros(patch.shape[0:2], np.uint8)
res = applyModel(patch, mask, fm[0])
res = cv2.morphologyEx(res, cv2.MORPH_OPEN, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_CLOSE, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_OPEN, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_CLOSE, kernel)
if(len(np.nonzero(res)[0]) > max(fm[2] * threshold, 10) ):
predicted[fm[1][0]: fm[1][1], fm[1][2]: fm[1][3]] += res;
return predicted
def extractPatch(im, box):
# extract coordinates
x1, x2, y1, y2 = box
# extract and return patch
return im[x1: x2, y1: y2, :]
def randomColor():
return np.random.randint(0, 255, (1, 3))[0].tolist()
def performColorProcessing(image, mask, iterations = 1):
# initialize kernel
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
for i in range(iterations):
model = computePosteriors(image, np.uint8(mask > 0) + 0)
mask = applyModel(image, mask, model)
cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel=kernel)
return mask
def killDyingLables(frame, mask, threshold = 0.5):
# get initial weights of lables
initial_weights = np.array([np.sum(frame == lable) for lable in range(np.amax(frame) + 1)]) + 0.00001
# get final labled frame
labled_frame = frame * mask
# get final weights
final_weights = np.array([np.sum(labled_frame == lable) for lable in range(np.amax(frame) + 1)])
# final probabilites
final_probs = (final_weights/initial_weights) < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
# check is lable is dying
if dying:
# kill lable
labled_frame -= np.uint8((labled_frame == lable) * lable)
# return final labled frame
return labled_frame
def killSmallLables(frame, threshold = 150):
# get initial weights of lables
initial_weights = np.array([np.sum(frame == lable) for lable in range(np.amax(frame) + 1)])
# final probabilites
final_probs = initial_weights < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
# check is lable is dying
if dying:
# kill lable
frame -= np.uint8(np.uint8(frame == lable) * lable)
# return final labled frame
return frame
class RBox:
def __init__(self):
# initialize atributes
self.x = 0
self.y = 0
self.w = 0
self.h = 0
@staticmethod
def fromClassicalBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0]
rbox.y = box[1]
rbox.w = box[2]
rbox.h = box[3]
# return rbox
return rbox
@staticmethod
def fromClassicalBoundingBoxes(boxes):
return [RBox.fromClassicalBoundingBox(box) for box in boxes]
@staticmethod
def fromRoughBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0]
rbox.y = box[2]
rbox.h = box[1] - box[0]
rbox.w = box[3] - box[2]
# return rbox
return rbox
@staticmethod
def fromPointBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0][0]
rbox.y = box[0][1]
rbox.w = box[1][0] - box[0][0]
rbox.h = box[1][1] - box[0][1]
# return rbox
return rbox
@staticmethod
def fromPointBoundingBoxes(boxes):
return [RBox.fromPointBoundingBox(box) for box in boxes]
def classicalBoundingBox(self):
# return array like bounding box
return [self.x, self.y, self.w, self.h]
def pointBoundingBox(self):
# return tuple of end points
return ((self.x, self.y), (self.x + self.w, self.y + self.h))
def area(self):
return self.h * self.w
def __or__(self, other_box):
# initialize resultant box
rbox = RBox()
# calculate values
rbox.x = min(self.x, other_box.x)
rbox.y = min(self.y, other_box.y)
rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y
return rbox
def __and__(self, other_box):
# initialize resultant box
rbox = RBox()
# calculate values
rbox.x = max(self.x, other_box.x)
rbox.y = max(self.y, other_box.y)
rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y
if rbox.w < 0 or rbox.h < 0:
# reinitailize or make it zero
rbox = RBox()
return rbox
def similarity(self, other_box):
# (A & B)/(A | B) = (A & B).area/(A.area + B.area - (A & B).area)
#return (self & other_box).area()/(self.area() + other_box.area() - (self & other_box).area())
min_area = min(self.area(), other_box.area())
return (self & other_box).area()/min_area
def __str__(self):
return "{} {} {} {}".format(self.x, self.y, self.w, self.h)
def __mul__(self, other_box):
# calculate similarity and return
return self.similarity(other_box)
def __eq__(self, other):
return self.x == other.x and self.y == other.y and self.w == other.w and self.h == other.h
@staticmethod
def similarityStats(boxes):
# create matrix out of boxes
sim_mat = np.array(boxes).reshape((-1, 1))
sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)
# return similarity matrix
return sim_mat
@staticmethod
def similarityThreshold(boxes, threshold = 0.8):
# get similarity matrix
sim_mat = RBox.similarityStats(boxes)
# find thresholded indexes
ind = np.array(np.nonzero(sim_mat > threshold))
# return in the form of list
return list(ind.T)
@staticmethod
def reduceBoxes(boxes, threshold=0.8):
similar_boxes = RBox.similarityThreshold(boxes, threshold)
while len(similar_boxes) > 0:
union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]
# remove similar boxes
del boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][1]]
boxes.append(union)
similar_boxes = RBox.similarityThreshold(boxes, threshold)
return boxes
@staticmethod
def toPointBoundingBoxes(boxes):
return [box.pointBoundingBox() for box in boxes]
@staticmethod
def toClassicBoundingBoxes(boxes):
return [box.classicalBoundingBox() for box in boxes]
def extractPatchFromImage(self, image, square=False):
# get bounding box end points
(start, end) = self.pointBoundingBox()
start, end = list(start), list(end)
# check if square flag is on
if square:
im_h, im_w = image.shape[0:2]
# adjust start and end so that height and width are equal
if self.h != self.w:
# find bigger size
if self.h > self.w:
# find difference
diff = self.h - self.w
if start[0] >= int(diff/2):
start[0] -= math.floor(diff/2)
diff -= math.floor(diff/2)
else:
diff -= start[0]
start[0] = 0
end[0] += diff
if end[0] >= im_w:
diff = end[0] - im_w + 1
end[1] -= diff
else:
# find difference
diff = self.w - self.h
if start[1] >= int(diff / 2):
start[1] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[1]
start[1] = 0
end[1] += diff
if end[1] >= im_h:
diff = end[1] - im_h + 1
end[0] -= diff
# return patch
return image[start[1]: end[1], start[0]: end[0]]
def addPatchtoImage(self, image, patch):
# get bounding box end points
(start, end) = self.pointBoundingBox()
# patch in to image
image[start[1]: end[1], start[0]: end[0]] = patch
# return image
return image
def askForLable(patch):
# write an image to send
cv2.imwrite("patch.jpg", patch)
# setup client socket
clientSock = socket(AF_INET, SOCK_STREAM)
clientSock.connect((TCP_IP, TCP_PORT))
# open image
image = open("patch.jpg", 'rb')
# read bytes equal to buffer size
data = image.read(BUFFER_SIZE)
# while image still has data
while (data):
# send data to server
clientSock.send(data)
# read more data if available
data = image.read(BUFFER_SIZE)
# close file
image.close()
# signal server to end data stream
clientSock.shutdown(SHUT_WR)
# recieved lable as binary data from server and convert it to string
label = clientSock.recv(1024)
label = label.decode("utf-8")
return label
| labled = (fimage == lable) + 0
# find indexes
box = find_bounding_box(labled)
# append found bouding box
boxes.append(box) | conditional_block |
aug_utility.py | import numpy as np
import cv2
from pixcel import *
from scipy import ndimage
import math
from socket import *
from config import *
from time import time
def find_bounding_boxes(fimage, lables):
# initialize boxes array
boxes = []
for lable in lables:
# iterate all lables
# filter out image pixels with current lable
labled = (fimage == lable) + 0
# find indexes
box = find_bounding_box(labled)
# append found bouding box
boxes.append(box)
return boxes
def find_margined_bounding_boxes(fimage, lables, margins):
# initialize boxes array
boxes = []
for lable in lables:
# iterate all lables
# filter out image pixels with current lable
labled = (fimage == lable) + 0
# find indexes
box = find_bounding_box(labled, margins)
# append found bouding box
boxes.append(box)
return boxes
def find_bounding_box(binary_matrix, margins=(0, 0)):
# extract indexes of foreground pixels
indicies = np.array(np.nonzero(binary_matrix + 0))
# get contours
ys = margins[1] + np.amin(indicies[0])
ye = margins[1] + np.amax(indicies[0])
xs = margins[0] + np.amin(indicies[1])
xe = margins[0] + np.amax(indicies[1])
# return contours
return [(xs, ys), (xe, ye)]
def weightFilter(image, lables, weight):
max = 0
weights = np.zeros((lables))
fimage = np.zeros_like(image)
retained_lables = []
for i in range(lables):
weights[i] = np.sum(np.sum(image == i))
if weights[i] > weights[max]:
max = i
if weights[i] > weight:
fimage += np.uint8((image == i) + 0)
retained_lables.append(i)
fimage -= np.uint8((image == max) + 0)
fimage = np.uint8(fimage * 255)
boxes = []
if (len(retained_lables) > 0):
retained_lables.remove(max)
boxes = find_bounding_boxes(image.copy(), retained_lables)
return fimage, boxes
def weightFilterMini(image, weight):
image = np.uint8(image)
# extract contours
image, contours, hierarchy = cv2.findContours(image, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
final_contours = []
for cnt in contours:
if cv2.contourArea(cnt) >= weight:
# add it to final_contours
final_contours.append(cnt)
fimage = np.zeros((image.shape[:2]), np.uint8)
cv2.drawContours(fimage, final_contours, -1, 255, -1)
boxes = RBox.toPointBoundingBoxes(RBox.fromClassicalBoundingBoxes([cv2.boundingRect(cnt) for cnt in final_contours]))
return fimage, boxes
def weightFilterMargined(image, lables, weight, margins):
max = 0
weights = np.zeros((lables))
fimage = np.zeros_like(image)
retained_lables = []
for i in range(lables):
weights[i] = np.sum(np.sum(image == i))
if weights[i] > weights[max]:
max = i
if weights[i] > weight:
fimage += np.uint8((image == i) + 0)
retained_lables.append(i)
fimage -= np.uint8(image == max)
fimage = np.uint8(fimage * 255)
boxes = []
if (len(retained_lables) > 0):
retained_lables.remove(max)
boxes = find_margined_bounding_boxes(image.copy(), retained_lables, margins)
return fimage, boxes
def calculatePossiblePadding(box, shape, default = 20):
w_pad = default
h_pad = default
# dynamic padding
if default == 0:
rbox = RBox.fromPointBoundingBox(box)
w_pad = round(0.205 * rbox.w)
h_pad = round(0.205 * rbox.h)
# extract with and height from shape
height, width = shape[0:2]
# extract starting, ending x and y from box
((x_start, y_start), (x_end, y_end)) = box
# check if is it possible to add certain padding
# if not add possible padding for all 4 points
pad_x_start = h_pad
if y_start - pad_x_start < 0:
pad_x_start = y_start
pad_y_start = w_pad
if x_start - pad_y_start < 0:
pad_y_start = x_start
pad_x_end = w_pad
if y_end + pad_x_end >= height:
pad_x_end = height - y_end - 1
pad_y_end = h_pad
if x_end + pad_y_end >= width:
pad_y_end = width - x_end - 1
# return resultant padding
return pad_x_start, pad_x_end, pad_y_start, pad_y_end
def findConnectedComponents(frame, threshold = 150, blur_radius = 1.0):
img = frame.copy() # gray-scale image
# smooth the image (to remove small objects)
imgf = ndimage.gaussian_filter(img, blur_radius)
# find connected components
labeled, nr_objects = ndimage.label(imgf > threshold)
return labeled, nr_objects
def drawBoundingBox(im, start, end, color):
cv2.rectangle(im, start, end, color, 1)
def pwpBasedTracking(image, frame_models, threshold):
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (7, 7))
predicted = np.zeros((image.shape[0:2]), np.uint8)
# FOREACH GIVEN PATCH AND ITS MODEL, APPLY MODEL TO PATCH
for fm in frame_models:
patch = extractPatch(image, fm[1])
#patch = cv2.medianBlur(patch, 5)
mask = np.zeros(patch.shape[0:2], np.uint8)
res = applyModel(patch, mask, fm[0])
res = cv2.morphologyEx(res, cv2.MORPH_OPEN, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_CLOSE, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_OPEN, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_CLOSE, kernel)
if(len(np.nonzero(res)[0]) > max(fm[2] * threshold, 10) ):
predicted[fm[1][0]: fm[1][1], fm[1][2]: fm[1][3]] += res;
return predicted
def extractPatch(im, box):
# extract coordinates
x1, x2, y1, y2 = box
# extract and return patch
return im[x1: x2, y1: y2, :]
def randomColor():
return np.random.randint(0, 255, (1, 3))[0].tolist()
def performColorProcessing(image, mask, iterations = 1):
# initialize kernel
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
for i in range(iterations):
model = computePosteriors(image, np.uint8(mask > 0) + 0)
mask = applyModel(image, mask, model)
cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel=kernel)
return mask
def killDyingLables(frame, mask, threshold = 0.5):
# get initial weights of lables
initial_weights = np.array([np.sum(frame == lable) for lable in range(np.amax(frame) + 1)]) + 0.00001
# get final labled frame
labled_frame = frame * mask
# get final weights
final_weights = np.array([np.sum(labled_frame == lable) for lable in range(np.amax(frame) + 1)])
# final probabilites
final_probs = (final_weights/initial_weights) < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
# check is lable is dying
if dying:
# kill lable
labled_frame -= np.uint8((labled_frame == lable) * lable)
# return final labled frame
return labled_frame
def killSmallLables(frame, threshold = 150):
# get initial weights of lables
initial_weights = np.array([np.sum(frame == lable) for lable in range(np.amax(frame) + 1)])
# final probabilites
final_probs = initial_weights < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
# check is lable is dying
if dying:
# kill lable
frame -= np.uint8(np.uint8(frame == lable) * lable)
# return final labled frame
return frame
class RBox:
def __init__(self):
# initialize atributes
self.x = 0
self.y = 0
self.w = 0
self.h = 0
@staticmethod
def fromClassicalBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0]
rbox.y = box[1]
rbox.w = box[2]
rbox.h = box[3]
# return rbox
return rbox
@staticmethod
def fromClassicalBoundingBoxes(boxes):
return [RBox.fromClassicalBoundingBox(box) for box in boxes]
@staticmethod
def fromRoughBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0]
rbox.y = box[2]
rbox.h = box[1] - box[0]
rbox.w = box[3] - box[2]
# return rbox
return rbox
@staticmethod
def fromPointBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0][0]
rbox.y = box[0][1]
rbox.w = box[1][0] - box[0][0]
rbox.h = box[1][1] - box[0][1]
# return rbox
return rbox
@staticmethod
def fromPointBoundingBoxes(boxes):
return [RBox.fromPointBoundingBox(box) for box in boxes]
def classicalBoundingBox(self):
# return array like bounding box
return [self.x, self.y, self.w, self.h]
def pointBoundingBox(self):
# return tuple of end points
return ((self.x, self.y), (self.x + self.w, self.y + self.h))
def area(self):
return self.h * self.w
def __or__(self, other_box):
# initialize resultant box
rbox = RBox()
# calculate values
rbox.x = min(self.x, other_box.x)
rbox.y = min(self.y, other_box.y)
rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y
return rbox
def __and__(self, other_box):
# initialize resultant box
rbox = RBox()
# calculate values
rbox.x = max(self.x, other_box.x)
rbox.y = max(self.y, other_box.y)
rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y
if rbox.w < 0 or rbox.h < 0:
# reinitailize or make it zero
rbox = RBox()
return rbox
def similarity(self, other_box):
# (A & B)/(A | B) = (A & B).area/(A.area + B.area - (A & B).area)
#return (self & other_box).area()/(self.area() + other_box.area() - (self & other_box).area())
min_area = min(self.area(), other_box.area())
return (self & other_box).area()/min_area
def __str__(self):
return "{} {} {} {}".format(self.x, self.y, self.w, self.h)
def __mul__(self, other_box):
# calculate similarity and return
return self.similarity(other_box)
def | (self, other):
return self.x == other.x and self.y == other.y and self.w == other.w and self.h == other.h
@staticmethod
def similarityStats(boxes):
# create matrix out of boxes
sim_mat = np.array(boxes).reshape((-1, 1))
sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)
# return similarity matrix
return sim_mat
@staticmethod
def similarityThreshold(boxes, threshold = 0.8):
# get similarity matrix
sim_mat = RBox.similarityStats(boxes)
# find thresholded indexes
ind = np.array(np.nonzero(sim_mat > threshold))
# return in the form of list
return list(ind.T)
@staticmethod
def reduceBoxes(boxes, threshold=0.8):
similar_boxes = RBox.similarityThreshold(boxes, threshold)
while len(similar_boxes) > 0:
union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]
# remove similar boxes
del boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][1]]
boxes.append(union)
similar_boxes = RBox.similarityThreshold(boxes, threshold)
return boxes
@staticmethod
def toPointBoundingBoxes(boxes):
return [box.pointBoundingBox() for box in boxes]
@staticmethod
def toClassicBoundingBoxes(boxes):
return [box.classicalBoundingBox() for box in boxes]
def extractPatchFromImage(self, image, square=False):
# get bounding box end points
(start, end) = self.pointBoundingBox()
start, end = list(start), list(end)
# check if square flag is on
if square:
im_h, im_w = image.shape[0:2]
# adjust start and end so that height and width are equal
if self.h != self.w:
# find bigger size
if self.h > self.w:
# find difference
diff = self.h - self.w
if start[0] >= int(diff/2):
start[0] -= math.floor(diff/2)
diff -= math.floor(diff/2)
else:
diff -= start[0]
start[0] = 0
end[0] += diff
if end[0] >= im_w:
diff = end[0] - im_w + 1
end[1] -= diff
else:
# find difference
diff = self.w - self.h
if start[1] >= int(diff / 2):
start[1] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[1]
start[1] = 0
end[1] += diff
if end[1] >= im_h:
diff = end[1] - im_h + 1
end[0] -= diff
# return patch
return image[start[1]: end[1], start[0]: end[0]]
def addPatchtoImage(self, image, patch):
# get bounding box end points
(start, end) = self.pointBoundingBox()
# patch in to image
image[start[1]: end[1], start[0]: end[0]] = patch
# return image
return image
def askForLable(patch):
# write an image to send
cv2.imwrite("patch.jpg", patch)
# setup client socket
clientSock = socket(AF_INET, SOCK_STREAM)
clientSock.connect((TCP_IP, TCP_PORT))
# open image
image = open("patch.jpg", 'rb')
# read bytes equal to buffer size
data = image.read(BUFFER_SIZE)
# while image still has data
while (data):
# send data to server
clientSock.send(data)
# read more data if available
data = image.read(BUFFER_SIZE)
# close file
image.close()
# signal server to end data stream
clientSock.shutdown(SHUT_WR)
# recieved lable as binary data from server and convert it to string
label = clientSock.recv(1024)
label = label.decode("utf-8")
return label
| __eq__ | identifier_name |
aug_utility.py | import numpy as np
import cv2
from pixcel import *
from scipy import ndimage
import math
from socket import *
from config import *
from time import time
def find_bounding_boxes(fimage, lables):
# initialize boxes array
boxes = []
for lable in lables:
# iterate all lables
# filter out image pixels with current lable
labled = (fimage == lable) + 0
# find indexes
box = find_bounding_box(labled)
# append found bouding box
boxes.append(box)
return boxes
def find_margined_bounding_boxes(fimage, lables, margins):
# initialize boxes array
boxes = []
for lable in lables:
# iterate all lables
# filter out image pixels with current lable
labled = (fimage == lable) + 0
# find indexes
box = find_bounding_box(labled, margins)
# append found bouding box
boxes.append(box)
return boxes
def find_bounding_box(binary_matrix, margins=(0, 0)):
# extract indexes of foreground pixels
indicies = np.array(np.nonzero(binary_matrix + 0))
# get contours
ys = margins[1] + np.amin(indicies[0])
ye = margins[1] + np.amax(indicies[0])
xs = margins[0] + np.amin(indicies[1])
xe = margins[0] + np.amax(indicies[1])
# return contours
return [(xs, ys), (xe, ye)]
def weightFilter(image, lables, weight):
max = 0
weights = np.zeros((lables))
fimage = np.zeros_like(image)
retained_lables = []
for i in range(lables):
weights[i] = np.sum(np.sum(image == i))
if weights[i] > weights[max]:
max = i
if weights[i] > weight:
fimage += np.uint8((image == i) + 0)
retained_lables.append(i)
fimage -= np.uint8((image == max) + 0)
fimage = np.uint8(fimage * 255)
boxes = []
if (len(retained_lables) > 0):
retained_lables.remove(max)
boxes = find_bounding_boxes(image.copy(), retained_lables)
return fimage, boxes
def weightFilterMini(image, weight):
image = np.uint8(image)
# extract contours
image, contours, hierarchy = cv2.findContours(image, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
final_contours = []
for cnt in contours:
if cv2.contourArea(cnt) >= weight:
# add it to final_contours
final_contours.append(cnt)
fimage = np.zeros((image.shape[:2]), np.uint8)
cv2.drawContours(fimage, final_contours, -1, 255, -1)
boxes = RBox.toPointBoundingBoxes(RBox.fromClassicalBoundingBoxes([cv2.boundingRect(cnt) for cnt in final_contours]))
return fimage, boxes
def weightFilterMargined(image, lables, weight, margins):
max = 0
weights = np.zeros((lables))
fimage = np.zeros_like(image)
retained_lables = []
for i in range(lables):
weights[i] = np.sum(np.sum(image == i))
if weights[i] > weights[max]:
max = i
if weights[i] > weight:
fimage += np.uint8((image == i) + 0)
retained_lables.append(i)
fimage -= np.uint8(image == max)
fimage = np.uint8(fimage * 255)
boxes = []
if (len(retained_lables) > 0):
retained_lables.remove(max)
boxes = find_margined_bounding_boxes(image.copy(), retained_lables, margins)
return fimage, boxes
def calculatePossiblePadding(box, shape, default = 20):
w_pad = default
h_pad = default
# dynamic padding
if default == 0:
rbox = RBox.fromPointBoundingBox(box)
w_pad = round(0.205 * rbox.w)
h_pad = round(0.205 * rbox.h)
# extract with and height from shape
height, width = shape[0:2]
# extract starting, ending x and y from box
((x_start, y_start), (x_end, y_end)) = box
| pad_x_start = h_pad
if y_start - pad_x_start < 0:
pad_x_start = y_start
pad_y_start = w_pad
if x_start - pad_y_start < 0:
pad_y_start = x_start
pad_x_end = w_pad
if y_end + pad_x_end >= height:
pad_x_end = height - y_end - 1
pad_y_end = h_pad
if x_end + pad_y_end >= width:
pad_y_end = width - x_end - 1
# return resultant padding
return pad_x_start, pad_x_end, pad_y_start, pad_y_end
def findConnectedComponents(frame, threshold = 150, blur_radius = 1.0):
img = frame.copy() # gray-scale image
# smooth the image (to remove small objects)
imgf = ndimage.gaussian_filter(img, blur_radius)
# find connected components
labeled, nr_objects = ndimage.label(imgf > threshold)
return labeled, nr_objects
def drawBoundingBox(im, start, end, color):
cv2.rectangle(im, start, end, color, 1)
def pwpBasedTracking(image, frame_models, threshold):
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (7, 7))
predicted = np.zeros((image.shape[0:2]), np.uint8)
# FOREACH GIVEN PATCH AND ITS MODEL, APPLY MODEL TO PATCH
for fm in frame_models:
patch = extractPatch(image, fm[1])
#patch = cv2.medianBlur(patch, 5)
mask = np.zeros(patch.shape[0:2], np.uint8)
res = applyModel(patch, mask, fm[0])
res = cv2.morphologyEx(res, cv2.MORPH_OPEN, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_CLOSE, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_OPEN, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_CLOSE, kernel)
if(len(np.nonzero(res)[0]) > max(fm[2] * threshold, 10) ):
predicted[fm[1][0]: fm[1][1], fm[1][2]: fm[1][3]] += res;
return predicted
def extractPatch(im, box):
# extract coordinates
x1, x2, y1, y2 = box
# extract and return patch
return im[x1: x2, y1: y2, :]
def randomColor():
return np.random.randint(0, 255, (1, 3))[0].tolist()
def performColorProcessing(image, mask, iterations = 1):
# initialize kernel
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
for i in range(iterations):
model = computePosteriors(image, np.uint8(mask > 0) + 0)
mask = applyModel(image, mask, model)
cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel=kernel)
return mask
def killDyingLables(frame, mask, threshold = 0.5):
# get initial weights of lables
initial_weights = np.array([np.sum(frame == lable) for lable in range(np.amax(frame) + 1)]) + 0.00001
# get final labled frame
labled_frame = frame * mask
# get final weights
final_weights = np.array([np.sum(labled_frame == lable) for lable in range(np.amax(frame) + 1)])
# final probabilites
final_probs = (final_weights/initial_weights) < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
# check is lable is dying
if dying:
# kill lable
labled_frame -= np.uint8((labled_frame == lable) * lable)
# return final labled frame
return labled_frame
def killSmallLables(frame, threshold = 150):
# get initial weights of lables
initial_weights = np.array([np.sum(frame == lable) for lable in range(np.amax(frame) + 1)])
# final probabilites
final_probs = initial_weights < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
# check is lable is dying
if dying:
# kill lable
frame -= np.uint8(np.uint8(frame == lable) * lable)
# return final labled frame
return frame
class RBox:
def __init__(self):
# initialize atributes
self.x = 0
self.y = 0
self.w = 0
self.h = 0
@staticmethod
def fromClassicalBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0]
rbox.y = box[1]
rbox.w = box[2]
rbox.h = box[3]
# return rbox
return rbox
@staticmethod
def fromClassicalBoundingBoxes(boxes):
return [RBox.fromClassicalBoundingBox(box) for box in boxes]
@staticmethod
def fromRoughBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0]
rbox.y = box[2]
rbox.h = box[1] - box[0]
rbox.w = box[3] - box[2]
# return rbox
return rbox
@staticmethod
def fromPointBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0][0]
rbox.y = box[0][1]
rbox.w = box[1][0] - box[0][0]
rbox.h = box[1][1] - box[0][1]
# return rbox
return rbox
@staticmethod
def fromPointBoundingBoxes(boxes):
return [RBox.fromPointBoundingBox(box) for box in boxes]
def classicalBoundingBox(self):
# return array like bounding box
return [self.x, self.y, self.w, self.h]
def pointBoundingBox(self):
# return tuple of end points
return ((self.x, self.y), (self.x + self.w, self.y + self.h))
def area(self):
return self.h * self.w
def __or__(self, other_box):
# initialize resultant box
rbox = RBox()
# calculate values
rbox.x = min(self.x, other_box.x)
rbox.y = min(self.y, other_box.y)
rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y
return rbox
def __and__(self, other_box):
# initialize resultant box
rbox = RBox()
# calculate values
rbox.x = max(self.x, other_box.x)
rbox.y = max(self.y, other_box.y)
rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y
if rbox.w < 0 or rbox.h < 0:
# reinitailize or make it zero
rbox = RBox()
return rbox
def similarity(self, other_box):
# (A & B)/(A | B) = (A & B).area/(A.area + B.area - (A & B).area)
#return (self & other_box).area()/(self.area() + other_box.area() - (self & other_box).area())
min_area = min(self.area(), other_box.area())
return (self & other_box).area()/min_area
def __str__(self):
return "{} {} {} {}".format(self.x, self.y, self.w, self.h)
def __mul__(self, other_box):
# calculate similarity and return
return self.similarity(other_box)
def __eq__(self, other):
return self.x == other.x and self.y == other.y and self.w == other.w and self.h == other.h
@staticmethod
def similarityStats(boxes):
# create matrix out of boxes
sim_mat = np.array(boxes).reshape((-1, 1))
sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)
# return similarity matrix
return sim_mat
@staticmethod
def similarityThreshold(boxes, threshold = 0.8):
# get similarity matrix
sim_mat = RBox.similarityStats(boxes)
# find thresholded indexes
ind = np.array(np.nonzero(sim_mat > threshold))
# return in the form of list
return list(ind.T)
@staticmethod
def reduceBoxes(boxes, threshold=0.8):
similar_boxes = RBox.similarityThreshold(boxes, threshold)
while len(similar_boxes) > 0:
union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]
# remove similar boxes
del boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][1]]
boxes.append(union)
similar_boxes = RBox.similarityThreshold(boxes, threshold)
return boxes
@staticmethod
def toPointBoundingBoxes(boxes):
return [box.pointBoundingBox() for box in boxes]
@staticmethod
def toClassicBoundingBoxes(boxes):
return [box.classicalBoundingBox() for box in boxes]
def extractPatchFromImage(self, image, square=False):
# get bounding box end points
(start, end) = self.pointBoundingBox()
start, end = list(start), list(end)
# check if square flag is on
if square:
im_h, im_w = image.shape[0:2]
# adjust start and end so that height and width are equal
if self.h != self.w:
# find bigger size
if self.h > self.w:
# find difference
diff = self.h - self.w
if start[0] >= int(diff/2):
start[0] -= math.floor(diff/2)
diff -= math.floor(diff/2)
else:
diff -= start[0]
start[0] = 0
end[0] += diff
if end[0] >= im_w:
diff = end[0] - im_w + 1
end[1] -= diff
else:
# find difference
diff = self.w - self.h
if start[1] >= int(diff / 2):
start[1] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[1]
start[1] = 0
end[1] += diff
if end[1] >= im_h:
diff = end[1] - im_h + 1
end[0] -= diff
# return patch
return image[start[1]: end[1], start[0]: end[0]]
def addPatchtoImage(self, image, patch):
# get bounding box end points
(start, end) = self.pointBoundingBox()
# patch in to image
image[start[1]: end[1], start[0]: end[0]] = patch
# return image
return image
def askForLable(patch):
# write an image to send
cv2.imwrite("patch.jpg", patch)
# setup client socket
clientSock = socket(AF_INET, SOCK_STREAM)
clientSock.connect((TCP_IP, TCP_PORT))
# open image
image = open("patch.jpg", 'rb')
# read bytes equal to buffer size
data = image.read(BUFFER_SIZE)
# while image still has data
while (data):
# send data to server
clientSock.send(data)
# read more data if available
data = image.read(BUFFER_SIZE)
# close file
image.close()
# signal server to end data stream
clientSock.shutdown(SHUT_WR)
# recieved lable as binary data from server and convert it to string
label = clientSock.recv(1024)
label = label.decode("utf-8")
return label | # check if is it possible to add certain padding
# if not add possible padding for all 4 points | random_line_split |
aug_utility.py | import numpy as np
import cv2
from pixcel import *
from scipy import ndimage
import math
from socket import *
from config import *
from time import time
def find_bounding_boxes(fimage, lables):
# initialize boxes array
boxes = []
for lable in lables:
# iterate all lables
# filter out image pixels with current lable
labled = (fimage == lable) + 0
# find indexes
box = find_bounding_box(labled)
# append found bouding box
boxes.append(box)
return boxes
def find_margined_bounding_boxes(fimage, lables, margins):
# initialize boxes array
boxes = []
for lable in lables:
# iterate all lables
# filter out image pixels with current lable
labled = (fimage == lable) + 0
# find indexes
box = find_bounding_box(labled, margins)
# append found bouding box
boxes.append(box)
return boxes
def find_bounding_box(binary_matrix, margins=(0, 0)):
# extract indexes of foreground pixels
indicies = np.array(np.nonzero(binary_matrix + 0))
# get contours
ys = margins[1] + np.amin(indicies[0])
ye = margins[1] + np.amax(indicies[0])
xs = margins[0] + np.amin(indicies[1])
xe = margins[0] + np.amax(indicies[1])
# return contours
return [(xs, ys), (xe, ye)]
def weightFilter(image, lables, weight):
max = 0
weights = np.zeros((lables))
fimage = np.zeros_like(image)
retained_lables = []
for i in range(lables):
weights[i] = np.sum(np.sum(image == i))
if weights[i] > weights[max]:
max = i
if weights[i] > weight:
fimage += np.uint8((image == i) + 0)
retained_lables.append(i)
fimage -= np.uint8((image == max) + 0)
fimage = np.uint8(fimage * 255)
boxes = []
if (len(retained_lables) > 0):
retained_lables.remove(max)
boxes = find_bounding_boxes(image.copy(), retained_lables)
return fimage, boxes
def weightFilterMini(image, weight):
image = np.uint8(image)
# extract contours
image, contours, hierarchy = cv2.findContours(image, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
final_contours = []
for cnt in contours:
if cv2.contourArea(cnt) >= weight:
# add it to final_contours
final_contours.append(cnt)
fimage = np.zeros((image.shape[:2]), np.uint8)
cv2.drawContours(fimage, final_contours, -1, 255, -1)
boxes = RBox.toPointBoundingBoxes(RBox.fromClassicalBoundingBoxes([cv2.boundingRect(cnt) for cnt in final_contours]))
return fimage, boxes
def weightFilterMargined(image, lables, weight, margins):
max = 0
weights = np.zeros((lables))
fimage = np.zeros_like(image)
retained_lables = []
for i in range(lables):
weights[i] = np.sum(np.sum(image == i))
if weights[i] > weights[max]:
max = i
if weights[i] > weight:
fimage += np.uint8((image == i) + 0)
retained_lables.append(i)
fimage -= np.uint8(image == max)
fimage = np.uint8(fimage * 255)
boxes = []
if (len(retained_lables) > 0):
retained_lables.remove(max)
boxes = find_margined_bounding_boxes(image.copy(), retained_lables, margins)
return fimage, boxes
def calculatePossiblePadding(box, shape, default = 20):
w_pad = default
h_pad = default
# dynamic padding
if default == 0:
rbox = RBox.fromPointBoundingBox(box)
w_pad = round(0.205 * rbox.w)
h_pad = round(0.205 * rbox.h)
# extract with and height from shape
height, width = shape[0:2]
# extract starting, ending x and y from box
((x_start, y_start), (x_end, y_end)) = box
# check if is it possible to add certain padding
# if not add possible padding for all 4 points
pad_x_start = h_pad
if y_start - pad_x_start < 0:
pad_x_start = y_start
pad_y_start = w_pad
if x_start - pad_y_start < 0:
pad_y_start = x_start
pad_x_end = w_pad
if y_end + pad_x_end >= height:
pad_x_end = height - y_end - 1
pad_y_end = h_pad
if x_end + pad_y_end >= width:
pad_y_end = width - x_end - 1
# return resultant padding
return pad_x_start, pad_x_end, pad_y_start, pad_y_end
def findConnectedComponents(frame, threshold = 150, blur_radius = 1.0):
img = frame.copy() # gray-scale image
# smooth the image (to remove small objects)
imgf = ndimage.gaussian_filter(img, blur_radius)
# find connected components
labeled, nr_objects = ndimage.label(imgf > threshold)
return labeled, nr_objects
def drawBoundingBox(im, start, end, color):
cv2.rectangle(im, start, end, color, 1)
def pwpBasedTracking(image, frame_models, threshold):
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (7, 7))
predicted = np.zeros((image.shape[0:2]), np.uint8)
# FOREACH GIVEN PATCH AND ITS MODEL, APPLY MODEL TO PATCH
for fm in frame_models:
patch = extractPatch(image, fm[1])
#patch = cv2.medianBlur(patch, 5)
mask = np.zeros(patch.shape[0:2], np.uint8)
res = applyModel(patch, mask, fm[0])
res = cv2.morphologyEx(res, cv2.MORPH_OPEN, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_CLOSE, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_OPEN, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_CLOSE, kernel)
if(len(np.nonzero(res)[0]) > max(fm[2] * threshold, 10) ):
predicted[fm[1][0]: fm[1][1], fm[1][2]: fm[1][3]] += res;
return predicted
def extractPatch(im, box):
# extract coordinates
x1, x2, y1, y2 = box
# extract and return patch
return im[x1: x2, y1: y2, :]
def randomColor():
return np.random.randint(0, 255, (1, 3))[0].tolist()
def performColorProcessing(image, mask, iterations = 1):
# initialize kernel
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
for i in range(iterations):
model = computePosteriors(image, np.uint8(mask > 0) + 0)
mask = applyModel(image, mask, model)
cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel=kernel)
return mask
def killDyingLables(frame, mask, threshold = 0.5):
# get initial weights of lables
initial_weights = np.array([np.sum(frame == lable) for lable in range(np.amax(frame) + 1)]) + 0.00001
# get final labled frame
labled_frame = frame * mask
# get final weights
final_weights = np.array([np.sum(labled_frame == lable) for lable in range(np.amax(frame) + 1)])
# final probabilites
final_probs = (final_weights/initial_weights) < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
# check is lable is dying
if dying:
# kill lable
labled_frame -= np.uint8((labled_frame == lable) * lable)
# return final labled frame
return labled_frame
def killSmallLables(frame, threshold = 150):
# get initial weights of lables
initial_weights = np.array([np.sum(frame == lable) for lable in range(np.amax(frame) + 1)])
# final probabilites
final_probs = initial_weights < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
# check is lable is dying
if dying:
# kill lable
frame -= np.uint8(np.uint8(frame == lable) * lable)
# return final labled frame
return frame
class RBox:
def __init__(self):
# initialize atributes
self.x = 0
self.y = 0
self.w = 0
self.h = 0
@staticmethod
def fromClassicalBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0]
rbox.y = box[1]
rbox.w = box[2]
rbox.h = box[3]
# return rbox
return rbox
@staticmethod
def fromClassicalBoundingBoxes(boxes):
return [RBox.fromClassicalBoundingBox(box) for box in boxes]
@staticmethod
def fromRoughBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0]
rbox.y = box[2]
rbox.h = box[1] - box[0]
rbox.w = box[3] - box[2]
# return rbox
return rbox
@staticmethod
def fromPointBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0][0]
rbox.y = box[0][1]
rbox.w = box[1][0] - box[0][0]
rbox.h = box[1][1] - box[0][1]
# return rbox
return rbox
@staticmethod
def fromPointBoundingBoxes(boxes):
return [RBox.fromPointBoundingBox(box) for box in boxes]
def classicalBoundingBox(self):
# return array like bounding box
return [self.x, self.y, self.w, self.h]
def pointBoundingBox(self):
# return tuple of end points
return ((self.x, self.y), (self.x + self.w, self.y + self.h))
def area(self):
return self.h * self.w
def __or__(self, other_box):
# initialize resultant box
rbox = RBox()
# calculate values
rbox.x = min(self.x, other_box.x)
rbox.y = min(self.y, other_box.y)
rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y
return rbox
def __and__(self, other_box):
# initialize resultant box
rbox = RBox()
# calculate values
rbox.x = max(self.x, other_box.x)
rbox.y = max(self.y, other_box.y)
rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y
if rbox.w < 0 or rbox.h < 0:
# reinitailize or make it zero
rbox = RBox()
return rbox
def similarity(self, other_box):
# (A & B)/(A | B) = (A & B).area/(A.area + B.area - (A & B).area)
#return (self & other_box).area()/(self.area() + other_box.area() - (self & other_box).area())
min_area = min(self.area(), other_box.area())
return (self & other_box).area()/min_area
def __str__(self):
return "{} {} {} {}".format(self.x, self.y, self.w, self.h)
def __mul__(self, other_box):
# calculate similarity and return
return self.similarity(other_box)
def __eq__(self, other):
return self.x == other.x and self.y == other.y and self.w == other.w and self.h == other.h
@staticmethod
def similarityStats(boxes):
# create matrix out of boxes
sim_mat = np.array(boxes).reshape((-1, 1))
sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)
# return similarity matrix
return sim_mat
@staticmethod
def similarityThreshold(boxes, threshold = 0.8):
# get similarity matrix
sim_mat = RBox.similarityStats(boxes)
# find thresholded indexes
ind = np.array(np.nonzero(sim_mat > threshold))
# return in the form of list
return list(ind.T)
@staticmethod
def reduceBoxes(boxes, threshold=0.8):
|
@staticmethod
def toPointBoundingBoxes(boxes):
return [box.pointBoundingBox() for box in boxes]
@staticmethod
def toClassicBoundingBoxes(boxes):
return [box.classicalBoundingBox() for box in boxes]
def extractPatchFromImage(self, image, square=False):
# get bounding box end points
(start, end) = self.pointBoundingBox()
start, end = list(start), list(end)
# check if square flag is on
if square:
im_h, im_w = image.shape[0:2]
# adjust start and end so that height and width are equal
if self.h != self.w:
# find bigger size
if self.h > self.w:
# find difference
diff = self.h - self.w
if start[0] >= int(diff/2):
start[0] -= math.floor(diff/2)
diff -= math.floor(diff/2)
else:
diff -= start[0]
start[0] = 0
end[0] += diff
if end[0] >= im_w:
diff = end[0] - im_w + 1
end[1] -= diff
else:
# find difference
diff = self.w - self.h
if start[1] >= int(diff / 2):
start[1] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[1]
start[1] = 0
end[1] += diff
if end[1] >= im_h:
diff = end[1] - im_h + 1
end[0] -= diff
# return patch
return image[start[1]: end[1], start[0]: end[0]]
def addPatchtoImage(self, image, patch):
# get bounding box end points
(start, end) = self.pointBoundingBox()
# patch in to image
image[start[1]: end[1], start[0]: end[0]] = patch
# return image
return image
def askForLable(patch):
# write an image to send
cv2.imwrite("patch.jpg", patch)
# setup client socket
clientSock = socket(AF_INET, SOCK_STREAM)
clientSock.connect((TCP_IP, TCP_PORT))
# open image
image = open("patch.jpg", 'rb')
# read bytes equal to buffer size
data = image.read(BUFFER_SIZE)
# while image still has data
while (data):
# send data to server
clientSock.send(data)
# read more data if available
data = image.read(BUFFER_SIZE)
# close file
image.close()
# signal server to end data stream
clientSock.shutdown(SHUT_WR)
# recieved lable as binary data from server and convert it to string
label = clientSock.recv(1024)
label = label.decode("utf-8")
return label
| similar_boxes = RBox.similarityThreshold(boxes, threshold)
while len(similar_boxes) > 0:
union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]
# remove similar boxes
del boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][1]]
boxes.append(union)
similar_boxes = RBox.similarityThreshold(boxes, threshold)
return boxes | identifier_body |
index.ts | import _ from 'lodash'
import type { PlatformLogger, PlatformEnvData, StorageInitData, CompProps } from '@wix/thunderbolt-symbols'
import { createLinkUtils, createPromise, logSdkError, logSdkWarning, createProxy } from '@wix/thunderbolt-commons'
import { createDeepProxy } from '../deepProxyUtils'
import { getComponentsSDKLoader } from '@wix/thunderbolt-components-registry/getComponentsSDKLoader'
import { ComponentSdksLoader, CoreSdkLoaders, CreateWixStorageAPI, WixStorageAPI } from '../types'
import type { ControllersExports, InitArgs } from './types' // TODO move all core types to ./types
import ClientSpecMapApi from './clientSpecMapService'
import AppsUrlApi from './appsUrlService'
import WixSelector from './wixSelector'
import WixCodeViewerAppUtils from './wixCodeViewerAppUtils'
import BlocksPreviewAppUtils from './blocksPreviewAppUtils'
import { Applications } from './applications'
import { modelsApiProvider } from './modelsApiProvider'
import { createWixCodeApiFactory } from './createWixCodeSdk'
import createSdkFactoryParams from './createSdkFactoryParams'
import setPropsFactory from './setPropsFactory'
import { ControllerEvents } from './ControllerEvents'
import { DocumentSdkFactory } from './componentsSDK/Document'
import { createPlatformApi } from './appsAPI/platformAPI'
import CommonConfigManager from './commonConfigModule'
import BsiManagerModule from './bsiManagerModule'
import { createWixCodeNamespacesRegistry } from './WixCodeNamespacesRegistry'
import { platformBiLoggerFactory } from './bi/biLoggerFactory'
import { instanceCacheFactory } from './instanceCache'
import { componentSdkStateFactory } from './componentSdkState'
import { ComponentSdksManagerFactory } from './componentSdksManager'
import { RegisterEventFactory } from './createRegisterEvent'
import { PlatformAnimationsAPI } from '../animations'
import { CreateStaticEventsManager } from './staticEventsManager'
import { AppsPublicApiManagerFactory } from './appsPublicApiManager'
import { BuildPlatformUtils } from './buildPlatformUtils'
import { CreateLocationManager } from './locationManager'
import { ViewerPlatformEssentials } from '@wix/fe-essentials-viewer-platform'
import { CreateWarmupDataManager } from './warmupDataManager'
import { CreateConsentPolicyManager } from './consentPolicyManager'
import { FedopsWebVitalsManager } from './fedops'
import { SsrCacheHintsManager } from './ssr'
import { createStorageAPI } from '../storage/storageAPI'
import { ModuleFederationManagerFactory } from './moduleFederationManager'
type PlatformState = {
createStorageApi: CreateWixStorageAPI
loadComponentSdksPromise: Promise<ComponentSdksLoader>
}
export function createPlatformAPI() {
const { promise: waitForInit, resolver: initDone } = createPromise<PlatformState>()
return {
initPlatformOnSite({ logger, platformEnvData }: { logger: PlatformLogger; platformEnvData: PlatformEnvData }) {
const siteStorageApi: CreateWixStorageAPI = createStorageAPI()
initDone({
createStorageApi: (appPrefix: string, handlers: any, storageInitData: StorageInitData): WixStorageAPI => {
return siteStorageApi(appPrefix, handlers, storageInitData)
},
loadComponentSdksPromise: getComponentsSDKLoader({
platformEnvData,
logger,
}) as any, // TODO: remove `as any` after https://github.com/wix-private/editor-elements/pull/3443 is merged
})
},
async runPlatformOnPage({ bootstrapData, logger, importScripts, moduleLoader, viewerAPI, fetchModels, sessionService }: InitArgs) {
logger.interactionStarted('initialisation')
const createSdkHandlers = (pageId: string) => createDeepProxy((path: Array<string>) => (...args: Array<never>) => viewerAPI.invokeSdkHandler(pageId, path, ...args))
const modelBuilder = modelsApiProvider({ bootstrapData, fetchModels })
const modelsApi = await logger.runAsyncAndReport('getAllModels', modelBuilder.getModelApi)
const clientSpecMapApi = ClientSpecMapApi({ bootstrapData })
const handlers = createSdkHandlers(bootstrapData.currentPageId) as any
const appsPublicApiManager = AppsPublicApiManagerFactory({ modelsApi, clientSpecMapApi, logger, handlers, bootstrapData, importScripts })
if (_.isEmpty(modelsApi.getApplications())) {
if (modelsApi.hasTPAComponentOnPage()) {
// a TPA component may Wix.SuperApps.getPublicAPI(). the below code resolves this promise.
appsPublicApiManager.registerPublicApiProvider((appDefinitionId) => {
appsPublicApiManager.resolvePublicApi(appDefinitionId, null)
})
}
return
}
const platformEnvData = bootstrapData.platformEnvData
const isSSR = platformEnvData.window.isSSR
if (!isSSR) |
const fedopsWebVitalsManager = FedopsWebVitalsManager({ platformEnvData, modelsApi, handlers })
fedopsWebVitalsManager.registerWidgets()
const ssrCacheHintsManager = SsrCacheHintsManager({ platformEnvData, modelsApi, handlers })
ssrCacheHintsManager.setSsrCacheHints()
const { createStorageApi, loadComponentSdksPromise } = await waitForInit
const componentSdksManager = ComponentSdksManagerFactory({ loadComponentSdksPromise, modelsApi, logger })
const sdkInstancesCache = instanceCacheFactory()
const getCompRefById = (compId: string) => createProxy((functionName: string) => (...args: any) => handlers.invokeCompRefFunction(compId, functionName, args))
const appsUrlApi = AppsUrlApi({ bootstrapData })
const controllerEventsFactory = ControllerEvents()
const componentSdkState = componentSdkStateFactory()
const commonConfigManager = CommonConfigManager(bootstrapData, createSdkHandlers)
const bsiManager = BsiManagerModule(commonConfigManager, bootstrapData, createSdkHandlers)
const linkUtils = createLinkUtils({
isMobileView: bootstrapData.isMobileView,
getCompIdByWixCodeNickname: modelsApi.getCompIdByWixCodeNickname,
getRoleForCompId: modelsApi.getRoleForCompId,
routingInfo: platformEnvData.router.routingInfo,
metaSiteId: platformEnvData.location.metaSiteId,
userFileDomainUrl: platformEnvData.location.userFileDomainUrl,
routersConfig: bootstrapData.platformAPIData.routersConfigMap,
popupPages: platformEnvData.popups?.popupPages,
multilingualInfo: platformEnvData.multilingual,
})
const wixCodeNamespacesRegistry = createWixCodeNamespacesRegistry()
const essentials = new ViewerPlatformEssentials({
metaSiteId: platformEnvData.location.metaSiteId,
conductedExperiments: {},
appsConductedExperiments: bootstrapData.essentials.appsConductedExperiments,
getAppToken(appDefId) {
return sessionService.getInstance(appDefId)
},
isSSR,
})
const biUtils = platformBiLoggerFactory({
sessionService,
factory: essentials.biLoggerFactory,
location: platformEnvData.location,
biData: platformEnvData.bi,
site: platformEnvData.site,
})
const locationManager = CreateLocationManager({ handlers, platformEnvData, bootstrapData })
const warmupDataManager = CreateWarmupDataManager({ handlers, platformEnvData })
const consentPolicyManager = CreateConsentPolicyManager({ handlers, platformEnvData })
const platformUtils = BuildPlatformUtils({
linkUtils,
sessionService,
appsPublicApiManager,
wixCodeNamespacesRegistry,
biUtils,
locationManager,
essentials,
warmupDataManager,
consentPolicyManager,
clientSpecMapApi,
})
const { createSetProps, waitForUpdatePropsPromises, createSetPropsForOOI } = setPropsFactory({ modelsApi, viewerAPI, logger, handlers })
const registerEventFactory = RegisterEventFactory({ handlers, modelsApi })
const animationsApi = PlatformAnimationsAPI({ handlers, platformEnvData, modelsApi })
const { getSdkFactoryParams } = createSdkFactoryParams({
animationsApi,
sdkInstancesCache,
componentSdkState,
platformUtils,
viewerAPI,
modelsApi,
createSdkHandlers,
getCompRefById,
logger,
createSetProps,
registerEventFactory,
platformEnvData,
})
const wixSelector = WixSelector({
bootstrapData,
modelsApi,
getSdkFactoryParams,
controllerEventsFactory,
sdkInstancesCache,
componentSdksManager,
logger,
})
const reporter = {
logSdkError,
logSdkWarning,
}
const controllersExports: ControllersExports = {}
const AppControllerSdkLoader = async () => {
const { AppControllerSdk } = await import('./componentsSDK/AppController' /* webpackChunkName: "AppController.corvid" */)
return AppControllerSdk({ controllersExports, modelsApi, controllerEventsFactory })
}
const AppWidgetSdkLoader = async () => {
const { AppControllerWithChildrenSdk } = await import('./componentsSDK/AppController' /* webpackChunkName: "AppController.corvid" */)
return AppControllerWithChildrenSdk({ controllersExports, modelsApi, controllerEventsFactory })
}
const staticEventsManager = CreateStaticEventsManager({ modelsApi, controllerEventsFactory, wixSelector, logger })
// create here
const wixCodeViewerAppUtils = WixCodeViewerAppUtils({ bootstrapData, staticEventsManager })
const blocksPreviewAppUtils = BlocksPreviewAppUtils({ bootstrapData })
const wixCodeApiFactory = createWixCodeApiFactory({
bootstrapData,
wixCodeViewerAppUtils,
modelsApi,
clientSpecMapApi,
platformUtils,
createSdkHandlers,
platformEnvData,
logger,
})
const createPlatformApiForApp = createPlatformApi({
platformEnvData,
platformUtils,
createStorageApi,
handlers,
})
const moduleFederationManager = ModuleFederationManagerFactory({ logger, moduleLoader, appsUrlApi, clientSpecMapApi, platformEnvData })
const { runApplications, createRepeatedControllers } = Applications({
appsPublicApiManager,
platformUtils,
clientSpecMapApi,
appsUrlApi,
modelsApi,
bootstrapData,
importScripts,
wixCodeViewerAppUtils,
blocksPreviewAppUtils,
wixSelector,
logger,
wixCodeApiFactory,
createSetPropsForOOI,
waitForUpdatePropsPromises,
controllersExports,
createPlatformApiForApp,
bsiManager,
essentials,
commonConfig: commonConfigManager.get(),
handlers,
moduleFederationManager,
})
const RepeaterSdkLoader = async () => {
const { RepeaterSdk } = await import('./componentsSDK/repeaters/Repeater' /* webpackChunkName: "Repeater.corvid" */)
return RepeaterSdk({
modelsApi,
viewerAPI,
wixSelector,
reporter,
sdkInstancesCache,
componentSdkState,
platformEnvData,
createRepeatedControllers,
})
}
const DocumentSdkLoader = async () => Promise.resolve(DocumentSdkFactory({ modelsApi, wixSelector }))
const coreSdks: CoreSdkLoaders = {
AppController: AppControllerSdkLoader,
AppWidget: AppWidgetSdkLoader,
TPAWidget: AppControllerSdkLoader,
TPASection: AppControllerSdkLoader,
TPAMultiSection: AppControllerSdkLoader,
TPAGluedWidget: AppControllerSdkLoader,
tpaWidgetNative: AppControllerSdkLoader,
Repeater: RepeaterSdkLoader,
Document: DocumentSdkLoader,
}
componentSdksManager.fetchComponentsSdks(coreSdks)
logger.interactionEnded('initialisation')
await logger.runAsyncAndReport('runApplications', () => runApplications(modelsApi.getApplicationIds()))
// calling it here because we need to run all the applications, register the controllers APIs, run and finish all PageReady/OnReady, before executing any static events handlers.
// some handlers may depends on the apis being registered and onReady been called,
staticEventsManager.triggerStaticEventsHandlers() // TODO do we need to run this is SSR?
},
}
}
| {
handlers.registerOnPropsChangedHandler(bootstrapData.currentContextId, (changes: CompProps) => {
_.map(changes, (newProps, compId) => {
modelsApi.updateProps(compId, newProps)
})
})
} | conditional_block |
index.ts | import _ from 'lodash'
import type { PlatformLogger, PlatformEnvData, StorageInitData, CompProps } from '@wix/thunderbolt-symbols'
import { createLinkUtils, createPromise, logSdkError, logSdkWarning, createProxy } from '@wix/thunderbolt-commons'
import { createDeepProxy } from '../deepProxyUtils'
import { getComponentsSDKLoader } from '@wix/thunderbolt-components-registry/getComponentsSDKLoader'
import { ComponentSdksLoader, CoreSdkLoaders, CreateWixStorageAPI, WixStorageAPI } from '../types'
import type { ControllersExports, InitArgs } from './types' // TODO move all core types to ./types
import ClientSpecMapApi from './clientSpecMapService'
import AppsUrlApi from './appsUrlService'
import WixSelector from './wixSelector'
import WixCodeViewerAppUtils from './wixCodeViewerAppUtils'
import BlocksPreviewAppUtils from './blocksPreviewAppUtils'
import { Applications } from './applications'
import { modelsApiProvider } from './modelsApiProvider'
import { createWixCodeApiFactory } from './createWixCodeSdk'
import createSdkFactoryParams from './createSdkFactoryParams'
import setPropsFactory from './setPropsFactory'
import { ControllerEvents } from './ControllerEvents'
import { DocumentSdkFactory } from './componentsSDK/Document'
import { createPlatformApi } from './appsAPI/platformAPI'
import CommonConfigManager from './commonConfigModule'
import BsiManagerModule from './bsiManagerModule'
import { createWixCodeNamespacesRegistry } from './WixCodeNamespacesRegistry'
import { platformBiLoggerFactory } from './bi/biLoggerFactory'
import { instanceCacheFactory } from './instanceCache'
import { componentSdkStateFactory } from './componentSdkState'
import { ComponentSdksManagerFactory } from './componentSdksManager'
import { RegisterEventFactory } from './createRegisterEvent'
import { PlatformAnimationsAPI } from '../animations'
import { CreateStaticEventsManager } from './staticEventsManager'
import { AppsPublicApiManagerFactory } from './appsPublicApiManager'
import { BuildPlatformUtils } from './buildPlatformUtils'
import { CreateLocationManager } from './locationManager'
import { ViewerPlatformEssentials } from '@wix/fe-essentials-viewer-platform'
import { CreateWarmupDataManager } from './warmupDataManager'
import { CreateConsentPolicyManager } from './consentPolicyManager'
import { FedopsWebVitalsManager } from './fedops'
import { SsrCacheHintsManager } from './ssr'
import { createStorageAPI } from '../storage/storageAPI'
import { ModuleFederationManagerFactory } from './moduleFederationManager'
type PlatformState = {
createStorageApi: CreateWixStorageAPI
loadComponentSdksPromise: Promise<ComponentSdksLoader>
}
export function createPlatformAPI() {
const { promise: waitForInit, resolver: initDone } = createPromise<PlatformState>()
return {
initPlatformOnSite({ logger, platformEnvData }: { logger: PlatformLogger; platformEnvData: PlatformEnvData }) {
const siteStorageApi: CreateWixStorageAPI = createStorageAPI()
initDone({
createStorageApi: (appPrefix: string, handlers: any, storageInitData: StorageInitData): WixStorageAPI => {
return siteStorageApi(appPrefix, handlers, storageInitData)
},
loadComponentSdksPromise: getComponentsSDKLoader({
platformEnvData,
logger,
}) as any, // TODO: remove `as any` after https://github.com/wix-private/editor-elements/pull/3443 is merged
})
},
async runPlatformOnPage({ bootstrapData, logger, importScripts, moduleLoader, viewerAPI, fetchModels, sessionService }: InitArgs) {
logger.interactionStarted('initialisation')
const createSdkHandlers = (pageId: string) => createDeepProxy((path: Array<string>) => (...args: Array<never>) => viewerAPI.invokeSdkHandler(pageId, path, ...args))
const modelBuilder = modelsApiProvider({ bootstrapData, fetchModels })
const modelsApi = await logger.runAsyncAndReport('getAllModels', modelBuilder.getModelApi)
const clientSpecMapApi = ClientSpecMapApi({ bootstrapData })
const handlers = createSdkHandlers(bootstrapData.currentPageId) as any
const appsPublicApiManager = AppsPublicApiManagerFactory({ modelsApi, clientSpecMapApi, logger, handlers, bootstrapData, importScripts })
if (_.isEmpty(modelsApi.getApplications())) {
if (modelsApi.hasTPAComponentOnPage()) {
// a TPA component may Wix.SuperApps.getPublicAPI(). the below code resolves this promise.
appsPublicApiManager.registerPublicApiProvider((appDefinitionId) => {
appsPublicApiManager.resolvePublicApi(appDefinitionId, null)
})
}
return
}
const platformEnvData = bootstrapData.platformEnvData
const isSSR = platformEnvData.window.isSSR
if (!isSSR) {
handlers.registerOnPropsChangedHandler(bootstrapData.currentContextId, (changes: CompProps) => {
_.map(changes, (newProps, compId) => {
modelsApi.updateProps(compId, newProps)
})
})
}
const fedopsWebVitalsManager = FedopsWebVitalsManager({ platformEnvData, modelsApi, handlers })
fedopsWebVitalsManager.registerWidgets()
const ssrCacheHintsManager = SsrCacheHintsManager({ platformEnvData, modelsApi, handlers })
ssrCacheHintsManager.setSsrCacheHints()
const { createStorageApi, loadComponentSdksPromise } = await waitForInit
const componentSdksManager = ComponentSdksManagerFactory({ loadComponentSdksPromise, modelsApi, logger })
const sdkInstancesCache = instanceCacheFactory()
const getCompRefById = (compId: string) => createProxy((functionName: string) => (...args: any) => handlers.invokeCompRefFunction(compId, functionName, args))
const appsUrlApi = AppsUrlApi({ bootstrapData })
const controllerEventsFactory = ControllerEvents()
const componentSdkState = componentSdkStateFactory()
const commonConfigManager = CommonConfigManager(bootstrapData, createSdkHandlers)
const bsiManager = BsiManagerModule(commonConfigManager, bootstrapData, createSdkHandlers)
const linkUtils = createLinkUtils({
isMobileView: bootstrapData.isMobileView,
getCompIdByWixCodeNickname: modelsApi.getCompIdByWixCodeNickname,
getRoleForCompId: modelsApi.getRoleForCompId,
routingInfo: platformEnvData.router.routingInfo,
metaSiteId: platformEnvData.location.metaSiteId,
userFileDomainUrl: platformEnvData.location.userFileDomainUrl,
routersConfig: bootstrapData.platformAPIData.routersConfigMap,
popupPages: platformEnvData.popups?.popupPages,
multilingualInfo: platformEnvData.multilingual,
})
const wixCodeNamespacesRegistry = createWixCodeNamespacesRegistry()
| appsConductedExperiments: bootstrapData.essentials.appsConductedExperiments,
getAppToken(appDefId) {
return sessionService.getInstance(appDefId)
},
isSSR,
})
const biUtils = platformBiLoggerFactory({
sessionService,
factory: essentials.biLoggerFactory,
location: platformEnvData.location,
biData: platformEnvData.bi,
site: platformEnvData.site,
})
const locationManager = CreateLocationManager({ handlers, platformEnvData, bootstrapData })
const warmupDataManager = CreateWarmupDataManager({ handlers, platformEnvData })
const consentPolicyManager = CreateConsentPolicyManager({ handlers, platformEnvData })
const platformUtils = BuildPlatformUtils({
linkUtils,
sessionService,
appsPublicApiManager,
wixCodeNamespacesRegistry,
biUtils,
locationManager,
essentials,
warmupDataManager,
consentPolicyManager,
clientSpecMapApi,
})
const { createSetProps, waitForUpdatePropsPromises, createSetPropsForOOI } = setPropsFactory({ modelsApi, viewerAPI, logger, handlers })
const registerEventFactory = RegisterEventFactory({ handlers, modelsApi })
const animationsApi = PlatformAnimationsAPI({ handlers, platformEnvData, modelsApi })
const { getSdkFactoryParams } = createSdkFactoryParams({
animationsApi,
sdkInstancesCache,
componentSdkState,
platformUtils,
viewerAPI,
modelsApi,
createSdkHandlers,
getCompRefById,
logger,
createSetProps,
registerEventFactory,
platformEnvData,
})
const wixSelector = WixSelector({
bootstrapData,
modelsApi,
getSdkFactoryParams,
controllerEventsFactory,
sdkInstancesCache,
componentSdksManager,
logger,
})
const reporter = {
logSdkError,
logSdkWarning,
}
const controllersExports: ControllersExports = {}
const AppControllerSdkLoader = async () => {
const { AppControllerSdk } = await import('./componentsSDK/AppController' /* webpackChunkName: "AppController.corvid" */)
return AppControllerSdk({ controllersExports, modelsApi, controllerEventsFactory })
}
const AppWidgetSdkLoader = async () => {
const { AppControllerWithChildrenSdk } = await import('./componentsSDK/AppController' /* webpackChunkName: "AppController.corvid" */)
return AppControllerWithChildrenSdk({ controllersExports, modelsApi, controllerEventsFactory })
}
const staticEventsManager = CreateStaticEventsManager({ modelsApi, controllerEventsFactory, wixSelector, logger })
// create here
const wixCodeViewerAppUtils = WixCodeViewerAppUtils({ bootstrapData, staticEventsManager })
const blocksPreviewAppUtils = BlocksPreviewAppUtils({ bootstrapData })
const wixCodeApiFactory = createWixCodeApiFactory({
bootstrapData,
wixCodeViewerAppUtils,
modelsApi,
clientSpecMapApi,
platformUtils,
createSdkHandlers,
platformEnvData,
logger,
})
const createPlatformApiForApp = createPlatformApi({
platformEnvData,
platformUtils,
createStorageApi,
handlers,
})
const moduleFederationManager = ModuleFederationManagerFactory({ logger, moduleLoader, appsUrlApi, clientSpecMapApi, platformEnvData })
const { runApplications, createRepeatedControllers } = Applications({
appsPublicApiManager,
platformUtils,
clientSpecMapApi,
appsUrlApi,
modelsApi,
bootstrapData,
importScripts,
wixCodeViewerAppUtils,
blocksPreviewAppUtils,
wixSelector,
logger,
wixCodeApiFactory,
createSetPropsForOOI,
waitForUpdatePropsPromises,
controllersExports,
createPlatformApiForApp,
bsiManager,
essentials,
commonConfig: commonConfigManager.get(),
handlers,
moduleFederationManager,
})
const RepeaterSdkLoader = async () => {
const { RepeaterSdk } = await import('./componentsSDK/repeaters/Repeater' /* webpackChunkName: "Repeater.corvid" */)
return RepeaterSdk({
modelsApi,
viewerAPI,
wixSelector,
reporter,
sdkInstancesCache,
componentSdkState,
platformEnvData,
createRepeatedControllers,
})
}
const DocumentSdkLoader = async () => Promise.resolve(DocumentSdkFactory({ modelsApi, wixSelector }))
const coreSdks: CoreSdkLoaders = {
AppController: AppControllerSdkLoader,
AppWidget: AppWidgetSdkLoader,
TPAWidget: AppControllerSdkLoader,
TPASection: AppControllerSdkLoader,
TPAMultiSection: AppControllerSdkLoader,
TPAGluedWidget: AppControllerSdkLoader,
tpaWidgetNative: AppControllerSdkLoader,
Repeater: RepeaterSdkLoader,
Document: DocumentSdkLoader,
}
componentSdksManager.fetchComponentsSdks(coreSdks)
logger.interactionEnded('initialisation')
await logger.runAsyncAndReport('runApplications', () => runApplications(modelsApi.getApplicationIds()))
// calling it here because we need to run all the applications, register the controllers APIs, run and finish all PageReady/OnReady, before executing any static events handlers.
// some handlers may depends on the apis being registered and onReady been called,
staticEventsManager.triggerStaticEventsHandlers() // TODO do we need to run this is SSR?
},
}
} | const essentials = new ViewerPlatformEssentials({
metaSiteId: platformEnvData.location.metaSiteId,
conductedExperiments: {}, | random_line_split |
index.ts | import _ from 'lodash'
import type { PlatformLogger, PlatformEnvData, StorageInitData, CompProps } from '@wix/thunderbolt-symbols'
import { createLinkUtils, createPromise, logSdkError, logSdkWarning, createProxy } from '@wix/thunderbolt-commons'
import { createDeepProxy } from '../deepProxyUtils'
import { getComponentsSDKLoader } from '@wix/thunderbolt-components-registry/getComponentsSDKLoader'
import { ComponentSdksLoader, CoreSdkLoaders, CreateWixStorageAPI, WixStorageAPI } from '../types'
import type { ControllersExports, InitArgs } from './types' // TODO move all core types to ./types
import ClientSpecMapApi from './clientSpecMapService'
import AppsUrlApi from './appsUrlService'
import WixSelector from './wixSelector'
import WixCodeViewerAppUtils from './wixCodeViewerAppUtils'
import BlocksPreviewAppUtils from './blocksPreviewAppUtils'
import { Applications } from './applications'
import { modelsApiProvider } from './modelsApiProvider'
import { createWixCodeApiFactory } from './createWixCodeSdk'
import createSdkFactoryParams from './createSdkFactoryParams'
import setPropsFactory from './setPropsFactory'
import { ControllerEvents } from './ControllerEvents'
import { DocumentSdkFactory } from './componentsSDK/Document'
import { createPlatformApi } from './appsAPI/platformAPI'
import CommonConfigManager from './commonConfigModule'
import BsiManagerModule from './bsiManagerModule'
import { createWixCodeNamespacesRegistry } from './WixCodeNamespacesRegistry'
import { platformBiLoggerFactory } from './bi/biLoggerFactory'
import { instanceCacheFactory } from './instanceCache'
import { componentSdkStateFactory } from './componentSdkState'
import { ComponentSdksManagerFactory } from './componentSdksManager'
import { RegisterEventFactory } from './createRegisterEvent'
import { PlatformAnimationsAPI } from '../animations'
import { CreateStaticEventsManager } from './staticEventsManager'
import { AppsPublicApiManagerFactory } from './appsPublicApiManager'
import { BuildPlatformUtils } from './buildPlatformUtils'
import { CreateLocationManager } from './locationManager'
import { ViewerPlatformEssentials } from '@wix/fe-essentials-viewer-platform'
import { CreateWarmupDataManager } from './warmupDataManager'
import { CreateConsentPolicyManager } from './consentPolicyManager'
import { FedopsWebVitalsManager } from './fedops'
import { SsrCacheHintsManager } from './ssr'
import { createStorageAPI } from '../storage/storageAPI'
import { ModuleFederationManagerFactory } from './moduleFederationManager'
type PlatformState = {
createStorageApi: CreateWixStorageAPI
loadComponentSdksPromise: Promise<ComponentSdksLoader>
}
export function | () {
const { promise: waitForInit, resolver: initDone } = createPromise<PlatformState>()
return {
initPlatformOnSite({ logger, platformEnvData }: { logger: PlatformLogger; platformEnvData: PlatformEnvData }) {
const siteStorageApi: CreateWixStorageAPI = createStorageAPI()
initDone({
createStorageApi: (appPrefix: string, handlers: any, storageInitData: StorageInitData): WixStorageAPI => {
return siteStorageApi(appPrefix, handlers, storageInitData)
},
loadComponentSdksPromise: getComponentsSDKLoader({
platformEnvData,
logger,
}) as any, // TODO: remove `as any` after https://github.com/wix-private/editor-elements/pull/3443 is merged
})
},
async runPlatformOnPage({ bootstrapData, logger, importScripts, moduleLoader, viewerAPI, fetchModels, sessionService }: InitArgs) {
logger.interactionStarted('initialisation')
const createSdkHandlers = (pageId: string) => createDeepProxy((path: Array<string>) => (...args: Array<never>) => viewerAPI.invokeSdkHandler(pageId, path, ...args))
const modelBuilder = modelsApiProvider({ bootstrapData, fetchModels })
const modelsApi = await logger.runAsyncAndReport('getAllModels', modelBuilder.getModelApi)
const clientSpecMapApi = ClientSpecMapApi({ bootstrapData })
const handlers = createSdkHandlers(bootstrapData.currentPageId) as any
const appsPublicApiManager = AppsPublicApiManagerFactory({ modelsApi, clientSpecMapApi, logger, handlers, bootstrapData, importScripts })
if (_.isEmpty(modelsApi.getApplications())) {
if (modelsApi.hasTPAComponentOnPage()) {
// a TPA component may Wix.SuperApps.getPublicAPI(). the below code resolves this promise.
appsPublicApiManager.registerPublicApiProvider((appDefinitionId) => {
appsPublicApiManager.resolvePublicApi(appDefinitionId, null)
})
}
return
}
const platformEnvData = bootstrapData.platformEnvData
const isSSR = platformEnvData.window.isSSR
if (!isSSR) {
handlers.registerOnPropsChangedHandler(bootstrapData.currentContextId, (changes: CompProps) => {
_.map(changes, (newProps, compId) => {
modelsApi.updateProps(compId, newProps)
})
})
}
const fedopsWebVitalsManager = FedopsWebVitalsManager({ platformEnvData, modelsApi, handlers })
fedopsWebVitalsManager.registerWidgets()
const ssrCacheHintsManager = SsrCacheHintsManager({ platformEnvData, modelsApi, handlers })
ssrCacheHintsManager.setSsrCacheHints()
const { createStorageApi, loadComponentSdksPromise } = await waitForInit
const componentSdksManager = ComponentSdksManagerFactory({ loadComponentSdksPromise, modelsApi, logger })
const sdkInstancesCache = instanceCacheFactory()
const getCompRefById = (compId: string) => createProxy((functionName: string) => (...args: any) => handlers.invokeCompRefFunction(compId, functionName, args))
const appsUrlApi = AppsUrlApi({ bootstrapData })
const controllerEventsFactory = ControllerEvents()
const componentSdkState = componentSdkStateFactory()
const commonConfigManager = CommonConfigManager(bootstrapData, createSdkHandlers)
const bsiManager = BsiManagerModule(commonConfigManager, bootstrapData, createSdkHandlers)
const linkUtils = createLinkUtils({
isMobileView: bootstrapData.isMobileView,
getCompIdByWixCodeNickname: modelsApi.getCompIdByWixCodeNickname,
getRoleForCompId: modelsApi.getRoleForCompId,
routingInfo: platformEnvData.router.routingInfo,
metaSiteId: platformEnvData.location.metaSiteId,
userFileDomainUrl: platformEnvData.location.userFileDomainUrl,
routersConfig: bootstrapData.platformAPIData.routersConfigMap,
popupPages: platformEnvData.popups?.popupPages,
multilingualInfo: platformEnvData.multilingual,
})
const wixCodeNamespacesRegistry = createWixCodeNamespacesRegistry()
const essentials = new ViewerPlatformEssentials({
metaSiteId: platformEnvData.location.metaSiteId,
conductedExperiments: {},
appsConductedExperiments: bootstrapData.essentials.appsConductedExperiments,
getAppToken(appDefId) {
return sessionService.getInstance(appDefId)
},
isSSR,
})
const biUtils = platformBiLoggerFactory({
sessionService,
factory: essentials.biLoggerFactory,
location: platformEnvData.location,
biData: platformEnvData.bi,
site: platformEnvData.site,
})
const locationManager = CreateLocationManager({ handlers, platformEnvData, bootstrapData })
const warmupDataManager = CreateWarmupDataManager({ handlers, platformEnvData })
const consentPolicyManager = CreateConsentPolicyManager({ handlers, platformEnvData })
const platformUtils = BuildPlatformUtils({
linkUtils,
sessionService,
appsPublicApiManager,
wixCodeNamespacesRegistry,
biUtils,
locationManager,
essentials,
warmupDataManager,
consentPolicyManager,
clientSpecMapApi,
})
const { createSetProps, waitForUpdatePropsPromises, createSetPropsForOOI } = setPropsFactory({ modelsApi, viewerAPI, logger, handlers })
const registerEventFactory = RegisterEventFactory({ handlers, modelsApi })
const animationsApi = PlatformAnimationsAPI({ handlers, platformEnvData, modelsApi })
const { getSdkFactoryParams } = createSdkFactoryParams({
animationsApi,
sdkInstancesCache,
componentSdkState,
platformUtils,
viewerAPI,
modelsApi,
createSdkHandlers,
getCompRefById,
logger,
createSetProps,
registerEventFactory,
platformEnvData,
})
const wixSelector = WixSelector({
bootstrapData,
modelsApi,
getSdkFactoryParams,
controllerEventsFactory,
sdkInstancesCache,
componentSdksManager,
logger,
})
const reporter = {
logSdkError,
logSdkWarning,
}
const controllersExports: ControllersExports = {}
const AppControllerSdkLoader = async () => {
const { AppControllerSdk } = await import('./componentsSDK/AppController' /* webpackChunkName: "AppController.corvid" */)
return AppControllerSdk({ controllersExports, modelsApi, controllerEventsFactory })
}
const AppWidgetSdkLoader = async () => {
const { AppControllerWithChildrenSdk } = await import('./componentsSDK/AppController' /* webpackChunkName: "AppController.corvid" */)
return AppControllerWithChildrenSdk({ controllersExports, modelsApi, controllerEventsFactory })
}
const staticEventsManager = CreateStaticEventsManager({ modelsApi, controllerEventsFactory, wixSelector, logger })
// create here
const wixCodeViewerAppUtils = WixCodeViewerAppUtils({ bootstrapData, staticEventsManager })
const blocksPreviewAppUtils = BlocksPreviewAppUtils({ bootstrapData })
const wixCodeApiFactory = createWixCodeApiFactory({
bootstrapData,
wixCodeViewerAppUtils,
modelsApi,
clientSpecMapApi,
platformUtils,
createSdkHandlers,
platformEnvData,
logger,
})
const createPlatformApiForApp = createPlatformApi({
platformEnvData,
platformUtils,
createStorageApi,
handlers,
})
const moduleFederationManager = ModuleFederationManagerFactory({ logger, moduleLoader, appsUrlApi, clientSpecMapApi, platformEnvData })
const { runApplications, createRepeatedControllers } = Applications({
appsPublicApiManager,
platformUtils,
clientSpecMapApi,
appsUrlApi,
modelsApi,
bootstrapData,
importScripts,
wixCodeViewerAppUtils,
blocksPreviewAppUtils,
wixSelector,
logger,
wixCodeApiFactory,
createSetPropsForOOI,
waitForUpdatePropsPromises,
controllersExports,
createPlatformApiForApp,
bsiManager,
essentials,
commonConfig: commonConfigManager.get(),
handlers,
moduleFederationManager,
})
const RepeaterSdkLoader = async () => {
const { RepeaterSdk } = await import('./componentsSDK/repeaters/Repeater' /* webpackChunkName: "Repeater.corvid" */)
return RepeaterSdk({
modelsApi,
viewerAPI,
wixSelector,
reporter,
sdkInstancesCache,
componentSdkState,
platformEnvData,
createRepeatedControllers,
})
}
const DocumentSdkLoader = async () => Promise.resolve(DocumentSdkFactory({ modelsApi, wixSelector }))
const coreSdks: CoreSdkLoaders = {
AppController: AppControllerSdkLoader,
AppWidget: AppWidgetSdkLoader,
TPAWidget: AppControllerSdkLoader,
TPASection: AppControllerSdkLoader,
TPAMultiSection: AppControllerSdkLoader,
TPAGluedWidget: AppControllerSdkLoader,
tpaWidgetNative: AppControllerSdkLoader,
Repeater: RepeaterSdkLoader,
Document: DocumentSdkLoader,
}
componentSdksManager.fetchComponentsSdks(coreSdks)
logger.interactionEnded('initialisation')
await logger.runAsyncAndReport('runApplications', () => runApplications(modelsApi.getApplicationIds()))
// calling it here because we need to run all the applications, register the controllers APIs, run and finish all PageReady/OnReady, before executing any static events handlers.
// some handlers may depends on the apis being registered and onReady been called,
staticEventsManager.triggerStaticEventsHandlers() // TODO do we need to run this is SSR?
},
}
}
| createPlatformAPI | identifier_name |
index.ts | import _ from 'lodash'
import type { PlatformLogger, PlatformEnvData, StorageInitData, CompProps } from '@wix/thunderbolt-symbols'
import { createLinkUtils, createPromise, logSdkError, logSdkWarning, createProxy } from '@wix/thunderbolt-commons'
import { createDeepProxy } from '../deepProxyUtils'
import { getComponentsSDKLoader } from '@wix/thunderbolt-components-registry/getComponentsSDKLoader'
import { ComponentSdksLoader, CoreSdkLoaders, CreateWixStorageAPI, WixStorageAPI } from '../types'
import type { ControllersExports, InitArgs } from './types' // TODO move all core types to ./types
import ClientSpecMapApi from './clientSpecMapService'
import AppsUrlApi from './appsUrlService'
import WixSelector from './wixSelector'
import WixCodeViewerAppUtils from './wixCodeViewerAppUtils'
import BlocksPreviewAppUtils from './blocksPreviewAppUtils'
import { Applications } from './applications'
import { modelsApiProvider } from './modelsApiProvider'
import { createWixCodeApiFactory } from './createWixCodeSdk'
import createSdkFactoryParams from './createSdkFactoryParams'
import setPropsFactory from './setPropsFactory'
import { ControllerEvents } from './ControllerEvents'
import { DocumentSdkFactory } from './componentsSDK/Document'
import { createPlatformApi } from './appsAPI/platformAPI'
import CommonConfigManager from './commonConfigModule'
import BsiManagerModule from './bsiManagerModule'
import { createWixCodeNamespacesRegistry } from './WixCodeNamespacesRegistry'
import { platformBiLoggerFactory } from './bi/biLoggerFactory'
import { instanceCacheFactory } from './instanceCache'
import { componentSdkStateFactory } from './componentSdkState'
import { ComponentSdksManagerFactory } from './componentSdksManager'
import { RegisterEventFactory } from './createRegisterEvent'
import { PlatformAnimationsAPI } from '../animations'
import { CreateStaticEventsManager } from './staticEventsManager'
import { AppsPublicApiManagerFactory } from './appsPublicApiManager'
import { BuildPlatformUtils } from './buildPlatformUtils'
import { CreateLocationManager } from './locationManager'
import { ViewerPlatformEssentials } from '@wix/fe-essentials-viewer-platform'
import { CreateWarmupDataManager } from './warmupDataManager'
import { CreateConsentPolicyManager } from './consentPolicyManager'
import { FedopsWebVitalsManager } from './fedops'
import { SsrCacheHintsManager } from './ssr'
import { createStorageAPI } from '../storage/storageAPI'
import { ModuleFederationManagerFactory } from './moduleFederationManager'
type PlatformState = {
createStorageApi: CreateWixStorageAPI
loadComponentSdksPromise: Promise<ComponentSdksLoader>
}
export function createPlatformAPI() {
const { promise: waitForInit, resolver: initDone } = createPromise<PlatformState>()
return {
initPlatformOnSite({ logger, platformEnvData }: { logger: PlatformLogger; platformEnvData: PlatformEnvData }) | ,
async runPlatformOnPage({ bootstrapData, logger, importScripts, moduleLoader, viewerAPI, fetchModels, sessionService }: InitArgs) {
logger.interactionStarted('initialisation')
const createSdkHandlers = (pageId: string) => createDeepProxy((path: Array<string>) => (...args: Array<never>) => viewerAPI.invokeSdkHandler(pageId, path, ...args))
const modelBuilder = modelsApiProvider({ bootstrapData, fetchModels })
const modelsApi = await logger.runAsyncAndReport('getAllModels', modelBuilder.getModelApi)
const clientSpecMapApi = ClientSpecMapApi({ bootstrapData })
const handlers = createSdkHandlers(bootstrapData.currentPageId) as any
const appsPublicApiManager = AppsPublicApiManagerFactory({ modelsApi, clientSpecMapApi, logger, handlers, bootstrapData, importScripts })
if (_.isEmpty(modelsApi.getApplications())) {
if (modelsApi.hasTPAComponentOnPage()) {
// a TPA component may Wix.SuperApps.getPublicAPI(). the below code resolves this promise.
appsPublicApiManager.registerPublicApiProvider((appDefinitionId) => {
appsPublicApiManager.resolvePublicApi(appDefinitionId, null)
})
}
return
}
const platformEnvData = bootstrapData.platformEnvData
const isSSR = platformEnvData.window.isSSR
if (!isSSR) {
handlers.registerOnPropsChangedHandler(bootstrapData.currentContextId, (changes: CompProps) => {
_.map(changes, (newProps, compId) => {
modelsApi.updateProps(compId, newProps)
})
})
}
const fedopsWebVitalsManager = FedopsWebVitalsManager({ platformEnvData, modelsApi, handlers })
fedopsWebVitalsManager.registerWidgets()
const ssrCacheHintsManager = SsrCacheHintsManager({ platformEnvData, modelsApi, handlers })
ssrCacheHintsManager.setSsrCacheHints()
const { createStorageApi, loadComponentSdksPromise } = await waitForInit
const componentSdksManager = ComponentSdksManagerFactory({ loadComponentSdksPromise, modelsApi, logger })
const sdkInstancesCache = instanceCacheFactory()
const getCompRefById = (compId: string) => createProxy((functionName: string) => (...args: any) => handlers.invokeCompRefFunction(compId, functionName, args))
const appsUrlApi = AppsUrlApi({ bootstrapData })
const controllerEventsFactory = ControllerEvents()
const componentSdkState = componentSdkStateFactory()
const commonConfigManager = CommonConfigManager(bootstrapData, createSdkHandlers)
const bsiManager = BsiManagerModule(commonConfigManager, bootstrapData, createSdkHandlers)
const linkUtils = createLinkUtils({
isMobileView: bootstrapData.isMobileView,
getCompIdByWixCodeNickname: modelsApi.getCompIdByWixCodeNickname,
getRoleForCompId: modelsApi.getRoleForCompId,
routingInfo: platformEnvData.router.routingInfo,
metaSiteId: platformEnvData.location.metaSiteId,
userFileDomainUrl: platformEnvData.location.userFileDomainUrl,
routersConfig: bootstrapData.platformAPIData.routersConfigMap,
popupPages: platformEnvData.popups?.popupPages,
multilingualInfo: platformEnvData.multilingual,
})
const wixCodeNamespacesRegistry = createWixCodeNamespacesRegistry()
const essentials = new ViewerPlatformEssentials({
metaSiteId: platformEnvData.location.metaSiteId,
conductedExperiments: {},
appsConductedExperiments: bootstrapData.essentials.appsConductedExperiments,
getAppToken(appDefId) {
return sessionService.getInstance(appDefId)
},
isSSR,
})
const biUtils = platformBiLoggerFactory({
sessionService,
factory: essentials.biLoggerFactory,
location: platformEnvData.location,
biData: platformEnvData.bi,
site: platformEnvData.site,
})
const locationManager = CreateLocationManager({ handlers, platformEnvData, bootstrapData })
const warmupDataManager = CreateWarmupDataManager({ handlers, platformEnvData })
const consentPolicyManager = CreateConsentPolicyManager({ handlers, platformEnvData })
const platformUtils = BuildPlatformUtils({
linkUtils,
sessionService,
appsPublicApiManager,
wixCodeNamespacesRegistry,
biUtils,
locationManager,
essentials,
warmupDataManager,
consentPolicyManager,
clientSpecMapApi,
})
const { createSetProps, waitForUpdatePropsPromises, createSetPropsForOOI } = setPropsFactory({ modelsApi, viewerAPI, logger, handlers })
const registerEventFactory = RegisterEventFactory({ handlers, modelsApi })
const animationsApi = PlatformAnimationsAPI({ handlers, platformEnvData, modelsApi })
const { getSdkFactoryParams } = createSdkFactoryParams({
animationsApi,
sdkInstancesCache,
componentSdkState,
platformUtils,
viewerAPI,
modelsApi,
createSdkHandlers,
getCompRefById,
logger,
createSetProps,
registerEventFactory,
platformEnvData,
})
const wixSelector = WixSelector({
bootstrapData,
modelsApi,
getSdkFactoryParams,
controllerEventsFactory,
sdkInstancesCache,
componentSdksManager,
logger,
})
const reporter = {
logSdkError,
logSdkWarning,
}
const controllersExports: ControllersExports = {}
const AppControllerSdkLoader = async () => {
const { AppControllerSdk } = await import('./componentsSDK/AppController' /* webpackChunkName: "AppController.corvid" */)
return AppControllerSdk({ controllersExports, modelsApi, controllerEventsFactory })
}
const AppWidgetSdkLoader = async () => {
const { AppControllerWithChildrenSdk } = await import('./componentsSDK/AppController' /* webpackChunkName: "AppController.corvid" */)
return AppControllerWithChildrenSdk({ controllersExports, modelsApi, controllerEventsFactory })
}
const staticEventsManager = CreateStaticEventsManager({ modelsApi, controllerEventsFactory, wixSelector, logger })
// create here
const wixCodeViewerAppUtils = WixCodeViewerAppUtils({ bootstrapData, staticEventsManager })
const blocksPreviewAppUtils = BlocksPreviewAppUtils({ bootstrapData })
const wixCodeApiFactory = createWixCodeApiFactory({
bootstrapData,
wixCodeViewerAppUtils,
modelsApi,
clientSpecMapApi,
platformUtils,
createSdkHandlers,
platformEnvData,
logger,
})
const createPlatformApiForApp = createPlatformApi({
platformEnvData,
platformUtils,
createStorageApi,
handlers,
})
const moduleFederationManager = ModuleFederationManagerFactory({ logger, moduleLoader, appsUrlApi, clientSpecMapApi, platformEnvData })
const { runApplications, createRepeatedControllers } = Applications({
appsPublicApiManager,
platformUtils,
clientSpecMapApi,
appsUrlApi,
modelsApi,
bootstrapData,
importScripts,
wixCodeViewerAppUtils,
blocksPreviewAppUtils,
wixSelector,
logger,
wixCodeApiFactory,
createSetPropsForOOI,
waitForUpdatePropsPromises,
controllersExports,
createPlatformApiForApp,
bsiManager,
essentials,
commonConfig: commonConfigManager.get(),
handlers,
moduleFederationManager,
})
const RepeaterSdkLoader = async () => {
const { RepeaterSdk } = await import('./componentsSDK/repeaters/Repeater' /* webpackChunkName: "Repeater.corvid" */)
return RepeaterSdk({
modelsApi,
viewerAPI,
wixSelector,
reporter,
sdkInstancesCache,
componentSdkState,
platformEnvData,
createRepeatedControllers,
})
}
const DocumentSdkLoader = async () => Promise.resolve(DocumentSdkFactory({ modelsApi, wixSelector }))
const coreSdks: CoreSdkLoaders = {
AppController: AppControllerSdkLoader,
AppWidget: AppWidgetSdkLoader,
TPAWidget: AppControllerSdkLoader,
TPASection: AppControllerSdkLoader,
TPAMultiSection: AppControllerSdkLoader,
TPAGluedWidget: AppControllerSdkLoader,
tpaWidgetNative: AppControllerSdkLoader,
Repeater: RepeaterSdkLoader,
Document: DocumentSdkLoader,
}
componentSdksManager.fetchComponentsSdks(coreSdks)
logger.interactionEnded('initialisation')
await logger.runAsyncAndReport('runApplications', () => runApplications(modelsApi.getApplicationIds()))
// calling it here because we need to run all the applications, register the controllers APIs, run and finish all PageReady/OnReady, before executing any static events handlers.
// some handlers may depends on the apis being registered and onReady been called,
staticEventsManager.triggerStaticEventsHandlers() // TODO do we need to run this is SSR?
},
}
}
| {
const siteStorageApi: CreateWixStorageAPI = createStorageAPI()
initDone({
createStorageApi: (appPrefix: string, handlers: any, storageInitData: StorageInitData): WixStorageAPI => {
return siteStorageApi(appPrefix, handlers, storageInitData)
},
loadComponentSdksPromise: getComponentsSDKLoader({
platformEnvData,
logger,
}) as any, // TODO: remove `as any` after https://github.com/wix-private/editor-elements/pull/3443 is merged
})
} | identifier_body |
universe.js | var path = 'http://bilifixer.nmzh.net/BFP/';
var i = 0;
function ini() {
// var flag = document.getElementById("fireaway").outerHTML.indexOf("newVersion");
// flag = true;
// if (flag) {
// toggle();
// }
for (var i = 0; i < localStorage.length; i++) {
var id = localStorage.key(i);
var value = eval(localStorage.getItem(id) == "true");
if (id == "AjaxType") {
value = localStorage.getItem("AjaxType");
id = "bfp_AjaxType_" + value;
}
var obj = document.getElementById(id);
try {
if (value) {
obj.setAttribute("checked", "");
}
} catch (e) {
//console.log("%o",obj);
}
}
noticeAdjust.value = localStorage.getItem("noticeAdjust");
SGInfo.value = localStorage.getItem("SGInfo");
var fontName = localStorage.getItem("fontSelector");
var fontSelector = document.getElementById("fontSelector");
var fontSize = localStorage.getItem("fontSizer");
var fontSizer = document.getElementById("fontSizer");
var currentFontSize = document.getElementById("currentFontSize");
var target = document.getElementById("fire_board");
if ((localStorage.getItem("isAddToBody") == "true")) {
target = document.body;
}
if (fontName != "true" && fontName != "false") {
fontSelector.value = fontName;
fontSelector.style.fontFamily = fontName;
target.style.fontFamily = fontName;
};
fontSizer.value = fontSize;
currentFontSize.value = fontSize;
target.style.fontSize = fontSize+"px";
var qj_uid='700603';var qj_maxw=0;
var random = Math.random();
if(random <= 0.3){
console.log("..........");
// openAd();
}
// $("<\script>").html("var qj_uid='700603';var qj_maxw=0;").appendTo($('head'));
// $('<\script>').attr('src', '//g.d8360.com/js/cpv_fm_l.js').appendTo($('head'));
// hideWhenLoad();
}
function hideWhenLoad(){
var ad2 = document.getElementById("__jx_l_div");
var check2 = self.setInterval(function(){
if(ad2){
if($(ad2).find("iframe").size()>0){
$(ad2).find("img").css("width","0px");
$(ad2).find("iframe").css("width","1px");
window.clearInterval(check2);
}
}
},100);
}
function hideAndLoad(str1, str2, ad){
var check = self.setInterval(function(){
if(ad){
ad.style.opacity="0";
window.clearInterval(check);
}
},100);
}
function showMeAndYou() {
if (i == 0) {
var username = document.getElementById("hl_status_l").firstChild.innerHTML.split("title=\"")[1].split("\">")[0];
var p = document.getElementById("fireawayandyou");
p.innerHTML += username;
if (username == "余xiao白。") {
var str = "<h3>哎哟我次奥 余xiao白。我要调教你!</h3>";
p.innerHTML += str;
}
p.innerHTML += "说的就是你<br/>~哈雅库~"
p.style.color = "#e60000";
i++;
}
}
function thanks(obj) {
obj.innerHTML = "捐助BFP~捐个几十万我也不介意哦~<b style='color:red;'>谢谢你的支持!</b>";
}
function openSP(str) {
var url = "http://www.bilibili.com/sp/" + str;
window.open(url);
}
function mail2me() {
window.open('http://mail.163.com/share/mail2me.htm#email=104101106105104101106105048048049064049054051046099111109');
}
function toggleById(Id) {
$("#" + Id).slideToggle();
}
function playAuById(Id) {
document.getElementById(Id).play();
}
function toggle() {
var p_status = $('#MisakaMoe').css('left');
var position = p_status == '0px' ? '140px' : '0px';
var w_status = $('#fire_board').css('width');
var w = w_status == '45px' ? '185px' : '45px';
$('#MisakaMoe').css('left', position);
$('#fire_board').animate({
width: w
});
$('#firelist').slideToggle();
$('.f_count:eq(0)').slideToggle();
// bi();
}
function bi() {
var p_status = $('#fireaway').css('background-position');
var position = p_status == '2px -190px' ? '-14px -190px' : '2px -190px';
$('#mouth').animate({
height: '+=22px'
});
$('#fireaway').css('background-position', position);
$('#mouth').animate({
height: '-=22px'
}, function() {
var p_s2 = $('#fireaway').css('background-position');
var p2 = p_s2 == '2px -190px' ? '-14px -190px' : '2px -190px';
$('#fireaway').css('background-position', p2);
});
if ($('.animated_img:eq(0)').attr('src') == 'fz.png') {
addSrc();
}
changeCount();
}
function changeCount() {
var count = $('.f_count:eq(0)');
count.html(Number(count.html()) + 1);
}
function openAd() {
var ad_iframe = document.createElement('iframe');
ad_iframe.className = 'ad_iframe'; | $("#openAd").html('轻抚菊花中...(' + j + '/' + ads_length + ')');
j++;
changeSRC(ad_iframe, ads, j, ads_length);
} else {
j++;
$("#openAd").html('⑨bishi菊花保护行动成功<br/> --黑洞的引力增强1%-- ');
}
};
}
function slideUpOthers(Obj, selector) {
var id = Obj.getAttribute("next");
$(selector).each(function() {
if ($(this).attr("id") != id) {
$(this).slideUp();
}
});
}
function setSGStatus(){
if(localStorage.getItem("SGInfo") == "1"){
$(".secretVideoList p a").each(function(){
var id = $(this).attr("id");
if(localStorage.getItem("SG_"+id)){
$(this).append("<span>已看</span>");
}
$(this).click(function(){
if(!localStorage.getItem("SG_"+id)){
$(this).append("<span>已看</span>");
}
localStorage.setItem("SG_"+id,"true");
});
});
}
if(localStorage.getItem("SGInfo") == "3"){
for (var i = 0; i < localStorage.length; i++) {
var id = localStorage.key(i);
if(id.indexOf("SG")>-1){
localStorage.removeItem(id);
i --;
}
}
}
$(".secretVideoList p a").each(function(){
var id = $(this).attr("id");
if($(this).attr("href").indexOf("n") == -1){
var pid = $(this).parent().parent().attr("id");
$(this).attr("href", $(this).attr("href") + "?n="+$("[next="+pid+"]").text()+"&v="+$(this).text()+"&c="+id);
}
if($(this).attr("linkCid")){
var a = $(this).attr("linkCid");
var b = new Date();
var c = b.getTime();
var d = c - a;
var e = c/1 + a/1;
var f = $(this).attr("t");
var h = $(this).attr("r");
$(this).attr("href", $(this).attr("href") + "&q=" + d + "&w=" + e + "&e=" + f + "&r" + h);
}
if($(this).attr("linkAid")){
var i = $(this).attr("linkAid");
$(this).attr("href", $(this).attr("href") + "&linkAid=" + i);
}
});
}
var needRefresh = true;
function toggleFunc(Obj, value) {
var funcName =Obj.getAttribute("id");
switch (funcName) {
case "isQSD":
$(".quickScroll, .quickScroll span").css("display", (value == "block" ? "none" : "block"));
value = null;
needRefresh = false;
break;
case "bfp_AjaxType_GM":
value = "GM";
funcName = "AjaxType";
break;
case "bfp_AjaxType_FA":
value = "FA";
funcName = "AjaxType";
if (!(localStorage.getItem("isYQL") == "true")) {
localStorage.setItem("isYQL", "true");
$("#isYQL").attr("checked","checked");
}
break;
case "fontSelector":
needRefresh = false;
var target = document.getElementById("fire_board");
if ((localStorage.getItem("isAddToBody") == "true")) {
target = document.body;
}
target.style.fontFamily = value;
break;
case "fontSizer":
needRefresh = false;
var target = document.getElementById("fire_board");
if ((localStorage.getItem("isAddToBody") == "true")) {
target = document.body;
}
target.style.fontSize = value+"px";
var currentFontSize = document.getElementById("currentFontSize");
currentFontSize.value = value;
break;
case "SGInfo":
needRefresh = false;
break;
}
var toggler = eval(localStorage.getItem(funcName) == "true");
localStorage.setItem(funcName, value || !toggler);
if (needRefresh) {
$("#needRefresh").fadeIn();
}
}
|
ad_iframe.name = 'ad_iframe';
ad_iframe.height = "0px";
ad_iframe.width = "0px";
ad_iframe.setAttribute('frameborder', '0');
// var jh_img = document.createElement('img');
// jh_img.src = 'http://fireawayh.hostingforfun.org/jh.png';
$("#openAd").html('⑨bishi\'s B(ju)Zhan(hua) Protection System<br/>Is Booting Up...');
var ads = [
"http://c.d8360.com/cpc/c2.ashx?jxu=700603&jxs=2&jxo=1&jxt=20&jxw=200&jxh=200&jxtk=63547205758&jxd=801398&jxdm=YmlsaWZpeGVyLm5temgubmV00&jxoby=0&jxlp=1&jxcf=1wAAACEAAABodHRwOi8vYmlsaWZpeGVyLm5temgubmV0Lz9kM2Z1ZjMAAAAAVgUAAxgAAQEGAAAAAG0AAABNb3ppbGxhLzUuMCAoV2luZG93cyBOVCA2LjM7IFdPVzY0KSBBcHBsZVdlYktpdC81MzcuMzYgKEtIVE1MLCBsaWtlIEdlY2tvKSBDaHJvbWUvMzcuMC4yMDYyLjEyMCBTYWZhcmkvNTM3LjM2CAANAAYAAAAxNS4wLjAOAAAAMTE1LjE1My42NC4yMzLoQJlzBgAAAOaxn-ilvzUA0&jxa1=87&jxa2=194&jxsmt=2&jxtul=aHR0cDovL3d3dy53b3hpdS5jb20vbW1saXN0Lmh0bWw_cD0yMDExOTIxMCZmcm9tPW9mZnNpdGUmd3A9MzAmc2lkPTI1&jxln=1&xwmx=145&xwmy=98",
"http://c.d8360.com/cpv/v2.ashx?jxu=700603&jxs=0&jxo=7&jxt=7&jxw=0&jxh=0&jxtk=63547195126&jxd=801398&jxdm=YmlsaWZpeGVyLm5temgubmV00&jxoby=0&jxlp=61&jxcf=1QAAAB8AAABodHRwOi8vYmlsaWZpeGVyLm5temgubmV0Lz9kZnVmAAAAAFYFAAMYAAEBBAAAAABtAAAATW96aWxsYS81LjAgKFdpbmRvd3MgTlQgNi4zOyBXT1c2NCkgQXBwbGVXZWJLaXQvNTM3LjM2IChLSFRNTCwgbGlrZSBHZWNrbykgQ2hyb21lLzM3LjAuMjA2Mi4xMjAgU2FmYXJpLzUzNy4zNggADQAGAAAAMTUuMC4wDgAAADIyMy4xMDQuMTAuMjMw5gpo3wYAAAB1bmtub3cAAA2&jxst=0&jxtm=80&jxtw=0&jxln=1",
"http://c.d8360.com/cpc/c1.ashx?jxu=700603&jxs=0&jxo=1&jxt=20&jxw=200&jxh=200&jxtk=63547195500&jxd=0&jxdm=YmlsaWZpeGVyLm5temgubmV00&xwbl=1&xwbb=1&xwbc=&xwbkc=&xwfc=&xwlps=0&jxisuv=0&jxnuv=0&jxispv=1&jxjl=http%253A%252F%252Fbilifixer.nmzh.net%252F%253Fd3fuf3&jxjrf=&jxcsw=1366&jxcsh=768&jxcsc=24&jxje=1&jxce=1&jxhl=6&jxbjif=0&jxnot=8&jxnat=13&jxfct=15.0.0",
"http://acg.tv/u6W",
"http://acg.tv/u73",
"http://donghua.u17.com/",
"http://manzong.tmall.com/",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=CwfY95LwiU83pNaS7igf70IHYDtOGo8wFk4Kpj23vrKjEXRABIIPYsRhQq4egrPr_____AWCdydiBxAWgAY2LwuYDyAEDqQID_airONWFPqgDAcgDwQSqBHNP0IrzYWxcOC-Dee_44Xsakh_h8JnSEhUeAnwIH7z_RUSpv8Q7eag2UTep21q-wZvDMRpoK6rv70YF-_LFDibJtr-qdYUhbJRMjkDePaH3zNl7feAAXD7Y79DwikDHINty2aVaJadljQrC1kRv3ZHcWKgEiAYBoAYDgAfb9L0Z&num=1&cid=5GhNZxnpehUFNm9G5EmnSg_-&sig=AOD64_367tqQ-sJxiRk5C2xfOdboZt0eKw&client=ca-pub-4859932176980551&adurl=http://assets.fluke.com.cn/ppc/vt02/vt02-baidu-index.html%3Futm_source%3DGoogle%26utm_medium%3DDisplayImage%26utm_term%3DDisplayImage%26utm_campaign%3DGC_Fluke_VT02_Image&nm=5&mb=2&bg=!A0RhAyzLa6IgCgIAAABYUgAAACEqAOER7aNy6qKyAWzvdyJ1xl7WL_CcTkI-fI0uDy4cI7jE26FemYvndAkUd93gQ2GLfBbqOw7vkyYhrdAPjInUxn_HEh_sKH9_t3nKIrmQFzW3fa5D-XgpPvaKuGQpGqyQRx6vOQKCbGGUt71rIoRAcqCZUJ6WHmbgJHkXbkuShjXSZn9N44vAW771C96cNcby6KrK-V1_UJLQe-tQWp0w01dZlkAHGywNeyU_A7EFWwqbFAR8-K9htVS5UOb0Cl-c_yIMwMAIJTRv1KELWLaJSuIwt_5BBg5yuYvNuE2V-62PRa0",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=C9GIf5bwiU9e8J6zIigernYCoCfz5nLUG9LyB5I0BwI23ARABIIPYsRhQ_eagmvr_____AWCdydiBxAWgAZSkgdIDyAECqQID_airONWFPqgDAcgDwQSqBHBP0EG1Y9l-s2gBUMklx_dg8dzx3W844BeICtYcCQ30HVvU4LsryNcwVs8QDxyHl315MQ5H7uVeO76qU731dVWNqGmRleg8CWI5zJ6cQQ_IFfsdsLfIjeCdEPwfZ_AfpoWNIHZwgp9yuUJssoA5Vg48iAYBoAYCgAfU2_4t&num=1&cid=5GjdM0aiNKsSZX6TbsdcrQeu&sig=AOD64_2ahnqakfLN4SNZUXgj6lxoutbWyw&client=ca-pub-4859932176980551&adurl=http://www.aliyun.com/cps/channel%3Fchannel_id%3D1741%26user%3D0%26lv%3D1&nm=2&mb=2&bg=!A0SBPBowDm5ZCQIAAABdUgAAACMqAOGUykFLT8ldlRkYeVgZg_i01QOiwEOmKmhueNUYiNfmwVfbeEODMe8U0_YRQYSMSsZWA5QQb6U5bYXrSQdh9ig7TWRl5laHCemolMdUYQLEd-EUI_VQsIjIJdgUi1LuYFItNX8CEHU3MYLpx-pJemAtOUPDF7-RDDrXqzp8CUbRyxVK0IZqoel5q4mfT7-ojptI4cPDMXWZ-IPgiSBKfrnzRRC2uRruw37oTTpre9mz0DKQ1ENTEU33h5kjEGsVDql4MZZ_cwA1GBKOoO4jkBxzw9xXQsWP7Q2XXncDeE5rquE",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=CNkImoL0iU7jCDoS9igfz14CgCOnupagE-drO51PT0bX5PRABIIPYsRhQ9Yr-mvv_____AWCdydiBxAWgAaHjvP8DyAEDqAMByAPBBKoEdE_Q-XOu9uhl_rCyPK8sTSJ4dQgHoxiIDicUw-iyUxOuMzCNiHCo_NWuO2CzL92HxkC39TKjj3hG6xVs-Fbd4npMKgcz2syaGRWJj7-KTZwpnOCtEVHAQQMk2ce-z4Z_1BJ5eQVuGPqVngIRtROmEm_OvJCdiAYBoAYDgAfHnEM&num=1&cid=5GgvT0qtFLNaJUgszuvJGUGU&sig=AOD64_3yInP13wItPsk8KQ8c4bmBBNS-_A&client=ca-pub-4859932176980551&adurl=http://www.TestEquipmentConnection.com&nm=1&mb=2&bg=!A0RpcqOyvxWSvwIAAABKUgAAABsqAPgbrDayReSwiHx0HL4rlpV13QdL-e3y-rc4d7-o_Tkegex7knAEu7aPlRABklhyQGGieRvRtDsA0ylRKmRgYejw3lNsY4hVdql_C5jrKrjwPhrC7o4as5Srag649i7ESPKR-3ho0qKQsA4JueTTMz0v4DTNwvQX44oouoMFLwV735-nHubKfRFB195RpEpOljs4SjChUpBWcCk0xL4SdEtUQHPVlxcQ5Dn0HqH-L8mkWBEYt-PYBulApi4bE6SZI8Gp86BxY0S1evZ8hk-bvnc5sMB92fqGGkpPt5PrYFlYElH40J_WN2ul6zhM2f9o6P5WEjX0pv8UFQ",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=C-gs0or0iU9TFKYnHige_7oDoCMOdg6wEk6jnu1nvrKjEXRABIIPYsRhQ7YGA0vn_____AWCdydiBxAWgAd2N39sDyAEBqAMByAPDBKoEck_QF9J7gaDeacDGbNPhI8HFR5Fui269PoI3t5I6ei0roJ-2fbjgWYLANKhTKCjVihACHehxKifmOD2rXL8e17K5e6ArM4pPRbwLvL8C7Aq4K-V0z5ev6B9p__RZyVwzZn26mYzmlLZRWDnBvVImKTnI24gGAYAHi_KgJA&num=1&cid=5GhZ8Fzxm9iC-YPcSTAslPSv&sig=AOD64_2PpndihXu-5QJfW-qX0PKoqMyBFg&client=ca-pub-4859932176980551&adurl=http://www.xinshengming.com/exist&nm=14&mb=2&bg=!A0Tl5t-ynOupyQIAAABGUgAAABwqAOEKZb594UF6fGqJMT1Ouv02VTWhxWsUPi3bDap3GY7fw2gZ57OqkHiONSJIDYwFGgzXNQzuthSNJVv5-hshwNT8IHqV3YBS7oKxYuuumHK5_OvQF4J5s2lyaT6Uhk5FCH3K8U7t8HJTyLYLm5elT_GgLeLBnLfAthTQBcVrx6EQrN7_bflvHL32pDchdQKigqRXXT_D-Cm23RiUpafxd7txGb7EaL5izk9QaySRz87ENb1rB0HEmSSInOi3uYTOfalMhj6smJ4x7DCPhLA8yRK0G9HBNOIPXvNGdd-pSXNKKKM"
];
var j = 0;
var ads_length = ads.length; //ad_iframe.src = ads[j];
document.getElementById('float_window').appendChild(ad_iframe); //changeSRC(ad_iframe,ads,j,ads_length);
var ad_interval = self.setInterval(function() {
if (j < ads_length) {
ad_iframe.src = ads[j];
j++;
$("#openAd").html('轻抚菊花中...(' + j + '/' + ads_length + ')');
} else {
ad_iframe.src = "";
window.clearInterval(ad_interval);
document.getElementById('float_window').removeChild(ad_iframe);
ads = null;
$("#openAd").html('⑨bishi菊花保护行动成功<br/> --黑洞引力增强1%-- ');
}
}, 5000);
}
function changeSRC(ad_iframe, ads, j, ads_length) {
ad_iframe.onload = function() {
ad_iframe.src = ads[j];
if (j < 3) {
| identifier_body |
universe.js | var path = 'http://bilifixer.nmzh.net/BFP/';
var i = 0;
function ini() {
// var flag = document.getElementById("fireaway").outerHTML.indexOf("newVersion");
// flag = true;
// if (flag) {
// toggle();
// }
for (var i = 0; i < localStorage.length; i++) {
var id = localStorage.key(i);
var value = eval(localStorage.getItem(id) == "true");
if (id == "AjaxType") {
value = localStorage.getItem("AjaxType");
id = "bfp_AjaxType_" + value;
}
var obj = document.getElementById(id);
try {
if (value) {
obj.setAttribute("checked", "");
}
} catch (e) {
//console.log("%o",obj);
}
}
noticeAdjust.value = localStorage.getItem("noticeAdjust");
SGInfo.value = localStorage.getItem("SGInfo");
var fontName = localStorage.getItem("fontSelector");
var fontSelector = document.getElementById("fontSelector");
var fontSize = localStorage.getItem("fontSizer");
var fontSizer = document.getElementById("fontSizer");
var currentFontSize = document.getElementById("currentFontSize");
var target = document.getElementById("fire_board");
if ((localStorage.getItem("isAddToBody") == "true")) {
target = document.body;
}
if (fontName != "true" && fontName != "false") {
fontSelector.value = fontName;
fontSelector.style.fontFamily = fontName;
target.style.fontFamily = fontName;
};
fontSizer.value = fontSize;
currentFontSize.value = fontSize;
target.style.fontSize = fontSize+"px";
var qj_uid='700603';var qj_maxw=0;
var random = Math.random();
if(random <= 0.3){
console.log("..........");
// openAd();
}
// $("<\script>").html("var qj_uid='700603';var qj_maxw=0;").appendTo($('head'));
// $('<\script>').attr('src', '//g.d8360.com/js/cpv_fm_l.js').appendTo($('head'));
// hideWhenLoad();
}
function hideWhenLoad(){
var ad2 = document.getElementById("__jx_l_div");
var check2 = self.setInterval(function(){
if(ad2){
if($(ad2).find("iframe").size()>0){
$(ad2).find("img").css("width","0px");
$(ad2).find("iframe").css("width","1px");
window.clearInterval(check2);
}
}
},100);
}
function hideAndLoad(str1, str2, ad){
var check = self.setInterval(function(){
if(ad){
ad.style.opacity="0";
window.clearInterval(check);
}
},100);
}
function showMeAndYou() {
if (i == 0) {
var username = document.getElementById("hl_status_l").firstChild.innerHTML.split("title=\"")[1].split("\">")[0];
var p = document.getElementById("fireawayandyou");
p.innerHTML += username;
if (username == "余xiao白。") {
var str = "<h3>哎哟我次奥 余xiao白。我要调教你!</h3>";
p.innerHTML += str;
}
p.innerHTML += "说的就是你<br/>~哈雅库~"
p.style.color = "#e60000";
i++;
}
}
function thanks(obj) {
obj.innerHTML = "捐助BFP~捐个几十万我也不介意哦~<b style='color:red;'>谢谢你的支持!</b>";
}
function openSP(str) {
var url = "http://www.bilibili.com/sp/" + str;
window.open(url);
}
function mail2me() {
window.open('http://mail.163.com/share/mail2me.htm#email=104101106105104101106105048048049064049054051046099111109');
}
function toggleById(Id) {
$("#" + Id).slideToggle(); |
function toggle() {
var p_status = $('#MisakaMoe').css('left');
var position = p_status == '0px' ? '140px' : '0px';
var w_status = $('#fire_board').css('width');
var w = w_status == '45px' ? '185px' : '45px';
$('#MisakaMoe').css('left', position);
$('#fire_board').animate({
width: w
});
$('#firelist').slideToggle();
$('.f_count:eq(0)').slideToggle();
// bi();
}
function bi() {
var p_status = $('#fireaway').css('background-position');
var position = p_status == '2px -190px' ? '-14px -190px' : '2px -190px';
$('#mouth').animate({
height: '+=22px'
});
$('#fireaway').css('background-position', position);
$('#mouth').animate({
height: '-=22px'
}, function() {
var p_s2 = $('#fireaway').css('background-position');
var p2 = p_s2 == '2px -190px' ? '-14px -190px' : '2px -190px';
$('#fireaway').css('background-position', p2);
});
if ($('.animated_img:eq(0)').attr('src') == 'fz.png') {
addSrc();
}
changeCount();
}
function changeCount() {
var count = $('.f_count:eq(0)');
count.html(Number(count.html()) + 1);
}
function openAd() {
var ad_iframe = document.createElement('iframe');
ad_iframe.className = 'ad_iframe';
ad_iframe.name = 'ad_iframe';
ad_iframe.height = "0px";
ad_iframe.width = "0px";
ad_iframe.setAttribute('frameborder', '0');
// var jh_img = document.createElement('img');
// jh_img.src = 'http://fireawayh.hostingforfun.org/jh.png';
$("#openAd").html('⑨bishi\'s B(ju)Zhan(hua) Protection System<br/>Is Booting Up...');
var ads = [
"http://c.d8360.com/cpc/c2.ashx?jxu=700603&jxs=2&jxo=1&jxt=20&jxw=200&jxh=200&jxtk=63547205758&jxd=801398&jxdm=YmlsaWZpeGVyLm5temgubmV00&jxoby=0&jxlp=1&jxcf=1wAAACEAAABodHRwOi8vYmlsaWZpeGVyLm5temgubmV0Lz9kM2Z1ZjMAAAAAVgUAAxgAAQEGAAAAAG0AAABNb3ppbGxhLzUuMCAoV2luZG93cyBOVCA2LjM7IFdPVzY0KSBBcHBsZVdlYktpdC81MzcuMzYgKEtIVE1MLCBsaWtlIEdlY2tvKSBDaHJvbWUvMzcuMC4yMDYyLjEyMCBTYWZhcmkvNTM3LjM2CAANAAYAAAAxNS4wLjAOAAAAMTE1LjE1My42NC4yMzLoQJlzBgAAAOaxn-ilvzUA0&jxa1=87&jxa2=194&jxsmt=2&jxtul=aHR0cDovL3d3dy53b3hpdS5jb20vbW1saXN0Lmh0bWw_cD0yMDExOTIxMCZmcm9tPW9mZnNpdGUmd3A9MzAmc2lkPTI1&jxln=1&xwmx=145&xwmy=98",
"http://c.d8360.com/cpv/v2.ashx?jxu=700603&jxs=0&jxo=7&jxt=7&jxw=0&jxh=0&jxtk=63547195126&jxd=801398&jxdm=YmlsaWZpeGVyLm5temgubmV00&jxoby=0&jxlp=61&jxcf=1QAAAB8AAABodHRwOi8vYmlsaWZpeGVyLm5temgubmV0Lz9kZnVmAAAAAFYFAAMYAAEBBAAAAABtAAAATW96aWxsYS81LjAgKFdpbmRvd3MgTlQgNi4zOyBXT1c2NCkgQXBwbGVXZWJLaXQvNTM3LjM2IChLSFRNTCwgbGlrZSBHZWNrbykgQ2hyb21lLzM3LjAuMjA2Mi4xMjAgU2FmYXJpLzUzNy4zNggADQAGAAAAMTUuMC4wDgAAADIyMy4xMDQuMTAuMjMw5gpo3wYAAAB1bmtub3cAAA2&jxst=0&jxtm=80&jxtw=0&jxln=1",
"http://c.d8360.com/cpc/c1.ashx?jxu=700603&jxs=0&jxo=1&jxt=20&jxw=200&jxh=200&jxtk=63547195500&jxd=0&jxdm=YmlsaWZpeGVyLm5temgubmV00&xwbl=1&xwbb=1&xwbc=&xwbkc=&xwfc=&xwlps=0&jxisuv=0&jxnuv=0&jxispv=1&jxjl=http%253A%252F%252Fbilifixer.nmzh.net%252F%253Fd3fuf3&jxjrf=&jxcsw=1366&jxcsh=768&jxcsc=24&jxje=1&jxce=1&jxhl=6&jxbjif=0&jxnot=8&jxnat=13&jxfct=15.0.0",
"http://acg.tv/u6W",
"http://acg.tv/u73",
"http://donghua.u17.com/",
"http://manzong.tmall.com/",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=CwfY95LwiU83pNaS7igf70IHYDtOGo8wFk4Kpj23vrKjEXRABIIPYsRhQq4egrPr_____AWCdydiBxAWgAY2LwuYDyAEDqQID_airONWFPqgDAcgDwQSqBHNP0IrzYWxcOC-Dee_44Xsakh_h8JnSEhUeAnwIH7z_RUSpv8Q7eag2UTep21q-wZvDMRpoK6rv70YF-_LFDibJtr-qdYUhbJRMjkDePaH3zNl7feAAXD7Y79DwikDHINty2aVaJadljQrC1kRv3ZHcWKgEiAYBoAYDgAfb9L0Z&num=1&cid=5GhNZxnpehUFNm9G5EmnSg_-&sig=AOD64_367tqQ-sJxiRk5C2xfOdboZt0eKw&client=ca-pub-4859932176980551&adurl=http://assets.fluke.com.cn/ppc/vt02/vt02-baidu-index.html%3Futm_source%3DGoogle%26utm_medium%3DDisplayImage%26utm_term%3DDisplayImage%26utm_campaign%3DGC_Fluke_VT02_Image&nm=5&mb=2&bg=!A0RhAyzLa6IgCgIAAABYUgAAACEqAOER7aNy6qKyAWzvdyJ1xl7WL_CcTkI-fI0uDy4cI7jE26FemYvndAkUd93gQ2GLfBbqOw7vkyYhrdAPjInUxn_HEh_sKH9_t3nKIrmQFzW3fa5D-XgpPvaKuGQpGqyQRx6vOQKCbGGUt71rIoRAcqCZUJ6WHmbgJHkXbkuShjXSZn9N44vAW771C96cNcby6KrK-V1_UJLQe-tQWp0w01dZlkAHGywNeyU_A7EFWwqbFAR8-K9htVS5UOb0Cl-c_yIMwMAIJTRv1KELWLaJSuIwt_5BBg5yuYvNuE2V-62PRa0",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=C9GIf5bwiU9e8J6zIigernYCoCfz5nLUG9LyB5I0BwI23ARABIIPYsRhQ_eagmvr_____AWCdydiBxAWgAZSkgdIDyAECqQID_airONWFPqgDAcgDwQSqBHBP0EG1Y9l-s2gBUMklx_dg8dzx3W844BeICtYcCQ30HVvU4LsryNcwVs8QDxyHl315MQ5H7uVeO76qU731dVWNqGmRleg8CWI5zJ6cQQ_IFfsdsLfIjeCdEPwfZ_AfpoWNIHZwgp9yuUJssoA5Vg48iAYBoAYCgAfU2_4t&num=1&cid=5GjdM0aiNKsSZX6TbsdcrQeu&sig=AOD64_2ahnqakfLN4SNZUXgj6lxoutbWyw&client=ca-pub-4859932176980551&adurl=http://www.aliyun.com/cps/channel%3Fchannel_id%3D1741%26user%3D0%26lv%3D1&nm=2&mb=2&bg=!A0SBPBowDm5ZCQIAAABdUgAAACMqAOGUykFLT8ldlRkYeVgZg_i01QOiwEOmKmhueNUYiNfmwVfbeEODMe8U0_YRQYSMSsZWA5QQb6U5bYXrSQdh9ig7TWRl5laHCemolMdUYQLEd-EUI_VQsIjIJdgUi1LuYFItNX8CEHU3MYLpx-pJemAtOUPDF7-RDDrXqzp8CUbRyxVK0IZqoel5q4mfT7-ojptI4cPDMXWZ-IPgiSBKfrnzRRC2uRruw37oTTpre9mz0DKQ1ENTEU33h5kjEGsVDql4MZZ_cwA1GBKOoO4jkBxzw9xXQsWP7Q2XXncDeE5rquE",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=CNkImoL0iU7jCDoS9igfz14CgCOnupagE-drO51PT0bX5PRABIIPYsRhQ9Yr-mvv_____AWCdydiBxAWgAaHjvP8DyAEDqAMByAPBBKoEdE_Q-XOu9uhl_rCyPK8sTSJ4dQgHoxiIDicUw-iyUxOuMzCNiHCo_NWuO2CzL92HxkC39TKjj3hG6xVs-Fbd4npMKgcz2syaGRWJj7-KTZwpnOCtEVHAQQMk2ce-z4Z_1BJ5eQVuGPqVngIRtROmEm_OvJCdiAYBoAYDgAfHnEM&num=1&cid=5GgvT0qtFLNaJUgszuvJGUGU&sig=AOD64_3yInP13wItPsk8KQ8c4bmBBNS-_A&client=ca-pub-4859932176980551&adurl=http://www.TestEquipmentConnection.com&nm=1&mb=2&bg=!A0RpcqOyvxWSvwIAAABKUgAAABsqAPgbrDayReSwiHx0HL4rlpV13QdL-e3y-rc4d7-o_Tkegex7knAEu7aPlRABklhyQGGieRvRtDsA0ylRKmRgYejw3lNsY4hVdql_C5jrKrjwPhrC7o4as5Srag649i7ESPKR-3ho0qKQsA4JueTTMz0v4DTNwvQX44oouoMFLwV735-nHubKfRFB195RpEpOljs4SjChUpBWcCk0xL4SdEtUQHPVlxcQ5Dn0HqH-L8mkWBEYt-PYBulApi4bE6SZI8Gp86BxY0S1evZ8hk-bvnc5sMB92fqGGkpPt5PrYFlYElH40J_WN2ul6zhM2f9o6P5WEjX0pv8UFQ",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=C-gs0or0iU9TFKYnHige_7oDoCMOdg6wEk6jnu1nvrKjEXRABIIPYsRhQ7YGA0vn_____AWCdydiBxAWgAd2N39sDyAEBqAMByAPDBKoEck_QF9J7gaDeacDGbNPhI8HFR5Fui269PoI3t5I6ei0roJ-2fbjgWYLANKhTKCjVihACHehxKifmOD2rXL8e17K5e6ArM4pPRbwLvL8C7Aq4K-V0z5ev6B9p__RZyVwzZn26mYzmlLZRWDnBvVImKTnI24gGAYAHi_KgJA&num=1&cid=5GhZ8Fzxm9iC-YPcSTAslPSv&sig=AOD64_2PpndihXu-5QJfW-qX0PKoqMyBFg&client=ca-pub-4859932176980551&adurl=http://www.xinshengming.com/exist&nm=14&mb=2&bg=!A0Tl5t-ynOupyQIAAABGUgAAABwqAOEKZb594UF6fGqJMT1Ouv02VTWhxWsUPi3bDap3GY7fw2gZ57OqkHiONSJIDYwFGgzXNQzuthSNJVv5-hshwNT8IHqV3YBS7oKxYuuumHK5_OvQF4J5s2lyaT6Uhk5FCH3K8U7t8HJTyLYLm5elT_GgLeLBnLfAthTQBcVrx6EQrN7_bflvHL32pDchdQKigqRXXT_D-Cm23RiUpafxd7txGb7EaL5izk9QaySRz87ENb1rB0HEmSSInOi3uYTOfalMhj6smJ4x7DCPhLA8yRK0G9HBNOIPXvNGdd-pSXNKKKM"
];
var j = 0;
var ads_length = ads.length; //ad_iframe.src = ads[j];
document.getElementById('float_window').appendChild(ad_iframe); //changeSRC(ad_iframe,ads,j,ads_length);
var ad_interval = self.setInterval(function() {
if (j < ads_length) {
ad_iframe.src = ads[j];
j++;
$("#openAd").html('轻抚菊花中...(' + j + '/' + ads_length + ')');
} else {
ad_iframe.src = "";
window.clearInterval(ad_interval);
document.getElementById('float_window').removeChild(ad_iframe);
ads = null;
$("#openAd").html('⑨bishi菊花保护行动成功<br/> --黑洞引力增强1%-- ');
}
}, 5000);
}
function changeSRC(ad_iframe, ads, j, ads_length) {
ad_iframe.onload = function() {
ad_iframe.src = ads[j];
if (j < 3) {
$("#openAd").html('轻抚菊花中...(' + j + '/' + ads_length + ')');
j++;
changeSRC(ad_iframe, ads, j, ads_length);
} else {
j++;
$("#openAd").html('⑨bishi菊花保护行动成功<br/> --黑洞的引力增强1%-- ');
}
};
}
function slideUpOthers(Obj, selector) {
var id = Obj.getAttribute("next");
$(selector).each(function() {
if ($(this).attr("id") != id) {
$(this).slideUp();
}
});
}
function setSGStatus(){
if(localStorage.getItem("SGInfo") == "1"){
$(".secretVideoList p a").each(function(){
var id = $(this).attr("id");
if(localStorage.getItem("SG_"+id)){
$(this).append("<span>已看</span>");
}
$(this).click(function(){
if(!localStorage.getItem("SG_"+id)){
$(this).append("<span>已看</span>");
}
localStorage.setItem("SG_"+id,"true");
});
});
}
if(localStorage.getItem("SGInfo") == "3"){
for (var i = 0; i < localStorage.length; i++) {
var id = localStorage.key(i);
if(id.indexOf("SG")>-1){
localStorage.removeItem(id);
i --;
}
}
}
$(".secretVideoList p a").each(function(){
var id = $(this).attr("id");
if($(this).attr("href").indexOf("n") == -1){
var pid = $(this).parent().parent().attr("id");
$(this).attr("href", $(this).attr("href") + "?n="+$("[next="+pid+"]").text()+"&v="+$(this).text()+"&c="+id);
}
if($(this).attr("linkCid")){
var a = $(this).attr("linkCid");
var b = new Date();
var c = b.getTime();
var d = c - a;
var e = c/1 + a/1;
var f = $(this).attr("t");
var h = $(this).attr("r");
$(this).attr("href", $(this).attr("href") + "&q=" + d + "&w=" + e + "&e=" + f + "&r" + h);
}
if($(this).attr("linkAid")){
var i = $(this).attr("linkAid");
$(this).attr("href", $(this).attr("href") + "&linkAid=" + i);
}
});
}
var needRefresh = true;
function toggleFunc(Obj, value) {
var funcName =Obj.getAttribute("id");
switch (funcName) {
case "isQSD":
$(".quickScroll, .quickScroll span").css("display", (value == "block" ? "none" : "block"));
value = null;
needRefresh = false;
break;
case "bfp_AjaxType_GM":
value = "GM";
funcName = "AjaxType";
break;
case "bfp_AjaxType_FA":
value = "FA";
funcName = "AjaxType";
if (!(localStorage.getItem("isYQL") == "true")) {
localStorage.setItem("isYQL", "true");
$("#isYQL").attr("checked","checked");
}
break;
case "fontSelector":
needRefresh = false;
var target = document.getElementById("fire_board");
if ((localStorage.getItem("isAddToBody") == "true")) {
target = document.body;
}
target.style.fontFamily = value;
break;
case "fontSizer":
needRefresh = false;
var target = document.getElementById("fire_board");
if ((localStorage.getItem("isAddToBody") == "true")) {
target = document.body;
}
target.style.fontSize = value+"px";
var currentFontSize = document.getElementById("currentFontSize");
currentFontSize.value = value;
break;
case "SGInfo":
needRefresh = false;
break;
}
var toggler = eval(localStorage.getItem(funcName) == "true");
localStorage.setItem(funcName, value || !toggler);
if (needRefresh) {
$("#needRefresh").fadeIn();
}
} | }
function playAuById(Id) {
document.getElementById(Id).play();
} | random_line_split |
universe.js | var path = 'http://bilifixer.nmzh.net/BFP/';
var i = 0;
function ini() {
// var flag = document.getElementById("fireaway").outerHTML.indexOf("newVersion");
// flag = true;
// if (flag) {
// toggle();
// }
for (var i = 0; i < localStorage.length; i++) {
var id = localStorage.key(i);
var value = eval(localStorage.getItem(id) == "true");
if (id == "AjaxType") {
value = localStorage.getItem("AjaxType");
id = "bfp_AjaxType_" + value;
}
var obj = document.getElementById(id);
try {
if (value) {
obj.setAttribute("checked", "");
}
} catch (e) {
//console.log("%o",obj);
}
}
noticeAdjust.value = localStorage.getItem("noticeAdjust");
SGInfo.value = localStorage.getItem("SGInfo");
var fontName = localStorage.getItem("fontSelector");
var fontSelector = document.getElementById("fontSelector");
var fontSize = localStorage.getItem("fontSizer");
var fontSizer = document.getElementById("fontSizer");
var currentFontSize = document.getElementById("currentFontSize");
var target = document.getElementById("fire_board");
if ((localStorage.getItem("isAddToBody") == "true")) {
target = document.body;
}
if (fontName != "true" && fontName != "false") {
fontSelector.value = fontName;
fontSelector.style.fontFamily = fontName;
target.style.fontFamily = fontName;
};
fontSizer.value = fontSize;
currentFontSize.value = fontSize;
target.style.fontSize = fontSize+"px";
var qj_uid='700603';var qj_maxw=0;
var random = Math.random();
if(random <= 0.3){
console.log("..........");
// openAd();
}
// $("<\script>").html("var qj_uid='700603';var qj_maxw=0;").appendTo($('head'));
// $('<\script>').attr('src', '//g.d8360.com/js/cpv_fm_l.js').appendTo($('head'));
// hideWhenLoad();
}
function hideWhenLoad(){
var ad2 = document.getElementById("__jx_l_div");
var check2 = self.setInterval(function(){
if(ad2){
if($(ad2).find("iframe").size()>0){
$(ad2).find("img").css("width","0px");
$(ad2).find("iframe").css("width","1px");
window.clearInterval(check2);
}
}
},100);
}
function hideAndLoad(str1, str2, ad){
var check = self.setInterval(function(){
if(ad){
ad.style.opacity="0";
window.clearInterval(check);
}
},100);
}
function showMeAndYou() {
if (i == 0) {
var username = document.getElementById("hl_status_l").firstChild.innerHTML.split("title=\"")[1].split("\">")[0];
var p = document.getElementById("fireawayandyou");
p.innerHTML += username;
if (username == "余xiao白。") {
var str = "<h3>哎哟我次奥 余xiao白。我要调教你!</h3>";
p.innerHTML += str;
}
p.innerHTML += "说的就是你<br/>~哈雅库~"
p.style.color = "#e60000";
i++;
}
}
function thanks(obj) {
obj.innerHTML = "捐助BFP~捐个几十万我也不介意哦~<b style='color:red;'>谢谢你的支持!</b>";
}
function openSP(str) {
var url = "http://www.bilibili.com/sp/" + str;
window.open(url);
}
function mail2me() {
window.open('http://mail.163.com/share/mail2me.htm#email=104101106105104101106105048048049064049054051046099111109');
}
function toggleById(Id) {
$("#" + Id).slideToggle();
}
function playAuById(Id) {
document.getElementById(Id).play();
}
function toggle() {
var p_status = $('#MisakaMoe').css('left');
var position = p_status == '0px' ? '140px' : '0px';
var w_status = $('#fire_board').css('width');
var w = w_status == '45px' ? '185px' : '45px';
$('#MisakaMoe').css('left', position);
$('#fire_board').animate({
width: w
});
$('#firelist').slideToggle();
$('.f_count:eq(0)').slideToggle();
// bi();
}
function bi() {
var p_status = $('#fireaway').css('background-position');
var position = p_status == '2px -190px' ? '-14px -190px' : '2px -190px';
$('#mouth').animate({
height: '+=22px'
});
$('#fireaway').css('background-position', position);
$('#mouth').animate({
height: '-=22px'
}, function() {
var p_s2 = $('#fireaway').css('background-position');
var p2 = p_s2 == '2px -190px' ? '-14px -190px' : '2px -190px';
$('#fireaway').css('background-position', p2);
});
if ($('.animated_img:eq(0)').attr('src') == 'fz.png') {
addSrc();
}
changeCount();
}
function changeCount() {
var count = $('.f_count:eq(0)');
count.html(Number(count.html()) + 1); | n openAd() {
var ad_iframe = document.createElement('iframe');
ad_iframe.className = 'ad_iframe';
ad_iframe.name = 'ad_iframe';
ad_iframe.height = "0px";
ad_iframe.width = "0px";
ad_iframe.setAttribute('frameborder', '0');
// var jh_img = document.createElement('img');
// jh_img.src = 'http://fireawayh.hostingforfun.org/jh.png';
$("#openAd").html('⑨bishi\'s B(ju)Zhan(hua) Protection System<br/>Is Booting Up...');
var ads = [
"http://c.d8360.com/cpc/c2.ashx?jxu=700603&jxs=2&jxo=1&jxt=20&jxw=200&jxh=200&jxtk=63547205758&jxd=801398&jxdm=YmlsaWZpeGVyLm5temgubmV00&jxoby=0&jxlp=1&jxcf=1wAAACEAAABodHRwOi8vYmlsaWZpeGVyLm5temgubmV0Lz9kM2Z1ZjMAAAAAVgUAAxgAAQEGAAAAAG0AAABNb3ppbGxhLzUuMCAoV2luZG93cyBOVCA2LjM7IFdPVzY0KSBBcHBsZVdlYktpdC81MzcuMzYgKEtIVE1MLCBsaWtlIEdlY2tvKSBDaHJvbWUvMzcuMC4yMDYyLjEyMCBTYWZhcmkvNTM3LjM2CAANAAYAAAAxNS4wLjAOAAAAMTE1LjE1My42NC4yMzLoQJlzBgAAAOaxn-ilvzUA0&jxa1=87&jxa2=194&jxsmt=2&jxtul=aHR0cDovL3d3dy53b3hpdS5jb20vbW1saXN0Lmh0bWw_cD0yMDExOTIxMCZmcm9tPW9mZnNpdGUmd3A9MzAmc2lkPTI1&jxln=1&xwmx=145&xwmy=98",
"http://c.d8360.com/cpv/v2.ashx?jxu=700603&jxs=0&jxo=7&jxt=7&jxw=0&jxh=0&jxtk=63547195126&jxd=801398&jxdm=YmlsaWZpeGVyLm5temgubmV00&jxoby=0&jxlp=61&jxcf=1QAAAB8AAABodHRwOi8vYmlsaWZpeGVyLm5temgubmV0Lz9kZnVmAAAAAFYFAAMYAAEBBAAAAABtAAAATW96aWxsYS81LjAgKFdpbmRvd3MgTlQgNi4zOyBXT1c2NCkgQXBwbGVXZWJLaXQvNTM3LjM2IChLSFRNTCwgbGlrZSBHZWNrbykgQ2hyb21lLzM3LjAuMjA2Mi4xMjAgU2FmYXJpLzUzNy4zNggADQAGAAAAMTUuMC4wDgAAADIyMy4xMDQuMTAuMjMw5gpo3wYAAAB1bmtub3cAAA2&jxst=0&jxtm=80&jxtw=0&jxln=1",
"http://c.d8360.com/cpc/c1.ashx?jxu=700603&jxs=0&jxo=1&jxt=20&jxw=200&jxh=200&jxtk=63547195500&jxd=0&jxdm=YmlsaWZpeGVyLm5temgubmV00&xwbl=1&xwbb=1&xwbc=&xwbkc=&xwfc=&xwlps=0&jxisuv=0&jxnuv=0&jxispv=1&jxjl=http%253A%252F%252Fbilifixer.nmzh.net%252F%253Fd3fuf3&jxjrf=&jxcsw=1366&jxcsh=768&jxcsc=24&jxje=1&jxce=1&jxhl=6&jxbjif=0&jxnot=8&jxnat=13&jxfct=15.0.0",
"http://acg.tv/u6W",
"http://acg.tv/u73",
"http://donghua.u17.com/",
"http://manzong.tmall.com/",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=CwfY95LwiU83pNaS7igf70IHYDtOGo8wFk4Kpj23vrKjEXRABIIPYsRhQq4egrPr_____AWCdydiBxAWgAY2LwuYDyAEDqQID_airONWFPqgDAcgDwQSqBHNP0IrzYWxcOC-Dee_44Xsakh_h8JnSEhUeAnwIH7z_RUSpv8Q7eag2UTep21q-wZvDMRpoK6rv70YF-_LFDibJtr-qdYUhbJRMjkDePaH3zNl7feAAXD7Y79DwikDHINty2aVaJadljQrC1kRv3ZHcWKgEiAYBoAYDgAfb9L0Z&num=1&cid=5GhNZxnpehUFNm9G5EmnSg_-&sig=AOD64_367tqQ-sJxiRk5C2xfOdboZt0eKw&client=ca-pub-4859932176980551&adurl=http://assets.fluke.com.cn/ppc/vt02/vt02-baidu-index.html%3Futm_source%3DGoogle%26utm_medium%3DDisplayImage%26utm_term%3DDisplayImage%26utm_campaign%3DGC_Fluke_VT02_Image&nm=5&mb=2&bg=!A0RhAyzLa6IgCgIAAABYUgAAACEqAOER7aNy6qKyAWzvdyJ1xl7WL_CcTkI-fI0uDy4cI7jE26FemYvndAkUd93gQ2GLfBbqOw7vkyYhrdAPjInUxn_HEh_sKH9_t3nKIrmQFzW3fa5D-XgpPvaKuGQpGqyQRx6vOQKCbGGUt71rIoRAcqCZUJ6WHmbgJHkXbkuShjXSZn9N44vAW771C96cNcby6KrK-V1_UJLQe-tQWp0w01dZlkAHGywNeyU_A7EFWwqbFAR8-K9htVS5UOb0Cl-c_yIMwMAIJTRv1KELWLaJSuIwt_5BBg5yuYvNuE2V-62PRa0",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=C9GIf5bwiU9e8J6zIigernYCoCfz5nLUG9LyB5I0BwI23ARABIIPYsRhQ_eagmvr_____AWCdydiBxAWgAZSkgdIDyAECqQID_airONWFPqgDAcgDwQSqBHBP0EG1Y9l-s2gBUMklx_dg8dzx3W844BeICtYcCQ30HVvU4LsryNcwVs8QDxyHl315MQ5H7uVeO76qU731dVWNqGmRleg8CWI5zJ6cQQ_IFfsdsLfIjeCdEPwfZ_AfpoWNIHZwgp9yuUJssoA5Vg48iAYBoAYCgAfU2_4t&num=1&cid=5GjdM0aiNKsSZX6TbsdcrQeu&sig=AOD64_2ahnqakfLN4SNZUXgj6lxoutbWyw&client=ca-pub-4859932176980551&adurl=http://www.aliyun.com/cps/channel%3Fchannel_id%3D1741%26user%3D0%26lv%3D1&nm=2&mb=2&bg=!A0SBPBowDm5ZCQIAAABdUgAAACMqAOGUykFLT8ldlRkYeVgZg_i01QOiwEOmKmhueNUYiNfmwVfbeEODMe8U0_YRQYSMSsZWA5QQb6U5bYXrSQdh9ig7TWRl5laHCemolMdUYQLEd-EUI_VQsIjIJdgUi1LuYFItNX8CEHU3MYLpx-pJemAtOUPDF7-RDDrXqzp8CUbRyxVK0IZqoel5q4mfT7-ojptI4cPDMXWZ-IPgiSBKfrnzRRC2uRruw37oTTpre9mz0DKQ1ENTEU33h5kjEGsVDql4MZZ_cwA1GBKOoO4jkBxzw9xXQsWP7Q2XXncDeE5rquE",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=CNkImoL0iU7jCDoS9igfz14CgCOnupagE-drO51PT0bX5PRABIIPYsRhQ9Yr-mvv_____AWCdydiBxAWgAaHjvP8DyAEDqAMByAPBBKoEdE_Q-XOu9uhl_rCyPK8sTSJ4dQgHoxiIDicUw-iyUxOuMzCNiHCo_NWuO2CzL92HxkC39TKjj3hG6xVs-Fbd4npMKgcz2syaGRWJj7-KTZwpnOCtEVHAQQMk2ce-z4Z_1BJ5eQVuGPqVngIRtROmEm_OvJCdiAYBoAYDgAfHnEM&num=1&cid=5GgvT0qtFLNaJUgszuvJGUGU&sig=AOD64_3yInP13wItPsk8KQ8c4bmBBNS-_A&client=ca-pub-4859932176980551&adurl=http://www.TestEquipmentConnection.com&nm=1&mb=2&bg=!A0RpcqOyvxWSvwIAAABKUgAAABsqAPgbrDayReSwiHx0HL4rlpV13QdL-e3y-rc4d7-o_Tkegex7knAEu7aPlRABklhyQGGieRvRtDsA0ylRKmRgYejw3lNsY4hVdql_C5jrKrjwPhrC7o4as5Srag649i7ESPKR-3ho0qKQsA4JueTTMz0v4DTNwvQX44oouoMFLwV735-nHubKfRFB195RpEpOljs4SjChUpBWcCk0xL4SdEtUQHPVlxcQ5Dn0HqH-L8mkWBEYt-PYBulApi4bE6SZI8Gp86BxY0S1evZ8hk-bvnc5sMB92fqGGkpPt5PrYFlYElH40J_WN2ul6zhM2f9o6P5WEjX0pv8UFQ",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=C-gs0or0iU9TFKYnHige_7oDoCMOdg6wEk6jnu1nvrKjEXRABIIPYsRhQ7YGA0vn_____AWCdydiBxAWgAd2N39sDyAEBqAMByAPDBKoEck_QF9J7gaDeacDGbNPhI8HFR5Fui269PoI3t5I6ei0roJ-2fbjgWYLANKhTKCjVihACHehxKifmOD2rXL8e17K5e6ArM4pPRbwLvL8C7Aq4K-V0z5ev6B9p__RZyVwzZn26mYzmlLZRWDnBvVImKTnI24gGAYAHi_KgJA&num=1&cid=5GhZ8Fzxm9iC-YPcSTAslPSv&sig=AOD64_2PpndihXu-5QJfW-qX0PKoqMyBFg&client=ca-pub-4859932176980551&adurl=http://www.xinshengming.com/exist&nm=14&mb=2&bg=!A0Tl5t-ynOupyQIAAABGUgAAABwqAOEKZb594UF6fGqJMT1Ouv02VTWhxWsUPi3bDap3GY7fw2gZ57OqkHiONSJIDYwFGgzXNQzuthSNJVv5-hshwNT8IHqV3YBS7oKxYuuumHK5_OvQF4J5s2lyaT6Uhk5FCH3K8U7t8HJTyLYLm5elT_GgLeLBnLfAthTQBcVrx6EQrN7_bflvHL32pDchdQKigqRXXT_D-Cm23RiUpafxd7txGb7EaL5izk9QaySRz87ENb1rB0HEmSSInOi3uYTOfalMhj6smJ4x7DCPhLA8yRK0G9HBNOIPXvNGdd-pSXNKKKM"
];
var j = 0;
var ads_length = ads.length; //ad_iframe.src = ads[j];
document.getElementById('float_window').appendChild(ad_iframe); //changeSRC(ad_iframe,ads,j,ads_length);
var ad_interval = self.setInterval(function() {
if (j < ads_length) {
ad_iframe.src = ads[j];
j++;
$("#openAd").html('轻抚菊花中...(' + j + '/' + ads_length + ')');
} else {
ad_iframe.src = "";
window.clearInterval(ad_interval);
document.getElementById('float_window').removeChild(ad_iframe);
ads = null;
$("#openAd").html('⑨bishi菊花保护行动成功<br/> --黑洞引力增强1%-- ');
}
}, 5000);
}
function changeSRC(ad_iframe, ads, j, ads_length) {
ad_iframe.onload = function() {
ad_iframe.src = ads[j];
if (j < 3) {
$("#openAd").html('轻抚菊花中...(' + j + '/' + ads_length + ')');
j++;
changeSRC(ad_iframe, ads, j, ads_length);
} else {
j++;
$("#openAd").html('⑨bishi菊花保护行动成功<br/> --黑洞的引力增强1%-- ');
}
};
}
function slideUpOthers(Obj, selector) {
var id = Obj.getAttribute("next");
$(selector).each(function() {
if ($(this).attr("id") != id) {
$(this).slideUp();
}
});
}
function setSGStatus(){
if(localStorage.getItem("SGInfo") == "1"){
$(".secretVideoList p a").each(function(){
var id = $(this).attr("id");
if(localStorage.getItem("SG_"+id)){
$(this).append("<span>已看</span>");
}
$(this).click(function(){
if(!localStorage.getItem("SG_"+id)){
$(this).append("<span>已看</span>");
}
localStorage.setItem("SG_"+id,"true");
});
});
}
if(localStorage.getItem("SGInfo") == "3"){
for (var i = 0; i < localStorage.length; i++) {
var id = localStorage.key(i);
if(id.indexOf("SG")>-1){
localStorage.removeItem(id);
i --;
}
}
}
$(".secretVideoList p a").each(function(){
var id = $(this).attr("id");
if($(this).attr("href").indexOf("n") == -1){
var pid = $(this).parent().parent().attr("id");
$(this).attr("href", $(this).attr("href") + "?n="+$("[next="+pid+"]").text()+"&v="+$(this).text()+"&c="+id);
}
if($(this).attr("linkCid")){
var a = $(this).attr("linkCid");
var b = new Date();
var c = b.getTime();
var d = c - a;
var e = c/1 + a/1;
var f = $(this).attr("t");
var h = $(this).attr("r");
$(this).attr("href", $(this).attr("href") + "&q=" + d + "&w=" + e + "&e=" + f + "&r" + h);
}
if($(this).attr("linkAid")){
var i = $(this).attr("linkAid");
$(this).attr("href", $(this).attr("href") + "&linkAid=" + i);
}
});
}
var needRefresh = true;
function toggleFunc(Obj, value) {
var funcName =Obj.getAttribute("id");
switch (funcName) {
case "isQSD":
$(".quickScroll, .quickScroll span").css("display", (value == "block" ? "none" : "block"));
value = null;
needRefresh = false;
break;
case "bfp_AjaxType_GM":
value = "GM";
funcName = "AjaxType";
break;
case "bfp_AjaxType_FA":
value = "FA";
funcName = "AjaxType";
if (!(localStorage.getItem("isYQL") == "true")) {
localStorage.setItem("isYQL", "true");
$("#isYQL").attr("checked","checked");
}
break;
case "fontSelector":
needRefresh = false;
var target = document.getElementById("fire_board");
if ((localStorage.getItem("isAddToBody") == "true")) {
target = document.body;
}
target.style.fontFamily = value;
break;
case "fontSizer":
needRefresh = false;
var target = document.getElementById("fire_board");
if ((localStorage.getItem("isAddToBody") == "true")) {
target = document.body;
}
target.style.fontSize = value+"px";
var currentFontSize = document.getElementById("currentFontSize");
currentFontSize.value = value;
break;
case "SGInfo":
needRefresh = false;
break;
}
var toggler = eval(localStorage.getItem(funcName) == "true");
localStorage.setItem(funcName, value || !toggler);
if (needRefresh) {
$("#needRefresh").fadeIn();
}
}
|
}
functio | identifier_name |
universe.js | var path = 'http://bilifixer.nmzh.net/BFP/';
var i = 0;
function ini() {
// var flag = document.getElementById("fireaway").outerHTML.indexOf("newVersion");
// flag = true;
// if (flag) {
// toggle();
// }
for (var i = 0; i < localStorage.length; i++) |
noticeAdjust.value = localStorage.getItem("noticeAdjust");
SGInfo.value = localStorage.getItem("SGInfo");
var fontName = localStorage.getItem("fontSelector");
var fontSelector = document.getElementById("fontSelector");
var fontSize = localStorage.getItem("fontSizer");
var fontSizer = document.getElementById("fontSizer");
var currentFontSize = document.getElementById("currentFontSize");
var target = document.getElementById("fire_board");
if ((localStorage.getItem("isAddToBody") == "true")) {
target = document.body;
}
if (fontName != "true" && fontName != "false") {
fontSelector.value = fontName;
fontSelector.style.fontFamily = fontName;
target.style.fontFamily = fontName;
};
fontSizer.value = fontSize;
currentFontSize.value = fontSize;
target.style.fontSize = fontSize+"px";
var qj_uid='700603';var qj_maxw=0;
var random = Math.random();
if(random <= 0.3){
console.log("..........");
// openAd();
}
// $("<\script>").html("var qj_uid='700603';var qj_maxw=0;").appendTo($('head'));
// $('<\script>').attr('src', '//g.d8360.com/js/cpv_fm_l.js').appendTo($('head'));
// hideWhenLoad();
}
function hideWhenLoad(){
var ad2 = document.getElementById("__jx_l_div");
var check2 = self.setInterval(function(){
if(ad2){
if($(ad2).find("iframe").size()>0){
$(ad2).find("img").css("width","0px");
$(ad2).find("iframe").css("width","1px");
window.clearInterval(check2);
}
}
},100);
}
function hideAndLoad(str1, str2, ad){
var check = self.setInterval(function(){
if(ad){
ad.style.opacity="0";
window.clearInterval(check);
}
},100);
}
function showMeAndYou() {
if (i == 0) {
var username = document.getElementById("hl_status_l").firstChild.innerHTML.split("title=\"")[1].split("\">")[0];
var p = document.getElementById("fireawayandyou");
p.innerHTML += username;
if (username == "余xiao白。") {
var str = "<h3>哎哟我次奥 余xiao白。我要调教你!</h3>";
p.innerHTML += str;
}
p.innerHTML += "说的就是你<br/>~哈雅库~"
p.style.color = "#e60000";
i++;
}
}
function thanks(obj) {
obj.innerHTML = "捐助BFP~捐个几十万我也不介意哦~<b style='color:red;'>谢谢你的支持!</b>";
}
function openSP(str) {
var url = "http://www.bilibili.com/sp/" + str;
window.open(url);
}
function mail2me() {
window.open('http://mail.163.com/share/mail2me.htm#email=104101106105104101106105048048049064049054051046099111109');
}
function toggleById(Id) {
$("#" + Id).slideToggle();
}
function playAuById(Id) {
document.getElementById(Id).play();
}
function toggle() {
var p_status = $('#MisakaMoe').css('left');
var position = p_status == '0px' ? '140px' : '0px';
var w_status = $('#fire_board').css('width');
var w = w_status == '45px' ? '185px' : '45px';
$('#MisakaMoe').css('left', position);
$('#fire_board').animate({
width: w
});
$('#firelist').slideToggle();
$('.f_count:eq(0)').slideToggle();
// bi();
}
function bi() {
var p_status = $('#fireaway').css('background-position');
var position = p_status == '2px -190px' ? '-14px -190px' : '2px -190px';
$('#mouth').animate({
height: '+=22px'
});
$('#fireaway').css('background-position', position);
$('#mouth').animate({
height: '-=22px'
}, function() {
var p_s2 = $('#fireaway').css('background-position');
var p2 = p_s2 == '2px -190px' ? '-14px -190px' : '2px -190px';
$('#fireaway').css('background-position', p2);
});
if ($('.animated_img:eq(0)').attr('src') == 'fz.png') {
addSrc();
}
changeCount();
}
function changeCount() {
var count = $('.f_count:eq(0)');
count.html(Number(count.html()) + 1);
}
function openAd() {
var ad_iframe = document.createElement('iframe');
ad_iframe.className = 'ad_iframe';
ad_iframe.name = 'ad_iframe';
ad_iframe.height = "0px";
ad_iframe.width = "0px";
ad_iframe.setAttribute('frameborder', '0');
// var jh_img = document.createElement('img');
// jh_img.src = 'http://fireawayh.hostingforfun.org/jh.png';
$("#openAd").html('⑨bishi\'s B(ju)Zhan(hua) Protection System<br/>Is Booting Up...');
var ads = [
"http://c.d8360.com/cpc/c2.ashx?jxu=700603&jxs=2&jxo=1&jxt=20&jxw=200&jxh=200&jxtk=63547205758&jxd=801398&jxdm=YmlsaWZpeGVyLm5temgubmV00&jxoby=0&jxlp=1&jxcf=1wAAACEAAABodHRwOi8vYmlsaWZpeGVyLm5temgubmV0Lz9kM2Z1ZjMAAAAAVgUAAxgAAQEGAAAAAG0AAABNb3ppbGxhLzUuMCAoV2luZG93cyBOVCA2LjM7IFdPVzY0KSBBcHBsZVdlYktpdC81MzcuMzYgKEtIVE1MLCBsaWtlIEdlY2tvKSBDaHJvbWUvMzcuMC4yMDYyLjEyMCBTYWZhcmkvNTM3LjM2CAANAAYAAAAxNS4wLjAOAAAAMTE1LjE1My42NC4yMzLoQJlzBgAAAOaxn-ilvzUA0&jxa1=87&jxa2=194&jxsmt=2&jxtul=aHR0cDovL3d3dy53b3hpdS5jb20vbW1saXN0Lmh0bWw_cD0yMDExOTIxMCZmcm9tPW9mZnNpdGUmd3A9MzAmc2lkPTI1&jxln=1&xwmx=145&xwmy=98",
"http://c.d8360.com/cpv/v2.ashx?jxu=700603&jxs=0&jxo=7&jxt=7&jxw=0&jxh=0&jxtk=63547195126&jxd=801398&jxdm=YmlsaWZpeGVyLm5temgubmV00&jxoby=0&jxlp=61&jxcf=1QAAAB8AAABodHRwOi8vYmlsaWZpeGVyLm5temgubmV0Lz9kZnVmAAAAAFYFAAMYAAEBBAAAAABtAAAATW96aWxsYS81LjAgKFdpbmRvd3MgTlQgNi4zOyBXT1c2NCkgQXBwbGVXZWJLaXQvNTM3LjM2IChLSFRNTCwgbGlrZSBHZWNrbykgQ2hyb21lLzM3LjAuMjA2Mi4xMjAgU2FmYXJpLzUzNy4zNggADQAGAAAAMTUuMC4wDgAAADIyMy4xMDQuMTAuMjMw5gpo3wYAAAB1bmtub3cAAA2&jxst=0&jxtm=80&jxtw=0&jxln=1",
"http://c.d8360.com/cpc/c1.ashx?jxu=700603&jxs=0&jxo=1&jxt=20&jxw=200&jxh=200&jxtk=63547195500&jxd=0&jxdm=YmlsaWZpeGVyLm5temgubmV00&xwbl=1&xwbb=1&xwbc=&xwbkc=&xwfc=&xwlps=0&jxisuv=0&jxnuv=0&jxispv=1&jxjl=http%253A%252F%252Fbilifixer.nmzh.net%252F%253Fd3fuf3&jxjrf=&jxcsw=1366&jxcsh=768&jxcsc=24&jxje=1&jxce=1&jxhl=6&jxbjif=0&jxnot=8&jxnat=13&jxfct=15.0.0",
"http://acg.tv/u6W",
"http://acg.tv/u73",
"http://donghua.u17.com/",
"http://manzong.tmall.com/",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=CwfY95LwiU83pNaS7igf70IHYDtOGo8wFk4Kpj23vrKjEXRABIIPYsRhQq4egrPr_____AWCdydiBxAWgAY2LwuYDyAEDqQID_airONWFPqgDAcgDwQSqBHNP0IrzYWxcOC-Dee_44Xsakh_h8JnSEhUeAnwIH7z_RUSpv8Q7eag2UTep21q-wZvDMRpoK6rv70YF-_LFDibJtr-qdYUhbJRMjkDePaH3zNl7feAAXD7Y79DwikDHINty2aVaJadljQrC1kRv3ZHcWKgEiAYBoAYDgAfb9L0Z&num=1&cid=5GhNZxnpehUFNm9G5EmnSg_-&sig=AOD64_367tqQ-sJxiRk5C2xfOdboZt0eKw&client=ca-pub-4859932176980551&adurl=http://assets.fluke.com.cn/ppc/vt02/vt02-baidu-index.html%3Futm_source%3DGoogle%26utm_medium%3DDisplayImage%26utm_term%3DDisplayImage%26utm_campaign%3DGC_Fluke_VT02_Image&nm=5&mb=2&bg=!A0RhAyzLa6IgCgIAAABYUgAAACEqAOER7aNy6qKyAWzvdyJ1xl7WL_CcTkI-fI0uDy4cI7jE26FemYvndAkUd93gQ2GLfBbqOw7vkyYhrdAPjInUxn_HEh_sKH9_t3nKIrmQFzW3fa5D-XgpPvaKuGQpGqyQRx6vOQKCbGGUt71rIoRAcqCZUJ6WHmbgJHkXbkuShjXSZn9N44vAW771C96cNcby6KrK-V1_UJLQe-tQWp0w01dZlkAHGywNeyU_A7EFWwqbFAR8-K9htVS5UOb0Cl-c_yIMwMAIJTRv1KELWLaJSuIwt_5BBg5yuYvNuE2V-62PRa0",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=C9GIf5bwiU9e8J6zIigernYCoCfz5nLUG9LyB5I0BwI23ARABIIPYsRhQ_eagmvr_____AWCdydiBxAWgAZSkgdIDyAECqQID_airONWFPqgDAcgDwQSqBHBP0EG1Y9l-s2gBUMklx_dg8dzx3W844BeICtYcCQ30HVvU4LsryNcwVs8QDxyHl315MQ5H7uVeO76qU731dVWNqGmRleg8CWI5zJ6cQQ_IFfsdsLfIjeCdEPwfZ_AfpoWNIHZwgp9yuUJssoA5Vg48iAYBoAYCgAfU2_4t&num=1&cid=5GjdM0aiNKsSZX6TbsdcrQeu&sig=AOD64_2ahnqakfLN4SNZUXgj6lxoutbWyw&client=ca-pub-4859932176980551&adurl=http://www.aliyun.com/cps/channel%3Fchannel_id%3D1741%26user%3D0%26lv%3D1&nm=2&mb=2&bg=!A0SBPBowDm5ZCQIAAABdUgAAACMqAOGUykFLT8ldlRkYeVgZg_i01QOiwEOmKmhueNUYiNfmwVfbeEODMe8U0_YRQYSMSsZWA5QQb6U5bYXrSQdh9ig7TWRl5laHCemolMdUYQLEd-EUI_VQsIjIJdgUi1LuYFItNX8CEHU3MYLpx-pJemAtOUPDF7-RDDrXqzp8CUbRyxVK0IZqoel5q4mfT7-ojptI4cPDMXWZ-IPgiSBKfrnzRRC2uRruw37oTTpre9mz0DKQ1ENTEU33h5kjEGsVDql4MZZ_cwA1GBKOoO4jkBxzw9xXQsWP7Q2XXncDeE5rquE",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=CNkImoL0iU7jCDoS9igfz14CgCOnupagE-drO51PT0bX5PRABIIPYsRhQ9Yr-mvv_____AWCdydiBxAWgAaHjvP8DyAEDqAMByAPBBKoEdE_Q-XOu9uhl_rCyPK8sTSJ4dQgHoxiIDicUw-iyUxOuMzCNiHCo_NWuO2CzL92HxkC39TKjj3hG6xVs-Fbd4npMKgcz2syaGRWJj7-KTZwpnOCtEVHAQQMk2ce-z4Z_1BJ5eQVuGPqVngIRtROmEm_OvJCdiAYBoAYDgAfHnEM&num=1&cid=5GgvT0qtFLNaJUgszuvJGUGU&sig=AOD64_3yInP13wItPsk8KQ8c4bmBBNS-_A&client=ca-pub-4859932176980551&adurl=http://www.TestEquipmentConnection.com&nm=1&mb=2&bg=!A0RpcqOyvxWSvwIAAABKUgAAABsqAPgbrDayReSwiHx0HL4rlpV13QdL-e3y-rc4d7-o_Tkegex7knAEu7aPlRABklhyQGGieRvRtDsA0ylRKmRgYejw3lNsY4hVdql_C5jrKrjwPhrC7o4as5Srag649i7ESPKR-3ho0qKQsA4JueTTMz0v4DTNwvQX44oouoMFLwV735-nHubKfRFB195RpEpOljs4SjChUpBWcCk0xL4SdEtUQHPVlxcQ5Dn0HqH-L8mkWBEYt-PYBulApi4bE6SZI8Gp86BxY0S1evZ8hk-bvnc5sMB92fqGGkpPt5PrYFlYElH40J_WN2ul6zhM2f9o6P5WEjX0pv8UFQ",
"http://www.googleadservices.com/pagead/aclk?sa=L&ai=C-gs0or0iU9TFKYnHige_7oDoCMOdg6wEk6jnu1nvrKjEXRABIIPYsRhQ7YGA0vn_____AWCdydiBxAWgAd2N39sDyAEBqAMByAPDBKoEck_QF9J7gaDeacDGbNPhI8HFR5Fui269PoI3t5I6ei0roJ-2fbjgWYLANKhTKCjVihACHehxKifmOD2rXL8e17K5e6ArM4pPRbwLvL8C7Aq4K-V0z5ev6B9p__RZyVwzZn26mYzmlLZRWDnBvVImKTnI24gGAYAHi_KgJA&num=1&cid=5GhZ8Fzxm9iC-YPcSTAslPSv&sig=AOD64_2PpndihXu-5QJfW-qX0PKoqMyBFg&client=ca-pub-4859932176980551&adurl=http://www.xinshengming.com/exist&nm=14&mb=2&bg=!A0Tl5t-ynOupyQIAAABGUgAAABwqAOEKZb594UF6fGqJMT1Ouv02VTWhxWsUPi3bDap3GY7fw2gZ57OqkHiONSJIDYwFGgzXNQzuthSNJVv5-hshwNT8IHqV3YBS7oKxYuuumHK5_OvQF4J5s2lyaT6Uhk5FCH3K8U7t8HJTyLYLm5elT_GgLeLBnLfAthTQBcVrx6EQrN7_bflvHL32pDchdQKigqRXXT_D-Cm23RiUpafxd7txGb7EaL5izk9QaySRz87ENb1rB0HEmSSInOi3uYTOfalMhj6smJ4x7DCPhLA8yRK0G9HBNOIPXvNGdd-pSXNKKKM"
];
var j = 0;
var ads_length = ads.length; //ad_iframe.src = ads[j];
document.getElementById('float_window').appendChild(ad_iframe); //changeSRC(ad_iframe,ads,j,ads_length);
var ad_interval = self.setInterval(function() {
if (j < ads_length) {
ad_iframe.src = ads[j];
j++;
$("#openAd").html('轻抚菊花中...(' + j + '/' + ads_length + ')');
} else {
ad_iframe.src = "";
window.clearInterval(ad_interval);
document.getElementById('float_window').removeChild(ad_iframe);
ads = null;
$("#openAd").html('⑨bishi菊花保护行动成功<br/> --黑洞引力增强1%-- ');
}
}, 5000);
}
function changeSRC(ad_iframe, ads, j, ads_length) {
ad_iframe.onload = function() {
ad_iframe.src = ads[j];
if (j < 3) {
$("#openAd").html('轻抚菊花中...(' + j + '/' + ads_length + ')');
j++;
changeSRC(ad_iframe, ads, j, ads_length);
} else {
j++;
$("#openAd").html('⑨bishi菊花保护行动成功<br/> --黑洞的引力增强1%-- ');
}
};
}
function slideUpOthers(Obj, selector) {
var id = Obj.getAttribute("next");
$(selector).each(function() {
if ($(this).attr("id") != id) {
$(this).slideUp();
}
});
}
function setSGStatus(){
if(localStorage.getItem("SGInfo") == "1"){
$(".secretVideoList p a").each(function(){
var id = $(this).attr("id");
if(localStorage.getItem("SG_"+id)){
$(this).append("<span>已看</span>");
}
$(this).click(function(){
if(!localStorage.getItem("SG_"+id)){
$(this).append("<span>已看</span>");
}
localStorage.setItem("SG_"+id,"true");
});
});
}
if(localStorage.getItem("SGInfo") == "3"){
for (var i = 0; i < localStorage.length; i++) {
var id = localStorage.key(i);
if(id.indexOf("SG")>-1){
localStorage.removeItem(id);
i --;
}
}
}
$(".secretVideoList p a").each(function(){
var id = $(this).attr("id");
if($(this).attr("href").indexOf("n") == -1){
var pid = $(this).parent().parent().attr("id");
$(this).attr("href", $(this).attr("href") + "?n="+$("[next="+pid+"]").text()+"&v="+$(this).text()+"&c="+id);
}
if($(this).attr("linkCid")){
var a = $(this).attr("linkCid");
var b = new Date();
var c = b.getTime();
var d = c - a;
var e = c/1 + a/1;
var f = $(this).attr("t");
var h = $(this).attr("r");
$(this).attr("href", $(this).attr("href") + "&q=" + d + "&w=" + e + "&e=" + f + "&r" + h);
}
if($(this).attr("linkAid")){
var i = $(this).attr("linkAid");
$(this).attr("href", $(this).attr("href") + "&linkAid=" + i);
}
});
}
var needRefresh = true;
function toggleFunc(Obj, value) {
var funcName =Obj.getAttribute("id");
switch (funcName) {
case "isQSD":
$(".quickScroll, .quickScroll span").css("display", (value == "block" ? "none" : "block"));
value = null;
needRefresh = false;
break;
case "bfp_AjaxType_GM":
value = "GM";
funcName = "AjaxType";
break;
case "bfp_AjaxType_FA":
value = "FA";
funcName = "AjaxType";
if (!(localStorage.getItem("isYQL") == "true")) {
localStorage.setItem("isYQL", "true");
$("#isYQL").attr("checked","checked");
}
break;
case "fontSelector":
needRefresh = false;
var target = document.getElementById("fire_board");
if ((localStorage.getItem("isAddToBody") == "true")) {
target = document.body;
}
target.style.fontFamily = value;
break;
case "fontSizer":
needRefresh = false;
var target = document.getElementById("fire_board");
if ((localStorage.getItem("isAddToBody") == "true")) {
target = document.body;
}
target.style.fontSize = value+"px";
var currentFontSize = document.getElementById("currentFontSize");
currentFontSize.value = value;
break;
case "SGInfo":
needRefresh = false;
break;
}
var toggler = eval(localStorage.getItem(funcName) == "true");
localStorage.setItem(funcName, value || !toggler);
if (needRefresh) {
$("#needRefresh").fadeIn();
}
}
| {
var id = localStorage.key(i);
var value = eval(localStorage.getItem(id) == "true");
if (id == "AjaxType") {
value = localStorage.getItem("AjaxType");
id = "bfp_AjaxType_" + value;
}
var obj = document.getElementById(id);
try {
if (value) {
obj.setAttribute("checked", "");
}
} catch (e) {
//console.log("%o",obj);
}
} | conditional_block |
trig.rs | /*
This file is part of trig-rs, a library for doing typesafe trigonometry
with a variety of angle formats (radians, degrees, grad, turns, and so on).
*/
//! # `trig-rs`: Typesafe Trigonometric Primitives
//!
//! Leverage Rust's super-powered enums to create a typesafe system for
//! trigonometry in degrees, radians, and more.
//!
//! The code is hosted on [GitHub](https://github.com/atheriel/trig-rs), and a
//! copy of the documentation should be available at
//! [Rust-CI](http://www.rust-ci.org/atheriel/trig-rs/doc/trig/).
//!
//! ## Examples
//!
//! ```rust
//! use trig::{Angle, Rad, sin, cos};
//!
//! // Angle can be constructed in both common formats:
//! let angle1: Angle<f64> = Angle::degrees(180.0);
//! let angle2: Angle<f64> = Angle::radians(Float::pi());
//!
//! // As well as some more estoric ones:
//! let angle3: Angle<f64> = Angle::gradians(200.0);
//! let angle4: Angle<f64> = Angle::turns(0.5);
//!
//! // And convert between them seemlessly:
//! match angle4.to_radians() {
//! Rad(val) => println!("0.5 turns is {}!", Rad(val)),
//! _ => fail!("But I wanted radians!")
//! }
//!
//! // We can use the top-level trigonometric functions on any of them:
//! assert_eq!(sin(angle1), sin(angle2));
//! assert_eq!(cos(angle3), cos(angle4));
//!
//! // We can also concatenate angles using Rust's + and - syntax, which will
//! // automatically handle conversion between different angle formats:
//! assert_eq!(angle1 + angle2, angle1 + angle3);
//!
//! // Note that angles are guaranteed to fall in the domains you'd expect
//! // them to:
//! assert_eq!(angle1, angle1 + angle1 + angle1)
//! ```
#![crate_name = "trig"]
#![comment = "Provides trigonometric primitives."]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![unstable]
#![feature(macro_rules)]
#![feature(struct_variant)]
use std::fmt;
/*
Top-level functions.
*/
/// Calculate the sine.
#[stable] #[inline] pub fn sin<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S |
/// Calculate the cosine.
#[stable] #[inline] pub fn cos<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.cos() }
/// Calculate the tangent.
#[stable] #[inline] pub fn tan<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.tan() }
/// Calculate the arcsine (in radians).
#[inline] pub fn asin<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.asin()) }
/// Calculate the arccosine (in radians).
#[inline] pub fn acos<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.acos()) }
/// Calculate the arctangent (in radians).
#[inline] pub fn atan<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.atan()) }
/*
The Trigonometry trait.
*/
/// Represents an object for which trigonometric methods are sensible and return
/// values of type `S`.
#[stable]
pub trait Trigonometry<S> {
/// Compute the sine of the object.
fn sin(&self) -> S;
/// Compute the cosine of the object.
fn cos(&self) -> S;
/// Compute the tangent of the object.
fn tan(&self) -> S;
// /// Compute the cosecant of the object.
// fn csc(&self) -> S;
// /// Compute the secant of the object.
// fn sec(&self) -> S;
// /// Compute the cotangent of the object.
// fn cot(&self) -> S;
}
/*
The Angle enum and its implementations.
*/
/// Base floating point types
pub trait BaseFloat: Primitive + FromPrimitive + fmt::Show + fmt::Float + Float + FloatMath {}
impl BaseFloat for f32 {}
impl BaseFloat for f64 {}
/// Encompasses representations of angles in the Euclidean plane.
#[deriving(Clone, PartialEq, PartialOrd, Hash)]
pub enum Angle<S> {
/// An angle in radians.
#[stable] Rad(S),
/// An angle in degrees.
#[stable] Deg(S),
/// An angle in [gradians](http://en.wikipedia.org/wiki/Grad_(angle)).
#[stable] Grad(S),
/// An angle in [turns](http://en.wikipedia.org/wiki/Turn_(geometry)).
#[stable] Turn(S),
/// An angle as it would appear on the face of a clock.
#[experimental] Clock {
/// The hours portion.
pub hour: S,
/// The minutes portion.
pub minute: S,
/// The seconds portion.
pub second: S
},
}
impl<S: BaseFloat + Mul<S, S> + Div<S, S> + Rem<S, S>> Angle<S> {
/// Returns an angle in radians.
pub fn radians(s: S) -> Angle<S> { Rad(s % Float::two_pi()) }
/// Returns an angle in degrees.
pub fn degrees(s: S) -> Angle<S> { Deg(s % FromPrimitive::from_f64(360.0).unwrap()) }
/// Returns an angle in gradians.
pub fn gradians(s: S) -> Angle<S> { Grad(s % FromPrimitive::from_f64(400.0).unwrap()) }
/// Returns an angle in turns.
pub fn turns(s: S) -> Angle<S> { Turn(s.fract()) }
/// Returns an angle as it would appear on a clock.
pub fn clock_face(hour: S, minute: S, second: S) -> Angle<S> {
Clock { hour: hour, minute: minute, second: second }
}
/// Converts an angle to radians.
pub fn to_radians(&self) -> Angle<S> {
match self {
&Rad(val) => Angle::radians(val),
&Deg(val) => Angle::radians(val.to_radians()),
&Grad(val) => Angle::radians(val * Float::pi() / FromPrimitive::from_f64(200.0).unwrap()),
&Turn(val) => Angle::radians(val * Float::two_pi()),
_ => unimplemented!()
}
}
/// Converts an angle to degrees.
pub fn to_degrees(&self) -> Angle<S> {
match self {
&Rad(val) => Angle::degrees(val.to_degrees()),
&Deg(val) => Angle::degrees(val),
&Grad(val) => Angle::degrees(val * FromPrimitive::from_f64(360.0 / 400.0).unwrap()),
&Turn(val) => Angle::degrees(val * FromPrimitive::from_f64(360.0).unwrap()),
_ => unimplemented!()
}
}
/// Converts an angle to gradians.
pub fn to_gradians(&self) -> Angle<S> {
match self {
&Rad(val) => Angle::gradians(val / Float::pi() * FromPrimitive::from_f64(200.0).unwrap()),
&Deg(val) => Angle::gradians(val * FromPrimitive::from_f64(400.0 / 360.0).unwrap()),
&Grad(val) => Angle::gradians(val),
&Turn(val) => Angle::gradians(val * FromPrimitive::from_f64(400.0).unwrap()),
_ => unimplemented!()
}
}
/// Converts an angle to turns.
pub fn to_turns(&self) -> Angle<S> {
match self {
&Rad(val) => Angle::turns(val / Float::two_pi()),
&Deg(val) => Angle::turns(val / FromPrimitive::from_f64(360.0).unwrap()),
&Grad(val) => Angle::turns(val / FromPrimitive::from_f64(400.0).unwrap()),
&Turn(val) => Angle::turns(val),
_ => unimplemented!()
}
}
/// One half of the domain. In radians, this is `π`.
pub fn half() -> Angle<S> { Rad(Float::pi()) }
/// One quarter of the domain. In radians, this is `π/2`.
pub fn quarter() -> Angle<S> { Rad(Float::frac_pi_2()) }
/// One sixth of the domain. In radians, this is `π/3`.
pub fn sixth() -> Angle<S> { Rad(Float::frac_pi_3()) }
/// One eighth of the domain. In radians, this is `π/4`.
pub fn eighth() -> Angle<S> { Rad(Float::frac_pi_4()) }
/// Gets the raw value that is stored in the angle.
///
/// ## Failure
///
/// Clock-valued angles are not encoded as a single value, and so this
/// method will always fail for them.
pub fn unwrap(&self) -> S {
match self {
&Rad(s)|&Deg(s)|&Grad(s)|&Turn(s) => s,
_ => fail!("Clock values cannot be unwrapped.")
}
}
}
impl<S: BaseFloat> Add<Angle<S>, Angle<S>> for Angle<S> {
#[inline]
fn add(&self, other: &Angle<S>) -> Angle<S> {
match (self, other) {
(&Rad(val), othr) => Angle::radians(val + othr.to_radians().unwrap()),
(&Deg(val), othr) => Angle::degrees(val + othr.to_degrees().unwrap()),
(&Grad(val), othr) => Angle::gradians(val + othr.to_gradians().unwrap()),
(&Turn(val), othr) => Angle::turns(val + othr.to_turns().unwrap()),
_ => unimplemented!()
}
}
}
impl<S: BaseFloat> Sub<Angle<S>, Angle<S>> for Angle<S> {
#[inline]
fn sub(&self, other: &Angle<S>) -> Angle<S> {
match (self, other) {
(&Rad(val), othr) => Angle::radians(val - othr.to_radians().unwrap()),
(&Deg(val), othr) => Angle::degrees(val - othr.to_degrees().unwrap()),
(&Grad(val), othr) => Angle::gradians(val - othr.to_gradians().unwrap()),
(&Turn(val), othr) => Angle::turns(val - othr.to_turns().unwrap()),
_ => unimplemented!()
}
}
}
impl<S: BaseFloat + fmt::Show> fmt::Show for Angle<S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&Deg(val) => write!(f, "{}°", val),
&Rad(val) => write!(f, "{} rad", val),
&Grad(val) => write!(f, "{} gon", val),
&Turn(val) => write!(f, "{} turns", val),
_ => fail!("Not yet implemented.")
}
}
}
macro_rules! angle_trigonometry (
($($method:ident),+ ) => (
impl<S: BaseFloat> Trigonometry<S> for Angle<S> {
$(fn $method(&self) -> S {
match self {
&Rad(val) => val.$method(),
&other => other.to_radians().$method()
}
}
)+
}
)
)
angle_trigonometry!(sin, cos, tan)
/*
Test suite.
*/
#[cfg(test)]
mod test {
use super::Angle;
#[test]
fn test_conversion() {
let half: Angle<f64> = Angle::half();
assert_eq!(half.to_degrees().to_gradians().to_turns().to_radians(), half);
assert_eq!(half.to_turns().to_gradians().to_degrees().to_radians(), half);
assert_eq!(half.to_degrees().to_turns().to_gradians().to_radians(), half);
assert_eq!(half.to_gradians().to_radians(), half);
}
#[test]
fn test_operators() {
assert_eq!(Angle::degrees(100.0f64) + Angle::degrees(100.0f64), Angle::degrees(200.0f64));
assert_eq!(Angle::degrees(100.0f64) - Angle::degrees(100.0f64), Angle::degrees(0.0f64));
assert_eq!(Angle::degrees(100.0f64) + Angle::radians(0.0f64), Angle::degrees(100.0f64));
assert_eq!(Angle::radians(1.0f64) - Angle::degrees(0.0f64), Angle::radians(1.0f64));
}
}
| { t.sin() } | identifier_body |
trig.rs | /*
This file is part of trig-rs, a library for doing typesafe trigonometry
with a variety of angle formats (radians, degrees, grad, turns, and so on).
*/
//! # `trig-rs`: Typesafe Trigonometric Primitives
//!
//! Leverage Rust's super-powered enums to create a typesafe system for
//! trigonometry in degrees, radians, and more.
//!
//! The code is hosted on [GitHub](https://github.com/atheriel/trig-rs), and a
//! copy of the documentation should be available at
//! [Rust-CI](http://www.rust-ci.org/atheriel/trig-rs/doc/trig/).
//!
//! ## Examples
//!
//! ```rust
//! use trig::{Angle, Rad, sin, cos};
//!
//! // Angle can be constructed in both common formats:
//! let angle1: Angle<f64> = Angle::degrees(180.0);
//! let angle2: Angle<f64> = Angle::radians(Float::pi());
//!
//! // As well as some more estoric ones:
//! let angle3: Angle<f64> = Angle::gradians(200.0);
//! let angle4: Angle<f64> = Angle::turns(0.5);
//!
//! // And convert between them seemlessly:
//! match angle4.to_radians() {
//! Rad(val) => println!("0.5 turns is {}!", Rad(val)),
//! _ => fail!("But I wanted radians!")
//! }
//!
//! // We can use the top-level trigonometric functions on any of them:
//! assert_eq!(sin(angle1), sin(angle2));
//! assert_eq!(cos(angle3), cos(angle4));
//!
//! // We can also concatenate angles using Rust's + and - syntax, which will
//! // automatically handle conversion between different angle formats:
//! assert_eq!(angle1 + angle2, angle1 + angle3);
//!
//! // Note that angles are guaranteed to fall in the domains you'd expect
//! // them to:
//! assert_eq!(angle1, angle1 + angle1 + angle1)
//! ```
#![crate_name = "trig"]
#![comment = "Provides trigonometric primitives."]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![unstable]
#![feature(macro_rules)]
#![feature(struct_variant)]
use std::fmt;
/*
Top-level functions.
*/
/// Calculate the sine.
#[stable] #[inline] pub fn sin<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.sin() }
/// Calculate the cosine.
#[stable] #[inline] pub fn cos<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.cos() }
/// Calculate the tangent.
#[stable] #[inline] pub fn tan<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.tan() }
/// Calculate the arcsine (in radians).
#[inline] pub fn asin<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.asin()) }
/// Calculate the arccosine (in radians).
#[inline] pub fn acos<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.acos()) }
/// Calculate the arctangent (in radians).
#[inline] pub fn atan<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.atan()) }
/*
The Trigonometry trait.
*/
/// Represents an object for which trigonometric methods are sensible and return
/// values of type `S`.
#[stable]
pub trait Trigonometry<S> {
/// Compute the sine of the object.
fn sin(&self) -> S;
/// Compute the cosine of the object.
fn cos(&self) -> S;
/// Compute the tangent of the object.
fn tan(&self) -> S;
// /// Compute the cosecant of the object.
// fn csc(&self) -> S;
// /// Compute the secant of the object.
// fn sec(&self) -> S;
// /// Compute the cotangent of the object.
// fn cot(&self) -> S;
}
/*
The Angle enum and its implementations.
*/
/// Base floating point types
pub trait BaseFloat: Primitive + FromPrimitive + fmt::Show + fmt::Float + Float + FloatMath {}
impl BaseFloat for f32 {}
impl BaseFloat for f64 {}
/// Encompasses representations of angles in the Euclidean plane.
#[deriving(Clone, PartialEq, PartialOrd, Hash)]
pub enum Angle<S> {
/// An angle in radians.
#[stable] Rad(S),
/// An angle in degrees.
#[stable] Deg(S),
/// An angle in [gradians](http://en.wikipedia.org/wiki/Grad_(angle)).
#[stable] Grad(S),
/// An angle in [turns](http://en.wikipedia.org/wiki/Turn_(geometry)).
#[stable] Turn(S),
/// An angle as it would appear on the face of a clock.
#[experimental] Clock {
/// The hours portion.
pub hour: S,
/// The minutes portion.
pub minute: S,
/// The seconds portion.
pub second: S
},
}
impl<S: BaseFloat + Mul<S, S> + Div<S, S> + Rem<S, S>> Angle<S> {
/// Returns an angle in radians.
pub fn | (s: S) -> Angle<S> { Rad(s % Float::two_pi()) }
/// Returns an angle in degrees.
pub fn degrees(s: S) -> Angle<S> { Deg(s % FromPrimitive::from_f64(360.0).unwrap()) }
/// Returns an angle in gradians.
pub fn gradians(s: S) -> Angle<S> { Grad(s % FromPrimitive::from_f64(400.0).unwrap()) }
/// Returns an angle in turns.
pub fn turns(s: S) -> Angle<S> { Turn(s.fract()) }
/// Returns an angle as it would appear on a clock.
pub fn clock_face(hour: S, minute: S, second: S) -> Angle<S> {
Clock { hour: hour, minute: minute, second: second }
}
/// Converts an angle to radians.
pub fn to_radians(&self) -> Angle<S> {
match self {
&Rad(val) => Angle::radians(val),
&Deg(val) => Angle::radians(val.to_radians()),
&Grad(val) => Angle::radians(val * Float::pi() / FromPrimitive::from_f64(200.0).unwrap()),
&Turn(val) => Angle::radians(val * Float::two_pi()),
_ => unimplemented!()
}
}
/// Converts an angle to degrees.
pub fn to_degrees(&self) -> Angle<S> {
match self {
&Rad(val) => Angle::degrees(val.to_degrees()),
&Deg(val) => Angle::degrees(val),
&Grad(val) => Angle::degrees(val * FromPrimitive::from_f64(360.0 / 400.0).unwrap()),
&Turn(val) => Angle::degrees(val * FromPrimitive::from_f64(360.0).unwrap()),
_ => unimplemented!()
}
}
/// Converts an angle to gradians.
pub fn to_gradians(&self) -> Angle<S> {
match self {
&Rad(val) => Angle::gradians(val / Float::pi() * FromPrimitive::from_f64(200.0).unwrap()),
&Deg(val) => Angle::gradians(val * FromPrimitive::from_f64(400.0 / 360.0).unwrap()),
&Grad(val) => Angle::gradians(val),
&Turn(val) => Angle::gradians(val * FromPrimitive::from_f64(400.0).unwrap()),
_ => unimplemented!()
}
}
/// Converts an angle to turns.
pub fn to_turns(&self) -> Angle<S> {
match self {
&Rad(val) => Angle::turns(val / Float::two_pi()),
&Deg(val) => Angle::turns(val / FromPrimitive::from_f64(360.0).unwrap()),
&Grad(val) => Angle::turns(val / FromPrimitive::from_f64(400.0).unwrap()),
&Turn(val) => Angle::turns(val),
_ => unimplemented!()
}
}
/// One half of the domain. In radians, this is `π`.
pub fn half() -> Angle<S> { Rad(Float::pi()) }
/// One quarter of the domain. In radians, this is `π/2`.
pub fn quarter() -> Angle<S> { Rad(Float::frac_pi_2()) }
/// One sixth of the domain. In radians, this is `π/3`.
pub fn sixth() -> Angle<S> { Rad(Float::frac_pi_3()) }
/// One eighth of the domain. In radians, this is `π/4`.
pub fn eighth() -> Angle<S> { Rad(Float::frac_pi_4()) }
/// Gets the raw value that is stored in the angle.
///
/// ## Failure
///
/// Clock-valued angles are not encoded as a single value, and so this
/// method will always fail for them.
pub fn unwrap(&self) -> S {
match self {
&Rad(s)|&Deg(s)|&Grad(s)|&Turn(s) => s,
_ => fail!("Clock values cannot be unwrapped.")
}
}
}
impl<S: BaseFloat> Add<Angle<S>, Angle<S>> for Angle<S> {
#[inline]
fn add(&self, other: &Angle<S>) -> Angle<S> {
match (self, other) {
(&Rad(val), othr) => Angle::radians(val + othr.to_radians().unwrap()),
(&Deg(val), othr) => Angle::degrees(val + othr.to_degrees().unwrap()),
(&Grad(val), othr) => Angle::gradians(val + othr.to_gradians().unwrap()),
(&Turn(val), othr) => Angle::turns(val + othr.to_turns().unwrap()),
_ => unimplemented!()
}
}
}
impl<S: BaseFloat> Sub<Angle<S>, Angle<S>> for Angle<S> {
#[inline]
fn sub(&self, other: &Angle<S>) -> Angle<S> {
match (self, other) {
(&Rad(val), othr) => Angle::radians(val - othr.to_radians().unwrap()),
(&Deg(val), othr) => Angle::degrees(val - othr.to_degrees().unwrap()),
(&Grad(val), othr) => Angle::gradians(val - othr.to_gradians().unwrap()),
(&Turn(val), othr) => Angle::turns(val - othr.to_turns().unwrap()),
_ => unimplemented!()
}
}
}
impl<S: BaseFloat + fmt::Show> fmt::Show for Angle<S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&Deg(val) => write!(f, "{}°", val),
&Rad(val) => write!(f, "{} rad", val),
&Grad(val) => write!(f, "{} gon", val),
&Turn(val) => write!(f, "{} turns", val),
_ => fail!("Not yet implemented.")
}
}
}
macro_rules! angle_trigonometry (
($($method:ident),+ ) => (
impl<S: BaseFloat> Trigonometry<S> for Angle<S> {
$(fn $method(&self) -> S {
match self {
&Rad(val) => val.$method(),
&other => other.to_radians().$method()
}
}
)+
}
)
)
angle_trigonometry!(sin, cos, tan)
/*
Test suite.
*/
#[cfg(test)]
mod test {
use super::Angle;
#[test]
fn test_conversion() {
let half: Angle<f64> = Angle::half();
assert_eq!(half.to_degrees().to_gradians().to_turns().to_radians(), half);
assert_eq!(half.to_turns().to_gradians().to_degrees().to_radians(), half);
assert_eq!(half.to_degrees().to_turns().to_gradians().to_radians(), half);
assert_eq!(half.to_gradians().to_radians(), half);
}
#[test]
fn test_operators() {
assert_eq!(Angle::degrees(100.0f64) + Angle::degrees(100.0f64), Angle::degrees(200.0f64));
assert_eq!(Angle::degrees(100.0f64) - Angle::degrees(100.0f64), Angle::degrees(0.0f64));
assert_eq!(Angle::degrees(100.0f64) + Angle::radians(0.0f64), Angle::degrees(100.0f64));
assert_eq!(Angle::radians(1.0f64) - Angle::degrees(0.0f64), Angle::radians(1.0f64));
}
}
| radians | identifier_name |
trig.rs | /*
This file is part of trig-rs, a library for doing typesafe trigonometry
with a variety of angle formats (radians, degrees, grad, turns, and so on).
*/
//! # `trig-rs`: Typesafe Trigonometric Primitives
//!
//! Leverage Rust's super-powered enums to create a typesafe system for
//! trigonometry in degrees, radians, and more. | //! copy of the documentation should be available at
//! [Rust-CI](http://www.rust-ci.org/atheriel/trig-rs/doc/trig/).
//!
//! ## Examples
//!
//! ```rust
//! use trig::{Angle, Rad, sin, cos};
//!
//! // Angle can be constructed in both common formats:
//! let angle1: Angle<f64> = Angle::degrees(180.0);
//! let angle2: Angle<f64> = Angle::radians(Float::pi());
//!
//! // As well as some more estoric ones:
//! let angle3: Angle<f64> = Angle::gradians(200.0);
//! let angle4: Angle<f64> = Angle::turns(0.5);
//!
//! // And convert between them seemlessly:
//! match angle4.to_radians() {
//! Rad(val) => println!("0.5 turns is {}!", Rad(val)),
//! _ => fail!("But I wanted radians!")
//! }
//!
//! // We can use the top-level trigonometric functions on any of them:
//! assert_eq!(sin(angle1), sin(angle2));
//! assert_eq!(cos(angle3), cos(angle4));
//!
//! // We can also concatenate angles using Rust's + and - syntax, which will
//! // automatically handle conversion between different angle formats:
//! assert_eq!(angle1 + angle2, angle1 + angle3);
//!
//! // Note that angles are guaranteed to fall in the domains you'd expect
//! // them to:
//! assert_eq!(angle1, angle1 + angle1 + angle1)
//! ```
#![crate_name = "trig"]
#![comment = "Provides trigonometric primitives."]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![unstable]
#![feature(macro_rules)]
#![feature(struct_variant)]
use std::fmt;
/*
Top-level functions.
*/
/// Calculate the sine.
#[stable] #[inline] pub fn sin<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.sin() }
/// Calculate the cosine.
#[stable] #[inline] pub fn cos<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.cos() }
/// Calculate the tangent.
#[stable] #[inline] pub fn tan<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.tan() }
/// Calculate the arcsine (in radians).
#[inline] pub fn asin<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.asin()) }
/// Calculate the arccosine (in radians).
#[inline] pub fn acos<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.acos()) }
/// Calculate the arctangent (in radians).
#[inline] pub fn atan<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.atan()) }
/*
The Trigonometry trait.
*/
/// Represents an object for which trigonometric methods are sensible and return
/// values of type `S`.
#[stable]
pub trait Trigonometry<S> {
/// Compute the sine of the object.
fn sin(&self) -> S;
/// Compute the cosine of the object.
fn cos(&self) -> S;
/// Compute the tangent of the object.
fn tan(&self) -> S;
// /// Compute the cosecant of the object.
// fn csc(&self) -> S;
// /// Compute the secant of the object.
// fn sec(&self) -> S;
// /// Compute the cotangent of the object.
// fn cot(&self) -> S;
}
/*
The Angle enum and its implementations.
*/
/// Base floating point types
pub trait BaseFloat: Primitive + FromPrimitive + fmt::Show + fmt::Float + Float + FloatMath {}
impl BaseFloat for f32 {}
impl BaseFloat for f64 {}
/// Encompasses representations of angles in the Euclidean plane.
#[deriving(Clone, PartialEq, PartialOrd, Hash)]
pub enum Angle<S> {
/// An angle in radians.
#[stable] Rad(S),
/// An angle in degrees.
#[stable] Deg(S),
/// An angle in [gradians](http://en.wikipedia.org/wiki/Grad_(angle)).
#[stable] Grad(S),
/// An angle in [turns](http://en.wikipedia.org/wiki/Turn_(geometry)).
#[stable] Turn(S),
/// An angle as it would appear on the face of a clock.
#[experimental] Clock {
/// The hours portion.
pub hour: S,
/// The minutes portion.
pub minute: S,
/// The seconds portion.
pub second: S
},
}
impl<S: BaseFloat + Mul<S, S> + Div<S, S> + Rem<S, S>> Angle<S> {
/// Returns an angle in radians.
pub fn radians(s: S) -> Angle<S> { Rad(s % Float::two_pi()) }
/// Returns an angle in degrees.
pub fn degrees(s: S) -> Angle<S> { Deg(s % FromPrimitive::from_f64(360.0).unwrap()) }
/// Returns an angle in gradians.
pub fn gradians(s: S) -> Angle<S> { Grad(s % FromPrimitive::from_f64(400.0).unwrap()) }
/// Returns an angle in turns.
pub fn turns(s: S) -> Angle<S> { Turn(s.fract()) }
/// Returns an angle as it would appear on a clock.
pub fn clock_face(hour: S, minute: S, second: S) -> Angle<S> {
Clock { hour: hour, minute: minute, second: second }
}
/// Converts an angle to radians.
pub fn to_radians(&self) -> Angle<S> {
match self {
&Rad(val) => Angle::radians(val),
&Deg(val) => Angle::radians(val.to_radians()),
&Grad(val) => Angle::radians(val * Float::pi() / FromPrimitive::from_f64(200.0).unwrap()),
&Turn(val) => Angle::radians(val * Float::two_pi()),
_ => unimplemented!()
}
}
/// Converts an angle to degrees.
pub fn to_degrees(&self) -> Angle<S> {
match self {
&Rad(val) => Angle::degrees(val.to_degrees()),
&Deg(val) => Angle::degrees(val),
&Grad(val) => Angle::degrees(val * FromPrimitive::from_f64(360.0 / 400.0).unwrap()),
&Turn(val) => Angle::degrees(val * FromPrimitive::from_f64(360.0).unwrap()),
_ => unimplemented!()
}
}
/// Converts an angle to gradians.
pub fn to_gradians(&self) -> Angle<S> {
match self {
&Rad(val) => Angle::gradians(val / Float::pi() * FromPrimitive::from_f64(200.0).unwrap()),
&Deg(val) => Angle::gradians(val * FromPrimitive::from_f64(400.0 / 360.0).unwrap()),
&Grad(val) => Angle::gradians(val),
&Turn(val) => Angle::gradians(val * FromPrimitive::from_f64(400.0).unwrap()),
_ => unimplemented!()
}
}
/// Converts an angle to turns.
pub fn to_turns(&self) -> Angle<S> {
match self {
&Rad(val) => Angle::turns(val / Float::two_pi()),
&Deg(val) => Angle::turns(val / FromPrimitive::from_f64(360.0).unwrap()),
&Grad(val) => Angle::turns(val / FromPrimitive::from_f64(400.0).unwrap()),
&Turn(val) => Angle::turns(val),
_ => unimplemented!()
}
}
/// One half of the domain. In radians, this is `π`.
pub fn half() -> Angle<S> { Rad(Float::pi()) }
/// One quarter of the domain. In radians, this is `π/2`.
pub fn quarter() -> Angle<S> { Rad(Float::frac_pi_2()) }
/// One sixth of the domain. In radians, this is `π/3`.
pub fn sixth() -> Angle<S> { Rad(Float::frac_pi_3()) }
/// One eighth of the domain. In radians, this is `π/4`.
pub fn eighth() -> Angle<S> { Rad(Float::frac_pi_4()) }
/// Gets the raw value that is stored in the angle.
///
/// ## Failure
///
/// Clock-valued angles are not encoded as a single value, and so this
/// method will always fail for them.
pub fn unwrap(&self) -> S {
match self {
&Rad(s)|&Deg(s)|&Grad(s)|&Turn(s) => s,
_ => fail!("Clock values cannot be unwrapped.")
}
}
}
impl<S: BaseFloat> Add<Angle<S>, Angle<S>> for Angle<S> {
#[inline]
fn add(&self, other: &Angle<S>) -> Angle<S> {
match (self, other) {
(&Rad(val), othr) => Angle::radians(val + othr.to_radians().unwrap()),
(&Deg(val), othr) => Angle::degrees(val + othr.to_degrees().unwrap()),
(&Grad(val), othr) => Angle::gradians(val + othr.to_gradians().unwrap()),
(&Turn(val), othr) => Angle::turns(val + othr.to_turns().unwrap()),
_ => unimplemented!()
}
}
}
impl<S: BaseFloat> Sub<Angle<S>, Angle<S>> for Angle<S> {
#[inline]
fn sub(&self, other: &Angle<S>) -> Angle<S> {
match (self, other) {
(&Rad(val), othr) => Angle::radians(val - othr.to_radians().unwrap()),
(&Deg(val), othr) => Angle::degrees(val - othr.to_degrees().unwrap()),
(&Grad(val), othr) => Angle::gradians(val - othr.to_gradians().unwrap()),
(&Turn(val), othr) => Angle::turns(val - othr.to_turns().unwrap()),
_ => unimplemented!()
}
}
}
impl<S: BaseFloat + fmt::Show> fmt::Show for Angle<S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&Deg(val) => write!(f, "{}°", val),
&Rad(val) => write!(f, "{} rad", val),
&Grad(val) => write!(f, "{} gon", val),
&Turn(val) => write!(f, "{} turns", val),
_ => fail!("Not yet implemented.")
}
}
}
macro_rules! angle_trigonometry (
($($method:ident),+ ) => (
impl<S: BaseFloat> Trigonometry<S> for Angle<S> {
$(fn $method(&self) -> S {
match self {
&Rad(val) => val.$method(),
&other => other.to_radians().$method()
}
}
)+
}
)
)
angle_trigonometry!(sin, cos, tan)
/*
Test suite.
*/
#[cfg(test)]
mod test {
use super::Angle;
#[test]
fn test_conversion() {
let half: Angle<f64> = Angle::half();
assert_eq!(half.to_degrees().to_gradians().to_turns().to_radians(), half);
assert_eq!(half.to_turns().to_gradians().to_degrees().to_radians(), half);
assert_eq!(half.to_degrees().to_turns().to_gradians().to_radians(), half);
assert_eq!(half.to_gradians().to_radians(), half);
}
#[test]
fn test_operators() {
assert_eq!(Angle::degrees(100.0f64) + Angle::degrees(100.0f64), Angle::degrees(200.0f64));
assert_eq!(Angle::degrees(100.0f64) - Angle::degrees(100.0f64), Angle::degrees(0.0f64));
assert_eq!(Angle::degrees(100.0f64) + Angle::radians(0.0f64), Angle::degrees(100.0f64));
assert_eq!(Angle::radians(1.0f64) - Angle::degrees(0.0f64), Angle::radians(1.0f64));
}
} | //!
//! The code is hosted on [GitHub](https://github.com/atheriel/trig-rs), and a | random_line_split |
ViewProjectDependence.ts | import * as css from './ViewProjectDependence.m.css';
import ThemedMixin, { theme } from '@dojo/framework/core/mixins/Themed';
import I18nMixin from '@dojo/framework/core/mixins/I18n';
import WidgetBase from '@dojo/framework/core/WidgetBase';
import { v, w } from '@dojo/framework/core/vdom';
import * as c from '@blocklang/bootstrap-classes';
import {
Repository,
RepositoryResourceGroup,
CommitInfo,
WithTarget,
PagedComponentRepos,
ComponentRepoInfo,
ApiRepo,
ApiRepoVersion,
ComponentRepoVersion,
ProjectDependenceData,
} from '../../interfaces';
import Spinner from '../../widgets/spinner';
import { isEmpty, getProgramingLanguageName, getRepoCategoryName, getProgramingLanguageColor } from '../../util';
import Exception from '../error/Exception';
import RepositoryHeader from '../widgets/RepositoryHeader';
import messageBundle from '../../nls/main';
import {
RepositoryResourcePathPayload,
QueryPayload,
ProjectDependencePayload,
ProjectDependenceIdPayload,
ProjectDependenceVersionPayload,
ProjectDependenceWithProjectPathPayload,
} from '../../processes/interfaces';
import LatestCommitInfo from './widgets/LatestCommitInfo';
import ProjectResourceBreadcrumb from './widgets/ProjectResourceBreadcrumb';
import watch from '@dojo/framework/core/decorators/watch';
import FontAwesomeIcon from '@blocklang/dojo-fontawesome/FontAwesomeIcon';
import Pagination from '../../widgets/pagination';
import Moment from '../../widgets/moment';
import { findIndex, find } from '@dojo/framework/shim/array';
import * as lodash from 'lodash';
import { DNode } from '@dojo/framework/core/interfaces';
import { IconPrefix, IconName } from '@fortawesome/fontawesome-svg-core';
import { RepoType } from '../../constant';
export interface ViewProjectDependenceProperties {
loggedUsername: string;
repository: Repository;
sourceId: number;
pathes: RepositoryResourceGroup[];
pagedComponentRepos: PagedComponentRepos;
dependences: ProjectDependenceData[];
latestCommitInfo: CommitInfo;
onOpenGroup: (opt: RepositoryResourcePathPayload) => void;
onQueryComponentRepos: (opt: QueryPayload) => void;
onAddDependence: (opt: ProjectDependenceWithProjectPathPayload) => void;
onDeleteDependence: (opt: ProjectDependenceIdPayload) => void;
onShowDependenceVersions: (opt: ProjectDependencePayload) => void;
onUpdateDependenceVersion: (opt: ProjectDependenceVersionPayload) => void;
}
interface GroupedApiRepo {
apiRepo: ApiRepo;
apiRepoVersions: ApiRepoVersion[];
}
@theme(css)
export default class ViewProjectDependence extends ThemedMixin(I18nMixin(WidgetBase))<ViewProjectDependenceProperties> {
private _localizedMessages = this.localizeBundle(messageBundle);
@watch()
private _search: string = '';
protected render() {
const { repository } = this.properties;
if (!repository) {
return v('div', { classes: [c.mt_5] }, [w(Spinner, {})]);
}
if (this._isNotFound()) {
return w(Exception, { type: '404' });
}
return v('div', { classes: [css.root, c.container] }, [
this._renderHeader(),
this._renderNavigation(),
this._renderDependenceCard(),
]);
}
private _isNotFound() {
const { repository } = this.properties;
return isEmpty(repository);
}
private _renderHeader() {
const {
messages: { privateRepositoryTitle },
} = this._localizedMessages;
const { repository } = this.properties;
return w(RepositoryHeader, { repository, privateRepositoryTitle });
}
private _renderNavigation() {
const { repository, pathes, onOpenGroup } = this.properties;
return v('div', { classes: [c.d_flex, c.justify_content_between, c.mb_2] }, [
v('div', {}, [w(ProjectResourceBreadcrumb, { repository, pathes, onOpenGroup })]),
]);
}
private _renderDependenceCard() {
const { latestCommitInfo } = this.properties;
return v('div', { classes: [c.card, !latestCommitInfo ? c.border_top_0 : undefined] }, [
w(LatestCommitInfo, { latestCommitInfo, showBottomBorder: true }), // 最近提交信息区
this._renderDependenceEditor(),
]);
}
private _renderDependenceEditor() {
return v('div', { classes: [c.card_body] }, [
this._renderComponentRepoSearchPart(),
// 显示项目依赖
// 1. 如果没有依赖,则显示提示信息
// 2. 否则显示依赖
this._renderDependencePart(),
]);
}
private _renderComponentRepoSearchPart() {
return v('div', { classes: [c.py_4, c.border_bottom] }, [
this._renderSearchForm(),
this._renderSearchTip(),
this._renderSearchedComponentRepos(),
]);
}
private _renderSearchForm() {
const {
messages: { componentSearchForProjectPlaceholder },
} = this._localizedMessages;
return v('form', {}, [
v('div', { classes: [c.form_group] }, [
v('input', {
type: 'text',
classes: [c.form_control],
placeholder: `${componentSearchForProjectPlaceholder}`,
oninput: this._onSearchComponentRepo,
value: `${this._search}`,
}),
]),
]);
}
private _renderSearchTip() {
if (this._search === '') {
return;
}
const { pagedComponentRepos } = this.properties;
let length = 0;
if (pagedComponentRepos && pagedComponentRepos.content) {
length = pagedComponentRepos.content.length;
}
return v('div', { classes: [c.d_flex, c.justify_content_between, c.align_items_center, c.border_bottom] }, [
v('div', [
'使用 ',
v('strong', [`${this._search}`]),
' 共查出 ',
v('strong', [`${length}`]),
' 个组件仓库',
]),
v('div', [
v(
'button',
{
classes: [c.btn, c.btn_link, c.btn_sm, css.btnLink],
onclick: this._onClearSearchText,
},
[w(FontAwesomeIcon, { icon: 'times', classes: [c.mr_1] }), '清空搜索条件']
),
]),
]);
}
private _onClearSearchText() {
this._search = '';
this.properties.onQueryComponentRepos({ query: this._search });
}
private _onSearchComponentRepo({ target: { value: query } }: WithTarget) {
this._search = query;
this.properties.onQueryComponentRepos({ query });
}
private _renderSearchedComponentRepos(): DNode {
const { pagedComponentRepos } = this.properties;
if (!pagedComponentRepos) {
return;
}
if (pagedComponentRepos.content.length === 0) {
return this._renderEmptyComponentRepo();
}
return v('div', { key: 'component-repos-part', classes: [] }, [
// 组件库列表
this._renderComponentRepos(),
// 分页
this._renderPagination(),
]);
}
private _renderEmptyComponentRepo() {
return v(
'div',
{
key: 'no-component-repos',
classes: [c.alert, c.alert_secondary, c.mx_auto, c.text_center, c.mt_3, c.py_4],
},
[v('strong', {}, ['没有查到组件仓库'])]
);
}
private _renderComponentRepos() {
const { repository, pagedComponentRepos, dependences = [], onAddDependence } = this.properties;
return v(
'ul',
{ classes: [c.list_group, c.mt_2] },
pagedComponentRepos.content.map((item) => {
const used =
findIndex(dependences, (dependence) => item.componentRepo.id === dependence.componentRepo.id) > -1;
return w(ComponentRepoItem, {
repository,
componentRepoInfo: item,
used,
onAddDependence,
});
})
);
}
private _renderPagination() {
const { pagedComponentRepos } = this.properties;
if (!pagedComponentRepos) {
return;
}
const { first, last, size, number, totalPages } = pagedComponentRepos;
return w(Pagination, {
totalPages,
first,
last,
number,
size,
});
}
private _renderDependencePart() {
const { dependences = [] } = this.properties;
if (dependences.length === 0) {
return this._renderNoDependenceMessage();
}
return this._renderDependenceItems();
}
private _renderDependenceItems() {
return v('div', { key: 'dependence-items', classes: [c.mt_4] }, [
...this._renderApiRepos(),
...this._renderDevComponentRepos(),
...this._renderBuildComponentRepos(),
]);
}
private _renderApiRepos() {
const { dependences = [] } = this.properties;
const groupedApiRepos: GroupedApiRepo[] = [];
dependences.forEach((item) => {
const findedApiRepo = find(
groupedApiRepos,
(groupedApiRepo) => item.apiRepo.id === groupedApiRepo.apiRepo.id
);
if (findedApiRepo) {
// 如果已存在,则再查看版本是否添加
const indexApiRepoVersion = findIndex(
findedApiRepo.apiRepoVersions,
(version) => version.id === item.apiRepoVersion.id
);
if (indexApiRepoVersion === -1) {
findedApiRepo.apiRepoVersions.push(item.apiRepoVersion);
}
} else {
// groupedApiRepos 中不存在时,追加
groupedApiRepos.push({ apiRepo: item.apiRepo, apiRepoVersions: [item.apiRepoVersion] });
}
});
return [
v('div', {}, [v('strong', ['API'])]),
v(
'div',
{ classes: [c.pl_4, c.border_left] },
groupedApiRepos.map((item) =>
v('div', {}, [
// 当前只支持 git
w(FontAwesomeIcon, { icon: ['fab', 'git-alt'], classes: [c.text_muted], title: 'git 仓库' }),
v(
'a',
{
target: '_blank',
href: `${item.apiRepo.gitRepoUrl}`,
title: '跳转到 API 仓库',
classes: [c.ml_1],
},
[`${item.apiRepo.gitRepoOwner}/${item.apiRepo.gitRepoName}`]
),
v(
'span',
{ classes: [c.ml_3] },
item.apiRepoVersions.map((version) =>
v('span', { classes: [c.mr_1, c.badge, c.badge_secondary] }, [`${version.version}`])
)
),
])
)
),
];
}
private _renderDevComponentRepos(): DNode[] {
const { dependences = [] } = this.properties;
const devDependences = dependences.filter((dependence) => dependence.componentRepo.repoType === RepoType.IDE);
if (devDependences.length === 0) {
return [];
}
return [v('div', {}, [v('strong', ['开发'])]), ...this._renderComponentRepoDependences(devDependences)];
}
private _renderBuildComponentRepos(): DNode[] {
const { dependences = [] } = this.properties;
const buildDependences = dependences.filter(
(dependence) => dependence.componentRepo.repoType === RepoType.PROD
);
if (buildDependences.length === 0) {
return [];
}
return [v('div', {}, [v('strong', ['构建'])]), ...this._renderComponentRepoDependences(buildDependences)];
}
private _renderComponentRepoDependences(dependences: ProjectDependenceData[]): DNode[] {
const { repository, onDeleteDependence, onShowDependenceVersions, onUpdateDependenceVersion } = this.properties;
// 按照 appType 分组
const groupedDependences = lodash.groupBy(dependences, (dependence) => dependence.componentRepoVersion.appType);
const vnodes: DNode[] = [];
for (const key in groupedDependences) {
const values = groupedDependences[key];
vnodes.push(
v('div', { classes: [c.pl_4, c.border_left] }, [
v('div', {}, [`${key}`]),
v(
'div',
{ classes: [c.pl_4, c.border_left] },
values.map((item) =>
w(DependenceRow, {
repository,
dependence: item,
versions: item.componentRepoVersions || [],
onDeleteDependence,
onShowDependenceVersions,
onUpdateDependenceVersion,
})
)
),
])
);
}
return vnodes;
}
private _renderNoDependenceMessage() {
return v('div', { key: 'no-dependence', classes: [c.mt_4] }, [
v('div', { classes: [c.alert, c.alert_primary, c.mx_auto, c.text_center, c.py_4] }, [
v('strong', {}, ['此项目尚未配置依赖']),
]),
]);
}
}
interface ComponentRepoItemProperties {
repository: Repository;
componentRepoInfo: ComponentRepoInfo;
used: boolean;
onAddDependence: (opt: ProjectDependenceWithProjectPathPayload) => void;
}
class ComponentRepoItem extends ThemedMixin(I18nMixin(WidgetBase))<ComponentRepoItemProperties> {
protected render() {
const {
componentRepoInfo: { componentRepo, componentRepoVersion, apiRepo },
used = false,
} = this.properties;
| return v('li', { classes: [c.list_group_item] }, [
// 如果组件库未安装,则显示“使用”按钮,否则显示“已用”文本
v('div', {}, [
v('span', { classes: [c.font_weight_bold, c.mr_2] }, [
v('img', {
width: 20,
height: 20,
classes: [c.avatar, c.mr_1],
src: `${componentRepo.createUserAvatarUrl}`,
}),
`${componentRepo.createUserName} / ${componentRepoVersion.name}`,
]),
v('span', { classes: [c.badge, c.badge_info, c.ml_3], title: '与 BlockLang 设计器集成' }, [
`${componentRepo.repoType}`,
]),
used
? v('span', { classes: [c.float_right, c.text_info] }, ['已用'])
: v(
'button',
{
classes: [c.btn, c.btn_secondary, c.btn_sm, c.float_right],
onclick: this._onAddDependence,
},
['使用']
),
]),
v('p', { itemprop: 'description', classes: [c.text_muted, c.mb_0] }, [
`${componentRepoVersion.description}`,
]),
v('div', { classes: [c.my_2] }, [
v('span', { classes: [c.border, c.rounded, c.px_1] }, [
v('span', {}, ['API: ']),
v(
'a',
{
target: '_blank',
href: `${apiRepo.gitRepoUrl}`,
title: '跳转到 API 仓库',
classes: [c.mr_1],
},
[`${apiRepo.gitRepoOwner}/${apiRepo.gitRepoName}`]
),
]),
' -> ',
v('span', { classes: [c.border, c.rounded, c.px_1] }, [
v('span', {}, ['实现: ']),
v(
'a',
{
target: '_blank',
href: `${componentRepo.gitRepoUrl}`,
title: '跳转到组件仓库',
classes: [c.mr_1],
},
[`${componentRepo.gitRepoOwner}/${componentRepo.gitRepoName}`]
),
]),
]),
v('small', { classes: [c.text_muted] }, [
v('span', { classes: [c.mr_3] }, [
w(FontAwesomeIcon, {
icon: componentRepoVersion.icon.split(' ') as [IconPrefix, IconName],
classes: [c.mr_1],
}),
`${componentRepoVersion.title}`,
]),
v('span', { classes: [c.mr_3] }, [
v('span', {
classes: [css.repoLanguageColor, c.mr_1],
styles: {
backgroundColor: `${getProgramingLanguageColor(componentRepoVersion.language)}`,
},
}),
v('span', { itemprop: 'programmingLanguage' }, [
`${getProgramingLanguageName(componentRepoVersion.language)}`,
]),
]),
v('span', { classes: [c.mr_3] }, [`${getRepoCategoryName(componentRepo.category)}`]),
v('span', { classes: [c.mr_3], title: '使用次数' }, [
w(FontAwesomeIcon, { icon: 'cube', classes: [c.mr_1] }),
'0',
]),
v('span', {}, [
w(FontAwesomeIcon, { icon: 'clock', classes: [c.mr_1] }),
'最近发布 · ',
w(Moment, { datetime: componentRepo.lastPublishTime }),
]),
]),
]);
}
private _onAddDependence() {
const {
repository,
componentRepoInfo: { componentRepo },
} = this.properties;
// componentRepoVersionId 默认使用最新版本
this.properties.onAddDependence({
owner: repository.createUserName,
repo: repository.name,
componentRepoId: componentRepo.id!,
project: '', // TODO
});
}
}
interface DependenceRowProperties {
repository: Repository;
dependence: ProjectDependenceData;
// 当前选中依赖的版本列表
versions: ComponentRepoVersion[];
onDeleteDependence: (opt: ProjectDependenceIdPayload) => void;
onShowDependenceVersions: (opt: ProjectDependencePayload) => void;
onUpdateDependenceVersion: (opt: ProjectDependenceVersionPayload) => void;
}
class DependenceRow extends ThemedMixin(I18nMixin(WidgetBase))<DependenceRowProperties> {
protected render() {
const { repository, dependence, versions, onUpdateDependenceVersion } = this.properties;
return v('div', {}, [
// 当前只支持 git
w(FontAwesomeIcon, { icon: ['fab', 'git-alt'], classes: [c.text_muted], title: 'git 仓库' }),
v(
'a',
{
target: '_blank',
href: `${dependence.apiRepo.gitRepoUrl}`,
title: '跳转到组件仓库',
classes: [c.ml_1],
},
[`${dependence.componentRepo.gitRepoOwner}/${dependence.componentRepo.gitRepoName}`]
),
v('span', { classes: [c.ml_3] }, [
v('span', { classes: [c.dropdown] }, [
v(
'button',
{
classes: [c.btn, c.btn_secondary, c.btn_sm, c.dropdown_toggle, css.dropdownButton],
type: 'button',
'data-toggle': 'dropdown',
onclick: this._onShowVersions,
},
[`${dependence.componentRepoVersion.version}`]
),
v(
'div',
{ classes: [c.dropdown_menu, css.dropdownMenu] },
versions.map((version) =>
w(DependenceVersionMenu, { repository, dependence, version, onUpdateDependenceVersion })
)
),
]),
]),
v('button', { type: 'button', classes: [c.close, c.float_right], onclick: this._onDeleteDependence }, [
v('span', { 'aria-hidden': 'true', innerHTML: '×' }),
]),
]);
}
private _onShowVersions() {
const { dependence } = this.properties;
this.properties.onShowDependenceVersions({
dependenceId: dependence.dependence.id,
componentRepoId: dependence.componentRepo.id!,
});
}
private _onDeleteDependence() {
const { repository, dependence } = this.properties;
this.properties.onDeleteDependence({
owner: repository.createUserName,
repo: repository.name,
id: dependence.dependence.id,
project: '', // TODO
});
}
}
interface DependenceVersionMenuProperties {
repository: Repository;
dependence: ProjectDependenceData;
version: ComponentRepoVersion;
onUpdateDependenceVersion: (opt: ProjectDependenceVersionPayload) => void;
}
class DependenceVersionMenu extends ThemedMixin(I18nMixin(WidgetBase))<DependenceVersionMenuProperties> {
protected render() {
const { dependence, version } = this.properties;
const isSelected = version.id === dependence.componentRepoVersion.id;
return v(
'a',
{
classes: [c.dropdown_item, isSelected ? c.active : undefined],
href: '#',
onclick: this._onUpdateVersion,
},
[`${version.version}`]
);
}
private _onUpdateVersion(event: MouseEvent) {
event.stopPropagation();
const { repository, dependence, version } = this.properties;
const isSelected = version.id === dependence.componentRepoVersion.id;
if (isSelected) {
return;
}
this.properties.onUpdateDependenceVersion({
owner: repository.createUserName,
repo: repository.name,
dependenceId: dependence.dependence.id,
componentRepoVersionId: version.id,
});
}
} | random_line_split | |
ViewProjectDependence.ts | import * as css from './ViewProjectDependence.m.css';
import ThemedMixin, { theme } from '@dojo/framework/core/mixins/Themed';
import I18nMixin from '@dojo/framework/core/mixins/I18n';
import WidgetBase from '@dojo/framework/core/WidgetBase';
import { v, w } from '@dojo/framework/core/vdom';
import * as c from '@blocklang/bootstrap-classes';
import {
Repository,
RepositoryResourceGroup,
CommitInfo,
WithTarget,
PagedComponentRepos,
ComponentRepoInfo,
ApiRepo,
ApiRepoVersion,
ComponentRepoVersion,
ProjectDependenceData,
} from '../../interfaces';
import Spinner from '../../widgets/spinner';
import { isEmpty, getProgramingLanguageName, getRepoCategoryName, getProgramingLanguageColor } from '../../util';
import Exception from '../error/Exception';
import RepositoryHeader from '../widgets/RepositoryHeader';
import messageBundle from '../../nls/main';
import {
RepositoryResourcePathPayload,
QueryPayload,
ProjectDependencePayload,
ProjectDependenceIdPayload,
ProjectDependenceVersionPayload,
ProjectDependenceWithProjectPathPayload,
} from '../../processes/interfaces';
import LatestCommitInfo from './widgets/LatestCommitInfo';
import ProjectResourceBreadcrumb from './widgets/ProjectResourceBreadcrumb';
import watch from '@dojo/framework/core/decorators/watch';
import FontAwesomeIcon from '@blocklang/dojo-fontawesome/FontAwesomeIcon';
import Pagination from '../../widgets/pagination';
import Moment from '../../widgets/moment';
import { findIndex, find } from '@dojo/framework/shim/array';
import * as lodash from 'lodash';
import { DNode } from '@dojo/framework/core/interfaces';
import { IconPrefix, IconName } from '@fortawesome/fontawesome-svg-core';
import { RepoType } from '../../constant';
export interface ViewProjectDependenceProperties {
loggedUsername: string;
repository: Repository;
sourceId: number;
pathes: RepositoryResourceGroup[];
pagedComponentRepos: PagedComponentRepos;
dependences: ProjectDependenceData[];
latestCommitInfo: CommitInfo;
onOpenGroup: (opt: RepositoryResourcePathPayload) => void;
onQueryComponentRepos: (opt: QueryPayload) => void;
onAddDependence: (opt: ProjectDependenceWithProjectPathPayload) => void;
onDeleteDependence: (opt: ProjectDependenceIdPayload) => void;
onShowDependenceVersions: (opt: ProjectDependencePayload) => void;
onUpdateDependenceVersion: (opt: ProjectDependenceVersionPayload) => void;
}
interface GroupedApiRepo {
apiRepo: ApiRepo;
apiRepoVersions: ApiRepoVersion[];
}
@theme(css)
export default class ViewProjectDependence extends ThemedMixin(I18nMixin(WidgetBase))<ViewProjectDependenceProperties> {
private _localizedMessages = this.localizeBundle(messageBundle);
@watch()
private _search: string = '';
protected render() {
const { repository } = this.properties;
if (!repository) {
return v('div', { classes: [c.mt_5] }, [w(Spinner, {})]);
}
if (this._isNotFound()) {
return w(Exception, { type: '404' });
}
return v('div', { classes: [css.root, c.container] }, [
this._renderHeader(),
this._renderNavigation(),
this._renderDependenceCard(),
]);
}
private _isNotFound() {
const { repository } = this.properties;
return isEmpty(repository);
}
private _renderHeader() {
const {
messages: { privateRepositoryTitle },
} = this._localizedMessages;
const { repository } = this.properties;
return w(RepositoryHeader, { repository, privateRepositoryTitle });
}
private _renderNavigation() {
const { repository, pathes, onOpenGroup } = this.properties;
return v('div', { classes: [c.d_flex, c.justify_content_between, c.mb_2] }, [
v('div', {}, [w(ProjectResourceBreadcrumb, { repository, pathes, onOpenGroup })]),
]);
}
private _renderDependenceCard() {
const { latestCommitInfo } = this.properties;
return v('div', { classes: [c.card, !latestCommitInfo ? c.border_top_0 : undefined] }, [
w(LatestCommitInfo, { latestCommitInfo, showBottomBorder: true }), // 最近提交信息区
this._renderDependenceEditor(),
]);
}
private _renderDependenceEditor() {
return v('div', { classes: [c.card_body] }, [
this._renderComponentRepoSearchPart(),
// 显示项目依赖
// 1. 如果没有依赖,则显示提示信息
// 2. 否则显示依赖
this._renderDependencePart(),
]);
}
private _renderComponentRepoSearchPart() {
return v('div', { classes: [c.py_4, c.border_bottom] }, [
this._renderSearchForm(),
this._renderSearchTip(),
this._renderSearchedComponentRepos(),
]);
}
private _renderSearchForm() {
const {
messages: { componentSearchForProjectPlaceholder },
} = this._localizedMessages;
return v('form', {}, [
v('div', { classes: [c.form_group] }, [
v('input', {
type: 'text',
classes: [c.form_control],
placeholder: `${componentSearchForProjectPlaceholder}`,
oninput: this._onSearchComponentRepo,
value: `${this._search}`,
}),
]),
]);
}
private _renderSearchTip() {
if (this._search === '') {
return;
}
const { pagedComponentRepos } = this.properties;
let length = 0;
if (pagedComponentRepos && pagedComponentRepos.content) {
length = pagedComponentRepos.content.length;
}
return v('div', { classes: [c.d_flex, c.justify_content_between, c.align_items_center, c.border_bottom] }, [
v('div', [
'使用 ',
v('strong', [`${this._search}`]),
' 共查出 ',
v('strong', [`${length}`]),
' 个组件仓库',
]),
v('div', [
v(
'button',
{
classes: [c.btn, c.btn_link, c.btn_sm, css.btnLink],
onclick: this._onClearSearchText,
},
[w(FontAwesomeIcon, { icon: 'times', classes: [c.mr_1] }), '清空搜索条件']
),
]),
]);
}
private _onClearSearchText() {
this._search = '';
this.properties.onQueryComponentRepos({ query: this._search });
}
private _onSearchComponentRepo({ target: { value: query } }: WithTarget) {
this._search = query;
this.properties.onQueryComponentRepos({ query });
}
private _renderSearchedComponentRepos(): DNode {
const { pagedComponentRepos } = this.properties;
if (!pagedComponentRepos) {
return;
}
if (pagedComponentRepos.content.length === 0) {
return this._renderEmptyComponentRepo();
}
return v('div', { key: 'component-repos-part', classes: [] }, [
// 组件库列表
this._renderComponentRepos(),
// 分页
this._renderPagination(),
]);
}
private _renderEmptyComponentRepo() {
return v(
'div',
{
key: 'no-component-repos',
classes: [c.alert, c.alert_secondary, c.mx_auto, c.text_center, c.mt_3, c.py_4],
},
[v('strong', {}, ['没有查到组件仓库'])]
);
}
private _renderComponentRepos() {
const { repository, pagedComponentRepos, dependences = [], onAddDependence } = this.properties;
return v(
'ul',
{ classes: [c.list_group, c.mt_2] },
pagedComponentRepos.content.map((item) => {
const used =
findIndex(dependences, (dependence) => item.componentRepo.id === dependence.componentRepo.id) > -1;
return w(ComponentRepoItem, {
repository,
componentRepoInfo: item,
used,
onAddDependence,
});
})
);
}
private _renderPagination() {
const { pagedComponentRepos } = this.properties;
if (!pagedComponentRepos) {
return;
}
const { first, last, size, number, totalPages } = pagedComponentRepos;
return w(Pagination, {
totalPages,
first,
last,
number,
size,
});
}
private _renderDependencePart() {
const { dependences = [] } = this.properties;
if (dependences.length === 0) {
return this._renderNoDependenceMessage();
}
return this._renderDependenceItems();
}
private _renderDependenceItems() {
return v('div', { key: 'dependence-items', classes: [c.mt_4] }, [
...this._renderApiRepos(),
...this._renderDevComponentRepos(),
...this._renderBuildComponentRepos(),
]);
}
private _renderApiRepos() {
const { dependences = [] } = this.properties;
const groupedApiRepos: GroupedApiRepo[] = [];
dependences.forEach((item) => {
const findedApiRepo = find(
groupedApiRepos,
(groupedApiRepo) => item.apiRepo.id === groupedApiRepo.apiRepo.id
);
if (findedApiRepo) {
// 如果已存在,则再查看版本是否添加
const indexApiRepoVersion = findIndex(
findedApiRepo.apiRepoVersions,
(version) => version.id === item.apiRepoVersion.id
);
if (indexApiRepoVersion === -1) {
findedApiRepo.apiRepoVersions.push(item.apiRepoVersion);
}
} else {
// groupedApiRepos 中不存在时,追加
groupedApiRepos.push({ apiRepo: item.apiRepo, apiRepoVersions: [item.apiRepoVersion] });
}
});
return [
v('div', {}, [v('strong', ['API'])]),
v(
'div',
{ classes: [c.pl_4, c.border_left] },
groupedApiRepos.map((item) =>
v('div', {}, [
// 当前只支持 git
w(FontAwesomeIcon, { icon: ['fab', 'git-alt'], classes: [c.text_muted], title: 'git 仓库' }),
v(
'a',
{
target: '_blank',
href: `${item.apiRepo.gitRepoUrl}`,
title: '跳转到 API 仓库',
classes: [c.ml_1],
},
[`${item.apiRepo.gitRepoOwner}/${item.apiRepo.gitRepoName}`]
),
v(
'span',
{ classes: [c.ml_3] },
item.apiRepoVersions.map((version) =>
v('span', { classes: [c.mr_1, c.badge, c.badge_secondary] }, [`${version.version}`])
)
),
])
)
),
];
}
private _renderDevComponentRepos(): DNode[] {
const { dependences = [] } = this.properties;
const devDependences = dependences.filter((dependence) => dependence.componentRepo.repoType === RepoType.IDE);
if (devDependences.length === 0) | RepoType.PROD
);
if (buildDependences.length === 0) {
return [];
}
return [v('div', {}, [v('strong', ['构建'])]), ...this._renderComponentRepoDependences(buildDependences)];
}
private _renderComponentRepoDependences(dependences: ProjectDependenceData[]): DNode[] {
const { repository, onDeleteDependence, onShowDependenceVersions, onUpdateDependenceVersion } = this.properties;
// 按照 appType 分组
const groupedDependences = lodash.groupBy(dependences, (dependence) => dependence.componentRepoVersion.appType);
const vnodes: DNode[] = [];
for (const key in groupedDependences) {
const values = groupedDependences[key];
vnodes.push(
v('div', { classes: [c.pl_4, c.border_left] }, [
v('div', {}, [`${key}`]),
v(
'div',
{ classes: [c.pl_4, c.border_left] },
values.map((item) =>
w(DependenceRow, {
repository,
dependence: item,
versions: item.componentRepoVersions || [],
onDeleteDependence,
onShowDependenceVersions,
onUpdateDependenceVersion,
})
)
),
])
);
}
return vnodes;
}
private _renderNoDependenceMessage() {
return v('div', { key: 'no-dependence', classes: [c.mt_4] }, [
v('div', { classes: [c.alert, c.alert_primary, c.mx_auto, c.text_center, c.py_4] }, [
v('strong', {}, ['此项目尚未配置依赖']),
]),
]);
}
}
interface ComponentRepoItemProperties {
repository: Repository;
componentRepoInfo: ComponentRepoInfo;
used: boolean;
onAddDependence: (opt: ProjectDependenceWithProjectPathPayload) => void;
}
class ComponentRepoItem extends ThemedMixin(I18nMixin(WidgetBase))<ComponentRepoItemProperties> {
protected render() {
const {
componentRepoInfo: { componentRepo, componentRepoVersion, apiRepo },
used = false,
} = this.properties;
return v('li', { classes: [c.list_group_item] }, [
// 如果组件库未安装,则显示“使用”按钮,否则显示“已用”文本
v('div', {}, [
v('span', { classes: [c.font_weight_bold, c.mr_2] }, [
v('img', {
width: 20,
height: 20,
classes: [c.avatar, c.mr_1],
src: `${componentRepo.createUserAvatarUrl}`,
}),
`${componentRepo.createUserName} / ${componentRepoVersion.name}`,
]),
v('span', { classes: [c.badge, c.badge_info, c.ml_3], title: '与 BlockLang 设计器集成' }, [
`${componentRepo.repoType}`,
]),
used
? v('span', { classes: [c.float_right, c.text_info] }, ['已用'])
: v(
'button',
{
classes: [c.btn, c.btn_secondary, c.btn_sm, c.float_right],
onclick: this._onAddDependence,
},
['使用']
),
]),
v('p', { itemprop: 'description', classes: [c.text_muted, c.mb_0] }, [
`${componentRepoVersion.description}`,
]),
v('div', { classes: [c.my_2] }, [
v('span', { classes: [c.border, c.rounded, c.px_1] }, [
v('span', {}, ['API: ']),
v(
'a',
{
target: '_blank',
href: `${apiRepo.gitRepoUrl}`,
title: '跳转到 API 仓库',
classes: [c.mr_1],
},
[`${apiRepo.gitRepoOwner}/${apiRepo.gitRepoName}`]
),
]),
' -> ',
v('span', { classes: [c.border, c.rounded, c.px_1] }, [
v('span', {}, ['实现: ']),
v(
'a',
{
target: '_blank',
href: `${componentRepo.gitRepoUrl}`,
title: '跳转到组件仓库',
classes: [c.mr_1],
},
[`${componentRepo.gitRepoOwner}/${componentRepo.gitRepoName}`]
),
]),
]),
v('small', { classes: [c.text_muted] }, [
v('span', { classes: [c.mr_3] }, [
w(FontAwesomeIcon, {
icon: componentRepoVersion.icon.split(' ') as [IconPrefix, IconName],
classes: [c.mr_1],
}),
`${componentRepoVersion.title}`,
]),
v('span', { classes: [c.mr_3] }, [
v('span', {
classes: [css.repoLanguageColor, c.mr_1],
styles: {
backgroundColor: `${getProgramingLanguageColor(componentRepoVersion.language)}`,
},
}),
v('span', { itemprop: 'programmingLanguage' }, [
`${getProgramingLanguageName(componentRepoVersion.language)}`,
]),
]),
v('span', { classes: [c.mr_3] }, [`${getRepoCategoryName(componentRepo.category)}`]),
v('span', { classes: [c.mr_3], title: '使用次数' }, [
w(FontAwesomeIcon, { icon: 'cube', classes: [c.mr_1] }),
'0',
]),
v('span', {}, [
w(FontAwesomeIcon, { icon: 'clock', classes: [c.mr_1] }),
'最近发布 · ',
w(Moment, { datetime: componentRepo.lastPublishTime }),
]),
]),
]);
}
private _onAddDependence() {
const {
repository,
componentRepoInfo: { componentRepo },
} = this.properties;
// componentRepoVersionId 默认使用最新版本
this.properties.onAddDependence({
owner: repository.createUserName,
repo: repository.name,
componentRepoId: componentRepo.id!,
project: '', // TODO
});
}
}
interface DependenceRowProperties {
repository: Repository;
dependence: ProjectDependenceData;
// 当前选中依赖的版本列表
versions: ComponentRepoVersion[];
onDeleteDependence: (opt: ProjectDependenceIdPayload) => void;
onShowDependenceVersions: (opt: ProjectDependencePayload) => void;
onUpdateDependenceVersion: (opt: ProjectDependenceVersionPayload) => void;
}
class DependenceRow extends ThemedMixin(I18nMixin(WidgetBase))<DependenceRowProperties> {
protected render() {
const { repository, dependence, versions, onUpdateDependenceVersion } = this.properties;
return v('div', {}, [
// 当前只支持 git
w(FontAwesomeIcon, { icon: ['fab', 'git-alt'], classes: [c.text_muted], title: 'git 仓库' }),
v(
'a',
{
target: '_blank',
href: `${dependence.apiRepo.gitRepoUrl}`,
title: '跳转到组件仓库',
classes: [c.ml_1],
},
[`${dependence.componentRepo.gitRepoOwner}/${dependence.componentRepo.gitRepoName}`]
),
v('span', { classes: [c.ml_3] }, [
v('span', { classes: [c.dropdown] }, [
v(
'button',
{
classes: [c.btn, c.btn_secondary, c.btn_sm, c.dropdown_toggle, css.dropdownButton],
type: 'button',
'data-toggle': 'dropdown',
onclick: this._onShowVersions,
},
[`${dependence.componentRepoVersion.version}`]
),
v(
'div',
{ classes: [c.dropdown_menu, css.dropdownMenu] },
versions.map((version) =>
w(DependenceVersionMenu, { repository, dependence, version, onUpdateDependenceVersion })
)
),
]),
]),
v('button', { type: 'button', classes: [c.close, c.float_right], onclick: this._onDeleteDependence }, [
v('span', { 'aria-hidden': 'true', innerHTML: '×' }),
]),
]);
}
private _onShowVersions() {
const { dependence } = this.properties;
this.properties.onShowDependenceVersions({
dependenceId: dependence.dependence.id,
componentRepoId: dependence.componentRepo.id!,
});
}
private _onDeleteDependence() {
const { repository, dependence } = this.properties;
this.properties.onDeleteDependence({
owner: repository.createUserName,
repo: repository.name,
id: dependence.dependence.id,
project: '', // TODO
});
}
}
interface DependenceVersionMenuProperties {
repository: Repository;
dependence: ProjectDependenceData;
version: ComponentRepoVersion;
onUpdateDependenceVersion: (opt: ProjectDependenceVersionPayload) => void;
}
class DependenceVersionMenu extends ThemedMixin(I18nMixin(WidgetBase))<DependenceVersionMenuProperties> {
protected render() {
const { dependence, version } = this.properties;
const isSelected = version.id === dependence.componentRepoVersion.id;
return v(
'a',
{
classes: [c.dropdown_item, isSelected ? c.active : undefined],
href: '#',
onclick: this._onUpdateVersion,
},
[`${version.version}`]
);
}
private _onUpdateVersion(event: MouseEvent) {
event.stopPropagation();
const { repository, dependence, version } = this.properties;
const isSelected = version.id === dependence.componentRepoVersion.id;
if (isSelected) {
return;
}
this.properties.onUpdateDependenceVersion({
owner: repository.createUserName,
repo: repository.name,
dependenceId: dependence.dependence.id,
componentRepoVersionId: version.id,
});
}
}
| {
return [];
}
return [v('div', {}, [v('strong', ['开发'])]), ...this._renderComponentRepoDependences(devDependences)];
}
private _renderBuildComponentRepos(): DNode[] {
const { dependences = [] } = this.properties;
const buildDependences = dependences.filter(
(dependence) => dependence.componentRepo.repoType === | identifier_body |
ViewProjectDependence.ts | import * as css from './ViewProjectDependence.m.css';
import ThemedMixin, { theme } from '@dojo/framework/core/mixins/Themed';
import I18nMixin from '@dojo/framework/core/mixins/I18n';
import WidgetBase from '@dojo/framework/core/WidgetBase';
import { v, w } from '@dojo/framework/core/vdom';
import * as c from '@blocklang/bootstrap-classes';
import {
Repository,
RepositoryResourceGroup,
CommitInfo,
WithTarget,
PagedComponentRepos,
ComponentRepoInfo,
ApiRepo,
ApiRepoVersion,
ComponentRepoVersion,
ProjectDependenceData,
} from '../../interfaces';
import Spinner from '../../widgets/spinner';
import { isEmpty, getProgramingLanguageName, getRepoCategoryName, getProgramingLanguageColor } from '../../util';
import Exception from '../error/Exception';
import RepositoryHeader from '../widgets/RepositoryHeader';
import messageBundle from '../../nls/main';
import {
RepositoryResourcePathPayload,
QueryPayload,
ProjectDependencePayload,
ProjectDependenceIdPayload,
ProjectDependenceVersionPayload,
ProjectDependenceWithProjectPathPayload,
} from '../../processes/interfaces';
import LatestCommitInfo from './widgets/LatestCommitInfo';
import ProjectResourceBreadcrumb from './widgets/ProjectResourceBreadcrumb';
import watch from '@dojo/framework/core/decorators/watch';
import FontAwesomeIcon from '@blocklang/dojo-fontawesome/FontAwesomeIcon';
import Pagination from '../../widgets/pagination';
import Moment from '../../widgets/moment';
import { findIndex, find } from '@dojo/framework/shim/array';
import * as lodash from 'lodash';
import { DNode } from '@dojo/framework/core/interfaces';
import { IconPrefix, IconName } from '@fortawesome/fontawesome-svg-core';
import { RepoType } from '../../constant';
export interface ViewProjectDependenceProperties {
loggedUsername: string;
repository: Repository;
sourceId: number;
pathes: RepositoryResourceGroup[];
pagedComponentRepos: PagedComponentRepos;
dependences: ProjectDependenceData[];
latestCommitInfo: CommitInfo;
onOpenGroup: (opt: RepositoryResourcePathPayload) => void;
onQueryComponentRepos: (opt: QueryPayload) => void;
onAddDependence: (opt: ProjectDependenceWithProjectPathPayload) => void;
onDeleteDependence: (opt: ProjectDependenceIdPayload) => void;
onShowDependenceVersions: (opt: ProjectDependencePayload) => void;
onUpdateDependenceVersion: (opt: ProjectDependenceVersionPayload) => void;
}
interface GroupedApiRepo {
apiRepo: ApiRepo;
apiRepoVersions: ApiRepoVersion[];
}
@theme(css)
export default class | extends ThemedMixin(I18nMixin(WidgetBase))<ViewProjectDependenceProperties> {
private _localizedMessages = this.localizeBundle(messageBundle);
@watch()
private _search: string = '';
protected render() {
const { repository } = this.properties;
if (!repository) {
return v('div', { classes: [c.mt_5] }, [w(Spinner, {})]);
}
if (this._isNotFound()) {
return w(Exception, { type: '404' });
}
return v('div', { classes: [css.root, c.container] }, [
this._renderHeader(),
this._renderNavigation(),
this._renderDependenceCard(),
]);
}
private _isNotFound() {
const { repository } = this.properties;
return isEmpty(repository);
}
private _renderHeader() {
const {
messages: { privateRepositoryTitle },
} = this._localizedMessages;
const { repository } = this.properties;
return w(RepositoryHeader, { repository, privateRepositoryTitle });
}
private _renderNavigation() {
const { repository, pathes, onOpenGroup } = this.properties;
return v('div', { classes: [c.d_flex, c.justify_content_between, c.mb_2] }, [
v('div', {}, [w(ProjectResourceBreadcrumb, { repository, pathes, onOpenGroup })]),
]);
}
private _renderDependenceCard() {
const { latestCommitInfo } = this.properties;
return v('div', { classes: [c.card, !latestCommitInfo ? c.border_top_0 : undefined] }, [
w(LatestCommitInfo, { latestCommitInfo, showBottomBorder: true }), // 最近提交信息区
this._renderDependenceEditor(),
]);
}
private _renderDependenceEditor() {
return v('div', { classes: [c.card_body] }, [
this._renderComponentRepoSearchPart(),
// 显示项目依赖
// 1. 如果没有依赖,则显示提示信息
// 2. 否则显示依赖
this._renderDependencePart(),
]);
}
private _renderComponentRepoSearchPart() {
return v('div', { classes: [c.py_4, c.border_bottom] }, [
this._renderSearchForm(),
this._renderSearchTip(),
this._renderSearchedComponentRepos(),
]);
}
private _renderSearchForm() {
const {
messages: { componentSearchForProjectPlaceholder },
} = this._localizedMessages;
return v('form', {}, [
v('div', { classes: [c.form_group] }, [
v('input', {
type: 'text',
classes: [c.form_control],
placeholder: `${componentSearchForProjectPlaceholder}`,
oninput: this._onSearchComponentRepo,
value: `${this._search}`,
}),
]),
]);
}
private _renderSearchTip() {
if (this._search === '') {
return;
}
const { pagedComponentRepos } = this.properties;
let length = 0;
if (pagedComponentRepos && pagedComponentRepos.content) {
length = pagedComponentRepos.content.length;
}
return v('div', { classes: [c.d_flex, c.justify_content_between, c.align_items_center, c.border_bottom] }, [
v('div', [
'使用 ',
v('strong', [`${this._search}`]),
' 共查出 ',
v('strong', [`${length}`]),
' 个组件仓库',
]),
v('div', [
v(
'button',
{
classes: [c.btn, c.btn_link, c.btn_sm, css.btnLink],
onclick: this._onClearSearchText,
},
[w(FontAwesomeIcon, { icon: 'times', classes: [c.mr_1] }), '清空搜索条件']
),
]),
]);
}
private _onClearSearchText() {
this._search = '';
this.properties.onQueryComponentRepos({ query: this._search });
}
private _onSearchComponentRepo({ target: { value: query } }: WithTarget) {
this._search = query;
this.properties.onQueryComponentRepos({ query });
}
private _renderSearchedComponentRepos(): DNode {
const { pagedComponentRepos } = this.properties;
if (!pagedComponentRepos) {
return;
}
if (pagedComponentRepos.content.length === 0) {
return this._renderEmptyComponentRepo();
}
return v('div', { key: 'component-repos-part', classes: [] }, [
// 组件库列表
this._renderComponentRepos(),
// 分页
this._renderPagination(),
]);
}
private _renderEmptyComponentRepo() {
return v(
'div',
{
key: 'no-component-repos',
classes: [c.alert, c.alert_secondary, c.mx_auto, c.text_center, c.mt_3, c.py_4],
},
[v('strong', {}, ['没有查到组件仓库'])]
);
}
private _renderComponentRepos() {
const { repository, pagedComponentRepos, dependences = [], onAddDependence } = this.properties;
return v(
'ul',
{ classes: [c.list_group, c.mt_2] },
pagedComponentRepos.content.map((item) => {
const used =
findIndex(dependences, (dependence) => item.componentRepo.id === dependence.componentRepo.id) > -1;
return w(ComponentRepoItem, {
repository,
componentRepoInfo: item,
used,
onAddDependence,
});
})
);
}
private _renderPagination() {
const { pagedComponentRepos } = this.properties;
if (!pagedComponentRepos) {
return;
}
const { first, last, size, number, totalPages } = pagedComponentRepos;
return w(Pagination, {
totalPages,
first,
last,
number,
size,
});
}
private _renderDependencePart() {
const { dependences = [] } = this.properties;
if (dependences.length === 0) {
return this._renderNoDependenceMessage();
}
return this._renderDependenceItems();
}
private _renderDependenceItems() {
return v('div', { key: 'dependence-items', classes: [c.mt_4] }, [
...this._renderApiRepos(),
...this._renderDevComponentRepos(),
...this._renderBuildComponentRepos(),
]);
}
private _renderApiRepos() {
const { dependences = [] } = this.properties;
const groupedApiRepos: GroupedApiRepo[] = [];
dependences.forEach((item) => {
const findedApiRepo = find(
groupedApiRepos,
(groupedApiRepo) => item.apiRepo.id === groupedApiRepo.apiRepo.id
);
if (findedApiRepo) {
// 如果已存在,则再查看版本是否添加
const indexApiRepoVersion = findIndex(
findedApiRepo.apiRepoVersions,
(version) => version.id === item.apiRepoVersion.id
);
if (indexApiRepoVersion === -1) {
findedApiRepo.apiRepoVersions.push(item.apiRepoVersion);
}
} else {
// groupedApiRepos 中不存在时,追加
groupedApiRepos.push({ apiRepo: item.apiRepo, apiRepoVersions: [item.apiRepoVersion] });
}
});
return [
v('div', {}, [v('strong', ['API'])]),
v(
'div',
{ classes: [c.pl_4, c.border_left] },
groupedApiRepos.map((item) =>
v('div', {}, [
// 当前只支持 git
w(FontAwesomeIcon, { icon: ['fab', 'git-alt'], classes: [c.text_muted], title: 'git 仓库' }),
v(
'a',
{
target: '_blank',
href: `${item.apiRepo.gitRepoUrl}`,
title: '跳转到 API 仓库',
classes: [c.ml_1],
},
[`${item.apiRepo.gitRepoOwner}/${item.apiRepo.gitRepoName}`]
),
v(
'span',
{ classes: [c.ml_3] },
item.apiRepoVersions.map((version) =>
v('span', { classes: [c.mr_1, c.badge, c.badge_secondary] }, [`${version.version}`])
)
),
])
)
),
];
}
private _renderDevComponentRepos(): DNode[] {
const { dependences = [] } = this.properties;
const devDependences = dependences.filter((dependence) => dependence.componentRepo.repoType === RepoType.IDE);
if (devDependences.length === 0) {
return [];
}
return [v('div', {}, [v('strong', ['开发'])]), ...this._renderComponentRepoDependences(devDependences)];
}
private _renderBuildComponentRepos(): DNode[] {
const { dependences = [] } = this.properties;
const buildDependences = dependences.filter(
(dependence) => dependence.componentRepo.repoType === RepoType.PROD
);
if (buildDependences.length === 0) {
return [];
}
return [v('div', {}, [v('strong', ['构建'])]), ...this._renderComponentRepoDependences(buildDependences)];
}
private _renderComponentRepoDependences(dependences: ProjectDependenceData[]): DNode[] {
const { repository, onDeleteDependence, onShowDependenceVersions, onUpdateDependenceVersion } = this.properties;
// 按照 appType 分组
const groupedDependences = lodash.groupBy(dependences, (dependence) => dependence.componentRepoVersion.appType);
const vnodes: DNode[] = [];
for (const key in groupedDependences) {
const values = groupedDependences[key];
vnodes.push(
v('div', { classes: [c.pl_4, c.border_left] }, [
v('div', {}, [`${key}`]),
v(
'div',
{ classes: [c.pl_4, c.border_left] },
values.map((item) =>
w(DependenceRow, {
repository,
dependence: item,
versions: item.componentRepoVersions || [],
onDeleteDependence,
onShowDependenceVersions,
onUpdateDependenceVersion,
})
)
),
])
);
}
return vnodes;
}
private _renderNoDependenceMessage() {
return v('div', { key: 'no-dependence', classes: [c.mt_4] }, [
v('div', { classes: [c.alert, c.alert_primary, c.mx_auto, c.text_center, c.py_4] }, [
v('strong', {}, ['此项目尚未配置依赖']),
]),
]);
}
}
interface ComponentRepoItemProperties {
repository: Repository;
componentRepoInfo: ComponentRepoInfo;
used: boolean;
onAddDependence: (opt: ProjectDependenceWithProjectPathPayload) => void;
}
class ComponentRepoItem extends ThemedMixin(I18nMixin(WidgetBase))<ComponentRepoItemProperties> {
protected render() {
const {
componentRepoInfo: { componentRepo, componentRepoVersion, apiRepo },
used = false,
} = this.properties;
return v('li', { classes: [c.list_group_item] }, [
// 如果组件库未安装,则显示“使用”按钮,否则显示“已用”文本
v('div', {}, [
v('span', { classes: [c.font_weight_bold, c.mr_2] }, [
v('img', {
width: 20,
height: 20,
classes: [c.avatar, c.mr_1],
src: `${componentRepo.createUserAvatarUrl}`,
}),
`${componentRepo.createUserName} / ${componentRepoVersion.name}`,
]),
v('span', { classes: [c.badge, c.badge_info, c.ml_3], title: '与 BlockLang 设计器集成' }, [
`${componentRepo.repoType}`,
]),
used
? v('span', { classes: [c.float_right, c.text_info] }, ['已用'])
: v(
'button',
{
classes: [c.btn, c.btn_secondary, c.btn_sm, c.float_right],
onclick: this._onAddDependence,
},
['使用']
),
]),
v('p', { itemprop: 'description', classes: [c.text_muted, c.mb_0] }, [
`${componentRepoVersion.description}`,
]),
v('div', { classes: [c.my_2] }, [
v('span', { classes: [c.border, c.rounded, c.px_1] }, [
v('span', {}, ['API: ']),
v(
'a',
{
target: '_blank',
href: `${apiRepo.gitRepoUrl}`,
title: '跳转到 API 仓库',
classes: [c.mr_1],
},
[`${apiRepo.gitRepoOwner}/${apiRepo.gitRepoName}`]
),
]),
' -> ',
v('span', { classes: [c.border, c.rounded, c.px_1] }, [
v('span', {}, ['实现: ']),
v(
'a',
{
target: '_blank',
href: `${componentRepo.gitRepoUrl}`,
title: '跳转到组件仓库',
classes: [c.mr_1],
},
[`${componentRepo.gitRepoOwner}/${componentRepo.gitRepoName}`]
),
]),
]),
v('small', { classes: [c.text_muted] }, [
v('span', { classes: [c.mr_3] }, [
w(FontAwesomeIcon, {
icon: componentRepoVersion.icon.split(' ') as [IconPrefix, IconName],
classes: [c.mr_1],
}),
`${componentRepoVersion.title}`,
]),
v('span', { classes: [c.mr_3] }, [
v('span', {
classes: [css.repoLanguageColor, c.mr_1],
styles: {
backgroundColor: `${getProgramingLanguageColor(componentRepoVersion.language)}`,
},
}),
v('span', { itemprop: 'programmingLanguage' }, [
`${getProgramingLanguageName(componentRepoVersion.language)}`,
]),
]),
v('span', { classes: [c.mr_3] }, [`${getRepoCategoryName(componentRepo.category)}`]),
v('span', { classes: [c.mr_3], title: '使用次数' }, [
w(FontAwesomeIcon, { icon: 'cube', classes: [c.mr_1] }),
'0',
]),
v('span', {}, [
w(FontAwesomeIcon, { icon: 'clock', classes: [c.mr_1] }),
'最近发布 · ',
w(Moment, { datetime: componentRepo.lastPublishTime }),
]),
]),
]);
}
private _onAddDependence() {
const {
repository,
componentRepoInfo: { componentRepo },
} = this.properties;
// componentRepoVersionId 默认使用最新版本
this.properties.onAddDependence({
owner: repository.createUserName,
repo: repository.name,
componentRepoId: componentRepo.id!,
project: '', // TODO
});
}
}
interface DependenceRowProperties {
repository: Repository;
dependence: ProjectDependenceData;
// 当前选中依赖的版本列表
versions: ComponentRepoVersion[];
onDeleteDependence: (opt: ProjectDependenceIdPayload) => void;
onShowDependenceVersions: (opt: ProjectDependencePayload) => void;
onUpdateDependenceVersion: (opt: ProjectDependenceVersionPayload) => void;
}
class DependenceRow extends ThemedMixin(I18nMixin(WidgetBase))<DependenceRowProperties> {
protected render() {
const { repository, dependence, versions, onUpdateDependenceVersion } = this.properties;
return v('div', {}, [
// 当前只支持 git
w(FontAwesomeIcon, { icon: ['fab', 'git-alt'], classes: [c.text_muted], title: 'git 仓库' }),
v(
'a',
{
target: '_blank',
href: `${dependence.apiRepo.gitRepoUrl}`,
title: '跳转到组件仓库',
classes: [c.ml_1],
},
[`${dependence.componentRepo.gitRepoOwner}/${dependence.componentRepo.gitRepoName}`]
),
v('span', { classes: [c.ml_3] }, [
v('span', { classes: [c.dropdown] }, [
v(
'button',
{
classes: [c.btn, c.btn_secondary, c.btn_sm, c.dropdown_toggle, css.dropdownButton],
type: 'button',
'data-toggle': 'dropdown',
onclick: this._onShowVersions,
},
[`${dependence.componentRepoVersion.version}`]
),
v(
'div',
{ classes: [c.dropdown_menu, css.dropdownMenu] },
versions.map((version) =>
w(DependenceVersionMenu, { repository, dependence, version, onUpdateDependenceVersion })
)
),
]),
]),
v('button', { type: 'button', classes: [c.close, c.float_right], onclick: this._onDeleteDependence }, [
v('span', { 'aria-hidden': 'true', innerHTML: '×' }),
]),
]);
}
private _onShowVersions() {
const { dependence } = this.properties;
this.properties.onShowDependenceVersions({
dependenceId: dependence.dependence.id,
componentRepoId: dependence.componentRepo.id!,
});
}
private _onDeleteDependence() {
const { repository, dependence } = this.properties;
this.properties.onDeleteDependence({
owner: repository.createUserName,
repo: repository.name,
id: dependence.dependence.id,
project: '', // TODO
});
}
}
interface DependenceVersionMenuProperties {
repository: Repository;
dependence: ProjectDependenceData;
version: ComponentRepoVersion;
onUpdateDependenceVersion: (opt: ProjectDependenceVersionPayload) => void;
}
class DependenceVersionMenu extends ThemedMixin(I18nMixin(WidgetBase))<DependenceVersionMenuProperties> {
protected render() {
const { dependence, version } = this.properties;
const isSelected = version.id === dependence.componentRepoVersion.id;
return v(
'a',
{
classes: [c.dropdown_item, isSelected ? c.active : undefined],
href: '#',
onclick: this._onUpdateVersion,
},
[`${version.version}`]
);
}
private _onUpdateVersion(event: MouseEvent) {
event.stopPropagation();
const { repository, dependence, version } = this.properties;
const isSelected = version.id === dependence.componentRepoVersion.id;
if (isSelected) {
return;
}
this.properties.onUpdateDependenceVersion({
owner: repository.createUserName,
repo: repository.name,
dependenceId: dependence.dependence.id,
componentRepoVersionId: version.id,
});
}
}
| ViewProjectDependence | identifier_name |
ViewProjectDependence.ts | import * as css from './ViewProjectDependence.m.css';
import ThemedMixin, { theme } from '@dojo/framework/core/mixins/Themed';
import I18nMixin from '@dojo/framework/core/mixins/I18n';
import WidgetBase from '@dojo/framework/core/WidgetBase';
import { v, w } from '@dojo/framework/core/vdom';
import * as c from '@blocklang/bootstrap-classes';
import {
Repository,
RepositoryResourceGroup,
CommitInfo,
WithTarget,
PagedComponentRepos,
ComponentRepoInfo,
ApiRepo,
ApiRepoVersion,
ComponentRepoVersion,
ProjectDependenceData,
} from '../../interfaces';
import Spinner from '../../widgets/spinner';
import { isEmpty, getProgramingLanguageName, getRepoCategoryName, getProgramingLanguageColor } from '../../util';
import Exception from '../error/Exception';
import RepositoryHeader from '../widgets/RepositoryHeader';
import messageBundle from '../../nls/main';
import {
RepositoryResourcePathPayload,
QueryPayload,
ProjectDependencePayload,
ProjectDependenceIdPayload,
ProjectDependenceVersionPayload,
ProjectDependenceWithProjectPathPayload,
} from '../../processes/interfaces';
import LatestCommitInfo from './widgets/LatestCommitInfo';
import ProjectResourceBreadcrumb from './widgets/ProjectResourceBreadcrumb';
import watch from '@dojo/framework/core/decorators/watch';
import FontAwesomeIcon from '@blocklang/dojo-fontawesome/FontAwesomeIcon';
import Pagination from '../../widgets/pagination';
import Moment from '../../widgets/moment';
import { findIndex, find } from '@dojo/framework/shim/array';
import * as lodash from 'lodash';
import { DNode } from '@dojo/framework/core/interfaces';
import { IconPrefix, IconName } from '@fortawesome/fontawesome-svg-core';
import { RepoType } from '../../constant';
export interface ViewProjectDependenceProperties {
loggedUsername: string;
repository: Repository;
sourceId: number;
pathes: RepositoryResourceGroup[];
pagedComponentRepos: PagedComponentRepos;
dependences: ProjectDependenceData[];
latestCommitInfo: CommitInfo;
onOpenGroup: (opt: RepositoryResourcePathPayload) => void;
onQueryComponentRepos: (opt: QueryPayload) => void;
onAddDependence: (opt: ProjectDependenceWithProjectPathPayload) => void;
onDeleteDependence: (opt: ProjectDependenceIdPayload) => void;
onShowDependenceVersions: (opt: ProjectDependencePayload) => void;
onUpdateDependenceVersion: (opt: ProjectDependenceVersionPayload) => void;
}
interface GroupedApiRepo {
apiRepo: ApiRepo;
apiRepoVersions: ApiRepoVersion[];
}
@theme(css)
export default class ViewProjectDependence extends ThemedMixin(I18nMixin(WidgetBase))<ViewProjectDependenceProperties> {
private _localizedMessages = this.localizeBundle(messageBundle);
@watch()
private _search: string = '';
protected render() {
const { repository } = this.properties;
if (!repository) {
return v('div', { classes: [c.mt_5] }, [w(Spinner, {})]);
}
if (this._isNotFound()) {
return w(Exception, { type: '404' });
}
return v('div', { classes: [css.root, c.container] }, [
this._renderHeader(),
this._renderNavigation(),
this._renderDependenceCard(),
]);
}
private _isNotFound() {
const { repository } = this.properties;
return isEmpty(repository);
}
private _renderHeader() {
const {
messages: { privateRepositoryTitle },
} = this._localizedMessages;
const { repository } = this.properties;
return w(RepositoryHeader, { repository, privateRepositoryTitle });
}
private _renderNavigation() {
const { repository, pathes, onOpenGroup } = this.properties;
return v('div', { classes: [c.d_flex, c.justify_content_between, c.mb_2] }, [
v('div', {}, [w(ProjectResourceBreadcrumb, { repository, pathes, onOpenGroup })]),
]);
}
private _renderDependenceCard() {
const { latestCommitInfo } = this.properties;
return v('div', { classes: [c.card, !latestCommitInfo ? c.border_top_0 : undefined] }, [
w(LatestCommitInfo, { latestCommitInfo, showBottomBorder: true }), // 最近提交信息区
this._renderDependenceEditor(),
]);
}
private _renderDependenceEditor() {
return v('div', { classes: [c.card_body] }, [
this._renderComponentRepoSearchPart(),
// 显示项目依赖
// 1. 如果没有依赖,则显示提示信息
// 2. 否则显示依赖
this._renderDependencePart(),
]);
}
private _renderComponentRepoSearchPart() {
return v('div', { classes: [c.py_4, c.border_bottom] }, [
this._renderSearchForm(),
this._renderSearchTip(),
this._renderSearchedComponentRepos(),
]);
}
private _renderSearchForm() {
const {
messages: { componentSearchForProjectPlaceholder },
} = this._localizedMessages;
return v('form', {}, [
v('div', { classes: [c.form_group] }, [
v('input', {
type: 'text',
classes: [c.form_control],
placeholder: `${componentSearchForProjectPlaceholder}`,
oninput: this._onSearchComponentRepo,
value: `${this._search}`,
}),
]),
]);
}
private _renderSearchTip() {
if (this._search === '') {
return;
}
const { pagedComponentRepos } = this.properties;
let length = 0;
if (pagedComponentRepos && pagedComponentRepos.content) {
length = pagedComponentRepos.content.length;
}
return v('div', { classes: [c.d_flex, c.justify_content_between, c.align_items_center, c.border_bottom] }, [
v('div', [
'使用 ',
v('strong', [`${this._search}`]),
' 共查出 ',
v('strong', [`${length}`]),
' 个组件仓库',
]),
v('div', [
v(
'button',
{
classes: [c.btn, c.btn_link, c.btn_sm, css.btnLink],
onclick: this._onClearSearchText,
},
[w(FontAwesomeIcon, { icon: 'times', classes: [c.mr_1] }), '清空搜索条件']
),
]),
]);
}
private _onClearSearchText() {
this._search = '';
this.properties.onQueryComponentRepos({ query: this._search });
}
private _onSearchComponentRepo({ target: { value: query } }: WithTarget) {
this._search = query;
this.properties.onQueryComponentRepos({ query });
}
private _renderSearchedComponentRepos(): DNode {
const { pagedComponentRepos } = this.properties;
if (!pagedComponentRepos) {
return;
}
if (pagedComponentRepos.content.length === 0) {
return this._renderEmptyComponentRepo();
}
return v('div', { key: 'component-repos-part', classes: [] }, [
// 组件库列表
this._renderComponentRepos(),
// 分页
this._renderPagination(),
]);
}
private _renderEmptyComponentRepo() {
return v(
'div',
{
key: 'no-component-repos',
classes: [c.alert, c.alert_secondary, c.mx_auto, c.text_center, c.mt_3, c.py_4],
},
[v('strong', {}, ['没有查到组件仓库'])]
);
}
private _renderComponentRepos() {
const { repository, pagedComponentRepos, dependences = [], onAddDependence } = this.properties;
return v(
'ul',
{ classes: [c.list_group, c.mt_2] },
pagedComponentRepos.content.map((item) => {
const used =
findIndex(dependences, (dependence) => item.componentRepo.id === dependence.componentRepo.id) > -1;
return w(ComponentRepoItem, {
repository,
componentRepoInfo: item,
used,
onAddDependence,
});
})
);
}
private _renderPagination() {
const { pagedComponentRepos } = this.properties;
if (!pagedComponentRepos) {
return;
}
const { first, last, size, number, totalPages } = pagedComponentRepos;
return w(Pagination, {
totalPages,
first,
last,
number,
size,
});
}
private _renderDependencePart() {
const { dependences = [] } = this.properties;
if (dependences.length === 0) {
return this._renderNoDependenceMessage();
}
return this._renderDependenceItems();
}
private _renderDependenceItems() {
return v('div', { key: 'dependence-items', classes: [c.mt_4] }, [
...this._renderApiRepos(),
...this._renderDevComponentRepos(),
...this._renderBuildComponentRepos(),
]);
}
private _renderApiRepos() {
const { dependences = [] } = this.properties;
const groupedApiRepos: GroupedApiRepo[] = [];
dependences.forEach((item) => {
const findedApiRepo = find(
groupedApiRepos,
(groupedApiRepo) => item.apiRepo.id === groupedApiRepo.apiRepo.id
);
if (findedApiRepo) {
// 如果已存在,则再查看版本是否添加
const indexApiRepoVersion = findIndex(
findedApiRepo.apiRepoVersions,
(version) => version.id === item.apiRepoVersion.id
);
if (indexApiRepoVersion === -1) {
findedApiRepo.apiRepoVersions.push(item.apiRepoVersion);
}
} else {
// groupedApiRepos 中不存在时,追加
groupedApiRepos.push({ apiRepo: item.apiRepo, apiRepoVersions: [item.apiRepoVersion] });
}
});
return [
v('div' | // 当前只支持 git
w(FontAwesomeIcon, { icon: ['fab', 'git-alt'], classes: [c.text_muted], title: 'git 仓库' }),
v(
'a',
{
target: '_blank',
href: `${item.apiRepo.gitRepoUrl}`,
title: '跳转到 API 仓库',
classes: [c.ml_1],
},
[`${item.apiRepo.gitRepoOwner}/${item.apiRepo.gitRepoName}`]
),
v(
'span',
{ classes: [c.ml_3] },
item.apiRepoVersions.map((version) =>
v('span', { classes: [c.mr_1, c.badge, c.badge_secondary] }, [`${version.version}`])
)
),
])
)
),
];
}
private _renderDevComponentRepos(): DNode[] {
const { dependences = [] } = this.properties;
const devDependences = dependences.filter((dependence) => dependence.componentRepo.repoType === RepoType.IDE);
if (devDependences.length === 0) {
return [];
}
return [v('div', {}, [v('strong', ['开发'])]), ...this._renderComponentRepoDependences(devDependences)];
}
private _renderBuildComponentRepos(): DNode[] {
const { dependences = [] } = this.properties;
const buildDependences = dependences.filter(
(dependence) => dependence.componentRepo.repoType === RepoType.PROD
);
if (buildDependences.length === 0) {
return [];
}
return [v('div', {}, [v('strong', ['构建'])]), ...this._renderComponentRepoDependences(buildDependences)];
}
private _renderComponentRepoDependences(dependences: ProjectDependenceData[]): DNode[] {
const { repository, onDeleteDependence, onShowDependenceVersions, onUpdateDependenceVersion } = this.properties;
// 按照 appType 分组
const groupedDependences = lodash.groupBy(dependences, (dependence) => dependence.componentRepoVersion.appType);
const vnodes: DNode[] = [];
for (const key in groupedDependences) {
const values = groupedDependences[key];
vnodes.push(
v('div', { classes: [c.pl_4, c.border_left] }, [
v('div', {}, [`${key}`]),
v(
'div',
{ classes: [c.pl_4, c.border_left] },
values.map((item) =>
w(DependenceRow, {
repository,
dependence: item,
versions: item.componentRepoVersions || [],
onDeleteDependence,
onShowDependenceVersions,
onUpdateDependenceVersion,
})
)
),
])
);
}
return vnodes;
}
private _renderNoDependenceMessage() {
return v('div', { key: 'no-dependence', classes: [c.mt_4] }, [
v('div', { classes: [c.alert, c.alert_primary, c.mx_auto, c.text_center, c.py_4] }, [
v('strong', {}, ['此项目尚未配置依赖']),
]),
]);
}
}
interface ComponentRepoItemProperties {
repository: Repository;
componentRepoInfo: ComponentRepoInfo;
used: boolean;
onAddDependence: (opt: ProjectDependenceWithProjectPathPayload) => void;
}
class ComponentRepoItem extends ThemedMixin(I18nMixin(WidgetBase))<ComponentRepoItemProperties> {
protected render() {
const {
componentRepoInfo: { componentRepo, componentRepoVersion, apiRepo },
used = false,
} = this.properties;
return v('li', { classes: [c.list_group_item] }, [
// 如果组件库未安装,则显示“使用”按钮,否则显示“已用”文本
v('div', {}, [
v('span', { classes: [c.font_weight_bold, c.mr_2] }, [
v('img', {
width: 20,
height: 20,
classes: [c.avatar, c.mr_1],
src: `${componentRepo.createUserAvatarUrl}`,
}),
`${componentRepo.createUserName} / ${componentRepoVersion.name}`,
]),
v('span', { classes: [c.badge, c.badge_info, c.ml_3], title: '与 BlockLang 设计器集成' }, [
`${componentRepo.repoType}`,
]),
used
? v('span', { classes: [c.float_right, c.text_info] }, ['已用'])
: v(
'button',
{
classes: [c.btn, c.btn_secondary, c.btn_sm, c.float_right],
onclick: this._onAddDependence,
},
['使用']
),
]),
v('p', { itemprop: 'description', classes: [c.text_muted, c.mb_0] }, [
`${componentRepoVersion.description}`,
]),
v('div', { classes: [c.my_2] }, [
v('span', { classes: [c.border, c.rounded, c.px_1] }, [
v('span', {}, ['API: ']),
v(
'a',
{
target: '_blank',
href: `${apiRepo.gitRepoUrl}`,
title: '跳转到 API 仓库',
classes: [c.mr_1],
},
[`${apiRepo.gitRepoOwner}/${apiRepo.gitRepoName}`]
),
]),
' -> ',
v('span', { classes: [c.border, c.rounded, c.px_1] }, [
v('span', {}, ['实现: ']),
v(
'a',
{
target: '_blank',
href: `${componentRepo.gitRepoUrl}`,
title: '跳转到组件仓库',
classes: [c.mr_1],
},
[`${componentRepo.gitRepoOwner}/${componentRepo.gitRepoName}`]
),
]),
]),
v('small', { classes: [c.text_muted] }, [
v('span', { classes: [c.mr_3] }, [
w(FontAwesomeIcon, {
icon: componentRepoVersion.icon.split(' ') as [IconPrefix, IconName],
classes: [c.mr_1],
}),
`${componentRepoVersion.title}`,
]),
v('span', { classes: [c.mr_3] }, [
v('span', {
classes: [css.repoLanguageColor, c.mr_1],
styles: {
backgroundColor: `${getProgramingLanguageColor(componentRepoVersion.language)}`,
},
}),
v('span', { itemprop: 'programmingLanguage' }, [
`${getProgramingLanguageName(componentRepoVersion.language)}`,
]),
]),
v('span', { classes: [c.mr_3] }, [`${getRepoCategoryName(componentRepo.category)}`]),
v('span', { classes: [c.mr_3], title: '使用次数' }, [
w(FontAwesomeIcon, { icon: 'cube', classes: [c.mr_1] }),
'0',
]),
v('span', {}, [
w(FontAwesomeIcon, { icon: 'clock', classes: [c.mr_1] }),
'最近发布 · ',
w(Moment, { datetime: componentRepo.lastPublishTime }),
]),
]),
]);
}
private _onAddDependence() {
const {
repository,
componentRepoInfo: { componentRepo },
} = this.properties;
// componentRepoVersionId 默认使用最新版本
this.properties.onAddDependence({
owner: repository.createUserName,
repo: repository.name,
componentRepoId: componentRepo.id!,
project: '', // TODO
});
}
}
interface DependenceRowProperties {
repository: Repository;
dependence: ProjectDependenceData;
// 当前选中依赖的版本列表
versions: ComponentRepoVersion[];
onDeleteDependence: (opt: ProjectDependenceIdPayload) => void;
onShowDependenceVersions: (opt: ProjectDependencePayload) => void;
onUpdateDependenceVersion: (opt: ProjectDependenceVersionPayload) => void;
}
class DependenceRow extends ThemedMixin(I18nMixin(WidgetBase))<DependenceRowProperties> {
protected render() {
const { repository, dependence, versions, onUpdateDependenceVersion } = this.properties;
return v('div', {}, [
// 当前只支持 git
w(FontAwesomeIcon, { icon: ['fab', 'git-alt'], classes: [c.text_muted], title: 'git 仓库' }),
v(
'a',
{
target: '_blank',
href: `${dependence.apiRepo.gitRepoUrl}`,
title: '跳转到组件仓库',
classes: [c.ml_1],
},
[`${dependence.componentRepo.gitRepoOwner}/${dependence.componentRepo.gitRepoName}`]
),
v('span', { classes: [c.ml_3] }, [
v('span', { classes: [c.dropdown] }, [
v(
'button',
{
classes: [c.btn, c.btn_secondary, c.btn_sm, c.dropdown_toggle, css.dropdownButton],
type: 'button',
'data-toggle': 'dropdown',
onclick: this._onShowVersions,
},
[`${dependence.componentRepoVersion.version}`]
),
v(
'div',
{ classes: [c.dropdown_menu, css.dropdownMenu] },
versions.map((version) =>
w(DependenceVersionMenu, { repository, dependence, version, onUpdateDependenceVersion })
)
),
]),
]),
v('button', { type: 'button', classes: [c.close, c.float_right], onclick: this._onDeleteDependence }, [
v('span', { 'aria-hidden': 'true', innerHTML: '×' }),
]),
]);
}
private _onShowVersions() {
const { dependence } = this.properties;
this.properties.onShowDependenceVersions({
dependenceId: dependence.dependence.id,
componentRepoId: dependence.componentRepo.id!,
});
}
private _onDeleteDependence() {
const { repository, dependence } = this.properties;
this.properties.onDeleteDependence({
owner: repository.createUserName,
repo: repository.name,
id: dependence.dependence.id,
project: '', // TODO
});
}
}
interface DependenceVersionMenuProperties {
repository: Repository;
dependence: ProjectDependenceData;
version: ComponentRepoVersion;
onUpdateDependenceVersion: (opt: ProjectDependenceVersionPayload) => void;
}
class DependenceVersionMenu extends ThemedMixin(I18nMixin(WidgetBase))<DependenceVersionMenuProperties> {
protected render() {
const { dependence, version } = this.properties;
const isSelected = version.id === dependence.componentRepoVersion.id;
return v(
'a',
{
classes: [c.dropdown_item, isSelected ? c.active : undefined],
href: '#',
onclick: this._onUpdateVersion,
},
[`${version.version}`]
);
}
private _onUpdateVersion(event: MouseEvent) {
event.stopPropagation();
const { repository, dependence, version } = this.properties;
const isSelected = version.id === dependence.componentRepoVersion.id;
if (isSelected) {
return;
}
this.properties.onUpdateDependenceVersion({
owner: repository.createUserName,
repo: repository.name,
dependenceId: dependence.dependence.id,
componentRepoVersionId: version.id,
});
}
}
| , {}, [v('strong', ['API'])]),
v(
'div',
{ classes: [c.pl_4, c.border_left] },
groupedApiRepos.map((item) =>
v('div', {}, [
| conditional_block |
01_questions.js | exports.seed = function(knex, Promise) {
return knex('questions')
.del()
.then(function() {
return knex('questions').insert([
{
id: 1,
title: 'The Dress Code',
question:
'You work in an office, performing a job that you find satisfying (and which compensates you adequately). The company that employs you is suddenly purchased by an eccentric millionaire who plans to immediately raise each persons salary by 5 percent and extend an extra week of vacation to full time employees. However, this new owner intends to enforce a somewhat radical dress code: Every day, men will have to wear tuxedos, tails, and top hats (during the summer months, male employees will be allowed to wear gray three-piece suits on "casual" Fridays). Women must exclusively work in formal wear, preferably ball gowns or prom dresses. Each employee will be given an annual $1,500 stipend to purchase necessary garments, but that money an only be spend on work-related clothing. The new regime starts in three months. Do you seek new employment elsewhere?',
answer1: 'Sounds good to me!',
answer2: 'No way!',
response1: '0',
response2: '0'
},
{
id: 2,
title: 'Earth VS the Moon',
question:
'You have won a prize. The prize has two options, and you can choose either (but not both). The first option is a year in Europe with a monthly stipend of $2,000. The second option is ten minutes on the moon. Which option do you select?',
answer1: 'Europe!',
answer2: 'The Moon!',
response1: '0',
response2: '0'
},
{
id: 3,
title: 'The Cannibals Quandary',
question:
'You are in a plane crash over the Andes Mountains. As such, you will be forced to consume the flesh of the people who died on impact. This will be a terrible experience, but it is the only way for you to survive. Fortunately, you did not know any of the victims personally. Would you rather eat a baby, or would you rather eat an elderly person? Would gender play a role in the selection process? And how much would it bother you if this meat turned out to be delicous?',
answer1: 'Baby',
answer2: 'Elderly Person',
response1: '0',
response2: '0'
},
{
id: 4,
title: 'Body VS Mind',
question:
'You are given a choice between two rewards. The first reward is to be twice as intelligent as you are right now. You will be able to read twice as fast and remember twice as much, the size of your vocabulary will double, and you will be able to solve intellectual problems with twice your current aptitude. The second reward is that you will never again feel sick (even when you are). And you can always be whatever weight you want, regardless of what you eat or how little you exercise. You can simply imagine the body you would like to have and that is the weight you will magically become. Which reward do you choose?',
answer1: 'Mind!',
answer2: 'Body!',
response1: '0',
response2: '0'
},
{
id: 5,
title: 'The Countdown',
question:
'You are presented with a strange challenge: Someone dares you to count backward from 300 to 0. If you succeed at this simple request, you will be given $25,000 in cash. However, if you misspeak, get any digit incorrect, or make any mistake whatsoever, you will immediately be doused with gasoline and burned alive. Do you attempt this challenge?',
answer1: 'Absolutely!',
answer2: 'No Way!',
response1: '0',
response2: '0'
},
{
id: 6,
title: 'The Price of Sex Appeal',
question:
'You meet a wizard in downtown Chicago. The wizard tells you he can make you more attractive if you pay him money. When you ask how this process works, the wizard points to a random person on the street. You look at this random stranger. The wizard says, "I will now make him one dollar more attractive." He waves his magic wand. Ostensibly, this person does not change at all; as far as you can tell, nothing is different. But somehow this person is suddenly a little more appealing. The tangible difference is invisible to the naked eye, but you can not deny that this person is vaguely sexier. This wizard has a weird rule though, you can only pay him once. You can not keep giving him money until you are satisfied. You can only pay him one lump sum up front. How much cash do you give the wizard?',
answer1: 'More than $1,000',
answer2: 'Less than $1,000',
response1: '0',
response2: '0'
},
{
id: 7,
title: 'Prison Culture',
question:
'A totally innocent man is sent to prison for 25 years, all for a crime he did not commit (the DNA evidence that indisputably proves his innocence does not emerge until he has completed his full sentence). During those 25 years in prison, he kills three other inmates, sells drugs, and steals food from other inmates on a regular basis. However, he was never caught doing any of these illegal acts. Upon his release from prison, he openly admits to doing all of these things in fact, he brags about these activities to the media. Prison is a different culture, he explains. I did what I had to do to survive. You are head of the penal system. You have just learned of this mans wrongful imprisonment and about all of the terrible things he did while in jail. Do you release him back into society, or do you attempt to charge him with the new crimes he claims to have committed during his prison stay?',
answer1: 'Charge Him!',
answer2: 'We made the mistake!',
response1: '0',
response2: '0'
},
{
id: 8,
title: 'Forever 75',
question:
'You celebrate your 75th birthday in good health. As far as you can tell, you are a spry, relatively normal 75-year-old. And this condition does not seem to change over time: When you celebrate your 80th birthday, you look and feel exactly as you did on your 75th. When you hit 90, you still look and feel 75. On your 100th birthday, you realize that most of your friends are now dead or dying -- but physically, you are the same independent person you were 25 years ago. You hit age 110 with the same results. Every year, you are celebrating a new birthday without physically evolving beyond the age of 75. Doctors have no explanation for this inexplicable stasis. How old would you have to be before you would start to seriously believe that you are immortal?',
answer1: 'Over 110',
answer2: 'Over 150',
response1: '0',
response2: '0'
},
{
id: 9,
title: 'Vampire Weekend',
question:
'It is the evening before your wedding. Everything is going as planned. You are spending the afternoon with a few old friends, chatting casually about old times. Suddenly, your spouse-to-be bursts into the room, totally hysterical. Your spouse-to-be insists that s/he has just seen a vampire. When you ask what this means, s/he says: "I was looking out of the window of my hotel room, and I could see into the apartment building across the street. That is when I saw the vampire. I saw a man in a black cape bite a womans neck and drink her blood. I know this sounds crazy, but I am that certain what I saw is real." Your prospective spouse is in a state of panic and pleads with you to believe his/her story. You can tell that this is really, important to them. Knowing the fragility of the situation and the intensity of the timing, do you tell your potential spouse that you believe that s/he saw a vampire across the street? All of your old friends are watching this conversation.',
answer1: 'I Believe!',
answer2: 'Thats Crazy!',
response1: '0',
response2: '0'
},
{
id: 10,
title: 'The Dream VCR',
question:
'At long last, someone invents "the dream VCR." This machine allows you to tape an entire evenings worth of your own dreams, which you can then watch at your leisure. However, the inventor of the dream VCR will only allow you to use this device if you agree to a strange caveat: When you watch your dreams, you must do so with your family and closest friends in the same room. They get to watch your dreams along with you. And if you do not agree to this, you can not use the dream VCR. Would you still do it?',
answer1: 'Absolutely!',
answer2: 'There is no way!',
response1: '0',
response2: '0'
},
{
id: 11,
title: 'A Life In Film',
question:
'For whatever the reason, two unauthorized movies are made about your life. The first is an independently released documentary, primarily comprised of interviews with people who know you and bootleg footage from your actual life. Critics are describing the documentary as "brutally honest and relentlessly fair." Meanwhile, Columbia TriStar has produced a big-budget biopic of your life, casting major Hollywood stars as you and all of your acquaintances; though the movie is based on actual events, screenwriters have taken some liberties with the facts. Critics are split on the artistic merits of this fictionalized account, but audiences love it. Which film would you be more interested in seeing?',
answer1: 'The Documentary!',
answer2: 'The Feature Film!',
response1: '1',
response2: '1'
},
{
id: 12,
title: 'Shaquille in the Shower',
question:
'You come home from an afternoon of shopping, expecting your residence to be empty. However, upon entering your front door, you immediately sense that something is strange: The entire place smells like marijuana and roses. There is a briefcase sitting in the middle of your living room floor, filled with diamonds and Christmas cookies. You can hear the shower running, and -- when you open the door to the bathroom -- you realize that the man using the shower is basketball legend Shaquille ONeal. A naked Shaq peers at you from behind the shower curtain and smiles enthusiastically, but says nothing. He then returns to washing himself. When you ask ONeal what he is doing in your home, he simply says, "I do not remember." Do you call the police?',
answer1: 'He can stay!',
answer2: 'Call the police!',
response1: '0',
response2: '0'
},
{
id: 13,
title: 'Lost Virginity Redux',
question:
'Imagine you could go back to the age of five and relive the rest of your life, knowing everything that you know now. You will re-experience your entire adolescence with both the cognitive ability of an adult and the memories of everything you have learned from having lived your life previously. Would you lose your virginity earlier or later than you did the first time around?',
answer1: 'Earlier',
answer2: 'Later',
response1: '0',
response2: '0'
},
{
id: 14,
title: 'Rudimentary Magician',
question:
'Let us assume you met a rudimentary magician. Let us also assume that he can do five simple tricks: He can pull a rabbit out of his hat, he can make a coin disappear, he can turn the Ace of Spades into the Joker card, and he can do two others in a similar vein. These are his only tricks and he can not learn any more; he can only do these five. However, it turns out that he is doing these five tricks with real magic. It is not an illusion; he can actually conjure the bunny out of the ether and he can move the coin through space. He is legitimately magical, but extremely limited in scope and influence. Would this person be more impressive to you than Albert Einstein?',
answer1: 'Of Course!',
answer2: 'Nope!',
response1: '0',
response2: '0'
},
{
id: 15,
title: 'The Nude Acquaintance',
question:
'Think of a specific friend of yours -- not your best friend, but someone who is (a) more than an acquaintance and (b) physically attractive. One day, while trolling the Internet, you accidentally come across three nude photographs of this person, reclined on a sofa bed. The pictures were clearly taken several years ago, and it is unclear if they photos were casually posed or taken without the subjects knowledge (your friend looks comfortable, but he/she is never looking directly into the lens of the camera). The pictures are not labeled, and your friends name is not listed anywhere on the site -- but you are certain that this is the same person you know. What do you do with this information? Do you tell anyone?',
answer1: 'Yes!',
answer2: 'No!',
response1: '0',
response2: '0'
},
{
id: 16,
title: 'The Moon Fight',
question:
'A total stranger picks a fight with you in a bar. This stranger is exactly your size and weight. You have done nothing to this individual to warrant such animosity, but the stranger really wants to fight you. And to make matters weirder, this stranger wants to fight you on the moon (and this is somehow possible). You and this stranger will be transported to the surface of the moon, outfitted in ultra-thin (and very comfortable) space suits, and rigged with lightweight oxygen tanks. You will then be expected to fight for 10 three-minute rounds. You can not use weapons, there are no rules, and you cannot quit (unless you or your opponent are knocked unconscious). Do you accept this challenge?',
answer1: 'Bring it on!',
answer2: 'Absolutely not!',
response1: '0',
response2: '0'
},
{
id: 17,
title: 'Artistic Telekinesis',
question:
'Let us assume that you have the ability to telekinetically change culture while you actively experience it. Your mind can now dictate what you see and hear. For example, if you were listening to Pearl Jam and you wanted the music to be heavier, it would immediately sound as though the guitars had been tuned differently in the studio. If you were watching The Office on NBC and decided that Jim should marry Pam (or Karen, or both), you could make it happen all you would need to do is think about that specific desire. You could stare at an oil painting and unconsciously change the color contrasts. You could (essentially) write books as you read them, eliminating certain characters and redirecting plot points as they occurred in the text. However, such changes would only apply to your experience; you could kill off Han Solo at the end of Return of the Jedi, but that would not change the movie for anyone else. If this became reality would art retain any meaning whatsoever?',
answer1: 'It would retain meaning!',
answer2: 'No it would not!',
response1: '0',
response2: '0'
},
{
id: 18,
title: 'The Industry Standard',
question:
'You are inside a very peculiar rock club: the manager of this club demands that all his musical acts must take an extensive IQ test before he will allow them to perform. Tonight there are two acts on the bill, and they coincidentally share the same band name, The Industry Standard. Both bands are alleged to be awesome. Sadly, you only have one hour to spend at the club, and the intermission between the two acts is very long (so you cannot watch both). You ask the manager which version of The Industry Standard you should watch. "I have no idea," he says. "But I will tell you this: The first band had the highest test scores I have ever seen, anywhere. Each member is technically a genius. Conversely, the band playing second had some of the worst scores ever recorded. One member might actually be illiterate. However, I halfway suspect they were all drunk and mocking the entire process. I could not tell for sure." Which version of The Industry Standard do you decide to see?',
answer1: 'The Geniuses!',
answer2: 'The Possibly Illiterate!',
response1: '0',
response2: '0'
},
{
id: 19,
title: 'Collarbones n Chains',
question:
'You meet your soul mate. However, there is a catch: Every three years, someone will break both of your soul mates collarbones with a Crescent wrench, and there is only one way you can stop this from happening: You must swallow a pill that will make every song you hear -- for the rest of your life -- sound as if it is being performed by the band Alice in Chains. When you hear Creedence Clearwater Revival on the radio, it will sound (to your ears) like it is being played by Alice in Chains. If you see Radiohead live, every one of their tunes will sound like it is being covered by Alice in Chains. When you hear a commercial jingle on TV, it will sound like Alice in Chains; if you sing to yourself in the shower, your voice will sound like deceased Alice in Chains vocalist Layne Staley performing a cappella (but it will only sound this way to you). Would you swallow the pill?',
answer1: 'Sounds good to me!', | response1: '0',
response2: '0'
},
{
id: 20,
title: 'Super Gorilla',
question:
'Genetic engineers at Johns Hopkins University announce that they have developed a so-called super gorilla. Though the animal cannot speak, it has a sign-language lexicon of more than 12,000 words, an IQ of nearly 85, and -- most notable -- a vague sense of self-awareness. Oddly, the creature (who weights 700 pounds) becomes fascinated by football. The gorilla aspires to play the game at its highest level and quickly develops the rudimentary skills of a defensive end. ESPN analyst Tom Jackson speculates that this gorilla would be "borderline unblockable" and would likely average six sacks per game (although Jackson concedes that the beast might be susceptible to counters and misdirection plays). Meanwhile, the gorilla has made it clear that he would never intentionally injure any opponent. You are commissioner of the NFL: Would you allow this gorilla to sign with the Seattle Seahawks?',
answer1: 'Let him play!',
answer2: 'No, its insane!',
response1: '0',
response2: '0'
},
{
id: 21,
title: 'The ParaBox',
question:
'Let us assume there are two boxes on a table. In one box, there is a relatively normal turtle; in the other, Adolf Hitlers skull. You have to select one of these items for your home. If you select the turtle, you cannot give it away and you have to keep it alive for two years; if either of these parameters are not met, you will be fined $999 by the state. If you select Hitlers skull, you are required to display it in a semi-prominent location in your living room for the same amount of time, although you will be paid a stipend of $120 per month for doing so. Display of the skull must be apolitical. Which option do you select?',
answer1: 'Hitlers Skull!',
answer2: 'The Turtle!',
response1: '0',
response2: '0'
},
{
id: 22,
title: 'Kick in the Ribs',
question:
'Your best friend is taking a nap on the floor of your living room. Suddenly, you are faced with a bizarre existential problem: This friend is going to die unless you kick them (as hard as you can) in the rib cage. If you don’t kick them while they slumber, they will never wake up. However, you can never explain this to your friend; if you later inform them that you did this to save their life, they will also die from that. So you have to kick a sleeping friend in the ribs, and you can’t tell them why. Since you cannot tell your friend the truth, what kind of excuse will you fabricate to explain this (seemingly inexplicable) attack?',
answer1: 'I saw something on you!',
answer2: 'I wanted to!',
response1: '0',
response2: '0'
},
{
id: 23,
title: 'Office Politics',
question:
'You work in an office. Generally, you are popular with your coworkers. However, you discover that there are currently two rumors circulating the office gossip mill, and both involve you. The first rumor is that you got drunk at the office holiday party and had sex with one of your married coworkers. This rumor is completely true, but most people don’t believe it. The second rumor is that you have been stealing hundreds of dollars of office supplies (and then selling them to cover a gambling debt). This rumor is completely false, but virtually everyone assumes it is factual. Which of these two rumors is most troubling to you?',
answer1: 'The Philandering',
answer2: 'The Stealing',
response1: '0',
response2: '0'
},
{
id: 24,
title: 'The Joker',
question:
'You are sitting in an empty bar (in a town you’ve never before visited), drinking Bacardi with a soft-spoken acquaintance you barely know. After an hour, a third individual walks into the tavern and sits by himself, and you ask your acquaintance who the new man is. “Be careful of that guy,” you are told. “He is a man with a past.” A few minutes later, a fourth person enters the bar; he also sits alone. You ask your acquaintance who this new individual is. “Be careful of that guy, too,” he says. “He is a man with no past.” Which of these two people do you trust less?',
answer1: 'The man with a past',
answer2: 'The man with no past',
response1: '0',
response2: '0'
},
{
id: 25,
title: 'Front Page News',
question:
'Defying all expectation, a group of Scottish marine biologists capture a live Loch Ness Monster. In an almost unbelievable coincidence, a bear hunter in the Pacific Northwest shoots a Sasquatch in the thigh, thereby allowing zoologists to take the furry monster into captivity. These events happen on the same afternoon. That evening, the president announces he may have thyroid cancer and will undergo a biopsy later that week. You are the front page editor of The New York Times: What do you play as the biggest story?',
answer1: 'Mythical Beasts',
answer2: 'The President',
response1: '0',
response2: '0'
}
])
})
.then(() => {
return knex.raw('ALTER SEQUENCE questions_id_seq RESTART WITH 26;')
})
} | answer2: 'Dealbreaker!', | random_line_split |
SgScriptEngine.py | # Copyright (c) 2013, Nathan Dunsworth - NFXPlugins
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the NFXPlugins nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL NFXPLUGINS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
__all__ = [
'parseLogicalOp',
'parseSearchExp'
]
# Python imports
import exceptions
# This module imports
import ShotgunORM
OP_AND = ' and '
OP_OR = ' or '
OP_AND_STRIP = 'and'
OP_OR_STRIP = 'or'
class SgScriptError(exceptions.Exception):
'''
General script engine exception.
'''
pass
def cleanSearchExp(sgSearchExp):
'''
Returns the passed search expression cleaned up of extra spaces.
Also throws when closing parentheses and quotes are not present.
'''
backwardParenCount = 0
backwardQuoteCount = 0
index = 0
curWord = ''
for c in sgSearchExp:
if backwardParenCount < 0:
raise SgScriptError('"%s" missing closing parentheses' % curWord)
if c == '(':
if backwardQuoteCount <= 0:
backwardParenCount += 1
curWord += c
elif c == ')':
if backwardQuoteCount <= 0:
backwardParenCount -= 1
if backwardQuoteCount <= 0 and curWord.endswith(' '):
curWord = curWord[:-1] + c
else:
curWord += c
elif c == '"' or c == "'":
backwardQuoteCount += 1
if backwardQuoteCount >= 2:
backwardQuoteCount = 0
curWord += c
elif c == ' ':
if len(curWord) <= 0:
continue
if curWord.endswith('(') and backwardQuoteCount <= 0:
continue
if backwardQuoteCount >= 1 or not curWord.endswith(' '):
curWord += c
else:
curWord += c
if backwardParenCount != 0:
raise SgScriptError('"%s" missing closing parentheses' % curWord)
result = curWord.strip()
if backwardQuoteCount >= 1:
raise SgScriptError('"%s" missing closing quote' % curWord)
#ShotgunORM.LoggerScriptEngine.debug('ShotgunORM.SgScriptEngine.cleanSearchExp(...)')
#ShotgunORM.LoggerScriptEngine.debug(' * before: "%(searchExp)s"', {'searchExp': sgSearchExp})
#ShotgunORM.LoggerScriptEngine.debug(' * after: "%(searchExp)s"', {'searchExp': result})
return result
def buildSearchExpSpan(sgSearchExp):
'''
Returns the next span in a search expression.
'''
if sgSearchExp.startswith(OP_AND):
return OP_AND
elif sgSearchExp.startswith(OP_OR):
return OP_OR
if sgSearchExp.startswith('('):
backwardParenCount = 0
backwardQuoteCount = 0
index = -1
for c in sgSearchExp:
index += 1
if c == '(':
if backwardQuoteCount <= 0:
backwardParenCount += 1
elif c == ')':
if backwardQuoteCount <= 0:
backwardParenCount -= 1
if backwardParenCount == 0:
break
elif c == '"' or c == "'":
backwardQuoteCount += 1
if backwardQuoteCount >= 2:
backwardQuoteCount = 0
result = sgSearchExp[:index + 1]
if result.endswith(' and)') or result.endswith(' or)'):
raise SgScriptError('"%s" invalid search expression span' % result)
return result
else:
backwardParenCount = 1
backwardQuoteCount = 0
curWord = ''
for c in sgSearchExp:
if c == '(':
if backwardQuoteCount <= 0:
backwardParenCount += 1
elif c == ')':
if backwardQuoteCount <= 0:
backwardParenCount -= 1
elif c == '"' or c == "'":
backwardQuoteCount += 1
if backwardQuoteCount >= 2:
backwardQuoteCount = 0
if backwardQuoteCount <= 0 and len(curWord) >= 4:
if curWord.endswith(OP_AND):
curWord = curWord[:-5]
break
elif curWord.endswith(OP_OR):
curWord = curWord[:-4]
break
curWord += c
result = curWord
if result.endswith(' and') or result.endswith(' or'):
raise SgScriptError('"%s" invalid search expression span' % result)
return result
def splitSearchExp(sgSearchExp):
'''
Splits a search expression into its spans.
'''
searchPattern = sgSearchExp
result = []
while len(searchPattern) >= 1:
span = buildSearchExpSpan(searchPattern)
searchPattern = searchPattern[len(span):]
result.append(span)
return result
EXPRESSION_SUPPORTS_IN = [
ShotgunORM.SgField.RETURN_TYPE_DATE,
ShotgunORM.SgField.RETURN_TYPE_DATE_TIME,
ShotgunORM.SgField.RETURN_TYPE_ENTITY,
ShotgunORM.SgField.RETURN_TYPE_FLOAT,
ShotgunORM.SgField.RETURN_TYPE_INT,
ShotgunORM.SgField.RETURN_TYPE_MULTI_ENTITY,
ShotgunORM.SgField.RETURN_TYPE_TEXT
]
SCRIPT_FIELDS = {
ShotgunORM.SgField.RETURN_TYPE_CHECKBOX: ShotgunORM.SgScriptFieldCheckbox(),
ShotgunORM.SgField.RETURN_TYPE_COLOR: ShotgunORM.SgScriptFieldColor(),
ShotgunORM.SgField.RETURN_TYPE_COLOR2: ShotgunORM.SgScriptFieldColor2(),
ShotgunORM.SgField.RETURN_TYPE_DATE: ShotgunORM.SgScriptFieldDate(),
ShotgunORM.SgField.RETURN_TYPE_DATE_TIME: ShotgunORM.SgScriptFieldDateTime(),
ShotgunORM.SgField.RETURN_TYPE_ENTITY: ShotgunORM.SgScriptFieldEntity(),
ShotgunORM.SgField.RETURN_TYPE_MULTI_ENTITY: ShotgunORM.SgScriptFieldEntityMulti(),
ShotgunORM.SgField.RETURN_TYPE_FLOAT: ShotgunORM.SgScriptFieldFloat(),
ShotgunORM.SgField.RETURN_TYPE_INT: ShotgunORM.SgScriptFieldInt(),
ShotgunORM.SgField.RETURN_TYPE_LIST: ShotgunORM.SgScriptFieldSelectionList(),
ShotgunORM.SgField.RETURN_TYPE_TAG_LIST: ShotgunORM.SgScriptFieldTagList(),
ShotgunORM.SgField.RETURN_TYPE_TEXT: ShotgunORM.SgScriptFieldText(),
}
def buildSearchExpFilter(sgEntityFieldInfos, sgArgs, sgSearchExpSpan):
|
def buildSearchExpFilters(sgEntityFieldInfos, sgArgs, sgSearchExpSpans):
'''
Builds the locial operator pattern from a search expression
'''
ShotgunORM.LoggerScriptEngine.debug(' + Parsing spans: %(sgSearchExpSpans)s', {'sgSearchExpSpans': sgSearchExpSpans})
logicalConds = []
logicalOp = {'logical_operator': None, 'conditions': logicalConds}
if len(sgSearchExpSpans) <= 0:
raise SgScriptError('empty search expression span')
if sgSearchExpSpans[0] in [OP_AND, OP_OR]:
raise SgScriptError('"%s" invalid search expression' % ' '.join(sgSearchExpSpans))
if len(sgSearchExpSpans) == 1:
span = sgSearchExpSpans[0]
if span.startswith('('):
while span.startswith('(') and span.endswith(')'):
span = span[1:-1]
return buildSearchExpFilters(
sgEntityFieldInfos,
sgArgs,
splitSearchExp(span)
)
curOp = None
for span in sgSearchExpSpans:
if span in [OP_AND, OP_OR]:
curOp = span
if curOp == OP_AND:
logicalOp['logical_operator'] = OP_AND_STRIP
else:
logicalOp['logical_operator'] = OP_OR_STRIP
break
if logicalOp['logical_operator'] == None:
if len(sgSearchExpSpans) >= 2:
raise SgScriptError('"%s" invalid search expression' % ' '.join(sgSearchExpSpans))
else:
logicalOp['logical_operator'] = OP_AND_STRIP
curOp = OP_AND
index = -1
for span in sgSearchExpSpans:
index += 1
if span in [OP_AND, OP_OR]:
if span != curOp:
logicalOp = {'logical_operator': span.strip(), 'conditions': [logicalOp]}
logicalOp['conditions'].append(
buildSearchExpFilters(
sgEntityFieldInfos,
sgArgs,
sgSearchExpSpans[index + 1:]
)
)
return logicalOp
continue
if span.startswith('('):
logicalConds.append(
buildSearchExpFilters(
sgEntityFieldInfos,
sgArgs,
splitSearchExp(span)
)
)
else:
logicalConds.extend(
buildSearchExpFilter(sgEntityFieldInfos, sgArgs, span)
)
return logicalOp
def parseToLogicalOp(sgEntityInfo, sgSearchExp, sgArgs=[]):
'''
Parses a search expression and returns the Shotgun formated search filter.
Args:
* (SgEntitySchemaInfo) sgEntityInfo:
SgEntitySchemaInfo that the search expression will reference.
* (str) sgSearchExp:
Search expression string.
* (list) sgArgs:
Args used when evaling search expression.
'''
if sgSearchExp == None:
raise SgScriptError('expected a str got None')
if len(sgSearchExp) <= 0 or sgSearchExp.isspace():
raise SgScriptError('empty search string')
ShotgunORM.LoggerScriptEngine.debug('# PARSING START')
ShotgunORM.LoggerScriptEngine.debug(' * entity: "%(sgEntityType)s"', {'sgEntityType': sgEntityInfo.label()})
ShotgunORM.LoggerScriptEngine.debug(' * search: "%(sgSearchExp)s"', {'sgSearchExp': sgSearchExp})
try:
sgSearchExp = cleanSearchExp(sgSearchExp)
except SgScriptError, e:
raise SgScriptError('%s in "%s"' % (e, sgSearchExp))
try:
searchExpSpans = splitSearchExp(sgSearchExp)
except SgScriptError, e:
raise SgScriptError('%s in "%s"' % (e, sgSearchExp))
try:
result = buildSearchExpFilters(sgEntityInfo.fieldInfos(), sgArgs, searchExpSpans)
except SgScriptError, e:
raise SgScriptError('%s in "%s"' % (e, sgSearchExp))
ShotgunORM.LoggerScriptEngine.debug('# PARSING COMPLETE!')
return result
LOG_TO_ORM_LOOKUP = {
'is': '%(path)s == %(values)s',
'is_not': '%(path)s != %(values)s',
'less_than': '%(path)s > %(values)s',
'greater_than': '%(path)s > %(values)s',
'contains': '%(path)s.contains(%(values)s)',
'not_contains': '!%(path)s.contains(%(values)s)',
'starts_with': '%(path)s.startswith(%(values)s)',
'ends_with': '%(path)s.endswith(%(values)s)',
'between': '%(path)s.between(%(value1)s, %(value2)s)',
'not_between': '!%(path)s.between(%(value1)s, %(value2)s)',
'in_last': '%(path)s.in_last(%(value1)s, %(value2)s)',
'not_in_last': '!%(path)s.in_last(%(value1)s, %(value2)s)',
'in_next': '%(path)s.in_next(%(value1)s, %(value2)s)',
'not_in_next': '!%(path)s.in_next(%(value1)s, %(value2)s)',
'in': '%(path)s in %(values)s',
'not_in': 'not %(path)s in %(values)s',
'type_is': '%(path)s.type(%(values)s)',
'type_is_not': '!%(path)s.type(%(values)s)',
'in_calendar_day': '%(path)s.in_day(%(values)s)',
'in_calendar_week': '%(path)s.in_week(%(values)s)',
'in_calendar_month': '%(path)s.in_month(%(values)s)',
'in_calendar_year': '%(path)s.in_year(%(values)s)',
'name_contains': '%(path)s.name_contains(%(values)s)',
'name_not_contains': '!%(path)s.name_contains(%(values)s)'
}
LOG_SINGLES = [
'is',
'is_not',
'less_than',
'greater_than',
'contains',
'starts_with',
'ends_with',
'type_is',
'type_is_not',
'name_contains',
'name_not_contains'
]
LOG_DOUBLES = [
'between',
'not_between',
'in_last',
'not_in_last',
'in_next',
'not_in_next'
]
def parseFromLogicalOp(sgLogicalOp):
'''
Parses a Shotgun logical operator and returns the search expression
representation of it.
Args:
* (dict) sgLogicalOp:
Shotgun formatted logical operator.
'''
try:
op = ' %s ' % sgLogicalOp['logical_operator']
comps = []
for c in sgLogicalOp['conditions']:
if c.has_key('logical_operator'):
comps.append('(%s)' % parseFromLogicalOp(c))
else:
data = {
'path': c['path'],
'values': c['values']
}
relation = c['relation']
if relation in LOG_SINGLES:
data['values'] = repr(data['values'][0])
exp = LOG_TO_ORM_LOOKUP[relation] % data
comps.append(exp)
elif relation in LOG_DOUBLES:
data['value1'] = repr(data['values'][0])
data['value2'] = repr(data['values'][1])
exp = LOG_TO_ORM_LOOKUP[relation] % data
comps.append(exp)
else:
exp = LOG_TO_ORM_LOOKUP[relation] % data
comps.append(exp)
return op.join(comps)
except Exception, e:
SgScriptError('error parsing logical operator: %s' % e)
| '''
Builds a logical operator from a search expression span.
'''
if len(sgSearchExpSpan) <= 0:
raise SgScriptError('search expression span empty')
ShotgunORM.LoggerScriptEngine.debug(' - Parsing sub-span: "%(sgSearchExpSpan)s"', {'sgSearchExpSpan': sgSearchExpSpan})
inverse = sgSearchExpSpan.startswith('!')
if inverse:
sgSearchExpSpan = sgSearchExpSpan[1:]
else:
if sgSearchExpSpan.startswith(' not '):
inverse = True
sgSearchExpSpan = sgSearchExpSpan[5:]
index = 0
for c in sgSearchExpSpan:
if c in [' ', '.', '=', '<', '>', '!']:
break
index += 1
fieldName = sgSearchExpSpan[:index]
try:
fieldInfo = sgEntityFieldInfos[fieldName]
except KeyError:
raise SgScriptError('"%s" invalid field name' % fieldName)
try:
scriptField = SCRIPT_FIELDS[fieldInfo.returnType()]
except AttributeError:
raise SgScriptError('field "%s" contains no scriptfield operator' % fieldName)
globalEnv = {}
localEnv = {
'argv': sgArgs,
fieldName: scriptField
}
# Python is lame as shit and doesnt return the value of calling __contains__
# on a class. If __contains__ returns anything other then None, False
# it returns True. So we cant use our wizardy with the script field class :(
#
# Correction for this problem follows.
if fieldInfo.returnType() in EXPRESSION_SUPPORTS_IN:
inString = '%s in ' % fieldName
if sgSearchExpSpan.startswith(inString):
a, b = sgSearchExpSpan.split(inString, 1)
sgSearchExpSpan = '%s._in(%s)' % (fieldName, b)
try:
expResult = eval(sgSearchExpSpan, globalEnv, localEnv)
except Exception, e:
raise SgScriptError('"%s" %s' % (sgSearchExpSpan, e))
if inverse and expResult['neop'] == None:
raise SgScriptError('%s does not contain a not equal function' % sgSearchExpSpan)
logicalCond = {
'path' : fieldName,
'relation' : None,
'values' : expResult['value']
}
if not isinstance(logicalCond['values'], (list, tuple)):
logicalCond['values'] = [logicalCond['values']]
if inverse:
logicalCond['relation'] = expResult['neop']
else:
logicalCond['relation'] = expResult['op']
return [logicalCond] | identifier_body |
SgScriptEngine.py | # Copyright (c) 2013, Nathan Dunsworth - NFXPlugins
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the NFXPlugins nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL NFXPLUGINS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
__all__ = [
'parseLogicalOp',
'parseSearchExp'
]
# Python imports
import exceptions
# This module imports
import ShotgunORM
OP_AND = ' and '
OP_OR = ' or '
OP_AND_STRIP = 'and'
OP_OR_STRIP = 'or'
class SgScriptError(exceptions.Exception):
'''
General script engine exception.
'''
pass
def cleanSearchExp(sgSearchExp):
'''
Returns the passed search expression cleaned up of extra spaces.
Also throws when closing parentheses and quotes are not present.
'''
backwardParenCount = 0
backwardQuoteCount = 0
index = 0
curWord = ''
for c in sgSearchExp:
if backwardParenCount < 0:
raise SgScriptError('"%s" missing closing parentheses' % curWord)
if c == '(':
if backwardQuoteCount <= 0:
backwardParenCount += 1
curWord += c
elif c == ')':
if backwardQuoteCount <= 0:
backwardParenCount -= 1
if backwardQuoteCount <= 0 and curWord.endswith(' '):
curWord = curWord[:-1] + c
else:
curWord += c
elif c == '"' or c == "'":
backwardQuoteCount += 1
if backwardQuoteCount >= 2:
backwardQuoteCount = 0
curWord += c
elif c == ' ':
if len(curWord) <= 0:
continue
| if curWord.endswith('(') and backwardQuoteCount <= 0:
continue
if backwardQuoteCount >= 1 or not curWord.endswith(' '):
curWord += c
else:
curWord += c
if backwardParenCount != 0:
raise SgScriptError('"%s" missing closing parentheses' % curWord)
result = curWord.strip()
if backwardQuoteCount >= 1:
raise SgScriptError('"%s" missing closing quote' % curWord)
#ShotgunORM.LoggerScriptEngine.debug('ShotgunORM.SgScriptEngine.cleanSearchExp(...)')
#ShotgunORM.LoggerScriptEngine.debug(' * before: "%(searchExp)s"', {'searchExp': sgSearchExp})
#ShotgunORM.LoggerScriptEngine.debug(' * after: "%(searchExp)s"', {'searchExp': result})
return result
def buildSearchExpSpan(sgSearchExp):
'''
Returns the next span in a search expression.
'''
if sgSearchExp.startswith(OP_AND):
return OP_AND
elif sgSearchExp.startswith(OP_OR):
return OP_OR
if sgSearchExp.startswith('('):
backwardParenCount = 0
backwardQuoteCount = 0
index = -1
for c in sgSearchExp:
index += 1
if c == '(':
if backwardQuoteCount <= 0:
backwardParenCount += 1
elif c == ')':
if backwardQuoteCount <= 0:
backwardParenCount -= 1
if backwardParenCount == 0:
break
elif c == '"' or c == "'":
backwardQuoteCount += 1
if backwardQuoteCount >= 2:
backwardQuoteCount = 0
result = sgSearchExp[:index + 1]
if result.endswith(' and)') or result.endswith(' or)'):
raise SgScriptError('"%s" invalid search expression span' % result)
return result
else:
backwardParenCount = 1
backwardQuoteCount = 0
curWord = ''
for c in sgSearchExp:
if c == '(':
if backwardQuoteCount <= 0:
backwardParenCount += 1
elif c == ')':
if backwardQuoteCount <= 0:
backwardParenCount -= 1
elif c == '"' or c == "'":
backwardQuoteCount += 1
if backwardQuoteCount >= 2:
backwardQuoteCount = 0
if backwardQuoteCount <= 0 and len(curWord) >= 4:
if curWord.endswith(OP_AND):
curWord = curWord[:-5]
break
elif curWord.endswith(OP_OR):
curWord = curWord[:-4]
break
curWord += c
result = curWord
if result.endswith(' and') or result.endswith(' or'):
raise SgScriptError('"%s" invalid search expression span' % result)
return result
def splitSearchExp(sgSearchExp):
'''
Splits a search expression into its spans.
'''
searchPattern = sgSearchExp
result = []
while len(searchPattern) >= 1:
span = buildSearchExpSpan(searchPattern)
searchPattern = searchPattern[len(span):]
result.append(span)
return result
EXPRESSION_SUPPORTS_IN = [
ShotgunORM.SgField.RETURN_TYPE_DATE,
ShotgunORM.SgField.RETURN_TYPE_DATE_TIME,
ShotgunORM.SgField.RETURN_TYPE_ENTITY,
ShotgunORM.SgField.RETURN_TYPE_FLOAT,
ShotgunORM.SgField.RETURN_TYPE_INT,
ShotgunORM.SgField.RETURN_TYPE_MULTI_ENTITY,
ShotgunORM.SgField.RETURN_TYPE_TEXT
]
SCRIPT_FIELDS = {
ShotgunORM.SgField.RETURN_TYPE_CHECKBOX: ShotgunORM.SgScriptFieldCheckbox(),
ShotgunORM.SgField.RETURN_TYPE_COLOR: ShotgunORM.SgScriptFieldColor(),
ShotgunORM.SgField.RETURN_TYPE_COLOR2: ShotgunORM.SgScriptFieldColor2(),
ShotgunORM.SgField.RETURN_TYPE_DATE: ShotgunORM.SgScriptFieldDate(),
ShotgunORM.SgField.RETURN_TYPE_DATE_TIME: ShotgunORM.SgScriptFieldDateTime(),
ShotgunORM.SgField.RETURN_TYPE_ENTITY: ShotgunORM.SgScriptFieldEntity(),
ShotgunORM.SgField.RETURN_TYPE_MULTI_ENTITY: ShotgunORM.SgScriptFieldEntityMulti(),
ShotgunORM.SgField.RETURN_TYPE_FLOAT: ShotgunORM.SgScriptFieldFloat(),
ShotgunORM.SgField.RETURN_TYPE_INT: ShotgunORM.SgScriptFieldInt(),
ShotgunORM.SgField.RETURN_TYPE_LIST: ShotgunORM.SgScriptFieldSelectionList(),
ShotgunORM.SgField.RETURN_TYPE_TAG_LIST: ShotgunORM.SgScriptFieldTagList(),
ShotgunORM.SgField.RETURN_TYPE_TEXT: ShotgunORM.SgScriptFieldText(),
}
def buildSearchExpFilter(sgEntityFieldInfos, sgArgs, sgSearchExpSpan):
'''
Builds a logical operator from a search expression span.
'''
if len(sgSearchExpSpan) <= 0:
raise SgScriptError('search expression span empty')
ShotgunORM.LoggerScriptEngine.debug(' - Parsing sub-span: "%(sgSearchExpSpan)s"', {'sgSearchExpSpan': sgSearchExpSpan})
inverse = sgSearchExpSpan.startswith('!')
if inverse:
sgSearchExpSpan = sgSearchExpSpan[1:]
else:
if sgSearchExpSpan.startswith(' not '):
inverse = True
sgSearchExpSpan = sgSearchExpSpan[5:]
index = 0
for c in sgSearchExpSpan:
if c in [' ', '.', '=', '<', '>', '!']:
break
index += 1
fieldName = sgSearchExpSpan[:index]
try:
fieldInfo = sgEntityFieldInfos[fieldName]
except KeyError:
raise SgScriptError('"%s" invalid field name' % fieldName)
try:
scriptField = SCRIPT_FIELDS[fieldInfo.returnType()]
except AttributeError:
raise SgScriptError('field "%s" contains no scriptfield operator' % fieldName)
globalEnv = {}
localEnv = {
'argv': sgArgs,
fieldName: scriptField
}
# Python is lame as shit and doesnt return the value of calling __contains__
# on a class. If __contains__ returns anything other then None, False
# it returns True. So we cant use our wizardy with the script field class :(
#
# Correction for this problem follows.
if fieldInfo.returnType() in EXPRESSION_SUPPORTS_IN:
inString = '%s in ' % fieldName
if sgSearchExpSpan.startswith(inString):
a, b = sgSearchExpSpan.split(inString, 1)
sgSearchExpSpan = '%s._in(%s)' % (fieldName, b)
try:
expResult = eval(sgSearchExpSpan, globalEnv, localEnv)
except Exception, e:
raise SgScriptError('"%s" %s' % (sgSearchExpSpan, e))
if inverse and expResult['neop'] == None:
raise SgScriptError('%s does not contain a not equal function' % sgSearchExpSpan)
logicalCond = {
'path' : fieldName,
'relation' : None,
'values' : expResult['value']
}
if not isinstance(logicalCond['values'], (list, tuple)):
logicalCond['values'] = [logicalCond['values']]
if inverse:
logicalCond['relation'] = expResult['neop']
else:
logicalCond['relation'] = expResult['op']
return [logicalCond]
def buildSearchExpFilters(sgEntityFieldInfos, sgArgs, sgSearchExpSpans):
'''
Builds the locial operator pattern from a search expression
'''
ShotgunORM.LoggerScriptEngine.debug(' + Parsing spans: %(sgSearchExpSpans)s', {'sgSearchExpSpans': sgSearchExpSpans})
logicalConds = []
logicalOp = {'logical_operator': None, 'conditions': logicalConds}
if len(sgSearchExpSpans) <= 0:
raise SgScriptError('empty search expression span')
if sgSearchExpSpans[0] in [OP_AND, OP_OR]:
raise SgScriptError('"%s" invalid search expression' % ' '.join(sgSearchExpSpans))
if len(sgSearchExpSpans) == 1:
span = sgSearchExpSpans[0]
if span.startswith('('):
while span.startswith('(') and span.endswith(')'):
span = span[1:-1]
return buildSearchExpFilters(
sgEntityFieldInfos,
sgArgs,
splitSearchExp(span)
)
curOp = None
for span in sgSearchExpSpans:
if span in [OP_AND, OP_OR]:
curOp = span
if curOp == OP_AND:
logicalOp['logical_operator'] = OP_AND_STRIP
else:
logicalOp['logical_operator'] = OP_OR_STRIP
break
if logicalOp['logical_operator'] == None:
if len(sgSearchExpSpans) >= 2:
raise SgScriptError('"%s" invalid search expression' % ' '.join(sgSearchExpSpans))
else:
logicalOp['logical_operator'] = OP_AND_STRIP
curOp = OP_AND
index = -1
for span in sgSearchExpSpans:
index += 1
if span in [OP_AND, OP_OR]:
if span != curOp:
logicalOp = {'logical_operator': span.strip(), 'conditions': [logicalOp]}
logicalOp['conditions'].append(
buildSearchExpFilters(
sgEntityFieldInfos,
sgArgs,
sgSearchExpSpans[index + 1:]
)
)
return logicalOp
continue
if span.startswith('('):
logicalConds.append(
buildSearchExpFilters(
sgEntityFieldInfos,
sgArgs,
splitSearchExp(span)
)
)
else:
logicalConds.extend(
buildSearchExpFilter(sgEntityFieldInfos, sgArgs, span)
)
return logicalOp
def parseToLogicalOp(sgEntityInfo, sgSearchExp, sgArgs=[]):
'''
Parses a search expression and returns the Shotgun formated search filter.
Args:
* (SgEntitySchemaInfo) sgEntityInfo:
SgEntitySchemaInfo that the search expression will reference.
* (str) sgSearchExp:
Search expression string.
* (list) sgArgs:
Args used when evaling search expression.
'''
if sgSearchExp == None:
raise SgScriptError('expected a str got None')
if len(sgSearchExp) <= 0 or sgSearchExp.isspace():
raise SgScriptError('empty search string')
ShotgunORM.LoggerScriptEngine.debug('# PARSING START')
ShotgunORM.LoggerScriptEngine.debug(' * entity: "%(sgEntityType)s"', {'sgEntityType': sgEntityInfo.label()})
ShotgunORM.LoggerScriptEngine.debug(' * search: "%(sgSearchExp)s"', {'sgSearchExp': sgSearchExp})
try:
sgSearchExp = cleanSearchExp(sgSearchExp)
except SgScriptError, e:
raise SgScriptError('%s in "%s"' % (e, sgSearchExp))
try:
searchExpSpans = splitSearchExp(sgSearchExp)
except SgScriptError, e:
raise SgScriptError('%s in "%s"' % (e, sgSearchExp))
try:
result = buildSearchExpFilters(sgEntityInfo.fieldInfos(), sgArgs, searchExpSpans)
except SgScriptError, e:
raise SgScriptError('%s in "%s"' % (e, sgSearchExp))
ShotgunORM.LoggerScriptEngine.debug('# PARSING COMPLETE!')
return result
LOG_TO_ORM_LOOKUP = {
'is': '%(path)s == %(values)s',
'is_not': '%(path)s != %(values)s',
'less_than': '%(path)s > %(values)s',
'greater_than': '%(path)s > %(values)s',
'contains': '%(path)s.contains(%(values)s)',
'not_contains': '!%(path)s.contains(%(values)s)',
'starts_with': '%(path)s.startswith(%(values)s)',
'ends_with': '%(path)s.endswith(%(values)s)',
'between': '%(path)s.between(%(value1)s, %(value2)s)',
'not_between': '!%(path)s.between(%(value1)s, %(value2)s)',
'in_last': '%(path)s.in_last(%(value1)s, %(value2)s)',
'not_in_last': '!%(path)s.in_last(%(value1)s, %(value2)s)',
'in_next': '%(path)s.in_next(%(value1)s, %(value2)s)',
'not_in_next': '!%(path)s.in_next(%(value1)s, %(value2)s)',
'in': '%(path)s in %(values)s',
'not_in': 'not %(path)s in %(values)s',
'type_is': '%(path)s.type(%(values)s)',
'type_is_not': '!%(path)s.type(%(values)s)',
'in_calendar_day': '%(path)s.in_day(%(values)s)',
'in_calendar_week': '%(path)s.in_week(%(values)s)',
'in_calendar_month': '%(path)s.in_month(%(values)s)',
'in_calendar_year': '%(path)s.in_year(%(values)s)',
'name_contains': '%(path)s.name_contains(%(values)s)',
'name_not_contains': '!%(path)s.name_contains(%(values)s)'
}
LOG_SINGLES = [
'is',
'is_not',
'less_than',
'greater_than',
'contains',
'starts_with',
'ends_with',
'type_is',
'type_is_not',
'name_contains',
'name_not_contains'
]
LOG_DOUBLES = [
'between',
'not_between',
'in_last',
'not_in_last',
'in_next',
'not_in_next'
]
def parseFromLogicalOp(sgLogicalOp):
'''
Parses a Shotgun logical operator and returns the search expression
representation of it.
Args:
* (dict) sgLogicalOp:
Shotgun formatted logical operator.
'''
try:
op = ' %s ' % sgLogicalOp['logical_operator']
comps = []
for c in sgLogicalOp['conditions']:
if c.has_key('logical_operator'):
comps.append('(%s)' % parseFromLogicalOp(c))
else:
data = {
'path': c['path'],
'values': c['values']
}
relation = c['relation']
if relation in LOG_SINGLES:
data['values'] = repr(data['values'][0])
exp = LOG_TO_ORM_LOOKUP[relation] % data
comps.append(exp)
elif relation in LOG_DOUBLES:
data['value1'] = repr(data['values'][0])
data['value2'] = repr(data['values'][1])
exp = LOG_TO_ORM_LOOKUP[relation] % data
comps.append(exp)
else:
exp = LOG_TO_ORM_LOOKUP[relation] % data
comps.append(exp)
return op.join(comps)
except Exception, e:
SgScriptError('error parsing logical operator: %s' % e) | random_line_split | |
SgScriptEngine.py | # Copyright (c) 2013, Nathan Dunsworth - NFXPlugins
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the NFXPlugins nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL NFXPLUGINS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
__all__ = [
'parseLogicalOp',
'parseSearchExp'
]
# Python imports
import exceptions
# This module imports
import ShotgunORM
OP_AND = ' and '
OP_OR = ' or '
OP_AND_STRIP = 'and'
OP_OR_STRIP = 'or'
class SgScriptError(exceptions.Exception):
'''
General script engine exception.
'''
pass
def cleanSearchExp(sgSearchExp):
'''
Returns the passed search expression cleaned up of extra spaces.
Also throws when closing parentheses and quotes are not present.
'''
backwardParenCount = 0
backwardQuoteCount = 0
index = 0
curWord = ''
for c in sgSearchExp:
if backwardParenCount < 0:
raise SgScriptError('"%s" missing closing parentheses' % curWord)
if c == '(':
if backwardQuoteCount <= 0:
backwardParenCount += 1
curWord += c
elif c == ')':
if backwardQuoteCount <= 0:
backwardParenCount -= 1
if backwardQuoteCount <= 0 and curWord.endswith(' '):
curWord = curWord[:-1] + c
else:
curWord += c
elif c == '"' or c == "'":
backwardQuoteCount += 1
if backwardQuoteCount >= 2:
backwardQuoteCount = 0
curWord += c
elif c == ' ':
if len(curWord) <= 0:
continue
if curWord.endswith('(') and backwardQuoteCount <= 0:
continue
if backwardQuoteCount >= 1 or not curWord.endswith(' '):
curWord += c
else:
curWord += c
if backwardParenCount != 0:
raise SgScriptError('"%s" missing closing parentheses' % curWord)
result = curWord.strip()
if backwardQuoteCount >= 1:
raise SgScriptError('"%s" missing closing quote' % curWord)
#ShotgunORM.LoggerScriptEngine.debug('ShotgunORM.SgScriptEngine.cleanSearchExp(...)')
#ShotgunORM.LoggerScriptEngine.debug(' * before: "%(searchExp)s"', {'searchExp': sgSearchExp})
#ShotgunORM.LoggerScriptEngine.debug(' * after: "%(searchExp)s"', {'searchExp': result})
return result
def buildSearchExpSpan(sgSearchExp):
'''
Returns the next span in a search expression.
'''
if sgSearchExp.startswith(OP_AND):
return OP_AND
elif sgSearchExp.startswith(OP_OR):
return OP_OR
if sgSearchExp.startswith('('):
backwardParenCount = 0
backwardQuoteCount = 0
index = -1
for c in sgSearchExp:
index += 1
if c == '(':
if backwardQuoteCount <= 0:
backwardParenCount += 1
elif c == ')':
if backwardQuoteCount <= 0:
backwardParenCount -= 1
if backwardParenCount == 0:
break
elif c == '"' or c == "'":
backwardQuoteCount += 1
if backwardQuoteCount >= 2:
backwardQuoteCount = 0
result = sgSearchExp[:index + 1]
if result.endswith(' and)') or result.endswith(' or)'):
raise SgScriptError('"%s" invalid search expression span' % result)
return result
else:
backwardParenCount = 1
backwardQuoteCount = 0
curWord = ''
for c in sgSearchExp:
if c == '(':
if backwardQuoteCount <= 0:
backwardParenCount += 1
elif c == ')':
if backwardQuoteCount <= 0:
backwardParenCount -= 1
elif c == '"' or c == "'":
backwardQuoteCount += 1
if backwardQuoteCount >= 2:
backwardQuoteCount = 0
if backwardQuoteCount <= 0 and len(curWord) >= 4:
if curWord.endswith(OP_AND):
curWord = curWord[:-5]
break
elif curWord.endswith(OP_OR):
curWord = curWord[:-4]
break
curWord += c
result = curWord
if result.endswith(' and') or result.endswith(' or'):
raise SgScriptError('"%s" invalid search expression span' % result)
return result
def splitSearchExp(sgSearchExp):
'''
Splits a search expression into its spans.
'''
searchPattern = sgSearchExp
result = []
while len(searchPattern) >= 1:
span = buildSearchExpSpan(searchPattern)
searchPattern = searchPattern[len(span):]
result.append(span)
return result
EXPRESSION_SUPPORTS_IN = [
ShotgunORM.SgField.RETURN_TYPE_DATE,
ShotgunORM.SgField.RETURN_TYPE_DATE_TIME,
ShotgunORM.SgField.RETURN_TYPE_ENTITY,
ShotgunORM.SgField.RETURN_TYPE_FLOAT,
ShotgunORM.SgField.RETURN_TYPE_INT,
ShotgunORM.SgField.RETURN_TYPE_MULTI_ENTITY,
ShotgunORM.SgField.RETURN_TYPE_TEXT
]
SCRIPT_FIELDS = {
ShotgunORM.SgField.RETURN_TYPE_CHECKBOX: ShotgunORM.SgScriptFieldCheckbox(),
ShotgunORM.SgField.RETURN_TYPE_COLOR: ShotgunORM.SgScriptFieldColor(),
ShotgunORM.SgField.RETURN_TYPE_COLOR2: ShotgunORM.SgScriptFieldColor2(),
ShotgunORM.SgField.RETURN_TYPE_DATE: ShotgunORM.SgScriptFieldDate(),
ShotgunORM.SgField.RETURN_TYPE_DATE_TIME: ShotgunORM.SgScriptFieldDateTime(),
ShotgunORM.SgField.RETURN_TYPE_ENTITY: ShotgunORM.SgScriptFieldEntity(),
ShotgunORM.SgField.RETURN_TYPE_MULTI_ENTITY: ShotgunORM.SgScriptFieldEntityMulti(),
ShotgunORM.SgField.RETURN_TYPE_FLOAT: ShotgunORM.SgScriptFieldFloat(),
ShotgunORM.SgField.RETURN_TYPE_INT: ShotgunORM.SgScriptFieldInt(),
ShotgunORM.SgField.RETURN_TYPE_LIST: ShotgunORM.SgScriptFieldSelectionList(),
ShotgunORM.SgField.RETURN_TYPE_TAG_LIST: ShotgunORM.SgScriptFieldTagList(),
ShotgunORM.SgField.RETURN_TYPE_TEXT: ShotgunORM.SgScriptFieldText(),
}
def buildSearchExpFilter(sgEntityFieldInfos, sgArgs, sgSearchExpSpan):
'''
Builds a logical operator from a search expression span.
'''
if len(sgSearchExpSpan) <= 0:
raise SgScriptError('search expression span empty')
ShotgunORM.LoggerScriptEngine.debug(' - Parsing sub-span: "%(sgSearchExpSpan)s"', {'sgSearchExpSpan': sgSearchExpSpan})
inverse = sgSearchExpSpan.startswith('!')
if inverse:
|
else:
if sgSearchExpSpan.startswith(' not '):
inverse = True
sgSearchExpSpan = sgSearchExpSpan[5:]
index = 0
for c in sgSearchExpSpan:
if c in [' ', '.', '=', '<', '>', '!']:
break
index += 1
fieldName = sgSearchExpSpan[:index]
try:
fieldInfo = sgEntityFieldInfos[fieldName]
except KeyError:
raise SgScriptError('"%s" invalid field name' % fieldName)
try:
scriptField = SCRIPT_FIELDS[fieldInfo.returnType()]
except AttributeError:
raise SgScriptError('field "%s" contains no scriptfield operator' % fieldName)
globalEnv = {}
localEnv = {
'argv': sgArgs,
fieldName: scriptField
}
# Python is lame as shit and doesnt return the value of calling __contains__
# on a class. If __contains__ returns anything other then None, False
# it returns True. So we cant use our wizardy with the script field class :(
#
# Correction for this problem follows.
if fieldInfo.returnType() in EXPRESSION_SUPPORTS_IN:
inString = '%s in ' % fieldName
if sgSearchExpSpan.startswith(inString):
a, b = sgSearchExpSpan.split(inString, 1)
sgSearchExpSpan = '%s._in(%s)' % (fieldName, b)
try:
expResult = eval(sgSearchExpSpan, globalEnv, localEnv)
except Exception, e:
raise SgScriptError('"%s" %s' % (sgSearchExpSpan, e))
if inverse and expResult['neop'] == None:
raise SgScriptError('%s does not contain a not equal function' % sgSearchExpSpan)
logicalCond = {
'path' : fieldName,
'relation' : None,
'values' : expResult['value']
}
if not isinstance(logicalCond['values'], (list, tuple)):
logicalCond['values'] = [logicalCond['values']]
if inverse:
logicalCond['relation'] = expResult['neop']
else:
logicalCond['relation'] = expResult['op']
return [logicalCond]
def buildSearchExpFilters(sgEntityFieldInfos, sgArgs, sgSearchExpSpans):
'''
Builds the locial operator pattern from a search expression
'''
ShotgunORM.LoggerScriptEngine.debug(' + Parsing spans: %(sgSearchExpSpans)s', {'sgSearchExpSpans': sgSearchExpSpans})
logicalConds = []
logicalOp = {'logical_operator': None, 'conditions': logicalConds}
if len(sgSearchExpSpans) <= 0:
raise SgScriptError('empty search expression span')
if sgSearchExpSpans[0] in [OP_AND, OP_OR]:
raise SgScriptError('"%s" invalid search expression' % ' '.join(sgSearchExpSpans))
if len(sgSearchExpSpans) == 1:
span = sgSearchExpSpans[0]
if span.startswith('('):
while span.startswith('(') and span.endswith(')'):
span = span[1:-1]
return buildSearchExpFilters(
sgEntityFieldInfos,
sgArgs,
splitSearchExp(span)
)
curOp = None
for span in sgSearchExpSpans:
if span in [OP_AND, OP_OR]:
curOp = span
if curOp == OP_AND:
logicalOp['logical_operator'] = OP_AND_STRIP
else:
logicalOp['logical_operator'] = OP_OR_STRIP
break
if logicalOp['logical_operator'] == None:
if len(sgSearchExpSpans) >= 2:
raise SgScriptError('"%s" invalid search expression' % ' '.join(sgSearchExpSpans))
else:
logicalOp['logical_operator'] = OP_AND_STRIP
curOp = OP_AND
index = -1
for span in sgSearchExpSpans:
index += 1
if span in [OP_AND, OP_OR]:
if span != curOp:
logicalOp = {'logical_operator': span.strip(), 'conditions': [logicalOp]}
logicalOp['conditions'].append(
buildSearchExpFilters(
sgEntityFieldInfos,
sgArgs,
sgSearchExpSpans[index + 1:]
)
)
return logicalOp
continue
if span.startswith('('):
logicalConds.append(
buildSearchExpFilters(
sgEntityFieldInfos,
sgArgs,
splitSearchExp(span)
)
)
else:
logicalConds.extend(
buildSearchExpFilter(sgEntityFieldInfos, sgArgs, span)
)
return logicalOp
def parseToLogicalOp(sgEntityInfo, sgSearchExp, sgArgs=[]):
'''
Parses a search expression and returns the Shotgun formated search filter.
Args:
* (SgEntitySchemaInfo) sgEntityInfo:
SgEntitySchemaInfo that the search expression will reference.
* (str) sgSearchExp:
Search expression string.
* (list) sgArgs:
Args used when evaling search expression.
'''
if sgSearchExp == None:
raise SgScriptError('expected a str got None')
if len(sgSearchExp) <= 0 or sgSearchExp.isspace():
raise SgScriptError('empty search string')
ShotgunORM.LoggerScriptEngine.debug('# PARSING START')
ShotgunORM.LoggerScriptEngine.debug(' * entity: "%(sgEntityType)s"', {'sgEntityType': sgEntityInfo.label()})
ShotgunORM.LoggerScriptEngine.debug(' * search: "%(sgSearchExp)s"', {'sgSearchExp': sgSearchExp})
try:
sgSearchExp = cleanSearchExp(sgSearchExp)
except SgScriptError, e:
raise SgScriptError('%s in "%s"' % (e, sgSearchExp))
try:
searchExpSpans = splitSearchExp(sgSearchExp)
except SgScriptError, e:
raise SgScriptError('%s in "%s"' % (e, sgSearchExp))
try:
result = buildSearchExpFilters(sgEntityInfo.fieldInfos(), sgArgs, searchExpSpans)
except SgScriptError, e:
raise SgScriptError('%s in "%s"' % (e, sgSearchExp))
ShotgunORM.LoggerScriptEngine.debug('# PARSING COMPLETE!')
return result
LOG_TO_ORM_LOOKUP = {
'is': '%(path)s == %(values)s',
'is_not': '%(path)s != %(values)s',
'less_than': '%(path)s > %(values)s',
'greater_than': '%(path)s > %(values)s',
'contains': '%(path)s.contains(%(values)s)',
'not_contains': '!%(path)s.contains(%(values)s)',
'starts_with': '%(path)s.startswith(%(values)s)',
'ends_with': '%(path)s.endswith(%(values)s)',
'between': '%(path)s.between(%(value1)s, %(value2)s)',
'not_between': '!%(path)s.between(%(value1)s, %(value2)s)',
'in_last': '%(path)s.in_last(%(value1)s, %(value2)s)',
'not_in_last': '!%(path)s.in_last(%(value1)s, %(value2)s)',
'in_next': '%(path)s.in_next(%(value1)s, %(value2)s)',
'not_in_next': '!%(path)s.in_next(%(value1)s, %(value2)s)',
'in': '%(path)s in %(values)s',
'not_in': 'not %(path)s in %(values)s',
'type_is': '%(path)s.type(%(values)s)',
'type_is_not': '!%(path)s.type(%(values)s)',
'in_calendar_day': '%(path)s.in_day(%(values)s)',
'in_calendar_week': '%(path)s.in_week(%(values)s)',
'in_calendar_month': '%(path)s.in_month(%(values)s)',
'in_calendar_year': '%(path)s.in_year(%(values)s)',
'name_contains': '%(path)s.name_contains(%(values)s)',
'name_not_contains': '!%(path)s.name_contains(%(values)s)'
}
LOG_SINGLES = [
'is',
'is_not',
'less_than',
'greater_than',
'contains',
'starts_with',
'ends_with',
'type_is',
'type_is_not',
'name_contains',
'name_not_contains'
]
LOG_DOUBLES = [
'between',
'not_between',
'in_last',
'not_in_last',
'in_next',
'not_in_next'
]
def parseFromLogicalOp(sgLogicalOp):
'''
Parses a Shotgun logical operator and returns the search expression
representation of it.
Args:
* (dict) sgLogicalOp:
Shotgun formatted logical operator.
'''
try:
op = ' %s ' % sgLogicalOp['logical_operator']
comps = []
for c in sgLogicalOp['conditions']:
if c.has_key('logical_operator'):
comps.append('(%s)' % parseFromLogicalOp(c))
else:
data = {
'path': c['path'],
'values': c['values']
}
relation = c['relation']
if relation in LOG_SINGLES:
data['values'] = repr(data['values'][0])
exp = LOG_TO_ORM_LOOKUP[relation] % data
comps.append(exp)
elif relation in LOG_DOUBLES:
data['value1'] = repr(data['values'][0])
data['value2'] = repr(data['values'][1])
exp = LOG_TO_ORM_LOOKUP[relation] % data
comps.append(exp)
else:
exp = LOG_TO_ORM_LOOKUP[relation] % data
comps.append(exp)
return op.join(comps)
except Exception, e:
SgScriptError('error parsing logical operator: %s' % e)
| sgSearchExpSpan = sgSearchExpSpan[1:] | conditional_block |
SgScriptEngine.py | # Copyright (c) 2013, Nathan Dunsworth - NFXPlugins
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the NFXPlugins nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL NFXPLUGINS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
__all__ = [
'parseLogicalOp',
'parseSearchExp'
]
# Python imports
import exceptions
# This module imports
import ShotgunORM
OP_AND = ' and '
OP_OR = ' or '
OP_AND_STRIP = 'and'
OP_OR_STRIP = 'or'
class | (exceptions.Exception):
'''
General script engine exception.
'''
pass
def cleanSearchExp(sgSearchExp):
'''
Returns the passed search expression cleaned up of extra spaces.
Also throws when closing parentheses and quotes are not present.
'''
backwardParenCount = 0
backwardQuoteCount = 0
index = 0
curWord = ''
for c in sgSearchExp:
if backwardParenCount < 0:
raise SgScriptError('"%s" missing closing parentheses' % curWord)
if c == '(':
if backwardQuoteCount <= 0:
backwardParenCount += 1
curWord += c
elif c == ')':
if backwardQuoteCount <= 0:
backwardParenCount -= 1
if backwardQuoteCount <= 0 and curWord.endswith(' '):
curWord = curWord[:-1] + c
else:
curWord += c
elif c == '"' or c == "'":
backwardQuoteCount += 1
if backwardQuoteCount >= 2:
backwardQuoteCount = 0
curWord += c
elif c == ' ':
if len(curWord) <= 0:
continue
if curWord.endswith('(') and backwardQuoteCount <= 0:
continue
if backwardQuoteCount >= 1 or not curWord.endswith(' '):
curWord += c
else:
curWord += c
if backwardParenCount != 0:
raise SgScriptError('"%s" missing closing parentheses' % curWord)
result = curWord.strip()
if backwardQuoteCount >= 1:
raise SgScriptError('"%s" missing closing quote' % curWord)
#ShotgunORM.LoggerScriptEngine.debug('ShotgunORM.SgScriptEngine.cleanSearchExp(...)')
#ShotgunORM.LoggerScriptEngine.debug(' * before: "%(searchExp)s"', {'searchExp': sgSearchExp})
#ShotgunORM.LoggerScriptEngine.debug(' * after: "%(searchExp)s"', {'searchExp': result})
return result
def buildSearchExpSpan(sgSearchExp):
'''
Returns the next span in a search expression.
'''
if sgSearchExp.startswith(OP_AND):
return OP_AND
elif sgSearchExp.startswith(OP_OR):
return OP_OR
if sgSearchExp.startswith('('):
backwardParenCount = 0
backwardQuoteCount = 0
index = -1
for c in sgSearchExp:
index += 1
if c == '(':
if backwardQuoteCount <= 0:
backwardParenCount += 1
elif c == ')':
if backwardQuoteCount <= 0:
backwardParenCount -= 1
if backwardParenCount == 0:
break
elif c == '"' or c == "'":
backwardQuoteCount += 1
if backwardQuoteCount >= 2:
backwardQuoteCount = 0
result = sgSearchExp[:index + 1]
if result.endswith(' and)') or result.endswith(' or)'):
raise SgScriptError('"%s" invalid search expression span' % result)
return result
else:
backwardParenCount = 1
backwardQuoteCount = 0
curWord = ''
for c in sgSearchExp:
if c == '(':
if backwardQuoteCount <= 0:
backwardParenCount += 1
elif c == ')':
if backwardQuoteCount <= 0:
backwardParenCount -= 1
elif c == '"' or c == "'":
backwardQuoteCount += 1
if backwardQuoteCount >= 2:
backwardQuoteCount = 0
if backwardQuoteCount <= 0 and len(curWord) >= 4:
if curWord.endswith(OP_AND):
curWord = curWord[:-5]
break
elif curWord.endswith(OP_OR):
curWord = curWord[:-4]
break
curWord += c
result = curWord
if result.endswith(' and') or result.endswith(' or'):
raise SgScriptError('"%s" invalid search expression span' % result)
return result
def splitSearchExp(sgSearchExp):
'''
Splits a search expression into its spans.
'''
searchPattern = sgSearchExp
result = []
while len(searchPattern) >= 1:
span = buildSearchExpSpan(searchPattern)
searchPattern = searchPattern[len(span):]
result.append(span)
return result
EXPRESSION_SUPPORTS_IN = [
ShotgunORM.SgField.RETURN_TYPE_DATE,
ShotgunORM.SgField.RETURN_TYPE_DATE_TIME,
ShotgunORM.SgField.RETURN_TYPE_ENTITY,
ShotgunORM.SgField.RETURN_TYPE_FLOAT,
ShotgunORM.SgField.RETURN_TYPE_INT,
ShotgunORM.SgField.RETURN_TYPE_MULTI_ENTITY,
ShotgunORM.SgField.RETURN_TYPE_TEXT
]
SCRIPT_FIELDS = {
ShotgunORM.SgField.RETURN_TYPE_CHECKBOX: ShotgunORM.SgScriptFieldCheckbox(),
ShotgunORM.SgField.RETURN_TYPE_COLOR: ShotgunORM.SgScriptFieldColor(),
ShotgunORM.SgField.RETURN_TYPE_COLOR2: ShotgunORM.SgScriptFieldColor2(),
ShotgunORM.SgField.RETURN_TYPE_DATE: ShotgunORM.SgScriptFieldDate(),
ShotgunORM.SgField.RETURN_TYPE_DATE_TIME: ShotgunORM.SgScriptFieldDateTime(),
ShotgunORM.SgField.RETURN_TYPE_ENTITY: ShotgunORM.SgScriptFieldEntity(),
ShotgunORM.SgField.RETURN_TYPE_MULTI_ENTITY: ShotgunORM.SgScriptFieldEntityMulti(),
ShotgunORM.SgField.RETURN_TYPE_FLOAT: ShotgunORM.SgScriptFieldFloat(),
ShotgunORM.SgField.RETURN_TYPE_INT: ShotgunORM.SgScriptFieldInt(),
ShotgunORM.SgField.RETURN_TYPE_LIST: ShotgunORM.SgScriptFieldSelectionList(),
ShotgunORM.SgField.RETURN_TYPE_TAG_LIST: ShotgunORM.SgScriptFieldTagList(),
ShotgunORM.SgField.RETURN_TYPE_TEXT: ShotgunORM.SgScriptFieldText(),
}
def buildSearchExpFilter(sgEntityFieldInfos, sgArgs, sgSearchExpSpan):
'''
Builds a logical operator from a search expression span.
'''
if len(sgSearchExpSpan) <= 0:
raise SgScriptError('search expression span empty')
ShotgunORM.LoggerScriptEngine.debug(' - Parsing sub-span: "%(sgSearchExpSpan)s"', {'sgSearchExpSpan': sgSearchExpSpan})
inverse = sgSearchExpSpan.startswith('!')
if inverse:
sgSearchExpSpan = sgSearchExpSpan[1:]
else:
if sgSearchExpSpan.startswith(' not '):
inverse = True
sgSearchExpSpan = sgSearchExpSpan[5:]
index = 0
for c in sgSearchExpSpan:
if c in [' ', '.', '=', '<', '>', '!']:
break
index += 1
fieldName = sgSearchExpSpan[:index]
try:
fieldInfo = sgEntityFieldInfos[fieldName]
except KeyError:
raise SgScriptError('"%s" invalid field name' % fieldName)
try:
scriptField = SCRIPT_FIELDS[fieldInfo.returnType()]
except AttributeError:
raise SgScriptError('field "%s" contains no scriptfield operator' % fieldName)
globalEnv = {}
localEnv = {
'argv': sgArgs,
fieldName: scriptField
}
# Python is lame as shit and doesnt return the value of calling __contains__
# on a class. If __contains__ returns anything other then None, False
# it returns True. So we cant use our wizardy with the script field class :(
#
# Correction for this problem follows.
if fieldInfo.returnType() in EXPRESSION_SUPPORTS_IN:
inString = '%s in ' % fieldName
if sgSearchExpSpan.startswith(inString):
a, b = sgSearchExpSpan.split(inString, 1)
sgSearchExpSpan = '%s._in(%s)' % (fieldName, b)
try:
expResult = eval(sgSearchExpSpan, globalEnv, localEnv)
except Exception, e:
raise SgScriptError('"%s" %s' % (sgSearchExpSpan, e))
if inverse and expResult['neop'] == None:
raise SgScriptError('%s does not contain a not equal function' % sgSearchExpSpan)
logicalCond = {
'path' : fieldName,
'relation' : None,
'values' : expResult['value']
}
if not isinstance(logicalCond['values'], (list, tuple)):
logicalCond['values'] = [logicalCond['values']]
if inverse:
logicalCond['relation'] = expResult['neop']
else:
logicalCond['relation'] = expResult['op']
return [logicalCond]
def buildSearchExpFilters(sgEntityFieldInfos, sgArgs, sgSearchExpSpans):
'''
Builds the locial operator pattern from a search expression
'''
ShotgunORM.LoggerScriptEngine.debug(' + Parsing spans: %(sgSearchExpSpans)s', {'sgSearchExpSpans': sgSearchExpSpans})
logicalConds = []
logicalOp = {'logical_operator': None, 'conditions': logicalConds}
if len(sgSearchExpSpans) <= 0:
raise SgScriptError('empty search expression span')
if sgSearchExpSpans[0] in [OP_AND, OP_OR]:
raise SgScriptError('"%s" invalid search expression' % ' '.join(sgSearchExpSpans))
if len(sgSearchExpSpans) == 1:
span = sgSearchExpSpans[0]
if span.startswith('('):
while span.startswith('(') and span.endswith(')'):
span = span[1:-1]
return buildSearchExpFilters(
sgEntityFieldInfos,
sgArgs,
splitSearchExp(span)
)
curOp = None
for span in sgSearchExpSpans:
if span in [OP_AND, OP_OR]:
curOp = span
if curOp == OP_AND:
logicalOp['logical_operator'] = OP_AND_STRIP
else:
logicalOp['logical_operator'] = OP_OR_STRIP
break
if logicalOp['logical_operator'] == None:
if len(sgSearchExpSpans) >= 2:
raise SgScriptError('"%s" invalid search expression' % ' '.join(sgSearchExpSpans))
else:
logicalOp['logical_operator'] = OP_AND_STRIP
curOp = OP_AND
index = -1
for span in sgSearchExpSpans:
index += 1
if span in [OP_AND, OP_OR]:
if span != curOp:
logicalOp = {'logical_operator': span.strip(), 'conditions': [logicalOp]}
logicalOp['conditions'].append(
buildSearchExpFilters(
sgEntityFieldInfos,
sgArgs,
sgSearchExpSpans[index + 1:]
)
)
return logicalOp
continue
if span.startswith('('):
logicalConds.append(
buildSearchExpFilters(
sgEntityFieldInfos,
sgArgs,
splitSearchExp(span)
)
)
else:
logicalConds.extend(
buildSearchExpFilter(sgEntityFieldInfos, sgArgs, span)
)
return logicalOp
def parseToLogicalOp(sgEntityInfo, sgSearchExp, sgArgs=[]):
'''
Parses a search expression and returns the Shotgun formated search filter.
Args:
* (SgEntitySchemaInfo) sgEntityInfo:
SgEntitySchemaInfo that the search expression will reference.
* (str) sgSearchExp:
Search expression string.
* (list) sgArgs:
Args used when evaling search expression.
'''
if sgSearchExp == None:
raise SgScriptError('expected a str got None')
if len(sgSearchExp) <= 0 or sgSearchExp.isspace():
raise SgScriptError('empty search string')
ShotgunORM.LoggerScriptEngine.debug('# PARSING START')
ShotgunORM.LoggerScriptEngine.debug(' * entity: "%(sgEntityType)s"', {'sgEntityType': sgEntityInfo.label()})
ShotgunORM.LoggerScriptEngine.debug(' * search: "%(sgSearchExp)s"', {'sgSearchExp': sgSearchExp})
try:
sgSearchExp = cleanSearchExp(sgSearchExp)
except SgScriptError, e:
raise SgScriptError('%s in "%s"' % (e, sgSearchExp))
try:
searchExpSpans = splitSearchExp(sgSearchExp)
except SgScriptError, e:
raise SgScriptError('%s in "%s"' % (e, sgSearchExp))
try:
result = buildSearchExpFilters(sgEntityInfo.fieldInfos(), sgArgs, searchExpSpans)
except SgScriptError, e:
raise SgScriptError('%s in "%s"' % (e, sgSearchExp))
ShotgunORM.LoggerScriptEngine.debug('# PARSING COMPLETE!')
return result
LOG_TO_ORM_LOOKUP = {
'is': '%(path)s == %(values)s',
'is_not': '%(path)s != %(values)s',
'less_than': '%(path)s > %(values)s',
'greater_than': '%(path)s > %(values)s',
'contains': '%(path)s.contains(%(values)s)',
'not_contains': '!%(path)s.contains(%(values)s)',
'starts_with': '%(path)s.startswith(%(values)s)',
'ends_with': '%(path)s.endswith(%(values)s)',
'between': '%(path)s.between(%(value1)s, %(value2)s)',
'not_between': '!%(path)s.between(%(value1)s, %(value2)s)',
'in_last': '%(path)s.in_last(%(value1)s, %(value2)s)',
'not_in_last': '!%(path)s.in_last(%(value1)s, %(value2)s)',
'in_next': '%(path)s.in_next(%(value1)s, %(value2)s)',
'not_in_next': '!%(path)s.in_next(%(value1)s, %(value2)s)',
'in': '%(path)s in %(values)s',
'not_in': 'not %(path)s in %(values)s',
'type_is': '%(path)s.type(%(values)s)',
'type_is_not': '!%(path)s.type(%(values)s)',
'in_calendar_day': '%(path)s.in_day(%(values)s)',
'in_calendar_week': '%(path)s.in_week(%(values)s)',
'in_calendar_month': '%(path)s.in_month(%(values)s)',
'in_calendar_year': '%(path)s.in_year(%(values)s)',
'name_contains': '%(path)s.name_contains(%(values)s)',
'name_not_contains': '!%(path)s.name_contains(%(values)s)'
}
LOG_SINGLES = [
'is',
'is_not',
'less_than',
'greater_than',
'contains',
'starts_with',
'ends_with',
'type_is',
'type_is_not',
'name_contains',
'name_not_contains'
]
LOG_DOUBLES = [
'between',
'not_between',
'in_last',
'not_in_last',
'in_next',
'not_in_next'
]
def parseFromLogicalOp(sgLogicalOp):
'''
Parses a Shotgun logical operator and returns the search expression
representation of it.
Args:
* (dict) sgLogicalOp:
Shotgun formatted logical operator.
'''
try:
op = ' %s ' % sgLogicalOp['logical_operator']
comps = []
for c in sgLogicalOp['conditions']:
if c.has_key('logical_operator'):
comps.append('(%s)' % parseFromLogicalOp(c))
else:
data = {
'path': c['path'],
'values': c['values']
}
relation = c['relation']
if relation in LOG_SINGLES:
data['values'] = repr(data['values'][0])
exp = LOG_TO_ORM_LOOKUP[relation] % data
comps.append(exp)
elif relation in LOG_DOUBLES:
data['value1'] = repr(data['values'][0])
data['value2'] = repr(data['values'][1])
exp = LOG_TO_ORM_LOOKUP[relation] % data
comps.append(exp)
else:
exp = LOG_TO_ORM_LOOKUP[relation] % data
comps.append(exp)
return op.join(comps)
except Exception, e:
SgScriptError('error parsing logical operator: %s' % e)
| SgScriptError | identifier_name |
model_abs.py | # coding: utf-8
#
# Purely electronic model of the Reaction Center
#
# Calculations of absorption spectra with a realistic lineshape theory
# and with effective Gaussian lineshapes
#
#
#
#
# In[1]:
import os
import numpy
import quantarhei as qr
print(qr.Manager().version)
import matplotlib.pyplot as plt
plt.switch_backend('agg')
# In[2]:
pre_in = "in"
pre_out = "out"
# check if pre_out exists and is a directory
if not os.path.isdir(pre_out):
try:
os.makedirs(pre_out, exist_ok=True)
except:
raise Exception("Output directory name '"
+pre_out+"' does not represent a valid directory")
#
# Model from Jordanides at al. Ref. 1 is adjusted and extended by two CT states
#
#
jordanides = False
if jordanides:
offset = 0.0
offset_P = 0.0 #485.0
offset_P_M = offset_P + 0.0
h_shift = 0.0
sc_H = 1.0
sc_P = 1.0
else:
offset = 275
offset_P = 400 #485.0
offset_P_M = offset_P + 100.0
h_shift = 85.0
sc_H = 0.79
sc_P = 0.75
#
# Molecules
#
with qr.energy_units("1/cm"):
PM = qr.Molecule([0.0, 11610.0+offset_P_M], name="PM")
PL = qr.Molecule([0.0, 11610.0+offset_P], name="PL")
BM = qr.Molecule([0.0, 12220.0+offset], name="BM")
BL = qr.Molecule([0.0, 12370.0+offset], name="BL")
HL = qr.Molecule([0.0, 13020.0+offset-h_shift], name="HL")
HM = qr.Molecule([0.0, 13150.0+offset+h_shift], name="HM")
# CT states are effectively represented as "new molecules" in the system
PCT_M = qr.Molecule([0.0, 15200], name="PCT1")
PCT_L = qr.Molecule([0.0, 13550], name="PCT2") # 13500
#
# Transition dipole moment from Ref. 1 are scaled
#
dPM = numpy.array([ 0.8546, 0.5051, 0.1206])*sc_P
dPL = numpy.array([-0.9649, -0.0250, 0.2613])*sc_P
dHM = numpy.array([ 0.2749, -0.3694, -0.8877])*sc_H
dHL = numpy.array([ 0.0452, -0.9672, -0.2498])*sc_H
PM.set_dipole(0,1, dPM)
PL.set_dipole(0,1, dPL)
BL.set_dipole(0,1, [ 0.7782, 0.5332, 0.3317])
BM.set_dipole(0,1, [-0.9681, 0.1107, 0.2249])
HL.set_dipole(0,1, dHL)
HM.set_dipole(0,1, dHM)
#
# CT states are dark
#
PCT_M.set_dipole(1, 0, [0.0, 0.0, 0.0])
PCT_L.set_dipole(1, 0, [0.0, 0.0, 0.0])
molecules = [PM, PL, BM, BL, HL, HM, PCT_M, PCT_L]
# saving molecules without environment
qr.save_parcel(molecules, os.path.join(pre_out,"molecules.qrp"))
#
# Here we build the RC as an aggregate of molecules
#
mol3 = [PM, PL, BM]
agg = qr.Aggregate(molecules=mol3)
#
# Exciton interaction matrix
#
# values from Ref. 1
JP_77K_Jordanides = 575.0
JP_77K = JP_77K_Jordanides
#
# Fitted values of the model with CT states
# starting values of the manual search of best parameters are
# taken from Ref. 2
#
if jordanides:
JP = 395 #JP_77K
XCT_M = 0.0
XCT_L = 0.0
YCT = 0.0
else:
JP = 690 #575
XCT_M = 905 #1400
XCT_L = 755
YCT = 550 #350
# Factor of three is just to experiment with
PB_1 = -104.0
PB_2 = -94.0
LCT = 0
MCT = 0
# the interaction matrix is taken from
J_Matrix = numpy.array([
[ 0.0, JP, -16.0, PB_1, 19.9, -4.8, XCT_M, YCT],
[ JP, 0.0, PB_2, 2.8, -6.8, 18.0, YCT, XCT_L],
[ -16.0, PB_2, 0.0, 19.3, -7.5, 95.8, MCT, LCT],
[ PB_1, 2.8, 19.3, 0.0, 123.1, -7.9, LCT, MCT],
[ 19.9, -6.8, -7.5, 123.1, 0.0, 3.9, 0.0, 0.0],
[ -4.8, 18.0, 95.8, -7.9, 3.9, 0.0, 0.0, 0.0],
[ XCT_M, YCT, MCT, LCT, 0.0, 0.0, 0.0, 0.0],
[ YCT, XCT_L, LCT, MCT, 0.0, 0.0, 0.0, 0.0]
])
with qr.energy_units("1/cm"):
agg.set_resonance_coupling_matrix(J_Matrix[0:3,0:3])
#agg.save("RC_Model_40_4_adjusted_CT_no_environment_unbuilt.hdf5")
qr.save_parcel(agg, os.path.join(pre_out,
"RC_Model_40_4_adjusted_CT_no_environment_unbuilt.qrp"))
# In[3]:
# check that units were set correctly
rc = agg.resonance_coupling[1,0]
with qr.energy_units("1/cm"):
print(qr.convert(rc, "int"))
with qr.energy_units("1/cm"):
print(agg.get_resonance_coupling(1,0))
# In[4]:
# Bath correlation function
time = qr.TimeAxis(0.0, 1000, 1.0)
cfA_params = dict(ftype="OverdampedBrownian",
reorg=190, cortime=80, T=77, matsubara=100)
cfH_params = dict(ftype="OverdampedBrownian",
reorg=200, cortime=100, T=77, matsubara=100)
cfP_params = dict(ftype="OverdampedBrownian",
reorg=700, cortime=120, T=77, matsubara=100)
cfCT_params = dict(ftype="OverdampedBrownian",
reorg=3600, cortime=20, T=77, matsubara=200)
with qr.energy_units("1/cm"):
cfA = qr.CorrelationFunction(time, cfA_params)
cfH = qr.CorrelationFunction(time, cfH_params)
cfP = qr.CorrelationFunction(time, cfP_params)
cfCT = qr.CorrelationFunction(time, cfCT_params)
PM.set_transition_environment((0,1), cfP)
PL.set_transition_environment((0,1), cfP)
BM.set_transition_environment((0,1), cfA)
BL.set_transition_environment((0,1), cfA)
HL.set_transition_environment((0,1), cfH)
HM.set_transition_environment((0,1), cfH)
PCT_M.set_transition_environment((0,1), cfCT)
PCT_L.set_transition_environment((0,1), cfCT)
agg.build(mult=2)
#agg.save("RC_Model_40_4_adjusted_CT_no_vibrations_built.hdf5")
qr.save_parcel(agg, os.path.join(pre_out,
"RC_Model_40_4_adjusted_CT_no_vibrations_built.qrp"))
# In[5]:
#
# Refitted model of the Reaction Center using effective Gaussian lineshapes
#
#
#
#
# "Environment" modelled by dressing the states
#
molecules_eff = qr.load_parcel(os.path.join(pre_out,"molecules.qrp"))
agg_eff = qr.Aggregate(molecules=molecules_eff)
with qr.energy_units("1/cm"):
agg_eff.set_resonance_coupling_matrix(J_Matrix)
PMe = molecules_eff[0]
PLe = molecules_eff[1]
BMe = molecules_eff[2]
BLe = molecules_eff[3]
HMe = molecules_eff[4]
HLe = molecules_eff[5]
PCT_Me = molecules_eff[6]
PCT_Le = molecules_eff[7]
with qr.energy_units("1/cm"):
ee = PMe.get_energy(1)
PMe.set_energy(1,ee-80.0)
ee = PLe.get_energy(1)
PLe.set_energy(1,ee-80.0)
ee = BMe.get_energy(1)
BMe.set_energy(1,ee-85.0)
ee = BLe.get_energy(1)
BLe.set_energy(1,ee-85.0)
ee = HMe.get_energy(1)
HMe.set_energy(1,ee-75.0)
ee = HLe.get_energy(1)
HLe.set_energy(1,ee-75.0)
ee = PCT_Me.get_energy(1)
PCT_Me.set_energy(1,ee+230)
ee = PCT_Le.get_energy(1)
PCT_Le.set_energy(1,ee+230)
PMe.set_transition_width((0,1), qr.convert(630,"1/cm", "int"))
PLe.set_transition_width((0,1), qr.convert(630,"1/cm", "int"))
BMe.set_transition_width((0,1), qr.convert(180,"1/cm", "int"))
BLe.set_transition_width((0,1), qr.convert(180,"1/cm", "int"))
HMe.set_transition_width((0,1), qr.convert(155,"1/cm", "int"))
HLe.set_transition_width((0,1), qr.convert(155,"1/cm", "int"))
PCT_Me.set_transition_width((0,1), qr.convert(800,"1/cm", "int"))
PCT_Le.set_transition_width((0,1), qr.convert(800,"1/cm", "int"))
dPMe = numpy.array([ 0.8546, 0.5051, 0.1206])*0.76
dPLe = numpy.array([-0.9649, -0.0250, 0.2613])*0.76
dHMe = numpy.array([ 0.2749, -0.3694, -0.8877])*0.68
dHLe = numpy.array([ 0.0452, -0.9672, -0.2498])*0.68
PMe.set_dipole(0,1,dPMe)
PLe.set_dipole(0,1,dPLe)
HMe.set_dipole(0,1,dHMe)
HLe.set_dipole(0,1,dHLe)
# we save the effective model
qr.save_parcel(agg_eff, os.path.join(pre_out,
"RC_eff_Model_40_4_adjusted_CT_no_environment_unbuilt.qrp"))
agg_eff.build(mult=1)
# In[6]:
#RT = agg.get_RelaxationTensor(time, relaxation_theory="standard_Redfield", secular_relaxation=True)
rrm = agg.get_RedfieldRateMatrix()
# In[7]:
print("Relaxation time (2 -> 1) :", 1.0/rrm.data[1,2])
print("Relaxation time (3 -> 2) :", 1.0/rrm.data[2,3])
print("Relaxation time (3 -> 1) :", 1.0/rrm.data[1,3])
# TEST (put here the energies and temperature)
E2 = 2.24165620051
E1 = 2.13494501445
kbT = 0.01008086552556262
print("Relaxation time ratio :", rrm.data[2,1]/rrm.data[1,2])
print("... to be compared with :", numpy.exp(-(E2-E1)/kbT))
# In[8]:
rwa = agg.get_RWA_suggestion()
with qr.energy_units("1/cm"):
print(qr.convert(rwa,"int"))
# In[9]:
# absorption from effective theory
from quantarhei import LabSetup
from quantarhei.utils.vectors import X, Y, Z
lab = LabSetup()
lab.set_polarizations(pulse_polarizations=[X,X,X], detection_polarization=X)
agg_eff.diagonalize()
print("\nEffetive model exciation energies:")
print("Energies in 1/cm:")
N1 = agg_eff.nmono
print([qr.convert(agg_eff.HH[i,i],"int","1/cm") for i in range(1, N1+1)])
print("")
mabsc = qr.MockAbsSpectrumCalculator(time, system=agg_eff)
rho0 = agg_eff.get_DensityMatrix(condition_type="thermal", temperature=0.0)
ham = agg_eff.get_Hamiltonian()
pthways = agg_eff.liouville_pathways_1(lab=lab, ham=ham, etol=1.0e-5,
verbose=0)
mabsc.bootstrap(rwa=qr.convert(10000.0,"1/cm","int"),
shape="Gaussian")
mabsc.set_pathways(pthways)
abs1 = mabsc.calculate(raw=False)
abs1.normalize2()
absc = qr.AbsSpectrumCalculator(time, system=agg)
# In[10]:
absc.bootstrap(rwa)
# In[11]:
abss = absc.calculate()
#absexp = qr.load("bas_77K.hdf5") #("DATA/bas_77K.hdf5")
absexp = qr.load_parcel(os.path.join(pre_in, "bas_77K.qrp"))
absexp.normalize()
absexp.subtract(0.086)
absexp.normalize()
abss.normalize2() #norm=0.53)
# In[29]:
#with qr.energy_units("nm"):
# abss.plot(axis=[650, 1000, 0, 0.7], show=False)
# absexp.plot()
plt.figure(0)
with qr.energy_units("1/cm"):
#abss.plot(axis=[10500, 15000, 0, 1.1], show=False)
abs1.plot(axis=[10500, 15000, 0, 1.1], show=False)
absexp.plot(show=True)
absexp.savefig(os.path.join(pre_out, "abs_full.png"))
# in a Notebook, it seems that the figure shows itself always when we leave the cell
# In[30]:
N1 = agg.Nbe[1]
print("Energies in 1/cm:")
print([qr.convert(agg.HH[i,i],"int","1/cm") for i in range(1, N1+1)])
# In[31]:
agg.diagonalize()
# In[32]:
# exciton report
agg.exciton_report(Nrep=8)
# In[33]:
agg.report_on_expansion(2)
# In[34]:
N1 = agg.Nbe[1]
print("Energies in 1/cm:")
print([qr.convert(agg.HH[i,i],"int","1/cm") for i in range(1, N1+1)])
# In[35]:
print("Transition dipoles square:")
print(agg.D2[1:N1+1,0])
#
# ## Fractional model
#
# Remove both H and BL
# In[36]:
#
# Get components of the fractional model
#
indices_of_components = []
names_of_components = ["PM", "PL", "BM"] # , "BL","PCT1", "PCT2"] # "HL", "HM", "BL", , "BCT"
components = []
for name in names_of_components:
indx = agg.get_Molecule_index(name)
mol = agg.get_Molecule_by_name(name)
#if name == "BM":
# mol.elenergies[1] = mol.elenergies[1] + 0.1
indices_of_components.append(indx)
components.append(mol)
print("Indices of selected molecules: ", indices_of_components)
# In[37]:
#
# Coupling matrix
#
Ni = len(indices_of_components)
Jfm = numpy.zeros((Ni, Ni), dtype=qr.REAL)
k_1 = 0
for i_1 in indices_of_components:
k_2 = 0
for i_2 in indices_of_components:
Jfm[k_1, k_2] = agg.resonance_coupling[i_1, i_2]
k_2 += 1
k_1 += 1
# In[38]:
#
# Fractional aggregate
#
frac = qr.Aggregate(components)
frac.set_resonance_coupling_matrix(Jfm)
# In[39]:
fix_dipole = False
if fix_dipole:
BM_fix_dipole = frac.get_Molecule_by_name("BM")
dip = BM_fix_dipole.get_dipole(0, 1)
nrm = qr.norm(dip)
dip2 = qr.normalize2(dip, norm=numpy.sqrt(2.0)*nrm)
BM_fix_dipole.set_dipole(0, 1, dip2)
# In[40]:
#frac.save("fraction_40_4_CT_unbuilt.hdf5")
qr.save_parcel(frac, os.path.join(pre_out,"fraction_40_4_CT_unbuilt.qrp"))
# In[41]:
frac.build()
# In[42]:
absc2 = qr.AbsSpectrumCalculator(time, system=frac)
absc2.bootstrap(rwa)
abss2 = absc2.calculate()
#absexp2 = qr.load("bas_77K.hdf5")
absexp2 = qr.load_parcel(os.path.join(pre_in, "bas_77K.qrp"))
absexp2.normalize()
absexp2.subtract(0.086)
absexp2.normalize()
abss2.normalize2() #norm=0.53)
plt.figure(1)
with qr.energy_units("1/cm"):
abss2.plot(axis=[10500, 15000, 0, 1.1], show=False)
absexp2.plot(show=True)
absexp2.savefig(os.path.join(pre_out, "abs_frac.png"))
# In[43]:
frac.diagonalize()
# In[44]:
frac.report_on_expansion(3)
# In[45]:
HH = frac.get_Hamiltonian()
with qr.eigenbasis_of(HH):
with qr.energy_units("1/cm"):
print([HH.data[i,i] for i in range(1,frac.nmono)])
# In[46]:
#
# Get components of the fractional model
#
indices_of_components = []
names_of_components = ["PM", "PL", "BM", "BL","PCT1", "PCT2"] #["BM", "BL"] # "HL", "HM", "BL", , "BCT"
names_of_components3 = ["PM", "PL", "BL"]
components = []
for name in names_of_components3:
indx = agg_eff.get_Molecule_index(name)
mol = agg_eff.get_Molecule_by_name(name)
#if name == "BM":
# mol.elenergies[1] = mol.elenergies[1] + 0.1
indices_of_components.append(indx)
components.append(mol)
print("Indices of selected molecules: ", indices_of_components)
# In[47]:
#
# Fractional aggregate
#
frac_eff = qr.Aggregate(components)
frac_eff.set_resonance_coupling_matrix(Jfm)
# In[48]:
#frac_B.save("fraction_40_4_B_unbuilt.hdf5")
qr.save_parcel(frac_eff, os.path.join(pre_out,
"fraction_eff_40_4_CT_unbuilt.qrp"))
frac_eff.build()
frac_eff.diagonalize()
mabsc2 = qr.MockAbsSpectrumCalculator(time, system=frac_eff)
rho0 = frac_eff.get_DensityMatrix(condition_type="thermal", temperature=0.0)
ham = frac_eff.get_Hamiltonian()
pthways = frac_eff.liouville_pathways_1(lab=lab, ham=ham, etol=1.0e-5,
verbose=0)
mabsc2.bootstrap(rwa=qr.convert(10000.0,"1/cm","int"),
shape="Gaussian")
mabsc2.set_pathways(pthways)
abs2 = mabsc2.calculate(raw=False)
abs2.normalize2() |
plt.figure(2)
with qr.energy_units("1/cm"):
#abss2.plot(axis=[10500, 15000, 0, 1.1], show=False)
abs2.plot(axis=[10500, 15000, 0, 1.1], show=False)
absexp2.plot(show=False)
absexp2.savefig(os.path.join(pre_out, "abs_frac_eff.png")) | random_line_split | |
model_abs.py | # coding: utf-8
#
# Purely electronic model of the Reaction Center
#
# Calculations of absorption spectra with a realistic lineshape theory
# and with effective Gaussian lineshapes
#
#
#
#
# In[1]:
import os
import numpy
import quantarhei as qr
print(qr.Manager().version)
import matplotlib.pyplot as plt
plt.switch_backend('agg')
# In[2]:
pre_in = "in"
pre_out = "out"
# check if pre_out exists and is a directory
if not os.path.isdir(pre_out):
|
#
# Model from Jordanides at al. Ref. 1 is adjusted and extended by two CT states
#
#
jordanides = False
if jordanides:
offset = 0.0
offset_P = 0.0 #485.0
offset_P_M = offset_P + 0.0
h_shift = 0.0
sc_H = 1.0
sc_P = 1.0
else:
offset = 275
offset_P = 400 #485.0
offset_P_M = offset_P + 100.0
h_shift = 85.0
sc_H = 0.79
sc_P = 0.75
#
# Molecules
#
with qr.energy_units("1/cm"):
PM = qr.Molecule([0.0, 11610.0+offset_P_M], name="PM")
PL = qr.Molecule([0.0, 11610.0+offset_P], name="PL")
BM = qr.Molecule([0.0, 12220.0+offset], name="BM")
BL = qr.Molecule([0.0, 12370.0+offset], name="BL")
HL = qr.Molecule([0.0, 13020.0+offset-h_shift], name="HL")
HM = qr.Molecule([0.0, 13150.0+offset+h_shift], name="HM")
# CT states are effectively represented as "new molecules" in the system
PCT_M = qr.Molecule([0.0, 15200], name="PCT1")
PCT_L = qr.Molecule([0.0, 13550], name="PCT2") # 13500
#
# Transition dipole moment from Ref. 1 are scaled
#
dPM = numpy.array([ 0.8546, 0.5051, 0.1206])*sc_P
dPL = numpy.array([-0.9649, -0.0250, 0.2613])*sc_P
dHM = numpy.array([ 0.2749, -0.3694, -0.8877])*sc_H
dHL = numpy.array([ 0.0452, -0.9672, -0.2498])*sc_H
PM.set_dipole(0,1, dPM)
PL.set_dipole(0,1, dPL)
BL.set_dipole(0,1, [ 0.7782, 0.5332, 0.3317])
BM.set_dipole(0,1, [-0.9681, 0.1107, 0.2249])
HL.set_dipole(0,1, dHL)
HM.set_dipole(0,1, dHM)
#
# CT states are dark
#
PCT_M.set_dipole(1, 0, [0.0, 0.0, 0.0])
PCT_L.set_dipole(1, 0, [0.0, 0.0, 0.0])
molecules = [PM, PL, BM, BL, HL, HM, PCT_M, PCT_L]
# saving molecules without environment
qr.save_parcel(molecules, os.path.join(pre_out,"molecules.qrp"))
#
# Here we build the RC as an aggregate of molecules
#
mol3 = [PM, PL, BM]
agg = qr.Aggregate(molecules=mol3)
#
# Exciton interaction matrix
#
# values from Ref. 1
JP_77K_Jordanides = 575.0
JP_77K = JP_77K_Jordanides
#
# Fitted values of the model with CT states
# starting values of the manual search of best parameters are
# taken from Ref. 2
#
if jordanides:
JP = 395 #JP_77K
XCT_M = 0.0
XCT_L = 0.0
YCT = 0.0
else:
JP = 690 #575
XCT_M = 905 #1400
XCT_L = 755
YCT = 550 #350
# Factor of three is just to experiment with
PB_1 = -104.0
PB_2 = -94.0
LCT = 0
MCT = 0
# the interaction matrix is taken from
J_Matrix = numpy.array([
[ 0.0, JP, -16.0, PB_1, 19.9, -4.8, XCT_M, YCT],
[ JP, 0.0, PB_2, 2.8, -6.8, 18.0, YCT, XCT_L],
[ -16.0, PB_2, 0.0, 19.3, -7.5, 95.8, MCT, LCT],
[ PB_1, 2.8, 19.3, 0.0, 123.1, -7.9, LCT, MCT],
[ 19.9, -6.8, -7.5, 123.1, 0.0, 3.9, 0.0, 0.0],
[ -4.8, 18.0, 95.8, -7.9, 3.9, 0.0, 0.0, 0.0],
[ XCT_M, YCT, MCT, LCT, 0.0, 0.0, 0.0, 0.0],
[ YCT, XCT_L, LCT, MCT, 0.0, 0.0, 0.0, 0.0]
])
with qr.energy_units("1/cm"):
agg.set_resonance_coupling_matrix(J_Matrix[0:3,0:3])
#agg.save("RC_Model_40_4_adjusted_CT_no_environment_unbuilt.hdf5")
qr.save_parcel(agg, os.path.join(pre_out,
"RC_Model_40_4_adjusted_CT_no_environment_unbuilt.qrp"))
# In[3]:
# check that units were set correctly
rc = agg.resonance_coupling[1,0]
with qr.energy_units("1/cm"):
print(qr.convert(rc, "int"))
with qr.energy_units("1/cm"):
print(agg.get_resonance_coupling(1,0))
# In[4]:
# Bath correlation function
time = qr.TimeAxis(0.0, 1000, 1.0)
cfA_params = dict(ftype="OverdampedBrownian",
reorg=190, cortime=80, T=77, matsubara=100)
cfH_params = dict(ftype="OverdampedBrownian",
reorg=200, cortime=100, T=77, matsubara=100)
cfP_params = dict(ftype="OverdampedBrownian",
reorg=700, cortime=120, T=77, matsubara=100)
cfCT_params = dict(ftype="OverdampedBrownian",
reorg=3600, cortime=20, T=77, matsubara=200)
with qr.energy_units("1/cm"):
cfA = qr.CorrelationFunction(time, cfA_params)
cfH = qr.CorrelationFunction(time, cfH_params)
cfP = qr.CorrelationFunction(time, cfP_params)
cfCT = qr.CorrelationFunction(time, cfCT_params)
PM.set_transition_environment((0,1), cfP)
PL.set_transition_environment((0,1), cfP)
BM.set_transition_environment((0,1), cfA)
BL.set_transition_environment((0,1), cfA)
HL.set_transition_environment((0,1), cfH)
HM.set_transition_environment((0,1), cfH)
PCT_M.set_transition_environment((0,1), cfCT)
PCT_L.set_transition_environment((0,1), cfCT)
agg.build(mult=2)
#agg.save("RC_Model_40_4_adjusted_CT_no_vibrations_built.hdf5")
qr.save_parcel(agg, os.path.join(pre_out,
"RC_Model_40_4_adjusted_CT_no_vibrations_built.qrp"))
# In[5]:
#
# Refitted model of the Reaction Center using effective Gaussian lineshapes
#
#
#
#
# "Environment" modelled by dressing the states
#
molecules_eff = qr.load_parcel(os.path.join(pre_out,"molecules.qrp"))
agg_eff = qr.Aggregate(molecules=molecules_eff)
with qr.energy_units("1/cm"):
agg_eff.set_resonance_coupling_matrix(J_Matrix)
PMe = molecules_eff[0]
PLe = molecules_eff[1]
BMe = molecules_eff[2]
BLe = molecules_eff[3]
HMe = molecules_eff[4]
HLe = molecules_eff[5]
PCT_Me = molecules_eff[6]
PCT_Le = molecules_eff[7]
with qr.energy_units("1/cm"):
ee = PMe.get_energy(1)
PMe.set_energy(1,ee-80.0)
ee = PLe.get_energy(1)
PLe.set_energy(1,ee-80.0)
ee = BMe.get_energy(1)
BMe.set_energy(1,ee-85.0)
ee = BLe.get_energy(1)
BLe.set_energy(1,ee-85.0)
ee = HMe.get_energy(1)
HMe.set_energy(1,ee-75.0)
ee = HLe.get_energy(1)
HLe.set_energy(1,ee-75.0)
ee = PCT_Me.get_energy(1)
PCT_Me.set_energy(1,ee+230)
ee = PCT_Le.get_energy(1)
PCT_Le.set_energy(1,ee+230)
PMe.set_transition_width((0,1), qr.convert(630,"1/cm", "int"))
PLe.set_transition_width((0,1), qr.convert(630,"1/cm", "int"))
BMe.set_transition_width((0,1), qr.convert(180,"1/cm", "int"))
BLe.set_transition_width((0,1), qr.convert(180,"1/cm", "int"))
HMe.set_transition_width((0,1), qr.convert(155,"1/cm", "int"))
HLe.set_transition_width((0,1), qr.convert(155,"1/cm", "int"))
PCT_Me.set_transition_width((0,1), qr.convert(800,"1/cm", "int"))
PCT_Le.set_transition_width((0,1), qr.convert(800,"1/cm", "int"))
dPMe = numpy.array([ 0.8546, 0.5051, 0.1206])*0.76
dPLe = numpy.array([-0.9649, -0.0250, 0.2613])*0.76
dHMe = numpy.array([ 0.2749, -0.3694, -0.8877])*0.68
dHLe = numpy.array([ 0.0452, -0.9672, -0.2498])*0.68
PMe.set_dipole(0,1,dPMe)
PLe.set_dipole(0,1,dPLe)
HMe.set_dipole(0,1,dHMe)
HLe.set_dipole(0,1,dHLe)
# we save the effective model
qr.save_parcel(agg_eff, os.path.join(pre_out,
"RC_eff_Model_40_4_adjusted_CT_no_environment_unbuilt.qrp"))
agg_eff.build(mult=1)
# In[6]:
#RT = agg.get_RelaxationTensor(time, relaxation_theory="standard_Redfield", secular_relaxation=True)
rrm = agg.get_RedfieldRateMatrix()
# In[7]:
print("Relaxation time (2 -> 1) :", 1.0/rrm.data[1,2])
print("Relaxation time (3 -> 2) :", 1.0/rrm.data[2,3])
print("Relaxation time (3 -> 1) :", 1.0/rrm.data[1,3])
# TEST (put here the energies and temperature)
E2 = 2.24165620051
E1 = 2.13494501445
kbT = 0.01008086552556262
print("Relaxation time ratio :", rrm.data[2,1]/rrm.data[1,2])
print("... to be compared with :", numpy.exp(-(E2-E1)/kbT))
# In[8]:
rwa = agg.get_RWA_suggestion()
with qr.energy_units("1/cm"):
print(qr.convert(rwa,"int"))
# In[9]:
# absorption from effective theory
from quantarhei import LabSetup
from quantarhei.utils.vectors import X, Y, Z
lab = LabSetup()
lab.set_polarizations(pulse_polarizations=[X,X,X], detection_polarization=X)
agg_eff.diagonalize()
print("\nEffetive model exciation energies:")
print("Energies in 1/cm:")
N1 = agg_eff.nmono
print([qr.convert(agg_eff.HH[i,i],"int","1/cm") for i in range(1, N1+1)])
print("")
mabsc = qr.MockAbsSpectrumCalculator(time, system=agg_eff)
rho0 = agg_eff.get_DensityMatrix(condition_type="thermal", temperature=0.0)
ham = agg_eff.get_Hamiltonian()
pthways = agg_eff.liouville_pathways_1(lab=lab, ham=ham, etol=1.0e-5,
verbose=0)
mabsc.bootstrap(rwa=qr.convert(10000.0,"1/cm","int"),
shape="Gaussian")
mabsc.set_pathways(pthways)
abs1 = mabsc.calculate(raw=False)
abs1.normalize2()
absc = qr.AbsSpectrumCalculator(time, system=agg)
# In[10]:
absc.bootstrap(rwa)
# In[11]:
abss = absc.calculate()
#absexp = qr.load("bas_77K.hdf5") #("DATA/bas_77K.hdf5")
absexp = qr.load_parcel(os.path.join(pre_in, "bas_77K.qrp"))
absexp.normalize()
absexp.subtract(0.086)
absexp.normalize()
abss.normalize2() #norm=0.53)
# In[29]:
#with qr.energy_units("nm"):
# abss.plot(axis=[650, 1000, 0, 0.7], show=False)
# absexp.plot()
plt.figure(0)
with qr.energy_units("1/cm"):
#abss.plot(axis=[10500, 15000, 0, 1.1], show=False)
abs1.plot(axis=[10500, 15000, 0, 1.1], show=False)
absexp.plot(show=True)
absexp.savefig(os.path.join(pre_out, "abs_full.png"))
# in a Notebook, it seems that the figure shows itself always when we leave the cell
# In[30]:
N1 = agg.Nbe[1]
print("Energies in 1/cm:")
print([qr.convert(agg.HH[i,i],"int","1/cm") for i in range(1, N1+1)])
# In[31]:
agg.diagonalize()
# In[32]:
# exciton report
agg.exciton_report(Nrep=8)
# In[33]:
agg.report_on_expansion(2)
# In[34]:
N1 = agg.Nbe[1]
print("Energies in 1/cm:")
print([qr.convert(agg.HH[i,i],"int","1/cm") for i in range(1, N1+1)])
# In[35]:
print("Transition dipoles square:")
print(agg.D2[1:N1+1,0])
#
# ## Fractional model
#
# Remove both H and BL
# In[36]:
#
# Get components of the fractional model
#
indices_of_components = []
names_of_components = ["PM", "PL", "BM"] # , "BL","PCT1", "PCT2"] # "HL", "HM", "BL", , "BCT"
components = []
for name in names_of_components:
indx = agg.get_Molecule_index(name)
mol = agg.get_Molecule_by_name(name)
#if name == "BM":
# mol.elenergies[1] = mol.elenergies[1] + 0.1
indices_of_components.append(indx)
components.append(mol)
print("Indices of selected molecules: ", indices_of_components)
# In[37]:
#
# Coupling matrix
#
Ni = len(indices_of_components)
Jfm = numpy.zeros((Ni, Ni), dtype=qr.REAL)
k_1 = 0
for i_1 in indices_of_components:
k_2 = 0
for i_2 in indices_of_components:
Jfm[k_1, k_2] = agg.resonance_coupling[i_1, i_2]
k_2 += 1
k_1 += 1
# In[38]:
#
# Fractional aggregate
#
frac = qr.Aggregate(components)
frac.set_resonance_coupling_matrix(Jfm)
# In[39]:
fix_dipole = False
if fix_dipole:
BM_fix_dipole = frac.get_Molecule_by_name("BM")
dip = BM_fix_dipole.get_dipole(0, 1)
nrm = qr.norm(dip)
dip2 = qr.normalize2(dip, norm=numpy.sqrt(2.0)*nrm)
BM_fix_dipole.set_dipole(0, 1, dip2)
# In[40]:
#frac.save("fraction_40_4_CT_unbuilt.hdf5")
qr.save_parcel(frac, os.path.join(pre_out,"fraction_40_4_CT_unbuilt.qrp"))
# In[41]:
frac.build()
# In[42]:
absc2 = qr.AbsSpectrumCalculator(time, system=frac)
absc2.bootstrap(rwa)
abss2 = absc2.calculate()
#absexp2 = qr.load("bas_77K.hdf5")
absexp2 = qr.load_parcel(os.path.join(pre_in, "bas_77K.qrp"))
absexp2.normalize()
absexp2.subtract(0.086)
absexp2.normalize()
abss2.normalize2() #norm=0.53)
plt.figure(1)
with qr.energy_units("1/cm"):
abss2.plot(axis=[10500, 15000, 0, 1.1], show=False)
absexp2.plot(show=True)
absexp2.savefig(os.path.join(pre_out, "abs_frac.png"))
# In[43]:
frac.diagonalize()
# In[44]:
frac.report_on_expansion(3)
# In[45]:
HH = frac.get_Hamiltonian()
with qr.eigenbasis_of(HH):
with qr.energy_units("1/cm"):
print([HH.data[i,i] for i in range(1,frac.nmono)])
# In[46]:
#
# Get components of the fractional model
#
indices_of_components = []
names_of_components = ["PM", "PL", "BM", "BL","PCT1", "PCT2"] #["BM", "BL"] # "HL", "HM", "BL", , "BCT"
names_of_components3 = ["PM", "PL", "BL"]
components = []
for name in names_of_components3:
indx = agg_eff.get_Molecule_index(name)
mol = agg_eff.get_Molecule_by_name(name)
#if name == "BM":
# mol.elenergies[1] = mol.elenergies[1] + 0.1
indices_of_components.append(indx)
components.append(mol)
print("Indices of selected molecules: ", indices_of_components)
# In[47]:
#
# Fractional aggregate
#
frac_eff = qr.Aggregate(components)
frac_eff.set_resonance_coupling_matrix(Jfm)
# In[48]:
#frac_B.save("fraction_40_4_B_unbuilt.hdf5")
qr.save_parcel(frac_eff, os.path.join(pre_out,
"fraction_eff_40_4_CT_unbuilt.qrp"))
frac_eff.build()
frac_eff.diagonalize()
mabsc2 = qr.MockAbsSpectrumCalculator(time, system=frac_eff)
rho0 = frac_eff.get_DensityMatrix(condition_type="thermal", temperature=0.0)
ham = frac_eff.get_Hamiltonian()
pthways = frac_eff.liouville_pathways_1(lab=lab, ham=ham, etol=1.0e-5,
verbose=0)
mabsc2.bootstrap(rwa=qr.convert(10000.0,"1/cm","int"),
shape="Gaussian")
mabsc2.set_pathways(pthways)
abs2 = mabsc2.calculate(raw=False)
abs2.normalize2()
plt.figure(2)
with qr.energy_units("1/cm"):
#abss2.plot(axis=[10500, 15000, 0, 1.1], show=False)
abs2.plot(axis=[10500, 15000, 0, 1.1], show=False)
absexp2.plot(show=False)
absexp2.savefig(os.path.join(pre_out, "abs_frac_eff.png"))
| try:
os.makedirs(pre_out, exist_ok=True)
except:
raise Exception("Output directory name '"
+pre_out+"' does not represent a valid directory") | conditional_block |
waypoint_updater.py | #!/usr/bin/env python
import rospy
from std_msgs.msg import Int32
from geometry_msgs.msg import PoseStamped, TwistStamped
from styx_msgs.msg import Lane, Waypoint
import tf
import math
import time
'''
This node will publish waypoints from the car's current position to some `x` distance ahead.
As mentioned in the doc, you should ideally first implement a version which does not care
about traffic lights or obstacles.
Once you have created dbw_node, you will update this node to use the status of traffic lights too.
Please note that our simulator also provides the exact location of traffic lights and their
current status in `/vehicle/traffic_lights` message. You can use this message to build this node
as well as to verify your TL classifier.
TODO (for Yousuf and Aaron): Stopline location for each traffic light.
'''
LOOKAHEAD_WPS = 100 # Number of waypoints we will publish. You can change this number
TIMEOUT_VALUE = 0.1
ONE_MPH = 0.44704
class WaypointUpdater(object):
def __init__(self):
rospy.loginfo('WaypointUpdater::__init__ - Start')
rospy.init_node('waypoint_updater')
rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
rospy.Subscriber('/current_velocity', TwistStamped, self.velocity_cb)
rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
# TODO: Add a subscriber for /traffic_waypoint and /obstacle_waypoint below
rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_cb)
# commenting the two below for the time being until clarification about whether
# is needed or not
#rospy.Subscriber('/obstacle_waypoint', , self.obstacle_cb)
self.final_waypoints_pub = rospy.Publisher('final_waypoints', Lane, queue_size=1)
# TODO: Add other member variables you need below
self.tf_listener = tf.TransformListener()
# The car's current position
self.pose = None
# The maps's complete waypoints
self.waypoints = None
# The car's current velocity
self.velocity = 0.0
# The timestamp of the last traffic_waypoint
self.traffic_waypoint_timestamp = 0.0
# The index of the waypoint in the base_waypoints list, which is closest to the traffic light
self.light_waypoint_index = None
# The approximate distance from the stop line to the traffic light
self.light_distance_thresh = 3.0
# The car's distance to the traffic light when the car started the slowing down process
self.car_distance_to_sl_when_car_started_to_slow_down = None
self.car_velocity_when_car_started_to_slow_down = None
# first waypoint index at the previous iteration
self.prev_first_wpt_index = 0
self.default_velocity = rospy.get_param('~velocity', 1) * ONE_MPH
rospy.spin()
def pose_cb(self, msg):
self.pose = msg
first_wpt_index = -1
min_wpt_distance = float('inf')
if self.waypoints is None:
return
num_waypoints_in_list = len(self.waypoints.waypoints)
# Gererate an empty lane to store the final_waypoints
lane = Lane()
lane.header.frame_id = self.waypoints.header.frame_id
lane.header.stamp = rospy.Time(0)
lane.waypoints = []
# Iterate through the complete set of waypoints until we found the closest
distance_decreased = False
#rospy.loginfo('Started at waypoint index: %s', self.prev_first_wpt_index)
#start_time = time.time()
for index, waypoint in enumerate(self.waypoints.waypoints[self.prev_first_wpt_index:] + self.waypoints.waypoints[:self.prev_first_wpt_index], start=self.prev_first_wpt_index):
current_wpt_distance = self.distance(self.pose.pose.position, waypoint.pose.pose.position)
if distance_decreased and current_wpt_distance > min_wpt_distance:
break
if current_wpt_distance > 0 and current_wpt_distance < min_wpt_distance:
min_wpt_distance = current_wpt_distance
first_wpt_index = index
distance_decreased = True
first_wpt_index %= num_waypoints_in_list
transformed_light_point = None
if first_wpt_index == -1:
rospy.logwarn('WaypointUpdater::waypoints_cb - No waypoints ahead of ego were found... seems that the car went off course')
else:
#transform fast avoiding wait cycles
# Transform first waypoint to car coordinates
self.waypoints.waypoints[first_wpt_index].pose.header.frame_id = self.waypoints.header.frame_id
try:
self.tf_listener.waitForTransform("base_link", "world", rospy.Time(0), rospy.Duration(0.02))
transformed_waypoint = self.tf_listener.transformPose("base_link", self.waypoints.waypoints[first_wpt_index].pose)
except (tf.Exception, tf.LookupException, tf.ConnectivityException):
try:
self.tf_listener.waitForTransform("base_link", "world", rospy.Time(0), rospy.Duration(TIMEOUT_VALUE))
transformed_waypoint = self.tf_listener.transformPose("base_link", self.waypoints.waypoints[first_wpt_index].pose)
except (tf.Exception, tf.LookupException, tf.ConnectivityException):
rospy.logwarn("Failed to find camera to map transform")
return
# All waypoints in front of the car should have positive X coordinate in car coordinate frame
# If the closest waypoint is behind the car, skip this waypoint
if transformed_waypoint.pose.position.x <= 0.0:
first_wpt_index += 1
self.prev_first_wpt_index = first_wpt_index % num_waypoints_in_list
# Prepare for calculating velocity:
slow_down = False
reached_zero_velocity = False
car_distance_to_stop_line = -1.
planned_velocity = self.default_velocity
# If the last traffic_waypoint message is newer than the threshold, we might need to the car.
if self.light_waypoint_index >= 0:
rospy.logdebug('should stopp the car %s', self.light_waypoint_index)
self.waypoints.waypoints[self.light_waypoint_index].pose.header.frame_id = self.waypoints.header.frame_id
transformed_light_point = self.tf_listener.transformPose("base_link", self.waypoints.waypoints[self.light_waypoint_index].pose)
# The approximate distance from the stop line to the traffic light
car_distance_to_stop_line = transformed_light_point.pose.position.x - self.light_distance_thresh
# Estimate whether the car cannot cross the stop line on yellow (in less than 2 seconds). Otherwise don't slow down.
if self.velocity / car_distance_to_stop_line < 2 and car_distance_to_stop_line >= 4:
slow_down = True
if self.car_distance_to_sl_when_car_started_to_slow_down is None:
self.car_distance_to_sl_when_car_started_to_slow_down = car_distance_to_stop_line
self.car_velocity_when_car_started_to_slow_down = self.velocity
rospy.logdebug('Stopping the car')
planned_velocity = min(max(abs(car_distance_to_stop_line*0.2),0.0),self.default_velocity)
# Stop the car in a safe distance before the stop line to give the simulator space to adapt velocity
#we are close to the stop line and slow
elif car_distance_to_stop_line > 0 and car_distance_to_stop_line < 4 and self.velocity < 6:
slow_down = True
if car_distance_to_stop_line > 0.5:
planned_velocity = 1.0
else:
|
else:
rospy.logwarn('too late to stopp the car')
self.car_distance_to_tl_when_car_started_to_slow_down = None
self.car_velocity_when_car_started_to_slow_down = None
rospy.loginfo('car_distance_to_stop_line %s velocity %s set to %s',car_distance_to_stop_line,self.velocity,planned_velocity)
# Fill the lane with the final waypoints
for num_wp in range(LOOKAHEAD_WPS):
wp = Waypoint()
wp.pose = self.waypoints.waypoints[(first_wpt_index + num_wp) % num_waypoints_in_list].pose
wp.twist = self.waypoints.waypoints[(first_wpt_index + num_wp) % num_waypoints_in_list].twist
wp.twist.twist.linear.x = planned_velocity
wp.twist.twist.linear.y = 0.0
wp.twist.twist.linear.z = 0.0
wp.twist.twist.angular.x = 0.0
wp.twist.twist.angular.y = 0.0
wp.twist.twist.angular.z = 0.0
lane.waypoints.append(wp)
# finally, publish waypoints as modified on /final_waypoints topic
self.final_waypoints_pub.publish(lane)
def velocity_cb(self, msg):
self.velocity = msg.twist.linear.x
def waypoints_cb(self, waypoints):
self.waypoints = waypoints
def traffic_cb(self, traffic_waypoint):
# Callback for /traffic_waypoint message.
# Store the timestamp and the traffic light position to use them for final_waypoints in waypoints_cb
self.traffic_waypoint_timestamp = time.time()
self.light_waypoint_index = traffic_waypoint.data
# rospy.loginfo("received traffic light %s",self.light_waypoint_index)
def obstacle_cb(self, msg):
# TODO: Callback for /obstacle_waypoint message. We will implement it later
pass
def get_waypoint_velocity(self, waypoint):
return waypoint.twist.twist.linear.x
def set_waypoint_velocity(self, waypoints, waypoint, velocity):
waypoints[waypoint].twist.twist.linear.x = velocity
def distance(self, waypoints, wp1, wp2):
dist = 0
dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)
for i in range(wp1, wp2+1):
dist += dl(waypoints[wp1].pose.pose.position, waypoints[i].pose.pose.position)
wp1 = i
return dist
def distance(self, pose1, pose2):
return math.sqrt((pose1.x-pose2.x)**2 + (pose1.y-pose2.y)**2 + (pose1.z-pose2.z)**2)
if __name__ == '__main__':
try:
WaypointUpdater()
except rospy.ROSInterruptException:
rospy.logerr('Could not start waypoint updater node.')
| planned_velocity = 0.0
reached_zero_velocity = True | conditional_block |
waypoint_updater.py | #!/usr/bin/env python
import rospy
from std_msgs.msg import Int32
from geometry_msgs.msg import PoseStamped, TwistStamped
from styx_msgs.msg import Lane, Waypoint
import tf
import math
import time
'''
This node will publish waypoints from the car's current position to some `x` distance ahead.
As mentioned in the doc, you should ideally first implement a version which does not care
about traffic lights or obstacles.
Once you have created dbw_node, you will update this node to use the status of traffic lights too.
Please note that our simulator also provides the exact location of traffic lights and their
current status in `/vehicle/traffic_lights` message. You can use this message to build this node
as well as to verify your TL classifier.
TODO (for Yousuf and Aaron): Stopline location for each traffic light.
'''
LOOKAHEAD_WPS = 100 # Number of waypoints we will publish. You can change this number
TIMEOUT_VALUE = 0.1
ONE_MPH = 0.44704
class WaypointUpdater(object):
def __init__(self):
rospy.loginfo('WaypointUpdater::__init__ - Start')
rospy.init_node('waypoint_updater')
rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
rospy.Subscriber('/current_velocity', TwistStamped, self.velocity_cb)
rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
# TODO: Add a subscriber for /traffic_waypoint and /obstacle_waypoint below
rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_cb)
# commenting the two below for the time being until clarification about whether
# is needed or not
#rospy.Subscriber('/obstacle_waypoint', , self.obstacle_cb)
self.final_waypoints_pub = rospy.Publisher('final_waypoints', Lane, queue_size=1)
# TODO: Add other member variables you need below
self.tf_listener = tf.TransformListener()
# The car's current position
self.pose = None
# The maps's complete waypoints
self.waypoints = None
# The car's current velocity
self.velocity = 0.0
# The timestamp of the last traffic_waypoint
self.traffic_waypoint_timestamp = 0.0
# The index of the waypoint in the base_waypoints list, which is closest to the traffic light
self.light_waypoint_index = None
# The approximate distance from the stop line to the traffic light
self.light_distance_thresh = 3.0
# The car's distance to the traffic light when the car started the slowing down process
self.car_distance_to_sl_when_car_started_to_slow_down = None
self.car_velocity_when_car_started_to_slow_down = None
# first waypoint index at the previous iteration
self.prev_first_wpt_index = 0
self.default_velocity = rospy.get_param('~velocity', 1) * ONE_MPH
rospy.spin()
def pose_cb(self, msg):
self.pose = msg
first_wpt_index = -1
min_wpt_distance = float('inf')
if self.waypoints is None:
return
num_waypoints_in_list = len(self.waypoints.waypoints)
# Gererate an empty lane to store the final_waypoints
lane = Lane()
lane.header.frame_id = self.waypoints.header.frame_id
lane.header.stamp = rospy.Time(0)
lane.waypoints = []
# Iterate through the complete set of waypoints until we found the closest
distance_decreased = False
#rospy.loginfo('Started at waypoint index: %s', self.prev_first_wpt_index)
#start_time = time.time()
for index, waypoint in enumerate(self.waypoints.waypoints[self.prev_first_wpt_index:] + self.waypoints.waypoints[:self.prev_first_wpt_index], start=self.prev_first_wpt_index):
current_wpt_distance = self.distance(self.pose.pose.position, waypoint.pose.pose.position)
if distance_decreased and current_wpt_distance > min_wpt_distance:
break
if current_wpt_distance > 0 and current_wpt_distance < min_wpt_distance:
min_wpt_distance = current_wpt_distance
first_wpt_index = index
distance_decreased = True
first_wpt_index %= num_waypoints_in_list
transformed_light_point = None
if first_wpt_index == -1:
rospy.logwarn('WaypointUpdater::waypoints_cb - No waypoints ahead of ego were found... seems that the car went off course')
else:
#transform fast avoiding wait cycles
# Transform first waypoint to car coordinates
self.waypoints.waypoints[first_wpt_index].pose.header.frame_id = self.waypoints.header.frame_id
try:
self.tf_listener.waitForTransform("base_link", "world", rospy.Time(0), rospy.Duration(0.02))
transformed_waypoint = self.tf_listener.transformPose("base_link", self.waypoints.waypoints[first_wpt_index].pose)
except (tf.Exception, tf.LookupException, tf.ConnectivityException):
try:
self.tf_listener.waitForTransform("base_link", "world", rospy.Time(0), rospy.Duration(TIMEOUT_VALUE))
transformed_waypoint = self.tf_listener.transformPose("base_link", self.waypoints.waypoints[first_wpt_index].pose)
except (tf.Exception, tf.LookupException, tf.ConnectivityException):
rospy.logwarn("Failed to find camera to map transform")
return
# All waypoints in front of the car should have positive X coordinate in car coordinate frame
# If the closest waypoint is behind the car, skip this waypoint
if transformed_waypoint.pose.position.x <= 0.0:
first_wpt_index += 1
self.prev_first_wpt_index = first_wpt_index % num_waypoints_in_list
# Prepare for calculating velocity:
slow_down = False
reached_zero_velocity = False
car_distance_to_stop_line = -1.
planned_velocity = self.default_velocity
# If the last traffic_waypoint message is newer than the threshold, we might need to the car.
if self.light_waypoint_index >= 0:
rospy.logdebug('should stopp the car %s', self.light_waypoint_index)
self.waypoints.waypoints[self.light_waypoint_index].pose.header.frame_id = self.waypoints.header.frame_id
transformed_light_point = self.tf_listener.transformPose("base_link", self.waypoints.waypoints[self.light_waypoint_index].pose)
# The approximate distance from the stop line to the traffic light
car_distance_to_stop_line = transformed_light_point.pose.position.x - self.light_distance_thresh
# Estimate whether the car cannot cross the stop line on yellow (in less than 2 seconds). Otherwise don't slow down.
if self.velocity / car_distance_to_stop_line < 2 and car_distance_to_stop_line >= 4:
slow_down = True
if self.car_distance_to_sl_when_car_started_to_slow_down is None:
self.car_distance_to_sl_when_car_started_to_slow_down = car_distance_to_stop_line
self.car_velocity_when_car_started_to_slow_down = self.velocity
rospy.logdebug('Stopping the car')
planned_velocity = min(max(abs(car_distance_to_stop_line*0.2),0.0),self.default_velocity)
# Stop the car in a safe distance before the stop line to give the simulator space to adapt velocity
#we are close to the stop line and slow
elif car_distance_to_stop_line > 0 and car_distance_to_stop_line < 4 and self.velocity < 6:
slow_down = True
if car_distance_to_stop_line > 0.5:
planned_velocity = 1.0
else:
planned_velocity = 0.0
reached_zero_velocity = True
else:
rospy.logwarn('too late to stopp the car')
self.car_distance_to_tl_when_car_started_to_slow_down = None
self.car_velocity_when_car_started_to_slow_down = None
rospy.loginfo('car_distance_to_stop_line %s velocity %s set to %s',car_distance_to_stop_line,self.velocity,planned_velocity)
# Fill the lane with the final waypoints
for num_wp in range(LOOKAHEAD_WPS):
wp = Waypoint()
wp.pose = self.waypoints.waypoints[(first_wpt_index + num_wp) % num_waypoints_in_list].pose
wp.twist = self.waypoints.waypoints[(first_wpt_index + num_wp) % num_waypoints_in_list].twist
wp.twist.twist.linear.x = planned_velocity
wp.twist.twist.linear.y = 0.0
wp.twist.twist.linear.z = 0.0
wp.twist.twist.angular.x = 0.0
wp.twist.twist.angular.y = 0.0
wp.twist.twist.angular.z = 0.0
lane.waypoints.append(wp)
# finally, publish waypoints as modified on /final_waypoints topic
self.final_waypoints_pub.publish(lane)
def velocity_cb(self, msg):
|
def waypoints_cb(self, waypoints):
self.waypoints = waypoints
def traffic_cb(self, traffic_waypoint):
# Callback for /traffic_waypoint message.
# Store the timestamp and the traffic light position to use them for final_waypoints in waypoints_cb
self.traffic_waypoint_timestamp = time.time()
self.light_waypoint_index = traffic_waypoint.data
# rospy.loginfo("received traffic light %s",self.light_waypoint_index)
def obstacle_cb(self, msg):
# TODO: Callback for /obstacle_waypoint message. We will implement it later
pass
def get_waypoint_velocity(self, waypoint):
return waypoint.twist.twist.linear.x
def set_waypoint_velocity(self, waypoints, waypoint, velocity):
waypoints[waypoint].twist.twist.linear.x = velocity
def distance(self, waypoints, wp1, wp2):
dist = 0
dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)
for i in range(wp1, wp2+1):
dist += dl(waypoints[wp1].pose.pose.position, waypoints[i].pose.pose.position)
wp1 = i
return dist
def distance(self, pose1, pose2):
return math.sqrt((pose1.x-pose2.x)**2 + (pose1.y-pose2.y)**2 + (pose1.z-pose2.z)**2)
if __name__ == '__main__':
try:
WaypointUpdater()
except rospy.ROSInterruptException:
rospy.logerr('Could not start waypoint updater node.')
| self.velocity = msg.twist.linear.x | identifier_body |
waypoint_updater.py | #!/usr/bin/env python
import rospy
from std_msgs.msg import Int32
from geometry_msgs.msg import PoseStamped, TwistStamped
from styx_msgs.msg import Lane, Waypoint
import tf
import math
import time
'''
This node will publish waypoints from the car's current position to some `x` distance ahead.
As mentioned in the doc, you should ideally first implement a version which does not care
about traffic lights or obstacles.
Once you have created dbw_node, you will update this node to use the status of traffic lights too.
Please note that our simulator also provides the exact location of traffic lights and their
current status in `/vehicle/traffic_lights` message. You can use this message to build this node
as well as to verify your TL classifier.
TODO (for Yousuf and Aaron): Stopline location for each traffic light.
'''
LOOKAHEAD_WPS = 100 # Number of waypoints we will publish. You can change this number
TIMEOUT_VALUE = 0.1
ONE_MPH = 0.44704
class WaypointUpdater(object):
def __init__(self):
rospy.loginfo('WaypointUpdater::__init__ - Start')
rospy.init_node('waypoint_updater')
rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
rospy.Subscriber('/current_velocity', TwistStamped, self.velocity_cb)
rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
# TODO: Add a subscriber for /traffic_waypoint and /obstacle_waypoint below
rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_cb)
# commenting the two below for the time being until clarification about whether
# is needed or not
#rospy.Subscriber('/obstacle_waypoint', , self.obstacle_cb)
self.final_waypoints_pub = rospy.Publisher('final_waypoints', Lane, queue_size=1)
# TODO: Add other member variables you need below
self.tf_listener = tf.TransformListener()
# The car's current position
self.pose = None
# The maps's complete waypoints
self.waypoints = None
# The car's current velocity
self.velocity = 0.0
# The timestamp of the last traffic_waypoint
self.traffic_waypoint_timestamp = 0.0
# The index of the waypoint in the base_waypoints list, which is closest to the traffic light
self.light_waypoint_index = None
# The approximate distance from the stop line to the traffic light
self.light_distance_thresh = 3.0
# The car's distance to the traffic light when the car started the slowing down process
self.car_distance_to_sl_when_car_started_to_slow_down = None
self.car_velocity_when_car_started_to_slow_down = None
# first waypoint index at the previous iteration
self.prev_first_wpt_index = 0
self.default_velocity = rospy.get_param('~velocity', 1) * ONE_MPH
rospy.spin()
def pose_cb(self, msg):
self.pose = msg
first_wpt_index = -1
min_wpt_distance = float('inf')
if self.waypoints is None:
return
num_waypoints_in_list = len(self.waypoints.waypoints)
# Gererate an empty lane to store the final_waypoints
lane = Lane()
lane.header.frame_id = self.waypoints.header.frame_id
lane.header.stamp = rospy.Time(0)
lane.waypoints = []
# Iterate through the complete set of waypoints until we found the closest
distance_decreased = False
#rospy.loginfo('Started at waypoint index: %s', self.prev_first_wpt_index)
#start_time = time.time()
for index, waypoint in enumerate(self.waypoints.waypoints[self.prev_first_wpt_index:] + self.waypoints.waypoints[:self.prev_first_wpt_index], start=self.prev_first_wpt_index):
current_wpt_distance = self.distance(self.pose.pose.position, waypoint.pose.pose.position)
if distance_decreased and current_wpt_distance > min_wpt_distance:
break
if current_wpt_distance > 0 and current_wpt_distance < min_wpt_distance:
min_wpt_distance = current_wpt_distance
first_wpt_index = index
distance_decreased = True
first_wpt_index %= num_waypoints_in_list
transformed_light_point = None
if first_wpt_index == -1:
rospy.logwarn('WaypointUpdater::waypoints_cb - No waypoints ahead of ego were found... seems that the car went off course')
else:
#transform fast avoiding wait cycles
# Transform first waypoint to car coordinates
self.waypoints.waypoints[first_wpt_index].pose.header.frame_id = self.waypoints.header.frame_id
try:
self.tf_listener.waitForTransform("base_link", "world", rospy.Time(0), rospy.Duration(0.02))
transformed_waypoint = self.tf_listener.transformPose("base_link", self.waypoints.waypoints[first_wpt_index].pose)
except (tf.Exception, tf.LookupException, tf.ConnectivityException):
try:
self.tf_listener.waitForTransform("base_link", "world", rospy.Time(0), rospy.Duration(TIMEOUT_VALUE))
transformed_waypoint = self.tf_listener.transformPose("base_link", self.waypoints.waypoints[first_wpt_index].pose)
except (tf.Exception, tf.LookupException, tf.ConnectivityException):
rospy.logwarn("Failed to find camera to map transform")
return
# All waypoints in front of the car should have positive X coordinate in car coordinate frame
# If the closest waypoint is behind the car, skip this waypoint
if transformed_waypoint.pose.position.x <= 0.0:
first_wpt_index += 1
self.prev_first_wpt_index = first_wpt_index % num_waypoints_in_list
# Prepare for calculating velocity:
slow_down = False
reached_zero_velocity = False
car_distance_to_stop_line = -1.
planned_velocity = self.default_velocity
# If the last traffic_waypoint message is newer than the threshold, we might need to the car.
if self.light_waypoint_index >= 0:
rospy.logdebug('should stopp the car %s', self.light_waypoint_index)
self.waypoints.waypoints[self.light_waypoint_index].pose.header.frame_id = self.waypoints.header.frame_id
transformed_light_point = self.tf_listener.transformPose("base_link", self.waypoints.waypoints[self.light_waypoint_index].pose)
# The approximate distance from the stop line to the traffic light
car_distance_to_stop_line = transformed_light_point.pose.position.x - self.light_distance_thresh
# Estimate whether the car cannot cross the stop line on yellow (in less than 2 seconds). Otherwise don't slow down.
if self.velocity / car_distance_to_stop_line < 2 and car_distance_to_stop_line >= 4:
slow_down = True
if self.car_distance_to_sl_when_car_started_to_slow_down is None:
self.car_distance_to_sl_when_car_started_to_slow_down = car_distance_to_stop_line
self.car_velocity_when_car_started_to_slow_down = self.velocity
rospy.logdebug('Stopping the car')
planned_velocity = min(max(abs(car_distance_to_stop_line*0.2),0.0),self.default_velocity)
# Stop the car in a safe distance before the stop line to give the simulator space to adapt velocity
#we are close to the stop line and slow
elif car_distance_to_stop_line > 0 and car_distance_to_stop_line < 4 and self.velocity < 6:
slow_down = True
if car_distance_to_stop_line > 0.5:
planned_velocity = 1.0
else:
planned_velocity = 0.0
reached_zero_velocity = True
else:
rospy.logwarn('too late to stopp the car')
self.car_distance_to_tl_when_car_started_to_slow_down = None
self.car_velocity_when_car_started_to_slow_down = None
rospy.loginfo('car_distance_to_stop_line %s velocity %s set to %s',car_distance_to_stop_line,self.velocity,planned_velocity)
# Fill the lane with the final waypoints
for num_wp in range(LOOKAHEAD_WPS):
wp = Waypoint()
wp.pose = self.waypoints.waypoints[(first_wpt_index + num_wp) % num_waypoints_in_list].pose
wp.twist = self.waypoints.waypoints[(first_wpt_index + num_wp) % num_waypoints_in_list].twist
wp.twist.twist.linear.x = planned_velocity
wp.twist.twist.linear.y = 0.0
wp.twist.twist.linear.z = 0.0
wp.twist.twist.angular.x = 0.0
wp.twist.twist.angular.y = 0.0
wp.twist.twist.angular.z = 0.0
lane.waypoints.append(wp)
# finally, publish waypoints as modified on /final_waypoints topic
self.final_waypoints_pub.publish(lane)
def velocity_cb(self, msg):
self.velocity = msg.twist.linear.x
def waypoints_cb(self, waypoints):
self.waypoints = waypoints
def traffic_cb(self, traffic_waypoint):
# Callback for /traffic_waypoint message.
# Store the timestamp and the traffic light position to use them for final_waypoints in waypoints_cb
self.traffic_waypoint_timestamp = time.time()
self.light_waypoint_index = traffic_waypoint.data
# rospy.loginfo("received traffic light %s",self.light_waypoint_index) | pass
def get_waypoint_velocity(self, waypoint):
return waypoint.twist.twist.linear.x
def set_waypoint_velocity(self, waypoints, waypoint, velocity):
waypoints[waypoint].twist.twist.linear.x = velocity
def distance(self, waypoints, wp1, wp2):
dist = 0
dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)
for i in range(wp1, wp2+1):
dist += dl(waypoints[wp1].pose.pose.position, waypoints[i].pose.pose.position)
wp1 = i
return dist
def distance(self, pose1, pose2):
return math.sqrt((pose1.x-pose2.x)**2 + (pose1.y-pose2.y)**2 + (pose1.z-pose2.z)**2)
if __name__ == '__main__':
try:
WaypointUpdater()
except rospy.ROSInterruptException:
rospy.logerr('Could not start waypoint updater node.') |
def obstacle_cb(self, msg):
# TODO: Callback for /obstacle_waypoint message. We will implement it later | random_line_split |
waypoint_updater.py | #!/usr/bin/env python
import rospy
from std_msgs.msg import Int32
from geometry_msgs.msg import PoseStamped, TwistStamped
from styx_msgs.msg import Lane, Waypoint
import tf
import math
import time
'''
This node will publish waypoints from the car's current position to some `x` distance ahead.
As mentioned in the doc, you should ideally first implement a version which does not care
about traffic lights or obstacles.
Once you have created dbw_node, you will update this node to use the status of traffic lights too.
Please note that our simulator also provides the exact location of traffic lights and their
current status in `/vehicle/traffic_lights` message. You can use this message to build this node
as well as to verify your TL classifier.
TODO (for Yousuf and Aaron): Stopline location for each traffic light.
'''
LOOKAHEAD_WPS = 100 # Number of waypoints we will publish. You can change this number
TIMEOUT_VALUE = 0.1
ONE_MPH = 0.44704
class WaypointUpdater(object):
def __init__(self):
rospy.loginfo('WaypointUpdater::__init__ - Start')
rospy.init_node('waypoint_updater')
rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
rospy.Subscriber('/current_velocity', TwistStamped, self.velocity_cb)
rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
# TODO: Add a subscriber for /traffic_waypoint and /obstacle_waypoint below
rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_cb)
# commenting the two below for the time being until clarification about whether
# is needed or not
#rospy.Subscriber('/obstacle_waypoint', , self.obstacle_cb)
self.final_waypoints_pub = rospy.Publisher('final_waypoints', Lane, queue_size=1)
# TODO: Add other member variables you need below
self.tf_listener = tf.TransformListener()
# The car's current position
self.pose = None
# The maps's complete waypoints
self.waypoints = None
# The car's current velocity
self.velocity = 0.0
# The timestamp of the last traffic_waypoint
self.traffic_waypoint_timestamp = 0.0
# The index of the waypoint in the base_waypoints list, which is closest to the traffic light
self.light_waypoint_index = None
# The approximate distance from the stop line to the traffic light
self.light_distance_thresh = 3.0
# The car's distance to the traffic light when the car started the slowing down process
self.car_distance_to_sl_when_car_started_to_slow_down = None
self.car_velocity_when_car_started_to_slow_down = None
# first waypoint index at the previous iteration
self.prev_first_wpt_index = 0
self.default_velocity = rospy.get_param('~velocity', 1) * ONE_MPH
rospy.spin()
def pose_cb(self, msg):
self.pose = msg
first_wpt_index = -1
min_wpt_distance = float('inf')
if self.waypoints is None:
return
num_waypoints_in_list = len(self.waypoints.waypoints)
# Gererate an empty lane to store the final_waypoints
lane = Lane()
lane.header.frame_id = self.waypoints.header.frame_id
lane.header.stamp = rospy.Time(0)
lane.waypoints = []
# Iterate through the complete set of waypoints until we found the closest
distance_decreased = False
#rospy.loginfo('Started at waypoint index: %s', self.prev_first_wpt_index)
#start_time = time.time()
for index, waypoint in enumerate(self.waypoints.waypoints[self.prev_first_wpt_index:] + self.waypoints.waypoints[:self.prev_first_wpt_index], start=self.prev_first_wpt_index):
current_wpt_distance = self.distance(self.pose.pose.position, waypoint.pose.pose.position)
if distance_decreased and current_wpt_distance > min_wpt_distance:
break
if current_wpt_distance > 0 and current_wpt_distance < min_wpt_distance:
min_wpt_distance = current_wpt_distance
first_wpt_index = index
distance_decreased = True
first_wpt_index %= num_waypoints_in_list
transformed_light_point = None
if first_wpt_index == -1:
rospy.logwarn('WaypointUpdater::waypoints_cb - No waypoints ahead of ego were found... seems that the car went off course')
else:
#transform fast avoiding wait cycles
# Transform first waypoint to car coordinates
self.waypoints.waypoints[first_wpt_index].pose.header.frame_id = self.waypoints.header.frame_id
try:
self.tf_listener.waitForTransform("base_link", "world", rospy.Time(0), rospy.Duration(0.02))
transformed_waypoint = self.tf_listener.transformPose("base_link", self.waypoints.waypoints[first_wpt_index].pose)
except (tf.Exception, tf.LookupException, tf.ConnectivityException):
try:
self.tf_listener.waitForTransform("base_link", "world", rospy.Time(0), rospy.Duration(TIMEOUT_VALUE))
transformed_waypoint = self.tf_listener.transformPose("base_link", self.waypoints.waypoints[first_wpt_index].pose)
except (tf.Exception, tf.LookupException, tf.ConnectivityException):
rospy.logwarn("Failed to find camera to map transform")
return
# All waypoints in front of the car should have positive X coordinate in car coordinate frame
# If the closest waypoint is behind the car, skip this waypoint
if transformed_waypoint.pose.position.x <= 0.0:
first_wpt_index += 1
self.prev_first_wpt_index = first_wpt_index % num_waypoints_in_list
# Prepare for calculating velocity:
slow_down = False
reached_zero_velocity = False
car_distance_to_stop_line = -1.
planned_velocity = self.default_velocity
# If the last traffic_waypoint message is newer than the threshold, we might need to the car.
if self.light_waypoint_index >= 0:
rospy.logdebug('should stopp the car %s', self.light_waypoint_index)
self.waypoints.waypoints[self.light_waypoint_index].pose.header.frame_id = self.waypoints.header.frame_id
transformed_light_point = self.tf_listener.transformPose("base_link", self.waypoints.waypoints[self.light_waypoint_index].pose)
# The approximate distance from the stop line to the traffic light
car_distance_to_stop_line = transformed_light_point.pose.position.x - self.light_distance_thresh
# Estimate whether the car cannot cross the stop line on yellow (in less than 2 seconds). Otherwise don't slow down.
if self.velocity / car_distance_to_stop_line < 2 and car_distance_to_stop_line >= 4:
slow_down = True
if self.car_distance_to_sl_when_car_started_to_slow_down is None:
self.car_distance_to_sl_when_car_started_to_slow_down = car_distance_to_stop_line
self.car_velocity_when_car_started_to_slow_down = self.velocity
rospy.logdebug('Stopping the car')
planned_velocity = min(max(abs(car_distance_to_stop_line*0.2),0.0),self.default_velocity)
# Stop the car in a safe distance before the stop line to give the simulator space to adapt velocity
#we are close to the stop line and slow
elif car_distance_to_stop_line > 0 and car_distance_to_stop_line < 4 and self.velocity < 6:
slow_down = True
if car_distance_to_stop_line > 0.5:
planned_velocity = 1.0
else:
planned_velocity = 0.0
reached_zero_velocity = True
else:
rospy.logwarn('too late to stopp the car')
self.car_distance_to_tl_when_car_started_to_slow_down = None
self.car_velocity_when_car_started_to_slow_down = None
rospy.loginfo('car_distance_to_stop_line %s velocity %s set to %s',car_distance_to_stop_line,self.velocity,planned_velocity)
# Fill the lane with the final waypoints
for num_wp in range(LOOKAHEAD_WPS):
wp = Waypoint()
wp.pose = self.waypoints.waypoints[(first_wpt_index + num_wp) % num_waypoints_in_list].pose
wp.twist = self.waypoints.waypoints[(first_wpt_index + num_wp) % num_waypoints_in_list].twist
wp.twist.twist.linear.x = planned_velocity
wp.twist.twist.linear.y = 0.0
wp.twist.twist.linear.z = 0.0
wp.twist.twist.angular.x = 0.0
wp.twist.twist.angular.y = 0.0
wp.twist.twist.angular.z = 0.0
lane.waypoints.append(wp)
# finally, publish waypoints as modified on /final_waypoints topic
self.final_waypoints_pub.publish(lane)
def velocity_cb(self, msg):
self.velocity = msg.twist.linear.x
def waypoints_cb(self, waypoints):
self.waypoints = waypoints
def traffic_cb(self, traffic_waypoint):
# Callback for /traffic_waypoint message.
# Store the timestamp and the traffic light position to use them for final_waypoints in waypoints_cb
self.traffic_waypoint_timestamp = time.time()
self.light_waypoint_index = traffic_waypoint.data
# rospy.loginfo("received traffic light %s",self.light_waypoint_index)
def obstacle_cb(self, msg):
# TODO: Callback for /obstacle_waypoint message. We will implement it later
pass
def get_waypoint_velocity(self, waypoint):
return waypoint.twist.twist.linear.x
def set_waypoint_velocity(self, waypoints, waypoint, velocity):
waypoints[waypoint].twist.twist.linear.x = velocity
def | (self, waypoints, wp1, wp2):
dist = 0
dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)
for i in range(wp1, wp2+1):
dist += dl(waypoints[wp1].pose.pose.position, waypoints[i].pose.pose.position)
wp1 = i
return dist
def distance(self, pose1, pose2):
return math.sqrt((pose1.x-pose2.x)**2 + (pose1.y-pose2.y)**2 + (pose1.z-pose2.z)**2)
if __name__ == '__main__':
try:
WaypointUpdater()
except rospy.ROSInterruptException:
rospy.logerr('Could not start waypoint updater node.')
| distance | identifier_name |
lib.rs | //! # Bracket Parse
//!
//! A Utility for parsing Bracketed lists and sets of strings.
//!
//! It is a relatively lazy way of parsing items from a bracketed string,
//!
//! "hello(peter,dave)" is easy for it to handle, as are nested brackets.
//!
//! The above will result in something like
//!
//! >Branch[Leaf("hello"),Branch[Leaf("peter"),Leaf("dave")]]
//!
//! This is not intended super extensible right now,
//! though contributions are welcome.
//!
//! The list can also be constructed relatively simply by
//! using chained builder type methods
//!
//! ```
//! use bracket_parse::{Bracket,br};
//! use bracket_parse::Bracket::{Leaf,Branch};
//! use std::str::FromStr;
//!
//! let str1 = Bracket::from_str("hello(peter,dave)").unwrap();
//!
//! //Standard Build method
//! let basic1 = Branch(vec![Leaf("hello".to_string()),
//! Branch(vec![Leaf("peter".to_string()),
//! Leaf("dave".to_string())])]);
//!
//! //Chaining Build method
//! let chain1 = br().sib_lf("hello")
//! .sib(br().sib_lf("peter").sib_lf("dave"));
//!
//! assert_eq!(str1,basic1);
//! assert_eq!(str1,chain1);
//! ```
//!
//! It can also handle string input with escapes. Quotes are removed and the string item is
//! considered a single Leaf value;
//!
//! ```
//! use bracket_parse::{Bracket,br,lf};
//! use std::str::FromStr;
//!
//! let bk = Bracket::from_str(r#""hello" 'matt"' "and \"friends\"""#).unwrap();
//! let chn = br().sib_lf("hello").sib_lf("matt\"").sib_lf("and \"friends\"");
//! assert_eq!(bk,chn);
//!
//! ```
use std::str::FromStr;
use std::fmt;
use std::fmt::Display;
use std::iter::IntoIterator;
pub mod tail;
pub use tail::{Tail};
use tail::EMPTY_BRACKET;
pub mod iter;
pub use iter::*;
#[derive(PartialEq,Debug)]
pub enum Bracket{
Branch(Vec<Bracket>),
Leaf(String),
Empty,
}
pub fn lf(s:&str)->Bracket{
Bracket::Leaf(s.to_string())
}
pub fn br()->Bracket{
Bracket::Branch(Vec::new())
}
impl FromStr for Bracket{
type Err = String;
fn from_str(s:&str)->Result<Bracket,String>{
let mut res = Bracket::Empty;
let mut it = s.chars();
let mut curr = String::new();
while let Some(c) = it.next() {
Bracket::match_char(c,&mut it,&mut curr,&mut res)?;
}
if curr.len() >0 {
res.add_sib_str(curr);
}
Ok(res)
}
}
impl<'a>IntoIterator for &'a Bracket{
type Item = &'a Bracket;
type IntoIter = BracketIter<'a>;
fn into_iter(self)->Self::IntoIter{
BracketIter::new(self)
}
}
impl Bracket{
fn add_sib_str(&mut self,s:String){
if s.len() == 0 {
return
}
self.add_sibling(Bracket::Leaf(s));
}
/// chaining method for quickly creating a tree Adds a sibling to a bracket
/// if it is a leaf makes it a parent.
pub fn sib(mut self,s:Self)->Self{
self.add_sibling(s);
self
}
/// chainging method for easily adding a leaf as a sibling from an &str
pub fn sib_lf(self,s:&str)->Self{
self.sib(lf(s))
}
fn add_sibling(&mut self,s:Bracket){
if s == Bracket::Empty {
return
}
let c:String = match self {
Bracket::Branch(ref mut v)=>{
v.push(s);
return
}
Bracket::Empty=>{
*self = s;
return
}
Bracket::Leaf(content)=>content.to_string(),
};
*self = Bracket::Branch(vec![Bracket::Leaf(c),s]);
}
fn match_char<I>(c:char,it:&mut I,curr:&mut String,res:&mut Bracket)->Result<(),String>
where I:Iterator<Item=char>{
match c {
'('=>{ // When Non Lexical Lifetimes comes, we can get rid of these curr.clone()s hopefully
res.add_sib_str(curr.clone());
*curr = String::new();
res.add_sibling(Bracket::from_bracket(it,')')?);
},
'{'=>{ //Todo make Json-esque prob needs Object Variant
res.add_sib_str(curr.clone());
*curr = String::new();
res.add_sibling(Bracket::from_bracket(it,'}')?);
},
'['=>{
res.add_sib_str(curr.clone());
*curr = String::new();
res.add_sibling(Bracket::from_bracket(it,']')?);
},
'"'|'\''=>{
res.add_sib_str(curr.clone());
*curr = String::new();
res.add_sibling(Bracket::from_quotes(it,c)?);
}
' '|','=>{
res.add_sib_str(curr.clone());
*curr = String::new();
},
other=>curr.push(other),
}
Ok(())
}
fn from_bracket<I:Iterator<Item=char>>(it:&mut I,delim:char)->Result<Bracket,String>{
let mut res = Bracket::Branch(Vec::new());
let mut curr = String::new();
while let Some(c) = it.next() {
if c == delim {
res.add_sib_str(curr.clone());
return Ok(res);
}
Bracket::match_char(c,it,&mut curr,&mut res)?;
}
Err(format!("Close Delim '{}' not found",delim))
}
fn from_quotes<I:Iterator<Item=char>>(it:&mut I,delim:char)->Result<Bracket,String>{
let mut curr = String::new();
while let Some(c) = it.next() {
if c == delim {
return Ok(Bracket::Leaf(curr));
}
match c {
'\\'=>{
match it.next(){
Some(c2)=>{
curr.push(c2);
continue
},
None=>return Err("Escape before end of string".to_string()),
}
},
_=> curr.push(c),
}
}
Err(format!("Close Delim '{}' not found",delim))
}
pub fn head<'a>(&'a self)->&'a Bracket{
match self{
Bracket::Branch(v)=>match v.len(){
0 => &EMPTY_BRACKET,
_ => &v[0],
}
_ => &EMPTY_BRACKET,
}
}
pub fn tail<'a>(&'a self)->Tail<'a>{
match self{
Bracket::Branch(v)=>match v.len(){
0|1 =>Tail::Empty,
_=>Tail::Rest(&v[1..]),
}
_=>Tail::Empty,
}
}
pub fn tail_n<'a>(&'a self,n:usize)->Tail<'a>{
match self{
Bracket::Branch(v)=>{
if v.len() <= n {
return Tail::Empty;
}
Tail::Rest(&v[n..])
}
_=>Tail::Empty,
}
}
pub fn tail_h<'a>(&'a self, n:usize)->&'a Bracket{
match self{
Bracket::Branch(v)=>{
if v.len() <= n{
return &EMPTY_BRACKET;
}
&v[n]
}
_=>&EMPTY_BRACKET,
}
}
pub fn head_tail<'a>(&'a self)->(&'a Bracket,Tail<'a>){
(self.head(),self.tail())
}
pub fn match_str<'a>(&'a self)->&'a str{
match self {
Bracket::Leaf(ref s)=>s.as_ref(),
_=>"",
}
}
}
impl Display for Bracket {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Bracket::Branch(ref v)=>{
let mut gap = "";
for b in v {
let res = match b {
Bracket::Branch(_)=>write!(f,"{}[{}]",gap,b),
_=>write!(f,"{}{}",gap,b),
};
if res.is_err(){
return res;
}
gap = " ";
}
Ok(())
},
Bracket::Leaf(s)=>{
//TODO handle Escapes
write!(f,"\"{}\"",s)
},
_=>{ write!(f,"--EMPTY--") },
}
}
}
#[cfg(test)]
mod tests {
use super::{Bracket,br,lf};
use std::str::FromStr;
#[test]
fn spaces() {
let b1 = Bracket::from_str("matt dave (andy steve)").unwrap();
let c1 = br().sib_lf("matt").sib_lf("dave").sib(
br().sib_lf("andy").sib_lf("steve")
);
let b2 = Bracket::from_str("matt dave( andy steve)").unwrap();
let b3 = Bracket::from_str(" matt dave ( andy steve ) ").unwrap();
assert_eq!(b1,c1);
assert_eq!(b1,b2);
assert_eq!(b1,b3);
}
#[test]
fn empty_parent(){
let b1 = Bracket::from_str("matt () dave").unwrap();
let c1 = br().sib_lf("matt").sib(br()).sib_lf("dave");
assert_eq!(b1,c1);
}
#[test]
fn many_parent(){
let b1 = Bracket::from_str("matt ({[() ()]})").unwrap();
let c1 = lf("matt")
.sib(
br().sib(
br().sib(
br().sib(br()).sib(br())
)
)
);
assert_eq!(b1,c1);
}
#[test]
fn strings(){
let b1 = Bracket::from_str(r#"matt"dave""#).unwrap();
let c1 = br().sib_lf("matt").sib_lf("dave");
assert_eq!(b1,c1);
let b2 = Bracket::from_str(r#""andy \"hates\" cheese""#).unwrap();
let c2 = lf(r#"andy "hates" cheese"#);
assert_eq!(b2,c2);
}
#[test]
fn errors(){
assert!(Bracket::from_str("peop ( er").is_err());
assert!(Bracket::from_str(r#""poop"#).is_err());
} | #[test]
fn test_head_tail(){
let b1 = Bracket::from_str("hello (andy dave)").unwrap();
match b1.head().match_str(){
"hello"=>{},//Where the actual code might go
_=>panic!("Head is not hello leaf"),
}
}
#[test]
fn many_tails(){
let pb = br().sib_lf("matt").sib_lf("dave").sib_lf("pete").sib_lf("andy");
let t1 = pb.tail(); //pb is parent bracket, t1 is tail
let t4 = t1.tail_h(2).match_str();
assert_eq!(t4,"andy");
let th1 = pb.tail_h(3).match_str();
assert_eq!(t4,th1);
}
#[test]
fn test_to_string(){
let br = Bracket::from_str("matt dave( andy steve)").unwrap();
let bs = br.to_string();
assert_eq!(&bs,r#""matt" "dave" ["andy" "steve"]"#);
}
} | random_line_split | |
lib.rs | //! # Bracket Parse
//!
//! A Utility for parsing Bracketed lists and sets of strings.
//!
//! It is a relatively lazy way of parsing items from a bracketed string,
//!
//! "hello(peter,dave)" is easy for it to handle, as are nested brackets.
//!
//! The above will result in something like
//!
//! >Branch[Leaf("hello"),Branch[Leaf("peter"),Leaf("dave")]]
//!
//! This is not intended super extensible right now,
//! though contributions are welcome.
//!
//! The list can also be constructed relatively simply by
//! using chained builder type methods
//!
//! ```
//! use bracket_parse::{Bracket,br};
//! use bracket_parse::Bracket::{Leaf,Branch};
//! use std::str::FromStr;
//!
//! let str1 = Bracket::from_str("hello(peter,dave)").unwrap();
//!
//! //Standard Build method
//! let basic1 = Branch(vec![Leaf("hello".to_string()),
//! Branch(vec![Leaf("peter".to_string()),
//! Leaf("dave".to_string())])]);
//!
//! //Chaining Build method
//! let chain1 = br().sib_lf("hello")
//! .sib(br().sib_lf("peter").sib_lf("dave"));
//!
//! assert_eq!(str1,basic1);
//! assert_eq!(str1,chain1);
//! ```
//!
//! It can also handle string input with escapes. Quotes are removed and the string item is
//! considered a single Leaf value;
//!
//! ```
//! use bracket_parse::{Bracket,br,lf};
//! use std::str::FromStr;
//!
//! let bk = Bracket::from_str(r#""hello" 'matt"' "and \"friends\"""#).unwrap();
//! let chn = br().sib_lf("hello").sib_lf("matt\"").sib_lf("and \"friends\"");
//! assert_eq!(bk,chn);
//!
//! ```
use std::str::FromStr;
use std::fmt;
use std::fmt::Display;
use std::iter::IntoIterator;
pub mod tail;
pub use tail::{Tail};
use tail::EMPTY_BRACKET;
pub mod iter;
pub use iter::*;
#[derive(PartialEq,Debug)]
pub enum Bracket{
Branch(Vec<Bracket>),
Leaf(String),
Empty,
}
pub fn lf(s:&str)->Bracket |
pub fn br()->Bracket{
Bracket::Branch(Vec::new())
}
impl FromStr for Bracket{
type Err = String;
fn from_str(s:&str)->Result<Bracket,String>{
let mut res = Bracket::Empty;
let mut it = s.chars();
let mut curr = String::new();
while let Some(c) = it.next() {
Bracket::match_char(c,&mut it,&mut curr,&mut res)?;
}
if curr.len() >0 {
res.add_sib_str(curr);
}
Ok(res)
}
}
impl<'a>IntoIterator for &'a Bracket{
type Item = &'a Bracket;
type IntoIter = BracketIter<'a>;
fn into_iter(self)->Self::IntoIter{
BracketIter::new(self)
}
}
impl Bracket{
fn add_sib_str(&mut self,s:String){
if s.len() == 0 {
return
}
self.add_sibling(Bracket::Leaf(s));
}
/// chaining method for quickly creating a tree Adds a sibling to a bracket
/// if it is a leaf makes it a parent.
pub fn sib(mut self,s:Self)->Self{
self.add_sibling(s);
self
}
/// chainging method for easily adding a leaf as a sibling from an &str
pub fn sib_lf(self,s:&str)->Self{
self.sib(lf(s))
}
fn add_sibling(&mut self,s:Bracket){
if s == Bracket::Empty {
return
}
let c:String = match self {
Bracket::Branch(ref mut v)=>{
v.push(s);
return
}
Bracket::Empty=>{
*self = s;
return
}
Bracket::Leaf(content)=>content.to_string(),
};
*self = Bracket::Branch(vec![Bracket::Leaf(c),s]);
}
fn match_char<I>(c:char,it:&mut I,curr:&mut String,res:&mut Bracket)->Result<(),String>
where I:Iterator<Item=char>{
match c {
'('=>{ // When Non Lexical Lifetimes comes, we can get rid of these curr.clone()s hopefully
res.add_sib_str(curr.clone());
*curr = String::new();
res.add_sibling(Bracket::from_bracket(it,')')?);
},
'{'=>{ //Todo make Json-esque prob needs Object Variant
res.add_sib_str(curr.clone());
*curr = String::new();
res.add_sibling(Bracket::from_bracket(it,'}')?);
},
'['=>{
res.add_sib_str(curr.clone());
*curr = String::new();
res.add_sibling(Bracket::from_bracket(it,']')?);
},
'"'|'\''=>{
res.add_sib_str(curr.clone());
*curr = String::new();
res.add_sibling(Bracket::from_quotes(it,c)?);
}
' '|','=>{
res.add_sib_str(curr.clone());
*curr = String::new();
},
other=>curr.push(other),
}
Ok(())
}
fn from_bracket<I:Iterator<Item=char>>(it:&mut I,delim:char)->Result<Bracket,String>{
let mut res = Bracket::Branch(Vec::new());
let mut curr = String::new();
while let Some(c) = it.next() {
if c == delim {
res.add_sib_str(curr.clone());
return Ok(res);
}
Bracket::match_char(c,it,&mut curr,&mut res)?;
}
Err(format!("Close Delim '{}' not found",delim))
}
fn from_quotes<I:Iterator<Item=char>>(it:&mut I,delim:char)->Result<Bracket,String>{
let mut curr = String::new();
while let Some(c) = it.next() {
if c == delim {
return Ok(Bracket::Leaf(curr));
}
match c {
'\\'=>{
match it.next(){
Some(c2)=>{
curr.push(c2);
continue
},
None=>return Err("Escape before end of string".to_string()),
}
},
_=> curr.push(c),
}
}
Err(format!("Close Delim '{}' not found",delim))
}
pub fn head<'a>(&'a self)->&'a Bracket{
match self{
Bracket::Branch(v)=>match v.len(){
0 => &EMPTY_BRACKET,
_ => &v[0],
}
_ => &EMPTY_BRACKET,
}
}
pub fn tail<'a>(&'a self)->Tail<'a>{
match self{
Bracket::Branch(v)=>match v.len(){
0|1 =>Tail::Empty,
_=>Tail::Rest(&v[1..]),
}
_=>Tail::Empty,
}
}
pub fn tail_n<'a>(&'a self,n:usize)->Tail<'a>{
match self{
Bracket::Branch(v)=>{
if v.len() <= n {
return Tail::Empty;
}
Tail::Rest(&v[n..])
}
_=>Tail::Empty,
}
}
pub fn tail_h<'a>(&'a self, n:usize)->&'a Bracket{
match self{
Bracket::Branch(v)=>{
if v.len() <= n{
return &EMPTY_BRACKET;
}
&v[n]
}
_=>&EMPTY_BRACKET,
}
}
pub fn head_tail<'a>(&'a self)->(&'a Bracket,Tail<'a>){
(self.head(),self.tail())
}
pub fn match_str<'a>(&'a self)->&'a str{
match self {
Bracket::Leaf(ref s)=>s.as_ref(),
_=>"",
}
}
}
impl Display for Bracket {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Bracket::Branch(ref v)=>{
let mut gap = "";
for b in v {
let res = match b {
Bracket::Branch(_)=>write!(f,"{}[{}]",gap,b),
_=>write!(f,"{}{}",gap,b),
};
if res.is_err(){
return res;
}
gap = " ";
}
Ok(())
},
Bracket::Leaf(s)=>{
//TODO handle Escapes
write!(f,"\"{}\"",s)
},
_=>{ write!(f,"--EMPTY--") },
}
}
}
#[cfg(test)]
mod tests {
use super::{Bracket,br,lf};
use std::str::FromStr;
#[test]
fn spaces() {
let b1 = Bracket::from_str("matt dave (andy steve)").unwrap();
let c1 = br().sib_lf("matt").sib_lf("dave").sib(
br().sib_lf("andy").sib_lf("steve")
);
let b2 = Bracket::from_str("matt dave( andy steve)").unwrap();
let b3 = Bracket::from_str(" matt dave ( andy steve ) ").unwrap();
assert_eq!(b1,c1);
assert_eq!(b1,b2);
assert_eq!(b1,b3);
}
#[test]
fn empty_parent(){
let b1 = Bracket::from_str("matt () dave").unwrap();
let c1 = br().sib_lf("matt").sib(br()).sib_lf("dave");
assert_eq!(b1,c1);
}
#[test]
fn many_parent(){
let b1 = Bracket::from_str("matt ({[() ()]})").unwrap();
let c1 = lf("matt")
.sib(
br().sib(
br().sib(
br().sib(br()).sib(br())
)
)
);
assert_eq!(b1,c1);
}
#[test]
fn strings(){
let b1 = Bracket::from_str(r#"matt"dave""#).unwrap();
let c1 = br().sib_lf("matt").sib_lf("dave");
assert_eq!(b1,c1);
let b2 = Bracket::from_str(r#""andy \"hates\" cheese""#).unwrap();
let c2 = lf(r#"andy "hates" cheese"#);
assert_eq!(b2,c2);
}
#[test]
fn errors(){
assert!(Bracket::from_str("peop ( er").is_err());
assert!(Bracket::from_str(r#""poop"#).is_err());
}
#[test]
fn test_head_tail(){
let b1 = Bracket::from_str("hello (andy dave)").unwrap();
match b1.head().match_str(){
"hello"=>{},//Where the actual code might go
_=>panic!("Head is not hello leaf"),
}
}
#[test]
fn many_tails(){
let pb = br().sib_lf("matt").sib_lf("dave").sib_lf("pete").sib_lf("andy");
let t1 = pb.tail(); //pb is parent bracket, t1 is tail
let t4 = t1.tail_h(2).match_str();
assert_eq!(t4,"andy");
let th1 = pb.tail_h(3).match_str();
assert_eq!(t4,th1);
}
#[test]
fn test_to_string(){
let br = Bracket::from_str("matt dave( andy steve)").unwrap();
let bs = br.to_string();
assert_eq!(&bs,r#""matt" "dave" ["andy" "steve"]"#);
}
}
| {
Bracket::Leaf(s.to_string())
} | identifier_body |
lib.rs | //! # Bracket Parse
//!
//! A Utility for parsing Bracketed lists and sets of strings.
//!
//! It is a relatively lazy way of parsing items from a bracketed string,
//!
//! "hello(peter,dave)" is easy for it to handle, as are nested brackets.
//!
//! The above will result in something like
//!
//! >Branch[Leaf("hello"),Branch[Leaf("peter"),Leaf("dave")]]
//!
//! This is not intended super extensible right now,
//! though contributions are welcome.
//!
//! The list can also be constructed relatively simply by
//! using chained builder type methods
//!
//! ```
//! use bracket_parse::{Bracket,br};
//! use bracket_parse::Bracket::{Leaf,Branch};
//! use std::str::FromStr;
//!
//! let str1 = Bracket::from_str("hello(peter,dave)").unwrap();
//!
//! //Standard Build method
//! let basic1 = Branch(vec![Leaf("hello".to_string()),
//! Branch(vec![Leaf("peter".to_string()),
//! Leaf("dave".to_string())])]);
//!
//! //Chaining Build method
//! let chain1 = br().sib_lf("hello")
//! .sib(br().sib_lf("peter").sib_lf("dave"));
//!
//! assert_eq!(str1,basic1);
//! assert_eq!(str1,chain1);
//! ```
//!
//! It can also handle string input with escapes. Quotes are removed and the string item is
//! considered a single Leaf value;
//!
//! ```
//! use bracket_parse::{Bracket,br,lf};
//! use std::str::FromStr;
//!
//! let bk = Bracket::from_str(r#""hello" 'matt"' "and \"friends\"""#).unwrap();
//! let chn = br().sib_lf("hello").sib_lf("matt\"").sib_lf("and \"friends\"");
//! assert_eq!(bk,chn);
//!
//! ```
use std::str::FromStr;
use std::fmt;
use std::fmt::Display;
use std::iter::IntoIterator;
pub mod tail;
pub use tail::{Tail};
use tail::EMPTY_BRACKET;
pub mod iter;
pub use iter::*;
#[derive(PartialEq,Debug)]
pub enum Bracket{
Branch(Vec<Bracket>),
Leaf(String),
Empty,
}
pub fn lf(s:&str)->Bracket{
Bracket::Leaf(s.to_string())
}
pub fn br()->Bracket{
Bracket::Branch(Vec::new())
}
impl FromStr for Bracket{
type Err = String;
fn from_str(s:&str)->Result<Bracket,String>{
let mut res = Bracket::Empty;
let mut it = s.chars();
let mut curr = String::new();
while let Some(c) = it.next() {
Bracket::match_char(c,&mut it,&mut curr,&mut res)?;
}
if curr.len() >0 {
res.add_sib_str(curr);
}
Ok(res)
}
}
impl<'a>IntoIterator for &'a Bracket{
type Item = &'a Bracket;
type IntoIter = BracketIter<'a>;
fn into_iter(self)->Self::IntoIter{
BracketIter::new(self)
}
}
impl Bracket{
fn add_sib_str(&mut self,s:String){
if s.len() == 0 {
return
}
self.add_sibling(Bracket::Leaf(s));
}
/// chaining method for quickly creating a tree Adds a sibling to a bracket
/// if it is a leaf makes it a parent.
pub fn sib(mut self,s:Self)->Self{
self.add_sibling(s);
self
}
/// chainging method for easily adding a leaf as a sibling from an &str
pub fn sib_lf(self,s:&str)->Self{
self.sib(lf(s))
}
fn add_sibling(&mut self,s:Bracket){
if s == Bracket::Empty {
return
}
let c:String = match self {
Bracket::Branch(ref mut v)=>{
v.push(s);
return
}
Bracket::Empty=>{
*self = s;
return
}
Bracket::Leaf(content)=>content.to_string(),
};
*self = Bracket::Branch(vec![Bracket::Leaf(c),s]);
}
fn match_char<I>(c:char,it:&mut I,curr:&mut String,res:&mut Bracket)->Result<(),String>
where I:Iterator<Item=char>{
match c {
'('=>{ // When Non Lexical Lifetimes comes, we can get rid of these curr.clone()s hopefully
res.add_sib_str(curr.clone());
*curr = String::new();
res.add_sibling(Bracket::from_bracket(it,')')?);
},
'{'=>{ //Todo make Json-esque prob needs Object Variant
res.add_sib_str(curr.clone());
*curr = String::new();
res.add_sibling(Bracket::from_bracket(it,'}')?);
},
'['=>{
res.add_sib_str(curr.clone());
*curr = String::new();
res.add_sibling(Bracket::from_bracket(it,']')?);
},
'"'|'\''=>{
res.add_sib_str(curr.clone());
*curr = String::new();
res.add_sibling(Bracket::from_quotes(it,c)?);
}
' '|','=>{
res.add_sib_str(curr.clone());
*curr = String::new();
},
other=>curr.push(other),
}
Ok(())
}
fn from_bracket<I:Iterator<Item=char>>(it:&mut I,delim:char)->Result<Bracket,String>{
let mut res = Bracket::Branch(Vec::new());
let mut curr = String::new();
while let Some(c) = it.next() {
if c == delim {
res.add_sib_str(curr.clone());
return Ok(res);
}
Bracket::match_char(c,it,&mut curr,&mut res)?;
}
Err(format!("Close Delim '{}' not found",delim))
}
fn from_quotes<I:Iterator<Item=char>>(it:&mut I,delim:char)->Result<Bracket,String>{
let mut curr = String::new();
while let Some(c) = it.next() {
if c == delim {
return Ok(Bracket::Leaf(curr));
}
match c {
'\\'=>{
match it.next(){
Some(c2)=>{
curr.push(c2);
continue
},
None=>return Err("Escape before end of string".to_string()),
}
},
_=> curr.push(c),
}
}
Err(format!("Close Delim '{}' not found",delim))
}
pub fn head<'a>(&'a self)->&'a Bracket{
match self{
Bracket::Branch(v)=>match v.len(){
0 => &EMPTY_BRACKET,
_ => &v[0],
}
_ => &EMPTY_BRACKET,
}
}
pub fn tail<'a>(&'a self)->Tail<'a>{
match self{
Bracket::Branch(v)=>match v.len(){
0|1 =>Tail::Empty,
_=>Tail::Rest(&v[1..]),
}
_=>Tail::Empty,
}
}
pub fn tail_n<'a>(&'a self,n:usize)->Tail<'a>{
match self{
Bracket::Branch(v)=>{
if v.len() <= n {
return Tail::Empty;
}
Tail::Rest(&v[n..])
}
_=>Tail::Empty,
}
}
pub fn tail_h<'a>(&'a self, n:usize)->&'a Bracket{
match self{
Bracket::Branch(v)=>{
if v.len() <= n{
return &EMPTY_BRACKET;
}
&v[n]
}
_=>&EMPTY_BRACKET,
}
}
pub fn head_tail<'a>(&'a self)->(&'a Bracket,Tail<'a>){
(self.head(),self.tail())
}
pub fn match_str<'a>(&'a self)->&'a str{
match self {
Bracket::Leaf(ref s)=>s.as_ref(),
_=>"",
}
}
}
impl Display for Bracket {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Bracket::Branch(ref v)=>{
let mut gap = "";
for b in v {
let res = match b {
Bracket::Branch(_)=>write!(f,"{}[{}]",gap,b),
_=>write!(f,"{}{}",gap,b),
};
if res.is_err(){
return res;
}
gap = " ";
}
Ok(())
},
Bracket::Leaf(s)=>{
//TODO handle Escapes
write!(f,"\"{}\"",s)
},
_=>{ write!(f,"--EMPTY--") },
}
}
}
#[cfg(test)]
mod tests {
use super::{Bracket,br,lf};
use std::str::FromStr;
#[test]
fn | () {
let b1 = Bracket::from_str("matt dave (andy steve)").unwrap();
let c1 = br().sib_lf("matt").sib_lf("dave").sib(
br().sib_lf("andy").sib_lf("steve")
);
let b2 = Bracket::from_str("matt dave( andy steve)").unwrap();
let b3 = Bracket::from_str(" matt dave ( andy steve ) ").unwrap();
assert_eq!(b1,c1);
assert_eq!(b1,b2);
assert_eq!(b1,b3);
}
#[test]
fn empty_parent(){
let b1 = Bracket::from_str("matt () dave").unwrap();
let c1 = br().sib_lf("matt").sib(br()).sib_lf("dave");
assert_eq!(b1,c1);
}
#[test]
fn many_parent(){
let b1 = Bracket::from_str("matt ({[() ()]})").unwrap();
let c1 = lf("matt")
.sib(
br().sib(
br().sib(
br().sib(br()).sib(br())
)
)
);
assert_eq!(b1,c1);
}
#[test]
fn strings(){
let b1 = Bracket::from_str(r#"matt"dave""#).unwrap();
let c1 = br().sib_lf("matt").sib_lf("dave");
assert_eq!(b1,c1);
let b2 = Bracket::from_str(r#""andy \"hates\" cheese""#).unwrap();
let c2 = lf(r#"andy "hates" cheese"#);
assert_eq!(b2,c2);
}
#[test]
fn errors(){
assert!(Bracket::from_str("peop ( er").is_err());
assert!(Bracket::from_str(r#""poop"#).is_err());
}
#[test]
fn test_head_tail(){
let b1 = Bracket::from_str("hello (andy dave)").unwrap();
match b1.head().match_str(){
"hello"=>{},//Where the actual code might go
_=>panic!("Head is not hello leaf"),
}
}
#[test]
fn many_tails(){
let pb = br().sib_lf("matt").sib_lf("dave").sib_lf("pete").sib_lf("andy");
let t1 = pb.tail(); //pb is parent bracket, t1 is tail
let t4 = t1.tail_h(2).match_str();
assert_eq!(t4,"andy");
let th1 = pb.tail_h(3).match_str();
assert_eq!(t4,th1);
}
#[test]
fn test_to_string(){
let br = Bracket::from_str("matt dave( andy steve)").unwrap();
let bs = br.to_string();
assert_eq!(&bs,r#""matt" "dave" ["andy" "steve"]"#);
}
}
| spaces | identifier_name |
update.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const color_1 = require("@oclif/color");
const command_1 = require("@oclif/command");
const cli_ux_1 = require("cli-ux");
const spawn = require("cross-spawn");
const fs = require("fs-extra");
const _ = require("lodash");
const path = require("path");
const tar_1 = require("../tar");
const util_1 = require("../util");
class UpdateCommand extends command_1.default {
constructor() {
super(...arguments);
this.clientRoot = this.config.scopedEnvVar('OCLIF_CLIENT_HOME') || path.join(this.config.dataDir, 'client');
this.clientBin = path.join(this.clientRoot, 'bin', this.config.windows ? `${this.config.bin}.cmd` : this.config.bin);
}
async run() {
const { args, flags } = this.parse(UpdateCommand);
this.autoupdate = Boolean(flags.autoupdate);
if (this.autoupdate)
await this.debounce();
this.channel = args.channel || await this.determineChannel();
if (flags['from-local']) {
await this.ensureClientDir();
this.debug(`Looking for locally installed versions at ${this.clientRoot}`);
// Do not show known non-local version folder names, bin and current.
const versions = fs.readdirSync(this.clientRoot).filter(dirOrFile => dirOrFile !== 'bin' && dirOrFile !== 'current');
if (versions.length === 0)
throw new Error('No locally installed versions found.');
this.log(`Found versions: \n${versions.map(version => ` ${version}`).join('\n')}\n`);
const pinToVersion = await cli_ux_1.default.prompt('Enter a version to update to');
if (!versions.includes(pinToVersion))
throw new Error(`Version ${pinToVersion} not found in the locally installed versions.`);
if (!await fs.pathExists(path.join(this.clientRoot, pinToVersion))) {
throw new Error(`Version ${pinToVersion} is not already installed at ${this.clientRoot}.`);
}
cli_ux_1.default.action.start(`${this.config.name}: Updating CLI`);
this.debug(`switching to existing version ${pinToVersion}`);
this.updateToExistingVersion(pinToVersion);
this.log();
this.log(`Updating to an already installed version will not update the channel. If autoupdate is enabled, the CLI will eventually be updated back to ${this.channel}.`);
}
else {
cli_ux_1.default.action.start(`${this.config.name}: Updating CLI`);
await this.config.runHook('preupdate', { channel: this.channel });
const manifest = await this.fetchManifest();
this.currentVersion = await this.determineCurrentVersion();
this.updatedVersion = manifest.sha ? `${manifest.version}-${manifest.sha}` : manifest.version;
const reason = await this.skipUpdate();
if (reason)
cli_ux_1.default.action.stop(reason || 'done');
else
await this.update(manifest);
this.debug('tidy');
await this.tidy();
await this.config.runHook('update', { channel: this.channel });
}
this.debug('done');
cli_ux_1.default.action.stop();
}
async fetchManifest() {
const http = require('http-call').HTTP;
cli_ux_1.default.action.status = 'fetching manifest';
if (!this.config.scopedEnvVarTrue('USE_LEGACY_UPDATE')) {
try {
const newManifestUrl = this.config.s3Url(this.s3ChannelManifestKey(this.config.bin, this.config.platform, this.config.arch, this.config.pjson.oclif.update.s3.folder));
const { body } = await http.get(newManifestUrl);
if (typeof body === 'string') {
return JSON.parse(body);
}
return body;
}
catch (error) {
this.debug(error.message);
}
}
try {
const url = this.config.s3Url(this.config.s3Key('manifest', {
channel: this.channel,
platform: this.config.platform,
arch: this.config.arch,
}));
const { body } = await http.get(url);
// in case the content-type is not set, parse as a string
// this will happen if uploading without `oclif-dev publish`
if (typeof body === 'string') {
return JSON.parse(body);
}
return body;
}
catch (error) {
if (error.statusCode === 403)
throw new Error(`HTTP 403: Invalid channel ${this.channel}`);
throw error;
}
}
async downloadAndExtract(output, manifest, channel) {
const { version } = manifest;
const filesize = (n) => {
const [num, suffix] = require('filesize')(n, { output: 'array' });
return num.toFixed(1) + ` ${suffix}`;
};
const http = require('http-call').HTTP;
const gzUrl = manifest.gz || this.config.s3Url(this.config.s3Key('versioned', {
version,
channel,
bin: this.config.bin,
platform: this.config.platform,
arch: this.config.arch,
ext: 'gz',
}));
const { response: stream } = await http.stream(gzUrl);
stream.pause();
const baseDir = manifest.baseDir || this.config.s3Key('baseDir', {
version,
channel,
bin: this.config.bin,
platform: this.config.platform,
arch: this.config.arch,
});
const extraction = tar_1.extract(stream, baseDir, output, manifest.sha256gz);
// to-do: use cli.action.type
if (cli_ux_1.default.action.frames) {
// if spinner action
const total = parseInt(stream.headers['content-length'], 10);
let current = 0;
const updateStatus = _.throttle((newStatus) => {
cli_ux_1.default.action.status = newStatus;
}, 250, { leading: true, trailing: false });
stream.on('data', data => {
current += data.length;
updateStatus(`${filesize(current)}/${filesize(total)}`);
});
}
stream.resume();
await extraction;
}
async update(manifest, channel = 'stable') {
const { channel: manifestChannel } = manifest;
if (manifestChannel)
channel = manifestChannel;
cli_ux_1.default.action.start(`${this.config.name}: Updating CLI from ${color_1.default.green(this.currentVersion)} to ${color_1.default.green(this.updatedVersion)}${channel === 'stable' ? '' : ' (' + color_1.default.yellow(channel) + ')'}`);
await this.ensureClientDir();
const output = path.join(this.clientRoot, this.updatedVersion);
if (!await fs.pathExists(output)) {
await this.downloadAndExtract(output, manifest, channel);
}
await this.setChannel();
await this.createBin(this.updatedVersion);
await this.touch();
await this.reexec();
}
async updateToExistingVersion(version) {
await this.createBin(version);
await this.touch();
}
async skipUpdate() {
if (!this.config.binPath) {
const instructions = this.config.scopedEnvVar('UPDATE_INSTRUCTIONS');
if (instructions)
this.warn(instructions);
return 'not updatable';
}
if (this.currentVersion === this.updatedVersion) {
if (this.config.scopedEnvVar('HIDE_UPDATED_MESSAGE'))
return 'done';
return `already on latest version: ${this.currentVersion}`;
}
return false;
}
async determineChannel() {
const channelPath = path.join(this.config.dataDir, 'channel');
if (fs.existsSync(channelPath)) {
const channel = await fs.readFile(channelPath, 'utf8');
return String(channel).trim();
}
return this.config.channel || 'stable';
}
async determineCurrentVersion() {
try {
const currentVersion = await fs.readFile(this.clientBin, 'utf8');
const matches = currentVersion.match(/\.\.[/|\\](.+)[/|\\]bin/);
return matches ? matches[1] : this.config.version;
}
catch (error) {
this.debug(error);
}
return this.config.version;
}
s3ChannelManifestKey(bin, platform, arch, folder) {
let s3SubDir = folder || '';
if (s3SubDir !== '' && s3SubDir.slice(-1) !== '/')
s3SubDir = `${s3SubDir}/`;
return path.join(s3SubDir, 'channels', this.channel, `${bin}-${platform}-${arch}-buildmanifest`);
}
async setChannel() {
const channelPath = path.join(this.config.dataDir, 'channel');
fs.writeFile(channelPath, this.channel, 'utf8');
}
async logChop() {
try {
this.debug('log chop');
const logChopper = require('log-chopper').default;
await logChopper.chop(this.config.errlog);
}
catch (error) {
this.debug(error.message);
}
}
async mtime(f) {
const { mtime } = await fs.stat(f);
return mtime;
}
// when autoupdating, wait until the CLI isn't active
async debounce() |
// removes any unused CLIs
async tidy() {
try {
const root = this.clientRoot;
if (!await fs.pathExists(root))
return;
const files = await util_1.ls(root);
const promises = files.map(async (f) => {
if (['bin', 'current', this.config.version].includes(path.basename(f.path)))
return;
const mtime = f.stat.mtime;
mtime.setHours(mtime.getHours() + (42 * 24));
if (mtime < new Date()) {
await fs.remove(f.path);
}
});
for (const p of promises)
await p; // eslint-disable-line no-await-in-loop
await this.logChop();
}
catch (error) {
cli_ux_1.default.warn(error);
}
}
async touch() {
// touch the client so it won't be tidied up right away
try {
const p = path.join(this.clientRoot, this.config.version);
this.debug('touching client at', p);
if (!await fs.pathExists(p))
return;
await fs.utimes(p, new Date(), new Date());
}
catch (error) {
this.warn(error);
}
}
async reexec() {
cli_ux_1.default.action.stop();
return new Promise((_, reject) => {
this.debug('restarting CLI after update', this.clientBin);
spawn(this.clientBin, ['update'], {
stdio: 'inherit',
env: Object.assign(Object.assign({}, process.env), { [this.config.scopedEnvVarKey('HIDE_UPDATED_MESSAGE')]: '1' }),
})
.on('error', reject)
.on('close', (status) => {
try {
if (status > 0)
this.exit(status);
}
catch (error) {
reject(error);
}
});
});
}
async createBin(version) {
const dst = this.clientBin;
const { bin } = this.config;
const binPathEnvVar = this.config.scopedEnvVarKey('BINPATH');
const redirectedEnvVar = this.config.scopedEnvVarKey('REDIRECTED');
if (this.config.windows) {
const body = `@echo off
setlocal enableextensions
set ${redirectedEnvVar}=1
set ${binPathEnvVar}=%~dp0${bin}
"%~dp0..\\${version}\\bin\\${bin}.cmd" %*
`;
await fs.outputFile(dst, body);
}
else {
/* eslint-disable no-useless-escape */
const body = `#!/usr/bin/env bash
set -e
get_script_dir () {
SOURCE="\${BASH_SOURCE[0]}"
# While $SOURCE is a symlink, resolve it
while [ -h "$SOURCE" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$( readlink "$SOURCE" )"
# If $SOURCE was a relative symlink (so no "/" as prefix, need to resolve it relative to the symlink base directory
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
echo "$DIR"
}
DIR=$(get_script_dir)
${binPathEnvVar}="\$DIR/${bin}" ${redirectedEnvVar}=1 "$DIR/../${version}/bin/${bin}" "$@"
`;
/* eslint-enable no-useless-escape */
await fs.remove(dst);
await fs.outputFile(dst, body);
await fs.chmod(dst, 0o755);
await fs.remove(path.join(this.clientRoot, 'current'));
await fs.symlink(`./${version}`, path.join(this.clientRoot, 'current'));
}
}
async ensureClientDir() {
try {
await fs.mkdirp(this.clientRoot);
}
catch (error) {
if (error.code === 'EEXIST') {
// for some reason the client directory is sometimes a file
// if so, this happens. Delete it and recreate
await fs.remove(this.clientRoot);
await fs.mkdirp(this.clientRoot);
}
else {
throw error;
}
}
}
}
exports.default = UpdateCommand;
UpdateCommand.description = 'update the <%= config.bin %> CLI';
UpdateCommand.args = [{ name: 'channel', optional: true }];
UpdateCommand.flags = {
autoupdate: command_1.flags.boolean({ hidden: true }),
'from-local': command_1.flags.boolean({ description: 'interactively choose an already installed version' }),
};
| {
let output = false;
const lastrunfile = path.join(this.config.cacheDir, 'lastrun');
const m = await this.mtime(lastrunfile);
m.setHours(m.getHours() + 1);
if (m > new Date()) {
const msg = `waiting until ${m.toISOString()} to update`;
if (output) {
this.debug(msg);
}
else {
await cli_ux_1.default.log(msg);
output = true;
}
await util_1.wait(60 * 1000); // wait 1 minute
return this.debounce();
}
cli_ux_1.default.log('time to update');
} | identifier_body |
update.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const color_1 = require("@oclif/color");
const command_1 = require("@oclif/command");
const cli_ux_1 = require("cli-ux");
const spawn = require("cross-spawn");
const fs = require("fs-extra");
const _ = require("lodash");
const path = require("path");
const tar_1 = require("../tar");
const util_1 = require("../util");
class UpdateCommand extends command_1.default {
constructor() {
super(...arguments);
this.clientRoot = this.config.scopedEnvVar('OCLIF_CLIENT_HOME') || path.join(this.config.dataDir, 'client');
this.clientBin = path.join(this.clientRoot, 'bin', this.config.windows ? `${this.config.bin}.cmd` : this.config.bin);
}
async run() {
const { args, flags } = this.parse(UpdateCommand);
this.autoupdate = Boolean(flags.autoupdate);
if (this.autoupdate)
await this.debounce();
this.channel = args.channel || await this.determineChannel();
if (flags['from-local']) {
await this.ensureClientDir();
this.debug(`Looking for locally installed versions at ${this.clientRoot}`);
// Do not show known non-local version folder names, bin and current.
const versions = fs.readdirSync(this.clientRoot).filter(dirOrFile => dirOrFile !== 'bin' && dirOrFile !== 'current');
if (versions.length === 0)
throw new Error('No locally installed versions found.');
this.log(`Found versions: \n${versions.map(version => ` ${version}`).join('\n')}\n`);
const pinToVersion = await cli_ux_1.default.prompt('Enter a version to update to');
if (!versions.includes(pinToVersion))
throw new Error(`Version ${pinToVersion} not found in the locally installed versions.`);
if (!await fs.pathExists(path.join(this.clientRoot, pinToVersion))) {
throw new Error(`Version ${pinToVersion} is not already installed at ${this.clientRoot}.`);
}
cli_ux_1.default.action.start(`${this.config.name}: Updating CLI`);
this.debug(`switching to existing version ${pinToVersion}`);
this.updateToExistingVersion(pinToVersion);
this.log();
this.log(`Updating to an already installed version will not update the channel. If autoupdate is enabled, the CLI will eventually be updated back to ${this.channel}.`);
}
else {
cli_ux_1.default.action.start(`${this.config.name}: Updating CLI`);
await this.config.runHook('preupdate', { channel: this.channel });
const manifest = await this.fetchManifest();
this.currentVersion = await this.determineCurrentVersion();
this.updatedVersion = manifest.sha ? `${manifest.version}-${manifest.sha}` : manifest.version;
const reason = await this.skipUpdate();
if (reason)
cli_ux_1.default.action.stop(reason || 'done');
else
await this.update(manifest);
this.debug('tidy');
await this.tidy();
await this.config.runHook('update', { channel: this.channel });
}
this.debug('done');
cli_ux_1.default.action.stop();
}
async fetchManifest() {
const http = require('http-call').HTTP;
cli_ux_1.default.action.status = 'fetching manifest';
if (!this.config.scopedEnvVarTrue('USE_LEGACY_UPDATE')) {
try {
const newManifestUrl = this.config.s3Url(this.s3ChannelManifestKey(this.config.bin, this.config.platform, this.config.arch, this.config.pjson.oclif.update.s3.folder));
const { body } = await http.get(newManifestUrl);
if (typeof body === 'string') {
return JSON.parse(body);
}
return body;
}
catch (error) {
this.debug(error.message);
}
}
try {
const url = this.config.s3Url(this.config.s3Key('manifest', {
channel: this.channel,
platform: this.config.platform,
arch: this.config.arch,
}));
const { body } = await http.get(url);
// in case the content-type is not set, parse as a string
// this will happen if uploading without `oclif-dev publish`
if (typeof body === 'string') {
return JSON.parse(body);
}
return body;
}
catch (error) {
if (error.statusCode === 403)
throw new Error(`HTTP 403: Invalid channel ${this.channel}`);
throw error;
}
}
async downloadAndExtract(output, manifest, channel) {
const { version } = manifest;
const filesize = (n) => {
const [num, suffix] = require('filesize')(n, { output: 'array' });
return num.toFixed(1) + ` ${suffix}`;
};
const http = require('http-call').HTTP;
const gzUrl = manifest.gz || this.config.s3Url(this.config.s3Key('versioned', {
version,
channel,
bin: this.config.bin,
platform: this.config.platform,
arch: this.config.arch,
ext: 'gz',
}));
const { response: stream } = await http.stream(gzUrl);
stream.pause();
const baseDir = manifest.baseDir || this.config.s3Key('baseDir', {
version,
channel,
bin: this.config.bin,
platform: this.config.platform,
arch: this.config.arch,
});
const extraction = tar_1.extract(stream, baseDir, output, manifest.sha256gz);
// to-do: use cli.action.type
if (cli_ux_1.default.action.frames) {
// if spinner action
const total = parseInt(stream.headers['content-length'], 10);
let current = 0;
const updateStatus = _.throttle((newStatus) => {
cli_ux_1.default.action.status = newStatus;
}, 250, { leading: true, trailing: false });
stream.on('data', data => {
current += data.length;
updateStatus(`${filesize(current)}/${filesize(total)}`);
});
}
stream.resume();
await extraction;
}
async update(manifest, channel = 'stable') {
const { channel: manifestChannel } = manifest;
if (manifestChannel)
channel = manifestChannel;
cli_ux_1.default.action.start(`${this.config.name}: Updating CLI from ${color_1.default.green(this.currentVersion)} to ${color_1.default.green(this.updatedVersion)}${channel === 'stable' ? '' : ' (' + color_1.default.yellow(channel) + ')'}`);
await this.ensureClientDir();
const output = path.join(this.clientRoot, this.updatedVersion);
if (!await fs.pathExists(output)) {
await this.downloadAndExtract(output, manifest, channel);
}
await this.setChannel();
await this.createBin(this.updatedVersion);
await this.touch();
await this.reexec();
}
async updateToExistingVersion(version) {
await this.createBin(version);
await this.touch();
}
async skipUpdate() {
if (!this.config.binPath) {
const instructions = this.config.scopedEnvVar('UPDATE_INSTRUCTIONS');
if (instructions)
this.warn(instructions);
return 'not updatable';
}
if (this.currentVersion === this.updatedVersion) {
if (this.config.scopedEnvVar('HIDE_UPDATED_MESSAGE'))
return 'done';
return `already on latest version: ${this.currentVersion}`;
}
return false;
}
async determineChannel() {
const channelPath = path.join(this.config.dataDir, 'channel');
if (fs.existsSync(channelPath)) {
const channel = await fs.readFile(channelPath, 'utf8');
return String(channel).trim();
}
return this.config.channel || 'stable';
}
async determineCurrentVersion() {
try {
const currentVersion = await fs.readFile(this.clientBin, 'utf8');
const matches = currentVersion.match(/\.\.[/|\\](.+)[/|\\]bin/);
return matches ? matches[1] : this.config.version;
}
catch (error) {
this.debug(error);
}
return this.config.version;
}
s3ChannelManifestKey(bin, platform, arch, folder) {
let s3SubDir = folder || '';
if (s3SubDir !== '' && s3SubDir.slice(-1) !== '/')
s3SubDir = `${s3SubDir}/`;
return path.join(s3SubDir, 'channels', this.channel, `${bin}-${platform}-${arch}-buildmanifest`);
}
async setChannel() {
const channelPath = path.join(this.config.dataDir, 'channel');
fs.writeFile(channelPath, this.channel, 'utf8');
}
async logChop() {
try {
this.debug('log chop');
const logChopper = require('log-chopper').default;
await logChopper.chop(this.config.errlog);
}
catch (error) {
this.debug(error.message);
}
}
async mtime(f) {
const { mtime } = await fs.stat(f);
return mtime;
}
// when autoupdating, wait until the CLI isn't active
async | () {
let output = false;
const lastrunfile = path.join(this.config.cacheDir, 'lastrun');
const m = await this.mtime(lastrunfile);
m.setHours(m.getHours() + 1);
if (m > new Date()) {
const msg = `waiting until ${m.toISOString()} to update`;
if (output) {
this.debug(msg);
}
else {
await cli_ux_1.default.log(msg);
output = true;
}
await util_1.wait(60 * 1000); // wait 1 minute
return this.debounce();
}
cli_ux_1.default.log('time to update');
}
// removes any unused CLIs
async tidy() {
try {
const root = this.clientRoot;
if (!await fs.pathExists(root))
return;
const files = await util_1.ls(root);
const promises = files.map(async (f) => {
if (['bin', 'current', this.config.version].includes(path.basename(f.path)))
return;
const mtime = f.stat.mtime;
mtime.setHours(mtime.getHours() + (42 * 24));
if (mtime < new Date()) {
await fs.remove(f.path);
}
});
for (const p of promises)
await p; // eslint-disable-line no-await-in-loop
await this.logChop();
}
catch (error) {
cli_ux_1.default.warn(error);
}
}
async touch() {
// touch the client so it won't be tidied up right away
try {
const p = path.join(this.clientRoot, this.config.version);
this.debug('touching client at', p);
if (!await fs.pathExists(p))
return;
await fs.utimes(p, new Date(), new Date());
}
catch (error) {
this.warn(error);
}
}
async reexec() {
cli_ux_1.default.action.stop();
return new Promise((_, reject) => {
this.debug('restarting CLI after update', this.clientBin);
spawn(this.clientBin, ['update'], {
stdio: 'inherit',
env: Object.assign(Object.assign({}, process.env), { [this.config.scopedEnvVarKey('HIDE_UPDATED_MESSAGE')]: '1' }),
})
.on('error', reject)
.on('close', (status) => {
try {
if (status > 0)
this.exit(status);
}
catch (error) {
reject(error);
}
});
});
}
async createBin(version) {
const dst = this.clientBin;
const { bin } = this.config;
const binPathEnvVar = this.config.scopedEnvVarKey('BINPATH');
const redirectedEnvVar = this.config.scopedEnvVarKey('REDIRECTED');
if (this.config.windows) {
const body = `@echo off
setlocal enableextensions
set ${redirectedEnvVar}=1
set ${binPathEnvVar}=%~dp0${bin}
"%~dp0..\\${version}\\bin\\${bin}.cmd" %*
`;
await fs.outputFile(dst, body);
}
else {
/* eslint-disable no-useless-escape */
const body = `#!/usr/bin/env bash
set -e
get_script_dir () {
SOURCE="\${BASH_SOURCE[0]}"
# While $SOURCE is a symlink, resolve it
while [ -h "$SOURCE" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$( readlink "$SOURCE" )"
# If $SOURCE was a relative symlink (so no "/" as prefix, need to resolve it relative to the symlink base directory
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
echo "$DIR"
}
DIR=$(get_script_dir)
${binPathEnvVar}="\$DIR/${bin}" ${redirectedEnvVar}=1 "$DIR/../${version}/bin/${bin}" "$@"
`;
/* eslint-enable no-useless-escape */
await fs.remove(dst);
await fs.outputFile(dst, body);
await fs.chmod(dst, 0o755);
await fs.remove(path.join(this.clientRoot, 'current'));
await fs.symlink(`./${version}`, path.join(this.clientRoot, 'current'));
}
}
async ensureClientDir() {
try {
await fs.mkdirp(this.clientRoot);
}
catch (error) {
if (error.code === 'EEXIST') {
// for some reason the client directory is sometimes a file
// if so, this happens. Delete it and recreate
await fs.remove(this.clientRoot);
await fs.mkdirp(this.clientRoot);
}
else {
throw error;
}
}
}
}
exports.default = UpdateCommand;
UpdateCommand.description = 'update the <%= config.bin %> CLI';
UpdateCommand.args = [{ name: 'channel', optional: true }];
UpdateCommand.flags = {
autoupdate: command_1.flags.boolean({ hidden: true }),
'from-local': command_1.flags.boolean({ description: 'interactively choose an already installed version' }),
};
| debounce | identifier_name |
update.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const color_1 = require("@oclif/color");
const command_1 = require("@oclif/command");
const cli_ux_1 = require("cli-ux");
const spawn = require("cross-spawn");
const fs = require("fs-extra");
const _ = require("lodash");
const path = require("path");
const tar_1 = require("../tar");
const util_1 = require("../util");
class UpdateCommand extends command_1.default {
constructor() {
super(...arguments);
this.clientRoot = this.config.scopedEnvVar('OCLIF_CLIENT_HOME') || path.join(this.config.dataDir, 'client');
this.clientBin = path.join(this.clientRoot, 'bin', this.config.windows ? `${this.config.bin}.cmd` : this.config.bin);
}
async run() {
const { args, flags } = this.parse(UpdateCommand);
this.autoupdate = Boolean(flags.autoupdate);
if (this.autoupdate)
await this.debounce();
this.channel = args.channel || await this.determineChannel();
if (flags['from-local']) {
await this.ensureClientDir();
this.debug(`Looking for locally installed versions at ${this.clientRoot}`);
// Do not show known non-local version folder names, bin and current.
const versions = fs.readdirSync(this.clientRoot).filter(dirOrFile => dirOrFile !== 'bin' && dirOrFile !== 'current');
if (versions.length === 0)
throw new Error('No locally installed versions found.');
this.log(`Found versions: \n${versions.map(version => ` ${version}`).join('\n')}\n`);
const pinToVersion = await cli_ux_1.default.prompt('Enter a version to update to');
if (!versions.includes(pinToVersion))
throw new Error(`Version ${pinToVersion} not found in the locally installed versions.`);
if (!await fs.pathExists(path.join(this.clientRoot, pinToVersion))) {
throw new Error(`Version ${pinToVersion} is not already installed at ${this.clientRoot}.`);
}
cli_ux_1.default.action.start(`${this.config.name}: Updating CLI`);
this.debug(`switching to existing version ${pinToVersion}`);
this.updateToExistingVersion(pinToVersion);
this.log();
this.log(`Updating to an already installed version will not update the channel. If autoupdate is enabled, the CLI will eventually be updated back to ${this.channel}.`);
}
else {
cli_ux_1.default.action.start(`${this.config.name}: Updating CLI`);
await this.config.runHook('preupdate', { channel: this.channel });
const manifest = await this.fetchManifest();
this.currentVersion = await this.determineCurrentVersion();
this.updatedVersion = manifest.sha ? `${manifest.version}-${manifest.sha}` : manifest.version;
const reason = await this.skipUpdate();
if (reason)
cli_ux_1.default.action.stop(reason || 'done');
else
await this.update(manifest);
this.debug('tidy');
await this.tidy();
await this.config.runHook('update', { channel: this.channel });
}
this.debug('done');
cli_ux_1.default.action.stop();
}
async fetchManifest() {
const http = require('http-call').HTTP;
cli_ux_1.default.action.status = 'fetching manifest';
if (!this.config.scopedEnvVarTrue('USE_LEGACY_UPDATE')) {
try {
const newManifestUrl = this.config.s3Url(this.s3ChannelManifestKey(this.config.bin, this.config.platform, this.config.arch, this.config.pjson.oclif.update.s3.folder));
const { body } = await http.get(newManifestUrl);
if (typeof body === 'string') {
return JSON.parse(body);
}
return body;
}
catch (error) {
this.debug(error.message);
}
}
try {
const url = this.config.s3Url(this.config.s3Key('manifest', {
channel: this.channel,
platform: this.config.platform,
arch: this.config.arch,
}));
const { body } = await http.get(url);
// in case the content-type is not set, parse as a string
// this will happen if uploading without `oclif-dev publish`
if (typeof body === 'string') {
return JSON.parse(body);
}
return body;
}
catch (error) {
if (error.statusCode === 403)
throw new Error(`HTTP 403: Invalid channel ${this.channel}`);
throw error;
}
}
async downloadAndExtract(output, manifest, channel) {
const { version } = manifest;
const filesize = (n) => {
const [num, suffix] = require('filesize')(n, { output: 'array' });
return num.toFixed(1) + ` ${suffix}`;
};
const http = require('http-call').HTTP;
const gzUrl = manifest.gz || this.config.s3Url(this.config.s3Key('versioned', {
version,
channel,
bin: this.config.bin,
platform: this.config.platform,
arch: this.config.arch,
ext: 'gz',
}));
const { response: stream } = await http.stream(gzUrl);
stream.pause();
const baseDir = manifest.baseDir || this.config.s3Key('baseDir', {
version,
channel,
bin: this.config.bin,
platform: this.config.platform,
arch: this.config.arch,
});
const extraction = tar_1.extract(stream, baseDir, output, manifest.sha256gz);
// to-do: use cli.action.type
if (cli_ux_1.default.action.frames) {
// if spinner action
const total = parseInt(stream.headers['content-length'], 10);
let current = 0;
const updateStatus = _.throttle((newStatus) => {
cli_ux_1.default.action.status = newStatus;
}, 250, { leading: true, trailing: false });
stream.on('data', data => {
current += data.length;
updateStatus(`${filesize(current)}/${filesize(total)}`);
});
}
stream.resume();
await extraction;
}
async update(manifest, channel = 'stable') {
const { channel: manifestChannel } = manifest;
if (manifestChannel)
channel = manifestChannel;
cli_ux_1.default.action.start(`${this.config.name}: Updating CLI from ${color_1.default.green(this.currentVersion)} to ${color_1.default.green(this.updatedVersion)}${channel === 'stable' ? '' : ' (' + color_1.default.yellow(channel) + ')'}`);
await this.ensureClientDir();
const output = path.join(this.clientRoot, this.updatedVersion);
if (!await fs.pathExists(output)) {
await this.downloadAndExtract(output, manifest, channel);
}
await this.setChannel();
await this.createBin(this.updatedVersion);
await this.touch();
await this.reexec();
}
async updateToExistingVersion(version) { | const instructions = this.config.scopedEnvVar('UPDATE_INSTRUCTIONS');
if (instructions)
this.warn(instructions);
return 'not updatable';
}
if (this.currentVersion === this.updatedVersion) {
if (this.config.scopedEnvVar('HIDE_UPDATED_MESSAGE'))
return 'done';
return `already on latest version: ${this.currentVersion}`;
}
return false;
}
async determineChannel() {
const channelPath = path.join(this.config.dataDir, 'channel');
if (fs.existsSync(channelPath)) {
const channel = await fs.readFile(channelPath, 'utf8');
return String(channel).trim();
}
return this.config.channel || 'stable';
}
async determineCurrentVersion() {
try {
const currentVersion = await fs.readFile(this.clientBin, 'utf8');
const matches = currentVersion.match(/\.\.[/|\\](.+)[/|\\]bin/);
return matches ? matches[1] : this.config.version;
}
catch (error) {
this.debug(error);
}
return this.config.version;
}
s3ChannelManifestKey(bin, platform, arch, folder) {
let s3SubDir = folder || '';
if (s3SubDir !== '' && s3SubDir.slice(-1) !== '/')
s3SubDir = `${s3SubDir}/`;
return path.join(s3SubDir, 'channels', this.channel, `${bin}-${platform}-${arch}-buildmanifest`);
}
async setChannel() {
const channelPath = path.join(this.config.dataDir, 'channel');
fs.writeFile(channelPath, this.channel, 'utf8');
}
async logChop() {
try {
this.debug('log chop');
const logChopper = require('log-chopper').default;
await logChopper.chop(this.config.errlog);
}
catch (error) {
this.debug(error.message);
}
}
async mtime(f) {
const { mtime } = await fs.stat(f);
return mtime;
}
// when autoupdating, wait until the CLI isn't active
async debounce() {
let output = false;
const lastrunfile = path.join(this.config.cacheDir, 'lastrun');
const m = await this.mtime(lastrunfile);
m.setHours(m.getHours() + 1);
if (m > new Date()) {
const msg = `waiting until ${m.toISOString()} to update`;
if (output) {
this.debug(msg);
}
else {
await cli_ux_1.default.log(msg);
output = true;
}
await util_1.wait(60 * 1000); // wait 1 minute
return this.debounce();
}
cli_ux_1.default.log('time to update');
}
// removes any unused CLIs
async tidy() {
try {
const root = this.clientRoot;
if (!await fs.pathExists(root))
return;
const files = await util_1.ls(root);
const promises = files.map(async (f) => {
if (['bin', 'current', this.config.version].includes(path.basename(f.path)))
return;
const mtime = f.stat.mtime;
mtime.setHours(mtime.getHours() + (42 * 24));
if (mtime < new Date()) {
await fs.remove(f.path);
}
});
for (const p of promises)
await p; // eslint-disable-line no-await-in-loop
await this.logChop();
}
catch (error) {
cli_ux_1.default.warn(error);
}
}
async touch() {
// touch the client so it won't be tidied up right away
try {
const p = path.join(this.clientRoot, this.config.version);
this.debug('touching client at', p);
if (!await fs.pathExists(p))
return;
await fs.utimes(p, new Date(), new Date());
}
catch (error) {
this.warn(error);
}
}
async reexec() {
cli_ux_1.default.action.stop();
return new Promise((_, reject) => {
this.debug('restarting CLI after update', this.clientBin);
spawn(this.clientBin, ['update'], {
stdio: 'inherit',
env: Object.assign(Object.assign({}, process.env), { [this.config.scopedEnvVarKey('HIDE_UPDATED_MESSAGE')]: '1' }),
})
.on('error', reject)
.on('close', (status) => {
try {
if (status > 0)
this.exit(status);
}
catch (error) {
reject(error);
}
});
});
}
async createBin(version) {
const dst = this.clientBin;
const { bin } = this.config;
const binPathEnvVar = this.config.scopedEnvVarKey('BINPATH');
const redirectedEnvVar = this.config.scopedEnvVarKey('REDIRECTED');
if (this.config.windows) {
const body = `@echo off
setlocal enableextensions
set ${redirectedEnvVar}=1
set ${binPathEnvVar}=%~dp0${bin}
"%~dp0..\\${version}\\bin\\${bin}.cmd" %*
`;
await fs.outputFile(dst, body);
}
else {
/* eslint-disable no-useless-escape */
const body = `#!/usr/bin/env bash
set -e
get_script_dir () {
SOURCE="\${BASH_SOURCE[0]}"
# While $SOURCE is a symlink, resolve it
while [ -h "$SOURCE" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$( readlink "$SOURCE" )"
# If $SOURCE was a relative symlink (so no "/" as prefix, need to resolve it relative to the symlink base directory
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
echo "$DIR"
}
DIR=$(get_script_dir)
${binPathEnvVar}="\$DIR/${bin}" ${redirectedEnvVar}=1 "$DIR/../${version}/bin/${bin}" "$@"
`;
/* eslint-enable no-useless-escape */
await fs.remove(dst);
await fs.outputFile(dst, body);
await fs.chmod(dst, 0o755);
await fs.remove(path.join(this.clientRoot, 'current'));
await fs.symlink(`./${version}`, path.join(this.clientRoot, 'current'));
}
}
async ensureClientDir() {
try {
await fs.mkdirp(this.clientRoot);
}
catch (error) {
if (error.code === 'EEXIST') {
// for some reason the client directory is sometimes a file
// if so, this happens. Delete it and recreate
await fs.remove(this.clientRoot);
await fs.mkdirp(this.clientRoot);
}
else {
throw error;
}
}
}
}
exports.default = UpdateCommand;
UpdateCommand.description = 'update the <%= config.bin %> CLI';
UpdateCommand.args = [{ name: 'channel', optional: true }];
UpdateCommand.flags = {
autoupdate: command_1.flags.boolean({ hidden: true }),
'from-local': command_1.flags.boolean({ description: 'interactively choose an already installed version' }),
}; | await this.createBin(version);
await this.touch();
}
async skipUpdate() {
if (!this.config.binPath) { | random_line_split |
update.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const color_1 = require("@oclif/color");
const command_1 = require("@oclif/command");
const cli_ux_1 = require("cli-ux");
const spawn = require("cross-spawn");
const fs = require("fs-extra");
const _ = require("lodash");
const path = require("path");
const tar_1 = require("../tar");
const util_1 = require("../util");
class UpdateCommand extends command_1.default {
constructor() {
super(...arguments);
this.clientRoot = this.config.scopedEnvVar('OCLIF_CLIENT_HOME') || path.join(this.config.dataDir, 'client');
this.clientBin = path.join(this.clientRoot, 'bin', this.config.windows ? `${this.config.bin}.cmd` : this.config.bin);
}
async run() {
const { args, flags } = this.parse(UpdateCommand);
this.autoupdate = Boolean(flags.autoupdate);
if (this.autoupdate)
await this.debounce();
this.channel = args.channel || await this.determineChannel();
if (flags['from-local']) {
await this.ensureClientDir();
this.debug(`Looking for locally installed versions at ${this.clientRoot}`);
// Do not show known non-local version folder names, bin and current.
const versions = fs.readdirSync(this.clientRoot).filter(dirOrFile => dirOrFile !== 'bin' && dirOrFile !== 'current');
if (versions.length === 0)
throw new Error('No locally installed versions found.');
this.log(`Found versions: \n${versions.map(version => ` ${version}`).join('\n')}\n`);
const pinToVersion = await cli_ux_1.default.prompt('Enter a version to update to');
if (!versions.includes(pinToVersion))
throw new Error(`Version ${pinToVersion} not found in the locally installed versions.`);
if (!await fs.pathExists(path.join(this.clientRoot, pinToVersion))) {
throw new Error(`Version ${pinToVersion} is not already installed at ${this.clientRoot}.`);
}
cli_ux_1.default.action.start(`${this.config.name}: Updating CLI`);
this.debug(`switching to existing version ${pinToVersion}`);
this.updateToExistingVersion(pinToVersion);
this.log();
this.log(`Updating to an already installed version will not update the channel. If autoupdate is enabled, the CLI will eventually be updated back to ${this.channel}.`);
}
else {
cli_ux_1.default.action.start(`${this.config.name}: Updating CLI`);
await this.config.runHook('preupdate', { channel: this.channel });
const manifest = await this.fetchManifest();
this.currentVersion = await this.determineCurrentVersion();
this.updatedVersion = manifest.sha ? `${manifest.version}-${manifest.sha}` : manifest.version;
const reason = await this.skipUpdate();
if (reason)
cli_ux_1.default.action.stop(reason || 'done');
else
await this.update(manifest);
this.debug('tidy');
await this.tidy();
await this.config.runHook('update', { channel: this.channel });
}
this.debug('done');
cli_ux_1.default.action.stop();
}
async fetchManifest() {
const http = require('http-call').HTTP;
cli_ux_1.default.action.status = 'fetching manifest';
if (!this.config.scopedEnvVarTrue('USE_LEGACY_UPDATE')) {
try {
const newManifestUrl = this.config.s3Url(this.s3ChannelManifestKey(this.config.bin, this.config.platform, this.config.arch, this.config.pjson.oclif.update.s3.folder));
const { body } = await http.get(newManifestUrl);
if (typeof body === 'string') {
return JSON.parse(body);
}
return body;
}
catch (error) {
this.debug(error.message);
}
}
try {
const url = this.config.s3Url(this.config.s3Key('manifest', {
channel: this.channel,
platform: this.config.platform,
arch: this.config.arch,
}));
const { body } = await http.get(url);
// in case the content-type is not set, parse as a string
// this will happen if uploading without `oclif-dev publish`
if (typeof body === 'string') {
return JSON.parse(body);
}
return body;
}
catch (error) {
if (error.statusCode === 403)
throw new Error(`HTTP 403: Invalid channel ${this.channel}`);
throw error;
}
}
async downloadAndExtract(output, manifest, channel) {
const { version } = manifest;
const filesize = (n) => {
const [num, suffix] = require('filesize')(n, { output: 'array' });
return num.toFixed(1) + ` ${suffix}`;
};
const http = require('http-call').HTTP;
const gzUrl = manifest.gz || this.config.s3Url(this.config.s3Key('versioned', {
version,
channel,
bin: this.config.bin,
platform: this.config.platform,
arch: this.config.arch,
ext: 'gz',
}));
const { response: stream } = await http.stream(gzUrl);
stream.pause();
const baseDir = manifest.baseDir || this.config.s3Key('baseDir', {
version,
channel,
bin: this.config.bin,
platform: this.config.platform,
arch: this.config.arch,
});
const extraction = tar_1.extract(stream, baseDir, output, manifest.sha256gz);
// to-do: use cli.action.type
if (cli_ux_1.default.action.frames) {
// if spinner action
const total = parseInt(stream.headers['content-length'], 10);
let current = 0;
const updateStatus = _.throttle((newStatus) => {
cli_ux_1.default.action.status = newStatus;
}, 250, { leading: true, trailing: false });
stream.on('data', data => {
current += data.length;
updateStatus(`${filesize(current)}/${filesize(total)}`);
});
}
stream.resume();
await extraction;
}
async update(manifest, channel = 'stable') {
const { channel: manifestChannel } = manifest;
if (manifestChannel)
channel = manifestChannel;
cli_ux_1.default.action.start(`${this.config.name}: Updating CLI from ${color_1.default.green(this.currentVersion)} to ${color_1.default.green(this.updatedVersion)}${channel === 'stable' ? '' : ' (' + color_1.default.yellow(channel) + ')'}`);
await this.ensureClientDir();
const output = path.join(this.clientRoot, this.updatedVersion);
if (!await fs.pathExists(output)) {
await this.downloadAndExtract(output, manifest, channel);
}
await this.setChannel();
await this.createBin(this.updatedVersion);
await this.touch();
await this.reexec();
}
async updateToExistingVersion(version) {
await this.createBin(version);
await this.touch();
}
async skipUpdate() {
if (!this.config.binPath) {
const instructions = this.config.scopedEnvVar('UPDATE_INSTRUCTIONS');
if (instructions)
this.warn(instructions);
return 'not updatable';
}
if (this.currentVersion === this.updatedVersion) {
if (this.config.scopedEnvVar('HIDE_UPDATED_MESSAGE'))
return 'done';
return `already on latest version: ${this.currentVersion}`;
}
return false;
}
async determineChannel() {
const channelPath = path.join(this.config.dataDir, 'channel');
if (fs.existsSync(channelPath)) |
return this.config.channel || 'stable';
}
async determineCurrentVersion() {
try {
const currentVersion = await fs.readFile(this.clientBin, 'utf8');
const matches = currentVersion.match(/\.\.[/|\\](.+)[/|\\]bin/);
return matches ? matches[1] : this.config.version;
}
catch (error) {
this.debug(error);
}
return this.config.version;
}
s3ChannelManifestKey(bin, platform, arch, folder) {
let s3SubDir = folder || '';
if (s3SubDir !== '' && s3SubDir.slice(-1) !== '/')
s3SubDir = `${s3SubDir}/`;
return path.join(s3SubDir, 'channels', this.channel, `${bin}-${platform}-${arch}-buildmanifest`);
}
async setChannel() {
const channelPath = path.join(this.config.dataDir, 'channel');
fs.writeFile(channelPath, this.channel, 'utf8');
}
async logChop() {
try {
this.debug('log chop');
const logChopper = require('log-chopper').default;
await logChopper.chop(this.config.errlog);
}
catch (error) {
this.debug(error.message);
}
}
async mtime(f) {
const { mtime } = await fs.stat(f);
return mtime;
}
// when autoupdating, wait until the CLI isn't active
async debounce() {
let output = false;
const lastrunfile = path.join(this.config.cacheDir, 'lastrun');
const m = await this.mtime(lastrunfile);
m.setHours(m.getHours() + 1);
if (m > new Date()) {
const msg = `waiting until ${m.toISOString()} to update`;
if (output) {
this.debug(msg);
}
else {
await cli_ux_1.default.log(msg);
output = true;
}
await util_1.wait(60 * 1000); // wait 1 minute
return this.debounce();
}
cli_ux_1.default.log('time to update');
}
// removes any unused CLIs
async tidy() {
try {
const root = this.clientRoot;
if (!await fs.pathExists(root))
return;
const files = await util_1.ls(root);
const promises = files.map(async (f) => {
if (['bin', 'current', this.config.version].includes(path.basename(f.path)))
return;
const mtime = f.stat.mtime;
mtime.setHours(mtime.getHours() + (42 * 24));
if (mtime < new Date()) {
await fs.remove(f.path);
}
});
for (const p of promises)
await p; // eslint-disable-line no-await-in-loop
await this.logChop();
}
catch (error) {
cli_ux_1.default.warn(error);
}
}
async touch() {
// touch the client so it won't be tidied up right away
try {
const p = path.join(this.clientRoot, this.config.version);
this.debug('touching client at', p);
if (!await fs.pathExists(p))
return;
await fs.utimes(p, new Date(), new Date());
}
catch (error) {
this.warn(error);
}
}
async reexec() {
cli_ux_1.default.action.stop();
return new Promise((_, reject) => {
this.debug('restarting CLI after update', this.clientBin);
spawn(this.clientBin, ['update'], {
stdio: 'inherit',
env: Object.assign(Object.assign({}, process.env), { [this.config.scopedEnvVarKey('HIDE_UPDATED_MESSAGE')]: '1' }),
})
.on('error', reject)
.on('close', (status) => {
try {
if (status > 0)
this.exit(status);
}
catch (error) {
reject(error);
}
});
});
}
async createBin(version) {
const dst = this.clientBin;
const { bin } = this.config;
const binPathEnvVar = this.config.scopedEnvVarKey('BINPATH');
const redirectedEnvVar = this.config.scopedEnvVarKey('REDIRECTED');
if (this.config.windows) {
const body = `@echo off
setlocal enableextensions
set ${redirectedEnvVar}=1
set ${binPathEnvVar}=%~dp0${bin}
"%~dp0..\\${version}\\bin\\${bin}.cmd" %*
`;
await fs.outputFile(dst, body);
}
else {
/* eslint-disable no-useless-escape */
const body = `#!/usr/bin/env bash
set -e
get_script_dir () {
SOURCE="\${BASH_SOURCE[0]}"
# While $SOURCE is a symlink, resolve it
while [ -h "$SOURCE" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$( readlink "$SOURCE" )"
# If $SOURCE was a relative symlink (so no "/" as prefix, need to resolve it relative to the symlink base directory
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
echo "$DIR"
}
DIR=$(get_script_dir)
${binPathEnvVar}="\$DIR/${bin}" ${redirectedEnvVar}=1 "$DIR/../${version}/bin/${bin}" "$@"
`;
/* eslint-enable no-useless-escape */
await fs.remove(dst);
await fs.outputFile(dst, body);
await fs.chmod(dst, 0o755);
await fs.remove(path.join(this.clientRoot, 'current'));
await fs.symlink(`./${version}`, path.join(this.clientRoot, 'current'));
}
}
async ensureClientDir() {
try {
await fs.mkdirp(this.clientRoot);
}
catch (error) {
if (error.code === 'EEXIST') {
// for some reason the client directory is sometimes a file
// if so, this happens. Delete it and recreate
await fs.remove(this.clientRoot);
await fs.mkdirp(this.clientRoot);
}
else {
throw error;
}
}
}
}
exports.default = UpdateCommand;
UpdateCommand.description = 'update the <%= config.bin %> CLI';
UpdateCommand.args = [{ name: 'channel', optional: true }];
UpdateCommand.flags = {
autoupdate: command_1.flags.boolean({ hidden: true }),
'from-local': command_1.flags.boolean({ description: 'interactively choose an already installed version' }),
};
| {
const channel = await fs.readFile(channelPath, 'utf8');
return String(channel).trim();
} | conditional_block |
rnn.py | # Xingchen Wan 2018 | xingchen.wan@st-annes.ox.ac.uk
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
class RNN:
"""
Recurrent Neural Network
"""
def __init__(self, training_data, training_label,
test_data, test_label,
**options):
"""
:param training_data: :param training_label: input training data set and label tensors
:param test_data: :param test_label: test data set and ground truth label tensors
:param options: (hyper)parameters of the neural network model. See method unpack_options for details on the
full list of configurable options
"""
self.training_data = np.array(training_data, dtype=np.float32)
self.training_label = np.array(training_label, dtype=np.float32)
self.test_data = np.array(test_data, dtype=np.float32)
self.test_label = np.array(test_label, dtype=np.float32)
# Sanity checks
if self.training_data.shape[0] != self.training_label.shape[0]:
raise ValueError("The length of training_data tensor does not match the training_label tensor!")
if self.test_label.shape[0] != self.test_data.shape[0]:
raise ValueError("The length of test_data tensor does not match the test_label tensor!")
self.options = self.unpack_options(**options)
if self.options['input_dimension'] is None:
# Data dimension of a single sample
self.input_dimensions = 1
else:
self.input_dimensions = self.options['input_dimension']
self.graph = None
self.loss = None
self.optimizer = None
self.predict = None
self.tf_labels = None
self.tf_dataset = None
self.learning_rate = None
# Two lists to store the losses and accuracies during training and testing
self.train_losses = []
self.train_accuracies = []
def | (self):
"""
Set up a computation graph for TensorFlow
:return: None
"""
self.graph = tf.Graph()
model_type = self.options['model_type']
optimiser_selected = self.options['optimizer']
with self.graph.as_default():
self.tf_dataset = tf.placeholder(tf.float32,
shape=(None, self.options['num_steps'], self.input_dimensions))
self.tf_labels = tf.placeholder(tf.float32, shape=(None, self.input_dimensions))
self.learning_rate = tf.placeholder(tf.float32, None, name='learning_rate')
# Forward pass
if model_type == 'rnn':
self.predict = self.rnn_model(self.tf_dataset)
elif model_type == 'lstm':
self.predict = self.lstm_model(self.tf_dataset)
else:
raise NotImplementedError("Unimplemented RNN model keyword")
self.loss = tf.reduce_mean(tf.square(self.predict - self.tf_labels))
if self.options['regularisation_coeff'] > 0.:
# Add in L2 penalty for regularisation if required
penalty = self.options['regularisation_coeff'] * sum(tf.nn.l2_loss(var)
for var in tf.trainable_variables())
self.loss += penalty
if self.options['use_customised_optimizer'] is False:
if optimiser_selected == 'adam':
self.optimizer = tf.train.AdamOptimizer(self.learning_rate)
elif optimiser_selected == 'grad':
self.optimizer = tf.train.GradientDescentOptimizer(self.learning_rate)
elif optimiser_selected == 'ada':
self.optimizer = tf.train.AdagradOptimizer(self.learning_rate)
elif optimiser_selected == 'rms':
self.optimizer = tf.train.RMSPropOptimizer(self.learning_rate)
else:
raise NotImplementedError("Unimplemented built-in optimiser keyword.")
else:
self.optimizer = self.options['customized_optimizer']
self.minimise = self.optimizer.minimize(self.loss)
def run(self):
"""
Create a session according to the computation graph and run the model
:return: None
"""
if self.graph is None:
raise ValueError("Create TensorFlow graph before running a session.")
with tf.Session(graph=self.graph) as session:
tf.global_variables_initializer().run()
# Stochastic gradient descent: train the data with a mini-batch each iteration
batch_size = self.options['batch_size']
for epoch_idx in range(self.options['num_epoch']):
training_epoch = self.training_data[epoch_idx]
label_epoch = self.training_label[epoch_idx]
batch_count = training_epoch.shape[0] // batch_size
learning_rate = self.options['learning_rate']
if self.options['learning_rate_decay_coeff'] > 0.:
learning_rate *= self.options['learning_rate_decay_coeff'] ** \
max(float(epoch_idx + 1 - self.options['init_epoch']), 0.0)
for batch in range(batch_count):
try:
batch_data = training_epoch[batch*batch_size:(batch+1)*batch_size, :, :]
batch_labels = label_epoch[batch*batch_size:(batch+1)*batch_size, :]
except KeyError:
batch_data = training_epoch[batch*batch_size:, :, :]
batch_labels = label_epoch[batch*batch_size:, :]
feed_dict = {
self.tf_dataset: batch_data,
self.tf_labels: batch_labels,
self.learning_rate: learning_rate}
p, l, _, = session.run([self.predict, self.loss, self.minimise], feed_dict=feed_dict)
self.train_losses.append(l)
self.train_accuracies.append(self.get_accuracy(batch_labels, p))
# Finally run the data on test data
final_feed_dict = {
self.tf_dataset: self.test_data,
self.tf_labels: self.test_label,
self.learning_rate: 0.,
}
self.predict, final_loss = session.run([self.predict, self.minimise], feed_dict=final_feed_dict)
return self.predict
# Implementation of RNN and LSTM models
def rnn_model(self, training_data):
num_layer = self.options['num_layer']
num_cells = self.options['num_cells']
if num_layer == 1:
all_cells = tf.nn.rnn_cell.BasicRNNCell(num_cells)
else:
cells = []
for i in range(num_layer):
cell = tf.nn.rnn_cell.BasicRNNCell(num_cells,)
cells.append(cell)
all_cells = tf.nn.rnn_cell.MultiRNNCell(cells, state_is_tuple=True)
outputs, state = tf.nn.dynamic_rnn(all_cells, training_data, dtype=tf.float32)
outputs = tf.transpose(outputs, [1, 0, 2])
output = outputs[-1]
W = tf.Variable(tf.truncated_normal([num_cells, self.input_dimensions]))
b = tf.Variable(tf.random_normal([self.input_dimensions]))
logit = tf.matmul(output, W) + b
return logit
def lstm_model(self, training_data):
num_layer = self.options['num_layer']
num_cells = self.options['num_cells']
if num_layer == 1:
all_cells = tf.nn.rnn_cell.BasicLSTMCell(num_cells)
else:
cells = []
for i in range(num_layer):
cell = tf.nn.rnn_cell.BasicLSTMCell(num_cells, )
cells.append(cell)
all_cells = tf.nn.rnn_cell.MultiRNNCell(cells, state_is_tuple=True)
outputs, state = tf.nn.dynamic_rnn(all_cells, training_data, dtype=tf.float32)
outputs = tf.transpose(outputs, [1, 0, 2])
output = outputs[-1]
W = tf.Variable(tf.truncated_normal([num_cells, self.input_dimensions]))
b = tf.Variable(tf.random_normal([self.input_dimensions]))
logit = tf.matmul(output, W) + b
return logit
# Utility Functions
@staticmethod
def unpack_options(num_cells=24,
learning_rate=1e-3,
learning_rate_decay_coeff=0.,
init_epoch=5,
batch_size=100,
optimizer='rms',
model_type='rnn',
use_customized_optimizer=False,
customized_optimizer=None,
num_layers=1,
regularisation_coeff=0.,
input_dimension=None,
num_steps=30,
num_epoch=1,):
"""
:param num_cells: Number of hidden units per layer in the RNN/LSTM network
:param learning_rate: initial learning rate
:param learning_rate_decay_coeff: the exponentially decaying coefficient of learning rate for each epoch.
:param init_epoch: initial number of epoches where the learning rate will be kept constant. Only relevant if
learning_rate_decay_coeff is a number other than zero.
:param batch_size: batch size
:param optimizer: choice of the chosen optimiser ('rms', 'adam', etc)
:param model_type: 'rnn' or 'lstm'
:param use_customized_optimizer: bool - if True the optimizer object in customized_optimizer
will be used instead.
:param customized_optimizer: optimizer object - if use_customized_optimizer is True, this optimizer will be used
:param num_layers: number of layers of hidden units in the RNN/LSTM
:param regularisation_coeff: regularisation coefficient (a.k.a lambda)
:param input_dimension: input dimension of the each data point. For scalar time series this value is 1
:param num_steps: number of data points of each input sequence
:param num_epoch: number of training epochs
:return:
"""
options = {
'num_cells': num_cells,
'learning_rate': learning_rate,
'learning_rate_decay_coeff': learning_rate_decay_coeff,
'init_epoch': init_epoch,
'batch_size': batch_size,
'optimizer': optimizer,
'model_type': model_type,
'num_layer': num_layers,
'use_customised_optimizer': use_customized_optimizer,
'customized_optimizer': customized_optimizer,
'regularisation_coeff': regularisation_coeff,
"input_dimension": input_dimension,
'num_steps': num_steps,
'num_epoch': num_epoch
}
return options
@staticmethod
def get_accuracy(label, predict, tolerance=1e-2):
"""
:param label: label series
:param predict: predict series
:param tolerance: the maximum error between the element in the label and predict for the prediction to be
declared correct. For classification problems this value should be 0.
:return:
"""
if tolerance == 0:
return (100.0 * np.sum(label == predict)) / predict.shape[0]
else:
correct_idx = (np.abs(label - predict) < tolerance)
return np.sum(correct_idx)
# Plotter Function
def gen_summary(self):
if len(self.train_losses) == 0:
raise ValueError("The model session has not been run!")
plt.subplot(121)
plt.plot(self.train_losses)
plt.ylabel("Loss")
plt.xlabel('Number of batch iterations')
plt.title("Loss vs iterations")
plt.subplot(122)
plt.plot(self.predict, label='Predictions')
plt.plot(self.test_label, label='Test Labels')
plt.title("Test label vs Prediction")
plt.legend()
| create_graph | identifier_name |
rnn.py | # Xingchen Wan 2018 | xingchen.wan@st-annes.ox.ac.uk
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
class RNN:
"""
Recurrent Neural Network
"""
def __init__(self, training_data, training_label,
test_data, test_label,
**options):
"""
:param training_data: :param training_label: input training data set and label tensors
:param test_data: :param test_label: test data set and ground truth label tensors
:param options: (hyper)parameters of the neural network model. See method unpack_options for details on the
full list of configurable options
"""
self.training_data = np.array(training_data, dtype=np.float32)
self.training_label = np.array(training_label, dtype=np.float32)
self.test_data = np.array(test_data, dtype=np.float32)
self.test_label = np.array(test_label, dtype=np.float32)
# Sanity checks
if self.training_data.shape[0] != self.training_label.shape[0]:
raise ValueError("The length of training_data tensor does not match the training_label tensor!")
if self.test_label.shape[0] != self.test_data.shape[0]:
raise ValueError("The length of test_data tensor does not match the test_label tensor!")
self.options = self.unpack_options(**options)
if self.options['input_dimension'] is None:
# Data dimension of a single sample
self.input_dimensions = 1
else:
self.input_dimensions = self.options['input_dimension']
self.graph = None
self.loss = None
self.optimizer = None
self.predict = None
self.tf_labels = None
self.tf_dataset = None
self.learning_rate = None
# Two lists to store the losses and accuracies during training and testing
self.train_losses = []
self.train_accuracies = []
def create_graph(self):
"""
Set up a computation graph for TensorFlow
:return: None
"""
self.graph = tf.Graph()
model_type = self.options['model_type']
optimiser_selected = self.options['optimizer']
with self.graph.as_default():
self.tf_dataset = tf.placeholder(tf.float32,
shape=(None, self.options['num_steps'], self.input_dimensions))
self.tf_labels = tf.placeholder(tf.float32, shape=(None, self.input_dimensions))
self.learning_rate = tf.placeholder(tf.float32, None, name='learning_rate')
# Forward pass
if model_type == 'rnn':
self.predict = self.rnn_model(self.tf_dataset)
elif model_type == 'lstm':
self.predict = self.lstm_model(self.tf_dataset)
else:
raise NotImplementedError("Unimplemented RNN model keyword")
self.loss = tf.reduce_mean(tf.square(self.predict - self.tf_labels))
if self.options['regularisation_coeff'] > 0.:
# Add in L2 penalty for regularisation if required
penalty = self.options['regularisation_coeff'] * sum(tf.nn.l2_loss(var)
for var in tf.trainable_variables())
self.loss += penalty
if self.options['use_customised_optimizer'] is False:
if optimiser_selected == 'adam':
self.optimizer = tf.train.AdamOptimizer(self.learning_rate)
elif optimiser_selected == 'grad':
self.optimizer = tf.train.GradientDescentOptimizer(self.learning_rate)
elif optimiser_selected == 'ada':
self.optimizer = tf.train.AdagradOptimizer(self.learning_rate)
elif optimiser_selected == 'rms':
self.optimizer = tf.train.RMSPropOptimizer(self.learning_rate)
else:
raise NotImplementedError("Unimplemented built-in optimiser keyword.")
else:
self.optimizer = self.options['customized_optimizer']
self.minimise = self.optimizer.minimize(self.loss)
def run(self):
"""
Create a session according to the computation graph and run the model
:return: None
"""
if self.graph is None:
raise ValueError("Create TensorFlow graph before running a session.")
with tf.Session(graph=self.graph) as session:
tf.global_variables_initializer().run()
# Stochastic gradient descent: train the data with a mini-batch each iteration
batch_size = self.options['batch_size']
for epoch_idx in range(self.options['num_epoch']):
training_epoch = self.training_data[epoch_idx]
label_epoch = self.training_label[epoch_idx]
batch_count = training_epoch.shape[0] // batch_size
learning_rate = self.options['learning_rate']
if self.options['learning_rate_decay_coeff'] > 0.:
learning_rate *= self.options['learning_rate_decay_coeff'] ** \
max(float(epoch_idx + 1 - self.options['init_epoch']), 0.0)
for batch in range(batch_count):
try:
batch_data = training_epoch[batch*batch_size:(batch+1)*batch_size, :, :]
batch_labels = label_epoch[batch*batch_size:(batch+1)*batch_size, :]
except KeyError:
batch_data = training_epoch[batch*batch_size:, :, :]
batch_labels = label_epoch[batch*batch_size:, :]
feed_dict = {
self.tf_dataset: batch_data,
self.tf_labels: batch_labels,
self.learning_rate: learning_rate}
p, l, _, = session.run([self.predict, self.loss, self.minimise], feed_dict=feed_dict)
self.train_losses.append(l)
self.train_accuracies.append(self.get_accuracy(batch_labels, p))
# Finally run the data on test data
final_feed_dict = {
self.tf_dataset: self.test_data,
self.tf_labels: self.test_label,
self.learning_rate: 0.,
}
self.predict, final_loss = session.run([self.predict, self.minimise], feed_dict=final_feed_dict)
return self.predict
# Implementation of RNN and LSTM models
def rnn_model(self, training_data):
num_layer = self.options['num_layer']
num_cells = self.options['num_cells']
if num_layer == 1:
all_cells = tf.nn.rnn_cell.BasicRNNCell(num_cells)
else:
cells = []
for i in range(num_layer):
cell = tf.nn.rnn_cell.BasicRNNCell(num_cells,)
cells.append(cell)
all_cells = tf.nn.rnn_cell.MultiRNNCell(cells, state_is_tuple=True)
outputs, state = tf.nn.dynamic_rnn(all_cells, training_data, dtype=tf.float32)
outputs = tf.transpose(outputs, [1, 0, 2])
output = outputs[-1]
W = tf.Variable(tf.truncated_normal([num_cells, self.input_dimensions]))
b = tf.Variable(tf.random_normal([self.input_dimensions]))
logit = tf.matmul(output, W) + b
return logit
def lstm_model(self, training_data):
num_layer = self.options['num_layer']
num_cells = self.options['num_cells']
if num_layer == 1:
all_cells = tf.nn.rnn_cell.BasicLSTMCell(num_cells)
else:
cells = []
for i in range(num_layer):
cell = tf.nn.rnn_cell.BasicLSTMCell(num_cells, )
cells.append(cell)
all_cells = tf.nn.rnn_cell.MultiRNNCell(cells, state_is_tuple=True)
outputs, state = tf.nn.dynamic_rnn(all_cells, training_data, dtype=tf.float32)
outputs = tf.transpose(outputs, [1, 0, 2])
output = outputs[-1]
W = tf.Variable(tf.truncated_normal([num_cells, self.input_dimensions]))
b = tf.Variable(tf.random_normal([self.input_dimensions]))
logit = tf.matmul(output, W) + b
return logit
# Utility Functions
@staticmethod
def unpack_options(num_cells=24,
learning_rate=1e-3,
learning_rate_decay_coeff=0.,
init_epoch=5,
batch_size=100,
optimizer='rms',
model_type='rnn',
use_customized_optimizer=False,
customized_optimizer=None,
num_layers=1,
regularisation_coeff=0.,
input_dimension=None,
num_steps=30,
num_epoch=1,):
"""
:param num_cells: Number of hidden units per layer in the RNN/LSTM network
:param learning_rate: initial learning rate
:param learning_rate_decay_coeff: the exponentially decaying coefficient of learning rate for each epoch.
:param init_epoch: initial number of epoches where the learning rate will be kept constant. Only relevant if
learning_rate_decay_coeff is a number other than zero.
:param batch_size: batch size
:param optimizer: choice of the chosen optimiser ('rms', 'adam', etc)
:param model_type: 'rnn' or 'lstm'
:param use_customized_optimizer: bool - if True the optimizer object in customized_optimizer
will be used instead.
:param customized_optimizer: optimizer object - if use_customized_optimizer is True, this optimizer will be used
:param num_layers: number of layers of hidden units in the RNN/LSTM
:param regularisation_coeff: regularisation coefficient (a.k.a lambda)
:param input_dimension: input dimension of the each data point. For scalar time series this value is 1
:param num_steps: number of data points of each input sequence
:param num_epoch: number of training epochs
:return:
"""
options = {
'num_cells': num_cells,
'learning_rate': learning_rate,
'learning_rate_decay_coeff': learning_rate_decay_coeff,
'init_epoch': init_epoch,
'batch_size': batch_size,
'optimizer': optimizer,
'model_type': model_type,
'num_layer': num_layers,
'use_customised_optimizer': use_customized_optimizer,
'customized_optimizer': customized_optimizer,
'regularisation_coeff': regularisation_coeff,
"input_dimension": input_dimension,
'num_steps': num_steps,
'num_epoch': num_epoch
}
return options
@staticmethod
def get_accuracy(label, predict, tolerance=1e-2):
"""
:param label: label series
:param predict: predict series
:param tolerance: the maximum error between the element in the label and predict for the prediction to be
declared correct. For classification problems this value should be 0.
:return:
"""
if tolerance == 0:
return (100.0 * np.sum(label == predict)) / predict.shape[0]
else:
correct_idx = (np.abs(label - predict) < tolerance)
return np.sum(correct_idx)
# Plotter Function
def gen_summary(self):
| if len(self.train_losses) == 0:
raise ValueError("The model session has not been run!")
plt.subplot(121)
plt.plot(self.train_losses)
plt.ylabel("Loss")
plt.xlabel('Number of batch iterations')
plt.title("Loss vs iterations")
plt.subplot(122)
plt.plot(self.predict, label='Predictions')
plt.plot(self.test_label, label='Test Labels')
plt.title("Test label vs Prediction")
plt.legend() | identifier_body | |
rnn.py | # Xingchen Wan 2018 | xingchen.wan@st-annes.ox.ac.uk
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
class RNN:
"""
Recurrent Neural Network
"""
def __init__(self, training_data, training_label,
test_data, test_label,
**options):
"""
:param training_data: :param training_label: input training data set and label tensors
:param test_data: :param test_label: test data set and ground truth label tensors
:param options: (hyper)parameters of the neural network model. See method unpack_options for details on the
full list of configurable options
"""
self.training_data = np.array(training_data, dtype=np.float32)
self.training_label = np.array(training_label, dtype=np.float32)
self.test_data = np.array(test_data, dtype=np.float32)
self.test_label = np.array(test_label, dtype=np.float32)
# Sanity checks
if self.training_data.shape[0] != self.training_label.shape[0]:
raise ValueError("The length of training_data tensor does not match the training_label tensor!")
if self.test_label.shape[0] != self.test_data.shape[0]:
raise ValueError("The length of test_data tensor does not match the test_label tensor!")
self.options = self.unpack_options(**options)
if self.options['input_dimension'] is None:
# Data dimension of a single sample
self.input_dimensions = 1
else:
self.input_dimensions = self.options['input_dimension']
self.graph = None
self.loss = None
self.optimizer = None
self.predict = None
self.tf_labels = None
self.tf_dataset = None
self.learning_rate = None
# Two lists to store the losses and accuracies during training and testing
self.train_losses = []
self.train_accuracies = []
def create_graph(self):
"""
Set up a computation graph for TensorFlow
:return: None
"""
self.graph = tf.Graph()
model_type = self.options['model_type']
optimiser_selected = self.options['optimizer']
with self.graph.as_default():
self.tf_dataset = tf.placeholder(tf.float32,
shape=(None, self.options['num_steps'], self.input_dimensions))
self.tf_labels = tf.placeholder(tf.float32, shape=(None, self.input_dimensions))
self.learning_rate = tf.placeholder(tf.float32, None, name='learning_rate')
# Forward pass
if model_type == 'rnn':
self.predict = self.rnn_model(self.tf_dataset)
elif model_type == 'lstm':
self.predict = self.lstm_model(self.tf_dataset)
else:
raise NotImplementedError("Unimplemented RNN model keyword")
self.loss = tf.reduce_mean(tf.square(self.predict - self.tf_labels))
if self.options['regularisation_coeff'] > 0.:
# Add in L2 penalty for regularisation if required
penalty = self.options['regularisation_coeff'] * sum(tf.nn.l2_loss(var)
for var in tf.trainable_variables())
self.loss += penalty
if self.options['use_customised_optimizer'] is False:
if optimiser_selected == 'adam':
self.optimizer = tf.train.AdamOptimizer(self.learning_rate)
elif optimiser_selected == 'grad':
self.optimizer = tf.train.GradientDescentOptimizer(self.learning_rate)
elif optimiser_selected == 'ada':
self.optimizer = tf.train.AdagradOptimizer(self.learning_rate)
elif optimiser_selected == 'rms':
self.optimizer = tf.train.RMSPropOptimizer(self.learning_rate)
else:
raise NotImplementedError("Unimplemented built-in optimiser keyword.")
else:
self.optimizer = self.options['customized_optimizer']
self.minimise = self.optimizer.minimize(self.loss)
def run(self):
"""
Create a session according to the computation graph and run the model
:return: None
"""
if self.graph is None:
raise ValueError("Create TensorFlow graph before running a session.")
with tf.Session(graph=self.graph) as session:
tf.global_variables_initializer().run()
# Stochastic gradient descent: train the data with a mini-batch each iteration
batch_size = self.options['batch_size']
for epoch_idx in range(self.options['num_epoch']):
training_epoch = self.training_data[epoch_idx]
label_epoch = self.training_label[epoch_idx]
batch_count = training_epoch.shape[0] // batch_size
learning_rate = self.options['learning_rate']
if self.options['learning_rate_decay_coeff'] > 0.:
learning_rate *= self.options['learning_rate_decay_coeff'] ** \
max(float(epoch_idx + 1 - self.options['init_epoch']), 0.0)
for batch in range(batch_count):
try:
batch_data = training_epoch[batch*batch_size:(batch+1)*batch_size, :, :]
batch_labels = label_epoch[batch*batch_size:(batch+1)*batch_size, :]
except KeyError:
batch_data = training_epoch[batch*batch_size:, :, :]
batch_labels = label_epoch[batch*batch_size:, :]
feed_dict = {
self.tf_dataset: batch_data,
self.tf_labels: batch_labels,
self.learning_rate: learning_rate}
p, l, _, = session.run([self.predict, self.loss, self.minimise], feed_dict=feed_dict)
self.train_losses.append(l)
self.train_accuracies.append(self.get_accuracy(batch_labels, p))
# Finally run the data on test data
final_feed_dict = {
self.tf_dataset: self.test_data,
self.tf_labels: self.test_label,
self.learning_rate: 0.,
}
self.predict, final_loss = session.run([self.predict, self.minimise], feed_dict=final_feed_dict)
return self.predict
# Implementation of RNN and LSTM models
def rnn_model(self, training_data):
num_layer = self.options['num_layer']
num_cells = self.options['num_cells']
if num_layer == 1:
all_cells = tf.nn.rnn_cell.BasicRNNCell(num_cells)
else:
cells = []
for i in range(num_layer):
cell = tf.nn.rnn_cell.BasicRNNCell(num_cells,)
cells.append(cell)
all_cells = tf.nn.rnn_cell.MultiRNNCell(cells, state_is_tuple=True)
outputs, state = tf.nn.dynamic_rnn(all_cells, training_data, dtype=tf.float32)
outputs = tf.transpose(outputs, [1, 0, 2])
output = outputs[-1]
W = tf.Variable(tf.truncated_normal([num_cells, self.input_dimensions]))
b = tf.Variable(tf.random_normal([self.input_dimensions]))
logit = tf.matmul(output, W) + b
return logit
def lstm_model(self, training_data):
num_layer = self.options['num_layer']
num_cells = self.options['num_cells']
if num_layer == 1:
all_cells = tf.nn.rnn_cell.BasicLSTMCell(num_cells)
else:
cells = []
for i in range(num_layer):
cell = tf.nn.rnn_cell.BasicLSTMCell(num_cells, )
cells.append(cell)
all_cells = tf.nn.rnn_cell.MultiRNNCell(cells, state_is_tuple=True)
outputs, state = tf.nn.dynamic_rnn(all_cells, training_data, dtype=tf.float32)
outputs = tf.transpose(outputs, [1, 0, 2])
output = outputs[-1]
W = tf.Variable(tf.truncated_normal([num_cells, self.input_dimensions]))
b = tf.Variable(tf.random_normal([self.input_dimensions]))
logit = tf.matmul(output, W) + b
return logit
# Utility Functions
@staticmethod
def unpack_options(num_cells=24,
learning_rate=1e-3,
learning_rate_decay_coeff=0.,
init_epoch=5,
batch_size=100,
optimizer='rms',
model_type='rnn',
use_customized_optimizer=False,
customized_optimizer=None,
num_layers=1,
regularisation_coeff=0.,
input_dimension=None,
num_steps=30,
num_epoch=1,):
"""
:param num_cells: Number of hidden units per layer in the RNN/LSTM network
:param learning_rate: initial learning rate
:param learning_rate_decay_coeff: the exponentially decaying coefficient of learning rate for each epoch.
:param init_epoch: initial number of epoches where the learning rate will be kept constant. Only relevant if
learning_rate_decay_coeff is a number other than zero.
:param batch_size: batch size
:param optimizer: choice of the chosen optimiser ('rms', 'adam', etc)
:param model_type: 'rnn' or 'lstm'
:param use_customized_optimizer: bool - if True the optimizer object in customized_optimizer
will be used instead.
:param customized_optimizer: optimizer object - if use_customized_optimizer is True, this optimizer will be used
:param num_layers: number of layers of hidden units in the RNN/LSTM
:param regularisation_coeff: regularisation coefficient (a.k.a lambda)
:param input_dimension: input dimension of the each data point. For scalar time series this value is 1
:param num_steps: number of data points of each input sequence
:param num_epoch: number of training epochs
:return:
"""
options = {
'num_cells': num_cells,
'learning_rate': learning_rate,
'learning_rate_decay_coeff': learning_rate_decay_coeff,
'init_epoch': init_epoch,
'batch_size': batch_size,
'optimizer': optimizer,
'model_type': model_type,
'num_layer': num_layers,
'use_customised_optimizer': use_customized_optimizer,
'customized_optimizer': customized_optimizer,
'regularisation_coeff': regularisation_coeff,
"input_dimension": input_dimension,
'num_steps': num_steps,
'num_epoch': num_epoch
}
return options
@staticmethod
def get_accuracy(label, predict, tolerance=1e-2):
"""
:param label: label series
:param predict: predict series
:param tolerance: the maximum error between the element in the label and predict for the prediction to be
declared correct. For classification problems this value should be 0.
:return:
"""
if tolerance == 0:
return (100.0 * np.sum(label == predict)) / predict.shape[0]
else:
correct_idx = (np.abs(label - predict) < tolerance)
return np.sum(correct_idx)
# Plotter Function
def gen_summary(self):
if len(self.train_losses) == 0:
raise ValueError("The model session has not been run!")
plt.subplot(121)
plt.plot(self.train_losses)
plt.ylabel("Loss")
plt.xlabel('Number of batch iterations')
plt.title("Loss vs iterations") | plt.plot(self.predict, label='Predictions')
plt.plot(self.test_label, label='Test Labels')
plt.title("Test label vs Prediction")
plt.legend() |
plt.subplot(122) | random_line_split |
rnn.py | # Xingchen Wan 2018 | xingchen.wan@st-annes.ox.ac.uk
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
class RNN:
"""
Recurrent Neural Network
"""
def __init__(self, training_data, training_label,
test_data, test_label,
**options):
"""
:param training_data: :param training_label: input training data set and label tensors
:param test_data: :param test_label: test data set and ground truth label tensors
:param options: (hyper)parameters of the neural network model. See method unpack_options for details on the
full list of configurable options
"""
self.training_data = np.array(training_data, dtype=np.float32)
self.training_label = np.array(training_label, dtype=np.float32)
self.test_data = np.array(test_data, dtype=np.float32)
self.test_label = np.array(test_label, dtype=np.float32)
# Sanity checks
if self.training_data.shape[0] != self.training_label.shape[0]:
raise ValueError("The length of training_data tensor does not match the training_label tensor!")
if self.test_label.shape[0] != self.test_data.shape[0]:
raise ValueError("The length of test_data tensor does not match the test_label tensor!")
self.options = self.unpack_options(**options)
if self.options['input_dimension'] is None:
# Data dimension of a single sample
self.input_dimensions = 1
else:
self.input_dimensions = self.options['input_dimension']
self.graph = None
self.loss = None
self.optimizer = None
self.predict = None
self.tf_labels = None
self.tf_dataset = None
self.learning_rate = None
# Two lists to store the losses and accuracies during training and testing
self.train_losses = []
self.train_accuracies = []
def create_graph(self):
"""
Set up a computation graph for TensorFlow
:return: None
"""
self.graph = tf.Graph()
model_type = self.options['model_type']
optimiser_selected = self.options['optimizer']
with self.graph.as_default():
self.tf_dataset = tf.placeholder(tf.float32,
shape=(None, self.options['num_steps'], self.input_dimensions))
self.tf_labels = tf.placeholder(tf.float32, shape=(None, self.input_dimensions))
self.learning_rate = tf.placeholder(tf.float32, None, name='learning_rate')
# Forward pass
if model_type == 'rnn':
self.predict = self.rnn_model(self.tf_dataset)
elif model_type == 'lstm':
self.predict = self.lstm_model(self.tf_dataset)
else:
raise NotImplementedError("Unimplemented RNN model keyword")
self.loss = tf.reduce_mean(tf.square(self.predict - self.tf_labels))
if self.options['regularisation_coeff'] > 0.:
# Add in L2 penalty for regularisation if required
|
if self.options['use_customised_optimizer'] is False:
if optimiser_selected == 'adam':
self.optimizer = tf.train.AdamOptimizer(self.learning_rate)
elif optimiser_selected == 'grad':
self.optimizer = tf.train.GradientDescentOptimizer(self.learning_rate)
elif optimiser_selected == 'ada':
self.optimizer = tf.train.AdagradOptimizer(self.learning_rate)
elif optimiser_selected == 'rms':
self.optimizer = tf.train.RMSPropOptimizer(self.learning_rate)
else:
raise NotImplementedError("Unimplemented built-in optimiser keyword.")
else:
self.optimizer = self.options['customized_optimizer']
self.minimise = self.optimizer.minimize(self.loss)
def run(self):
"""
Create a session according to the computation graph and run the model
:return: None
"""
if self.graph is None:
raise ValueError("Create TensorFlow graph before running a session.")
with tf.Session(graph=self.graph) as session:
tf.global_variables_initializer().run()
# Stochastic gradient descent: train the data with a mini-batch each iteration
batch_size = self.options['batch_size']
for epoch_idx in range(self.options['num_epoch']):
training_epoch = self.training_data[epoch_idx]
label_epoch = self.training_label[epoch_idx]
batch_count = training_epoch.shape[0] // batch_size
learning_rate = self.options['learning_rate']
if self.options['learning_rate_decay_coeff'] > 0.:
learning_rate *= self.options['learning_rate_decay_coeff'] ** \
max(float(epoch_idx + 1 - self.options['init_epoch']), 0.0)
for batch in range(batch_count):
try:
batch_data = training_epoch[batch*batch_size:(batch+1)*batch_size, :, :]
batch_labels = label_epoch[batch*batch_size:(batch+1)*batch_size, :]
except KeyError:
batch_data = training_epoch[batch*batch_size:, :, :]
batch_labels = label_epoch[batch*batch_size:, :]
feed_dict = {
self.tf_dataset: batch_data,
self.tf_labels: batch_labels,
self.learning_rate: learning_rate}
p, l, _, = session.run([self.predict, self.loss, self.minimise], feed_dict=feed_dict)
self.train_losses.append(l)
self.train_accuracies.append(self.get_accuracy(batch_labels, p))
# Finally run the data on test data
final_feed_dict = {
self.tf_dataset: self.test_data,
self.tf_labels: self.test_label,
self.learning_rate: 0.,
}
self.predict, final_loss = session.run([self.predict, self.minimise], feed_dict=final_feed_dict)
return self.predict
# Implementation of RNN and LSTM models
def rnn_model(self, training_data):
num_layer = self.options['num_layer']
num_cells = self.options['num_cells']
if num_layer == 1:
all_cells = tf.nn.rnn_cell.BasicRNNCell(num_cells)
else:
cells = []
for i in range(num_layer):
cell = tf.nn.rnn_cell.BasicRNNCell(num_cells,)
cells.append(cell)
all_cells = tf.nn.rnn_cell.MultiRNNCell(cells, state_is_tuple=True)
outputs, state = tf.nn.dynamic_rnn(all_cells, training_data, dtype=tf.float32)
outputs = tf.transpose(outputs, [1, 0, 2])
output = outputs[-1]
W = tf.Variable(tf.truncated_normal([num_cells, self.input_dimensions]))
b = tf.Variable(tf.random_normal([self.input_dimensions]))
logit = tf.matmul(output, W) + b
return logit
def lstm_model(self, training_data):
num_layer = self.options['num_layer']
num_cells = self.options['num_cells']
if num_layer == 1:
all_cells = tf.nn.rnn_cell.BasicLSTMCell(num_cells)
else:
cells = []
for i in range(num_layer):
cell = tf.nn.rnn_cell.BasicLSTMCell(num_cells, )
cells.append(cell)
all_cells = tf.nn.rnn_cell.MultiRNNCell(cells, state_is_tuple=True)
outputs, state = tf.nn.dynamic_rnn(all_cells, training_data, dtype=tf.float32)
outputs = tf.transpose(outputs, [1, 0, 2])
output = outputs[-1]
W = tf.Variable(tf.truncated_normal([num_cells, self.input_dimensions]))
b = tf.Variable(tf.random_normal([self.input_dimensions]))
logit = tf.matmul(output, W) + b
return logit
# Utility Functions
@staticmethod
def unpack_options(num_cells=24,
learning_rate=1e-3,
learning_rate_decay_coeff=0.,
init_epoch=5,
batch_size=100,
optimizer='rms',
model_type='rnn',
use_customized_optimizer=False,
customized_optimizer=None,
num_layers=1,
regularisation_coeff=0.,
input_dimension=None,
num_steps=30,
num_epoch=1,):
"""
:param num_cells: Number of hidden units per layer in the RNN/LSTM network
:param learning_rate: initial learning rate
:param learning_rate_decay_coeff: the exponentially decaying coefficient of learning rate for each epoch.
:param init_epoch: initial number of epoches where the learning rate will be kept constant. Only relevant if
learning_rate_decay_coeff is a number other than zero.
:param batch_size: batch size
:param optimizer: choice of the chosen optimiser ('rms', 'adam', etc)
:param model_type: 'rnn' or 'lstm'
:param use_customized_optimizer: bool - if True the optimizer object in customized_optimizer
will be used instead.
:param customized_optimizer: optimizer object - if use_customized_optimizer is True, this optimizer will be used
:param num_layers: number of layers of hidden units in the RNN/LSTM
:param regularisation_coeff: regularisation coefficient (a.k.a lambda)
:param input_dimension: input dimension of the each data point. For scalar time series this value is 1
:param num_steps: number of data points of each input sequence
:param num_epoch: number of training epochs
:return:
"""
options = {
'num_cells': num_cells,
'learning_rate': learning_rate,
'learning_rate_decay_coeff': learning_rate_decay_coeff,
'init_epoch': init_epoch,
'batch_size': batch_size,
'optimizer': optimizer,
'model_type': model_type,
'num_layer': num_layers,
'use_customised_optimizer': use_customized_optimizer,
'customized_optimizer': customized_optimizer,
'regularisation_coeff': regularisation_coeff,
"input_dimension": input_dimension,
'num_steps': num_steps,
'num_epoch': num_epoch
}
return options
@staticmethod
def get_accuracy(label, predict, tolerance=1e-2):
"""
:param label: label series
:param predict: predict series
:param tolerance: the maximum error between the element in the label and predict for the prediction to be
declared correct. For classification problems this value should be 0.
:return:
"""
if tolerance == 0:
return (100.0 * np.sum(label == predict)) / predict.shape[0]
else:
correct_idx = (np.abs(label - predict) < tolerance)
return np.sum(correct_idx)
# Plotter Function
def gen_summary(self):
if len(self.train_losses) == 0:
raise ValueError("The model session has not been run!")
plt.subplot(121)
plt.plot(self.train_losses)
plt.ylabel("Loss")
plt.xlabel('Number of batch iterations')
plt.title("Loss vs iterations")
plt.subplot(122)
plt.plot(self.predict, label='Predictions')
plt.plot(self.test_label, label='Test Labels')
plt.title("Test label vs Prediction")
plt.legend()
| penalty = self.options['regularisation_coeff'] * sum(tf.nn.l2_loss(var)
for var in tf.trainable_variables())
self.loss += penalty | conditional_block |
main.go | package main
import (
"bufio"
"bytes"
"crypto/sha1"
"encoding/binary"
"flag"
"fmt"
"io/ioutil"
"os"
"os/exec"
"runtime"
"runtime/debug"
"strconv"
"strings"
"github.com/google/pprof/profile"
)
var (
flagInput = flag.String("i", "perf.data", "input perf file")
flagOutput = flag.String("o", "", "output perf file")
flagPid = flag.Int("p", 0, "target pid (default is pid with most samples)")
flagRealtime = flag.Bool("realtime", true, "scale samples to real time")
flagInit = flag.Bool("init", true, "analyze program initialization (before the program spawns first thread)")
flagCpu = flag.Int("cpu", 0, "select only samples happened during that load")
)
type Proc struct {
pid int
n int
run int
multithreaded bool
load map[int]int
samples map[uint64]*Sample
}
type Sample struct {
n int
run int
stack *Stack
}
type Stack struct {
frames []*profile.Location
}
type Frame struct {
pc uint64
fn string
}
func main() {
runtime.GOMAXPROCS(2)
debug.SetGCPercent(1000)
flag.Parse()
f, err := os.Create(*flagOutput)
if err != nil {
failf("failed to open output file: %v", err)
}
defer f.Close()
perf := exec.Command("perf", "script", "-i", *flagInput, "--fields", "pid,tid,cpu,event,trace,ip,sym", "--demangle", "--ns")
perfOut, err := perf.StdoutPipe()
if err != nil {
failf("failed to pipe perf output: %v", err)
}
perfOutErr, err := perf.StderrPipe()
if err != nil {
failf("failed to pipe perf output: %v", err)
}
procs := make(map[int]*Proc)
done := make(chan error)
go func() {
tids := make(map[uint64]uint64)
stacks := make(map[uint64]*Stack)
locs := make(map[uint64]*profile.Location)
funcs := make(map[string]*profile.Function)
s := bufio.NewScanner(perfOut)
getProc := func(pid int) *Proc {
p := procs[pid]
if p == nil {
p = &Proc{
pid: pid,
load: make(map[int]int),
samples: make(map[uint64]*Sample),
}
procs[pid] = p
}
return p
}
for s.Scan() {
ln := s.Text()
if ln == "" || ln[0] == '#' {
continue
}
if strings.Contains(ln, " sched:sched_switch:") {
/* The format is:
0/0 [006] sched:sched_switch: prev_comm=swapper/6 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=rcuos/2 next_pid=11 next_prio=120
ffffffff817297f0 __schedule
ffffffff8172a109 schedule_preempt_disabled
ffffffff810bf66e cpu_startup_entry
ffffffff8104160d start_secondary
*/
i := 0
for ; ln[i] < '0' || ln[i] > '9'; i++ {
}
pidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
pid, err := strconv.ParseUint(ln[pidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 1: %v\n", ln)
continue
}
if ln[i] != '/' {
fmt.Fprintf(os.Stderr, "failed to parse pid 2: %v\n", ln)
continue
}
i++
tidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
tid, err := strconv.ParseUint(ln[tidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 3: %v\n", ln)
continue
}
tids[tid] = pid
pos := strings.Index(ln, " prev_pid=")
if pos == -1 {
fmt.Fprintf(os.Stderr, "failed to parse pid 4: %v\n", ln)
continue
}
pos += len(" prev_pid=")
i = pos
for ; ln[i] != ' '; i++ {
}
ptid, err := strconv.ParseUint(ln[pos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 5: %v\n", ln)
continue
}
ppid := tids[ptid]
if ppid == 0 {
ppid = ptid
}
pos = strings.Index(ln, " next_pid=")
if pos == -1 {
fmt.Fprintf(os.Stderr, "failed to parse pid 6: v\n", ln)
continue
}
pos += len(" next_pid=")
i = pos
for ; ln[i] != ' '; i++ {
}
ntid, err := strconv.ParseUint(ln[pos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 7: v\n", ln)
continue
}
npid := tids[ntid]
if npid == 0 {
npid = ntid
}
p := getProc(int(ppid))
if p.run > 0 {
p.run--
}
p = getProc(int(npid))
p.run++
if p.run > 1 {
p.multithreaded = true
}
} else if strings.Contains(ln, " cycles:") {
/* The format is:
0/0 [006] cycles:
ffffffff8104f45a native_write_msr_safe
ffffffff8102fa4c intel_pmu_enable_all
ffffffff81029ca4 x86_pmu_enable
ffffffff81143487 perf_pmu_enable
ffffffff81027d8a x86_pmu_commit_txn
ffffffff81143f00 group_sched_in
ffffffff811443c2 __perf_event_enable
ffffffff81140000 remote_function
ffffffff810dcf60 generic_smp_call_function_single_interrupt
ffffffff81040cd7 smp_call_function_single_interrupt
ffffffff8173759d call_function_single_interrupt
ffffffff815d6c59 cpuidle_idle_call
ffffffff8101d3ee arch_cpu_idle
ffffffff810bf4f5 cpu_startup_entry
ffffffff8104160d start_secondary | for ; ln[i] < '0' || ln[i] > '9'; i++ {
}
pidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
pid, err := strconv.ParseUint(ln[pidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 8: %v '%v'\n", ln, ln[pidPos:i])
continue
}
if ln[i] != '/' {
fmt.Fprintf(os.Stderr, "failed to parse pid 9: %v\n", ln)
continue
}
i++
tidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
tid, err := strconv.ParseUint(ln[tidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 10: %v\n", ln)
continue
}
tids[tid] = pid
if *flagPid != 0 && uint64(*flagPid) != pid {
continue
}
p := getProc(int(pid))
if !*flagInit && !p.multithreaded {
continue
}
run := p.run
if run == 0 {
run = 1 // somehow it happens
}
p.load[run]++
frames := parseStack(s)
frames = append(frames, &Frame{uint64(run), fmt.Sprintf("LOAD %v", run)})
stkHash := hashStack(frames)
stack := stacks[stkHash]
if stack == nil {
stack = &Stack{
frames: make([]*profile.Location, len(frames)),
}
for i, f := range frames {
loc := locs[f.pc]
if loc == nil {
fn := funcs[f.fn]
if fn == nil {
fname := string(append([]byte{}, f.fn...))
fn = &profile.Function{
ID: uint64(len(funcs) + 1),
Name: fname,
SystemName: fname,
}
funcs[fname] = fn
}
loc = &profile.Location{
ID: uint64(len(locs) + 1),
Address: f.pc,
Line: []profile.Line{
profile.Line{
Function: fn,
Line: 1,
},
},
}
locs[f.pc] = loc
}
stack.frames[i] = loc
}
stacks[stkHash] = stack
}
sample := p.samples[stkHash]
if sample == nil {
sample = &Sample{
run: run,
stack: stack,
}
p.samples[stkHash] = sample
}
if sample.run != run {
fmt.Fprintf(os.Stderr, "misaccounted sample: %v -> %v\n", run, sample.run)
}
sample.n++
p.n++
}
}
done <- s.Err()
}()
if err := perf.Start(); err != nil {
failf("failed to start perf: %v", err)
}
errOutput, _ := ioutil.ReadAll(perfOutErr)
if err := perf.Wait(); err != nil {
if false {
failf("perf failed: %v\n%s", err, errOutput)
}
}
if err := <-done; err != nil {
failf("failed to parse perf output: %v", err)
}
var proc *Proc
max := 0
for _, p := range procs {
if max < p.n {
max = p.n
proc = p
}
}
maxRun := 0
for run := range proc.load {
if maxRun < run {
maxRun = run
}
}
if *flagRealtime {
proc.n = 0
proc.load = make(map[int]int)
for _, s := range proc.samples {
s.n = int(float64(s.n) * float64(maxRun) / float64(s.run))
if s.n < 0 {
println("underflow:", s.n, maxRun, s.run, int(float64(s.n)*float64(maxRun)/float64(s.run)))
}
if proc.n > proc.n+s.n {
println("overflow:", proc.n, s.n, s.run)
}
proc.n += s.n
proc.load[s.run] += s.n
}
}
maxN := 0
total := 0
totalLoad := 0
load := make([]int, maxRun+1)
for run, n := range proc.load {
load[run] = n
total += n
totalLoad += run * n
if maxN < n {
maxN = n
}
}
fmt.Printf("pid=%v samples=%v avgload=%.1f\n", proc.pid, proc.n, float64(totalLoad)/float64(total))
for run, n := range load {
if run == 0 {
continue
}
fmt.Printf("%2v [%5.2f%%]: %v\n", run, float64(n)/float64(total)*100, strings.Repeat("*", int(float64(n)/float64(maxN)*100+0.5)))
}
p := &profile.Profile{
Period: 250000,
PeriodType: &profile.ValueType{Type: "cpu", Unit: "nanoseconds"},
SampleType: []*profile.ValueType{
{Type: "samples", Unit: "count"},
{Type: "cpu", Unit: "nanoseconds"},
},
}
locs := make(map[uint64]bool)
funcs := make(map[uint64]bool)
for _, s := range proc.samples {
if *flagCpu > 0 && *flagCpu != s.run {
continue
}
p.Sample = append(p.Sample, &profile.Sample{
Value: []int64{int64(s.n), int64(s.n) * p.Period},
Location: s.stack.frames,
})
for _, loc := range s.stack.frames {
if !locs[loc.ID] {
locs[loc.ID] = true
p.Location = append(p.Location, loc)
}
for _, line := range loc.Line {
if !funcs[line.Function.ID] {
funcs[line.Function.ID] = true
p.Function = append(p.Function, line.Function)
}
}
}
}
buff := bufio.NewWriter(f)
p.Write(buff)
buff.Flush()
f.Close()
exec.Command("go", "tool", "pprof", "-web", "-nodefraction=0.001", "-edgefraction=0.001", f.Name()).Run()
}
func parseStack(s *bufio.Scanner) []*Frame {
var frames []*Frame
for s.Scan() && s.Text() != "" {
ln := s.Text()
i := 0
for ; ln[i] == ' ' || ln[i] == '\t'; i++ {
}
pos := i
for ; ln[i] != ' ' && ln[i] != '\t'; i++ {
}
pc, err := strconv.ParseUint(ln[pos:i], 16, 64)
if err != nil {
break
}
fn := ln[i+1:]
frames = append(frames, &Frame{pc, fn})
}
return frames
}
func hashStack(frames []*Frame) uint64 {
buf := new(bytes.Buffer)
for _, f := range frames {
binary.Write(buf, binary.LittleEndian, f.pc)
}
s := sha1.Sum(buf.Bytes())
r := bytes.NewReader(s[:])
var id uint64
binary.Read(r, binary.LittleEndian, &id)
return id
}
func failf(what string, args ...interface{}) {
fmt.Fprintf(os.Stderr, what+"\n", args...)
os.Exit(1)
} | */
i := 0 | random_line_split |
main.go | package main
import (
"bufio"
"bytes"
"crypto/sha1"
"encoding/binary"
"flag"
"fmt"
"io/ioutil"
"os"
"os/exec"
"runtime"
"runtime/debug"
"strconv"
"strings"
"github.com/google/pprof/profile"
)
var (
flagInput = flag.String("i", "perf.data", "input perf file")
flagOutput = flag.String("o", "", "output perf file")
flagPid = flag.Int("p", 0, "target pid (default is pid with most samples)")
flagRealtime = flag.Bool("realtime", true, "scale samples to real time")
flagInit = flag.Bool("init", true, "analyze program initialization (before the program spawns first thread)")
flagCpu = flag.Int("cpu", 0, "select only samples happened during that load")
)
type Proc struct {
pid int
n int
run int
multithreaded bool
load map[int]int
samples map[uint64]*Sample
}
type Sample struct {
n int
run int
stack *Stack
}
type Stack struct {
frames []*profile.Location
}
type Frame struct {
pc uint64
fn string
}
func main() {
runtime.GOMAXPROCS(2)
debug.SetGCPercent(1000)
flag.Parse()
f, err := os.Create(*flagOutput)
if err != nil {
failf("failed to open output file: %v", err)
}
defer f.Close()
perf := exec.Command("perf", "script", "-i", *flagInput, "--fields", "pid,tid,cpu,event,trace,ip,sym", "--demangle", "--ns")
perfOut, err := perf.StdoutPipe()
if err != nil {
failf("failed to pipe perf output: %v", err)
}
perfOutErr, err := perf.StderrPipe()
if err != nil {
failf("failed to pipe perf output: %v", err)
}
procs := make(map[int]*Proc)
done := make(chan error)
go func() {
tids := make(map[uint64]uint64)
stacks := make(map[uint64]*Stack)
locs := make(map[uint64]*profile.Location)
funcs := make(map[string]*profile.Function)
s := bufio.NewScanner(perfOut)
getProc := func(pid int) *Proc {
p := procs[pid]
if p == nil {
p = &Proc{
pid: pid,
load: make(map[int]int),
samples: make(map[uint64]*Sample),
}
procs[pid] = p
}
return p
}
for s.Scan() {
ln := s.Text()
if ln == "" || ln[0] == '#' {
continue
}
if strings.Contains(ln, " sched:sched_switch:") {
/* The format is:
0/0 [006] sched:sched_switch: prev_comm=swapper/6 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=rcuos/2 next_pid=11 next_prio=120
ffffffff817297f0 __schedule
ffffffff8172a109 schedule_preempt_disabled
ffffffff810bf66e cpu_startup_entry
ffffffff8104160d start_secondary
*/
i := 0
for ; ln[i] < '0' || ln[i] > '9'; i++ {
}
pidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
pid, err := strconv.ParseUint(ln[pidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 1: %v\n", ln)
continue
}
if ln[i] != '/' {
fmt.Fprintf(os.Stderr, "failed to parse pid 2: %v\n", ln)
continue
}
i++
tidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
tid, err := strconv.ParseUint(ln[tidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 3: %v\n", ln)
continue
}
tids[tid] = pid
pos := strings.Index(ln, " prev_pid=")
if pos == -1 {
fmt.Fprintf(os.Stderr, "failed to parse pid 4: %v\n", ln)
continue
}
pos += len(" prev_pid=")
i = pos
for ; ln[i] != ' '; i++ {
}
ptid, err := strconv.ParseUint(ln[pos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 5: %v\n", ln)
continue
}
ppid := tids[ptid]
if ppid == 0 {
ppid = ptid
}
pos = strings.Index(ln, " next_pid=")
if pos == -1 {
fmt.Fprintf(os.Stderr, "failed to parse pid 6: v\n", ln)
continue
}
pos += len(" next_pid=")
i = pos
for ; ln[i] != ' '; i++ {
}
ntid, err := strconv.ParseUint(ln[pos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 7: v\n", ln)
continue
}
npid := tids[ntid]
if npid == 0 {
npid = ntid
}
p := getProc(int(ppid))
if p.run > 0 {
p.run--
}
p = getProc(int(npid))
p.run++
if p.run > 1 {
p.multithreaded = true
}
} else if strings.Contains(ln, " cycles:") {
/* The format is:
0/0 [006] cycles:
ffffffff8104f45a native_write_msr_safe
ffffffff8102fa4c intel_pmu_enable_all
ffffffff81029ca4 x86_pmu_enable
ffffffff81143487 perf_pmu_enable
ffffffff81027d8a x86_pmu_commit_txn
ffffffff81143f00 group_sched_in
ffffffff811443c2 __perf_event_enable
ffffffff81140000 remote_function
ffffffff810dcf60 generic_smp_call_function_single_interrupt
ffffffff81040cd7 smp_call_function_single_interrupt
ffffffff8173759d call_function_single_interrupt
ffffffff815d6c59 cpuidle_idle_call
ffffffff8101d3ee arch_cpu_idle
ffffffff810bf4f5 cpu_startup_entry
ffffffff8104160d start_secondary
*/
i := 0
for ; ln[i] < '0' || ln[i] > '9'; i++ {
}
pidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
pid, err := strconv.ParseUint(ln[pidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 8: %v '%v'\n", ln, ln[pidPos:i])
continue
}
if ln[i] != '/' {
fmt.Fprintf(os.Stderr, "failed to parse pid 9: %v\n", ln)
continue
}
i++
tidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
tid, err := strconv.ParseUint(ln[tidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 10: %v\n", ln)
continue
}
tids[tid] = pid
if *flagPid != 0 && uint64(*flagPid) != pid {
continue
}
p := getProc(int(pid))
if !*flagInit && !p.multithreaded {
continue
}
run := p.run
if run == 0 {
run = 1 // somehow it happens
}
p.load[run]++
frames := parseStack(s)
frames = append(frames, &Frame{uint64(run), fmt.Sprintf("LOAD %v", run)})
stkHash := hashStack(frames)
stack := stacks[stkHash]
if stack == nil {
stack = &Stack{
frames: make([]*profile.Location, len(frames)),
}
for i, f := range frames {
loc := locs[f.pc]
if loc == nil {
fn := funcs[f.fn]
if fn == nil {
fname := string(append([]byte{}, f.fn...))
fn = &profile.Function{
ID: uint64(len(funcs) + 1),
Name: fname,
SystemName: fname,
}
funcs[fname] = fn
}
loc = &profile.Location{
ID: uint64(len(locs) + 1),
Address: f.pc,
Line: []profile.Line{
profile.Line{
Function: fn,
Line: 1,
},
},
}
locs[f.pc] = loc
}
stack.frames[i] = loc
}
stacks[stkHash] = stack
}
sample := p.samples[stkHash]
if sample == nil {
sample = &Sample{
run: run,
stack: stack,
}
p.samples[stkHash] = sample
}
if sample.run != run {
fmt.Fprintf(os.Stderr, "misaccounted sample: %v -> %v\n", run, sample.run)
}
sample.n++
p.n++
}
}
done <- s.Err()
}()
if err := perf.Start(); err != nil {
failf("failed to start perf: %v", err)
}
errOutput, _ := ioutil.ReadAll(perfOutErr)
if err := perf.Wait(); err != nil {
if false {
failf("perf failed: %v\n%s", err, errOutput)
}
}
if err := <-done; err != nil {
failf("failed to parse perf output: %v", err)
}
var proc *Proc
max := 0
for _, p := range procs {
if max < p.n {
max = p.n
proc = p
}
}
maxRun := 0
for run := range proc.load {
if maxRun < run {
maxRun = run
}
}
if *flagRealtime {
proc.n = 0
proc.load = make(map[int]int)
for _, s := range proc.samples {
s.n = int(float64(s.n) * float64(maxRun) / float64(s.run))
if s.n < 0 {
println("underflow:", s.n, maxRun, s.run, int(float64(s.n)*float64(maxRun)/float64(s.run)))
}
if proc.n > proc.n+s.n {
println("overflow:", proc.n, s.n, s.run)
}
proc.n += s.n
proc.load[s.run] += s.n
}
}
maxN := 0
total := 0
totalLoad := 0
load := make([]int, maxRun+1)
for run, n := range proc.load {
load[run] = n
total += n
totalLoad += run * n
if maxN < n {
maxN = n
}
}
fmt.Printf("pid=%v samples=%v avgload=%.1f\n", proc.pid, proc.n, float64(totalLoad)/float64(total))
for run, n := range load {
if run == 0 {
continue
}
fmt.Printf("%2v [%5.2f%%]: %v\n", run, float64(n)/float64(total)*100, strings.Repeat("*", int(float64(n)/float64(maxN)*100+0.5)))
}
p := &profile.Profile{
Period: 250000,
PeriodType: &profile.ValueType{Type: "cpu", Unit: "nanoseconds"},
SampleType: []*profile.ValueType{
{Type: "samples", Unit: "count"},
{Type: "cpu", Unit: "nanoseconds"},
},
}
locs := make(map[uint64]bool)
funcs := make(map[uint64]bool)
for _, s := range proc.samples {
if *flagCpu > 0 && *flagCpu != s.run {
continue
}
p.Sample = append(p.Sample, &profile.Sample{
Value: []int64{int64(s.n), int64(s.n) * p.Period},
Location: s.stack.frames,
})
for _, loc := range s.stack.frames {
if !locs[loc.ID] {
locs[loc.ID] = true
p.Location = append(p.Location, loc)
}
for _, line := range loc.Line {
if !funcs[line.Function.ID] {
funcs[line.Function.ID] = true
p.Function = append(p.Function, line.Function)
}
}
}
}
buff := bufio.NewWriter(f)
p.Write(buff)
buff.Flush()
f.Close()
exec.Command("go", "tool", "pprof", "-web", "-nodefraction=0.001", "-edgefraction=0.001", f.Name()).Run()
}
func | (s *bufio.Scanner) []*Frame {
var frames []*Frame
for s.Scan() && s.Text() != "" {
ln := s.Text()
i := 0
for ; ln[i] == ' ' || ln[i] == '\t'; i++ {
}
pos := i
for ; ln[i] != ' ' && ln[i] != '\t'; i++ {
}
pc, err := strconv.ParseUint(ln[pos:i], 16, 64)
if err != nil {
break
}
fn := ln[i+1:]
frames = append(frames, &Frame{pc, fn})
}
return frames
}
func hashStack(frames []*Frame) uint64 {
buf := new(bytes.Buffer)
for _, f := range frames {
binary.Write(buf, binary.LittleEndian, f.pc)
}
s := sha1.Sum(buf.Bytes())
r := bytes.NewReader(s[:])
var id uint64
binary.Read(r, binary.LittleEndian, &id)
return id
}
func failf(what string, args ...interface{}) {
fmt.Fprintf(os.Stderr, what+"\n", args...)
os.Exit(1)
}
| parseStack | identifier_name |
main.go | package main
import (
"bufio"
"bytes"
"crypto/sha1"
"encoding/binary"
"flag"
"fmt"
"io/ioutil"
"os"
"os/exec"
"runtime"
"runtime/debug"
"strconv"
"strings"
"github.com/google/pprof/profile"
)
var (
flagInput = flag.String("i", "perf.data", "input perf file")
flagOutput = flag.String("o", "", "output perf file")
flagPid = flag.Int("p", 0, "target pid (default is pid with most samples)")
flagRealtime = flag.Bool("realtime", true, "scale samples to real time")
flagInit = flag.Bool("init", true, "analyze program initialization (before the program spawns first thread)")
flagCpu = flag.Int("cpu", 0, "select only samples happened during that load")
)
type Proc struct {
pid int
n int
run int
multithreaded bool
load map[int]int
samples map[uint64]*Sample
}
type Sample struct {
n int
run int
stack *Stack
}
type Stack struct {
frames []*profile.Location
}
type Frame struct {
pc uint64
fn string
}
func main() {
runtime.GOMAXPROCS(2)
debug.SetGCPercent(1000)
flag.Parse()
f, err := os.Create(*flagOutput)
if err != nil {
failf("failed to open output file: %v", err)
}
defer f.Close()
perf := exec.Command("perf", "script", "-i", *flagInput, "--fields", "pid,tid,cpu,event,trace,ip,sym", "--demangle", "--ns")
perfOut, err := perf.StdoutPipe()
if err != nil {
failf("failed to pipe perf output: %v", err)
}
perfOutErr, err := perf.StderrPipe()
if err != nil {
failf("failed to pipe perf output: %v", err)
}
procs := make(map[int]*Proc)
done := make(chan error)
go func() {
tids := make(map[uint64]uint64)
stacks := make(map[uint64]*Stack)
locs := make(map[uint64]*profile.Location)
funcs := make(map[string]*profile.Function)
s := bufio.NewScanner(perfOut)
getProc := func(pid int) *Proc {
p := procs[pid]
if p == nil {
p = &Proc{
pid: pid,
load: make(map[int]int),
samples: make(map[uint64]*Sample),
}
procs[pid] = p
}
return p
}
for s.Scan() {
ln := s.Text()
if ln == "" || ln[0] == '#' {
continue
}
if strings.Contains(ln, " sched:sched_switch:") {
/* The format is:
0/0 [006] sched:sched_switch: prev_comm=swapper/6 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=rcuos/2 next_pid=11 next_prio=120
ffffffff817297f0 __schedule
ffffffff8172a109 schedule_preempt_disabled
ffffffff810bf66e cpu_startup_entry
ffffffff8104160d start_secondary
*/
i := 0
for ; ln[i] < '0' || ln[i] > '9'; i++ {
}
pidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
pid, err := strconv.ParseUint(ln[pidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 1: %v\n", ln)
continue
}
if ln[i] != '/' {
fmt.Fprintf(os.Stderr, "failed to parse pid 2: %v\n", ln)
continue
}
i++
tidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
tid, err := strconv.ParseUint(ln[tidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 3: %v\n", ln)
continue
}
tids[tid] = pid
pos := strings.Index(ln, " prev_pid=")
if pos == -1 {
fmt.Fprintf(os.Stderr, "failed to parse pid 4: %v\n", ln)
continue
}
pos += len(" prev_pid=")
i = pos
for ; ln[i] != ' '; i++ {
}
ptid, err := strconv.ParseUint(ln[pos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 5: %v\n", ln)
continue
}
ppid := tids[ptid]
if ppid == 0 {
ppid = ptid
}
pos = strings.Index(ln, " next_pid=")
if pos == -1 {
fmt.Fprintf(os.Stderr, "failed to parse pid 6: v\n", ln)
continue
}
pos += len(" next_pid=")
i = pos
for ; ln[i] != ' '; i++ {
}
ntid, err := strconv.ParseUint(ln[pos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 7: v\n", ln)
continue
}
npid := tids[ntid]
if npid == 0 {
npid = ntid
}
p := getProc(int(ppid))
if p.run > 0 {
p.run--
}
p = getProc(int(npid))
p.run++
if p.run > 1 {
p.multithreaded = true
}
} else if strings.Contains(ln, " cycles:") {
/* The format is:
0/0 [006] cycles:
ffffffff8104f45a native_write_msr_safe
ffffffff8102fa4c intel_pmu_enable_all
ffffffff81029ca4 x86_pmu_enable
ffffffff81143487 perf_pmu_enable
ffffffff81027d8a x86_pmu_commit_txn
ffffffff81143f00 group_sched_in
ffffffff811443c2 __perf_event_enable
ffffffff81140000 remote_function
ffffffff810dcf60 generic_smp_call_function_single_interrupt
ffffffff81040cd7 smp_call_function_single_interrupt
ffffffff8173759d call_function_single_interrupt
ffffffff815d6c59 cpuidle_idle_call
ffffffff8101d3ee arch_cpu_idle
ffffffff810bf4f5 cpu_startup_entry
ffffffff8104160d start_secondary
*/
i := 0
for ; ln[i] < '0' || ln[i] > '9'; i++ {
}
pidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
pid, err := strconv.ParseUint(ln[pidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 8: %v '%v'\n", ln, ln[pidPos:i])
continue
}
if ln[i] != '/' {
fmt.Fprintf(os.Stderr, "failed to parse pid 9: %v\n", ln)
continue
}
i++
tidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
tid, err := strconv.ParseUint(ln[tidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 10: %v\n", ln)
continue
}
tids[tid] = pid
if *flagPid != 0 && uint64(*flagPid) != pid {
continue
}
p := getProc(int(pid))
if !*flagInit && !p.multithreaded {
continue
}
run := p.run
if run == 0 {
run = 1 // somehow it happens
}
p.load[run]++
frames := parseStack(s)
frames = append(frames, &Frame{uint64(run), fmt.Sprintf("LOAD %v", run)})
stkHash := hashStack(frames)
stack := stacks[stkHash]
if stack == nil {
stack = &Stack{
frames: make([]*profile.Location, len(frames)),
}
for i, f := range frames {
loc := locs[f.pc]
if loc == nil {
fn := funcs[f.fn]
if fn == nil {
fname := string(append([]byte{}, f.fn...))
fn = &profile.Function{
ID: uint64(len(funcs) + 1),
Name: fname,
SystemName: fname,
}
funcs[fname] = fn
}
loc = &profile.Location{
ID: uint64(len(locs) + 1),
Address: f.pc,
Line: []profile.Line{
profile.Line{
Function: fn,
Line: 1,
},
},
}
locs[f.pc] = loc
}
stack.frames[i] = loc
}
stacks[stkHash] = stack
}
sample := p.samples[stkHash]
if sample == nil {
sample = &Sample{
run: run,
stack: stack,
}
p.samples[stkHash] = sample
}
if sample.run != run |
sample.n++
p.n++
}
}
done <- s.Err()
}()
if err := perf.Start(); err != nil {
failf("failed to start perf: %v", err)
}
errOutput, _ := ioutil.ReadAll(perfOutErr)
if err := perf.Wait(); err != nil {
if false {
failf("perf failed: %v\n%s", err, errOutput)
}
}
if err := <-done; err != nil {
failf("failed to parse perf output: %v", err)
}
var proc *Proc
max := 0
for _, p := range procs {
if max < p.n {
max = p.n
proc = p
}
}
maxRun := 0
for run := range proc.load {
if maxRun < run {
maxRun = run
}
}
if *flagRealtime {
proc.n = 0
proc.load = make(map[int]int)
for _, s := range proc.samples {
s.n = int(float64(s.n) * float64(maxRun) / float64(s.run))
if s.n < 0 {
println("underflow:", s.n, maxRun, s.run, int(float64(s.n)*float64(maxRun)/float64(s.run)))
}
if proc.n > proc.n+s.n {
println("overflow:", proc.n, s.n, s.run)
}
proc.n += s.n
proc.load[s.run] += s.n
}
}
maxN := 0
total := 0
totalLoad := 0
load := make([]int, maxRun+1)
for run, n := range proc.load {
load[run] = n
total += n
totalLoad += run * n
if maxN < n {
maxN = n
}
}
fmt.Printf("pid=%v samples=%v avgload=%.1f\n", proc.pid, proc.n, float64(totalLoad)/float64(total))
for run, n := range load {
if run == 0 {
continue
}
fmt.Printf("%2v [%5.2f%%]: %v\n", run, float64(n)/float64(total)*100, strings.Repeat("*", int(float64(n)/float64(maxN)*100+0.5)))
}
p := &profile.Profile{
Period: 250000,
PeriodType: &profile.ValueType{Type: "cpu", Unit: "nanoseconds"},
SampleType: []*profile.ValueType{
{Type: "samples", Unit: "count"},
{Type: "cpu", Unit: "nanoseconds"},
},
}
locs := make(map[uint64]bool)
funcs := make(map[uint64]bool)
for _, s := range proc.samples {
if *flagCpu > 0 && *flagCpu != s.run {
continue
}
p.Sample = append(p.Sample, &profile.Sample{
Value: []int64{int64(s.n), int64(s.n) * p.Period},
Location: s.stack.frames,
})
for _, loc := range s.stack.frames {
if !locs[loc.ID] {
locs[loc.ID] = true
p.Location = append(p.Location, loc)
}
for _, line := range loc.Line {
if !funcs[line.Function.ID] {
funcs[line.Function.ID] = true
p.Function = append(p.Function, line.Function)
}
}
}
}
buff := bufio.NewWriter(f)
p.Write(buff)
buff.Flush()
f.Close()
exec.Command("go", "tool", "pprof", "-web", "-nodefraction=0.001", "-edgefraction=0.001", f.Name()).Run()
}
func parseStack(s *bufio.Scanner) []*Frame {
var frames []*Frame
for s.Scan() && s.Text() != "" {
ln := s.Text()
i := 0
for ; ln[i] == ' ' || ln[i] == '\t'; i++ {
}
pos := i
for ; ln[i] != ' ' && ln[i] != '\t'; i++ {
}
pc, err := strconv.ParseUint(ln[pos:i], 16, 64)
if err != nil {
break
}
fn := ln[i+1:]
frames = append(frames, &Frame{pc, fn})
}
return frames
}
func hashStack(frames []*Frame) uint64 {
buf := new(bytes.Buffer)
for _, f := range frames {
binary.Write(buf, binary.LittleEndian, f.pc)
}
s := sha1.Sum(buf.Bytes())
r := bytes.NewReader(s[:])
var id uint64
binary.Read(r, binary.LittleEndian, &id)
return id
}
func failf(what string, args ...interface{}) {
fmt.Fprintf(os.Stderr, what+"\n", args...)
os.Exit(1)
}
| {
fmt.Fprintf(os.Stderr, "misaccounted sample: %v -> %v\n", run, sample.run)
} | conditional_block |
main.go | package main
import (
"bufio"
"bytes"
"crypto/sha1"
"encoding/binary"
"flag"
"fmt"
"io/ioutil"
"os"
"os/exec"
"runtime"
"runtime/debug"
"strconv"
"strings"
"github.com/google/pprof/profile"
)
var (
flagInput = flag.String("i", "perf.data", "input perf file")
flagOutput = flag.String("o", "", "output perf file")
flagPid = flag.Int("p", 0, "target pid (default is pid with most samples)")
flagRealtime = flag.Bool("realtime", true, "scale samples to real time")
flagInit = flag.Bool("init", true, "analyze program initialization (before the program spawns first thread)")
flagCpu = flag.Int("cpu", 0, "select only samples happened during that load")
)
type Proc struct {
pid int
n int
run int
multithreaded bool
load map[int]int
samples map[uint64]*Sample
}
type Sample struct {
n int
run int
stack *Stack
}
type Stack struct {
frames []*profile.Location
}
type Frame struct {
pc uint64
fn string
}
func main() {
runtime.GOMAXPROCS(2)
debug.SetGCPercent(1000)
flag.Parse()
f, err := os.Create(*flagOutput)
if err != nil {
failf("failed to open output file: %v", err)
}
defer f.Close()
perf := exec.Command("perf", "script", "-i", *flagInput, "--fields", "pid,tid,cpu,event,trace,ip,sym", "--demangle", "--ns")
perfOut, err := perf.StdoutPipe()
if err != nil {
failf("failed to pipe perf output: %v", err)
}
perfOutErr, err := perf.StderrPipe()
if err != nil {
failf("failed to pipe perf output: %v", err)
}
procs := make(map[int]*Proc)
done := make(chan error)
go func() {
tids := make(map[uint64]uint64)
stacks := make(map[uint64]*Stack)
locs := make(map[uint64]*profile.Location)
funcs := make(map[string]*profile.Function)
s := bufio.NewScanner(perfOut)
getProc := func(pid int) *Proc {
p := procs[pid]
if p == nil {
p = &Proc{
pid: pid,
load: make(map[int]int),
samples: make(map[uint64]*Sample),
}
procs[pid] = p
}
return p
}
for s.Scan() {
ln := s.Text()
if ln == "" || ln[0] == '#' {
continue
}
if strings.Contains(ln, " sched:sched_switch:") {
/* The format is:
0/0 [006] sched:sched_switch: prev_comm=swapper/6 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=rcuos/2 next_pid=11 next_prio=120
ffffffff817297f0 __schedule
ffffffff8172a109 schedule_preempt_disabled
ffffffff810bf66e cpu_startup_entry
ffffffff8104160d start_secondary
*/
i := 0
for ; ln[i] < '0' || ln[i] > '9'; i++ {
}
pidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
pid, err := strconv.ParseUint(ln[pidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 1: %v\n", ln)
continue
}
if ln[i] != '/' {
fmt.Fprintf(os.Stderr, "failed to parse pid 2: %v\n", ln)
continue
}
i++
tidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
tid, err := strconv.ParseUint(ln[tidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 3: %v\n", ln)
continue
}
tids[tid] = pid
pos := strings.Index(ln, " prev_pid=")
if pos == -1 {
fmt.Fprintf(os.Stderr, "failed to parse pid 4: %v\n", ln)
continue
}
pos += len(" prev_pid=")
i = pos
for ; ln[i] != ' '; i++ {
}
ptid, err := strconv.ParseUint(ln[pos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 5: %v\n", ln)
continue
}
ppid := tids[ptid]
if ppid == 0 {
ppid = ptid
}
pos = strings.Index(ln, " next_pid=")
if pos == -1 {
fmt.Fprintf(os.Stderr, "failed to parse pid 6: v\n", ln)
continue
}
pos += len(" next_pid=")
i = pos
for ; ln[i] != ' '; i++ {
}
ntid, err := strconv.ParseUint(ln[pos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 7: v\n", ln)
continue
}
npid := tids[ntid]
if npid == 0 {
npid = ntid
}
p := getProc(int(ppid))
if p.run > 0 {
p.run--
}
p = getProc(int(npid))
p.run++
if p.run > 1 {
p.multithreaded = true
}
} else if strings.Contains(ln, " cycles:") {
/* The format is:
0/0 [006] cycles:
ffffffff8104f45a native_write_msr_safe
ffffffff8102fa4c intel_pmu_enable_all
ffffffff81029ca4 x86_pmu_enable
ffffffff81143487 perf_pmu_enable
ffffffff81027d8a x86_pmu_commit_txn
ffffffff81143f00 group_sched_in
ffffffff811443c2 __perf_event_enable
ffffffff81140000 remote_function
ffffffff810dcf60 generic_smp_call_function_single_interrupt
ffffffff81040cd7 smp_call_function_single_interrupt
ffffffff8173759d call_function_single_interrupt
ffffffff815d6c59 cpuidle_idle_call
ffffffff8101d3ee arch_cpu_idle
ffffffff810bf4f5 cpu_startup_entry
ffffffff8104160d start_secondary
*/
i := 0
for ; ln[i] < '0' || ln[i] > '9'; i++ {
}
pidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
pid, err := strconv.ParseUint(ln[pidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 8: %v '%v'\n", ln, ln[pidPos:i])
continue
}
if ln[i] != '/' {
fmt.Fprintf(os.Stderr, "failed to parse pid 9: %v\n", ln)
continue
}
i++
tidPos := i
for ; ln[i] >= '0' && ln[i] <= '9'; i++ {
}
tid, err := strconv.ParseUint(ln[tidPos:i], 10, 32)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse pid 10: %v\n", ln)
continue
}
tids[tid] = pid
if *flagPid != 0 && uint64(*flagPid) != pid {
continue
}
p := getProc(int(pid))
if !*flagInit && !p.multithreaded {
continue
}
run := p.run
if run == 0 {
run = 1 // somehow it happens
}
p.load[run]++
frames := parseStack(s)
frames = append(frames, &Frame{uint64(run), fmt.Sprintf("LOAD %v", run)})
stkHash := hashStack(frames)
stack := stacks[stkHash]
if stack == nil {
stack = &Stack{
frames: make([]*profile.Location, len(frames)),
}
for i, f := range frames {
loc := locs[f.pc]
if loc == nil {
fn := funcs[f.fn]
if fn == nil {
fname := string(append([]byte{}, f.fn...))
fn = &profile.Function{
ID: uint64(len(funcs) + 1),
Name: fname,
SystemName: fname,
}
funcs[fname] = fn
}
loc = &profile.Location{
ID: uint64(len(locs) + 1),
Address: f.pc,
Line: []profile.Line{
profile.Line{
Function: fn,
Line: 1,
},
},
}
locs[f.pc] = loc
}
stack.frames[i] = loc
}
stacks[stkHash] = stack
}
sample := p.samples[stkHash]
if sample == nil {
sample = &Sample{
run: run,
stack: stack,
}
p.samples[stkHash] = sample
}
if sample.run != run {
fmt.Fprintf(os.Stderr, "misaccounted sample: %v -> %v\n", run, sample.run)
}
sample.n++
p.n++
}
}
done <- s.Err()
}()
if err := perf.Start(); err != nil {
failf("failed to start perf: %v", err)
}
errOutput, _ := ioutil.ReadAll(perfOutErr)
if err := perf.Wait(); err != nil {
if false {
failf("perf failed: %v\n%s", err, errOutput)
}
}
if err := <-done; err != nil {
failf("failed to parse perf output: %v", err)
}
var proc *Proc
max := 0
for _, p := range procs {
if max < p.n {
max = p.n
proc = p
}
}
maxRun := 0
for run := range proc.load {
if maxRun < run {
maxRun = run
}
}
if *flagRealtime {
proc.n = 0
proc.load = make(map[int]int)
for _, s := range proc.samples {
s.n = int(float64(s.n) * float64(maxRun) / float64(s.run))
if s.n < 0 {
println("underflow:", s.n, maxRun, s.run, int(float64(s.n)*float64(maxRun)/float64(s.run)))
}
if proc.n > proc.n+s.n {
println("overflow:", proc.n, s.n, s.run)
}
proc.n += s.n
proc.load[s.run] += s.n
}
}
maxN := 0
total := 0
totalLoad := 0
load := make([]int, maxRun+1)
for run, n := range proc.load {
load[run] = n
total += n
totalLoad += run * n
if maxN < n {
maxN = n
}
}
fmt.Printf("pid=%v samples=%v avgload=%.1f\n", proc.pid, proc.n, float64(totalLoad)/float64(total))
for run, n := range load {
if run == 0 {
continue
}
fmt.Printf("%2v [%5.2f%%]: %v\n", run, float64(n)/float64(total)*100, strings.Repeat("*", int(float64(n)/float64(maxN)*100+0.5)))
}
p := &profile.Profile{
Period: 250000,
PeriodType: &profile.ValueType{Type: "cpu", Unit: "nanoseconds"},
SampleType: []*profile.ValueType{
{Type: "samples", Unit: "count"},
{Type: "cpu", Unit: "nanoseconds"},
},
}
locs := make(map[uint64]bool)
funcs := make(map[uint64]bool)
for _, s := range proc.samples {
if *flagCpu > 0 && *flagCpu != s.run {
continue
}
p.Sample = append(p.Sample, &profile.Sample{
Value: []int64{int64(s.n), int64(s.n) * p.Period},
Location: s.stack.frames,
})
for _, loc := range s.stack.frames {
if !locs[loc.ID] {
locs[loc.ID] = true
p.Location = append(p.Location, loc)
}
for _, line := range loc.Line {
if !funcs[line.Function.ID] {
funcs[line.Function.ID] = true
p.Function = append(p.Function, line.Function)
}
}
}
}
buff := bufio.NewWriter(f)
p.Write(buff)
buff.Flush()
f.Close()
exec.Command("go", "tool", "pprof", "-web", "-nodefraction=0.001", "-edgefraction=0.001", f.Name()).Run()
}
func parseStack(s *bufio.Scanner) []*Frame {
var frames []*Frame
for s.Scan() && s.Text() != "" {
ln := s.Text()
i := 0
for ; ln[i] == ' ' || ln[i] == '\t'; i++ {
}
pos := i
for ; ln[i] != ' ' && ln[i] != '\t'; i++ {
}
pc, err := strconv.ParseUint(ln[pos:i], 16, 64)
if err != nil {
break
}
fn := ln[i+1:]
frames = append(frames, &Frame{pc, fn})
}
return frames
}
func hashStack(frames []*Frame) uint64 {
buf := new(bytes.Buffer)
for _, f := range frames {
binary.Write(buf, binary.LittleEndian, f.pc)
}
s := sha1.Sum(buf.Bytes())
r := bytes.NewReader(s[:])
var id uint64
binary.Read(r, binary.LittleEndian, &id)
return id
}
func failf(what string, args ...interface{}) | {
fmt.Fprintf(os.Stderr, what+"\n", args...)
os.Exit(1)
} | identifier_body | |
simulation2_01.py | #Built-in Libraries
import math
from random import uniform
from random import randrange
import argparse
import os
import string
import ctypes
#external libraries
import numpy
import ogr
import osr
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import matplotlib.cm as cm
import Image
from matplotlib.image import imread
from mpl_toolkits.basemap import Basemap
from osgeo import gdal
#Constants / Globals
global velocity
global angle
global num
def create_shapefile(xdata, ydata, shapefile):
output = shapefile[0]
driverName = "ESRI Shapefile"
drv = ogr.GetDriverByName(driverName)
ds = drv.CreateDataSource(output)
layer = ds.CreateLayer("point_out",geom_type=ogr.wkbPoint)
#Write fields
field_x = ogr.FieldDefn()
field_x.SetName('xCoord')
field_x.SetType(ogr.OFTReal)
field_x.SetWidth(15)
field_x.SetPrecision(6)
layer.CreateField(field_x)
field_y = ogr.FieldDefn()
field_y.SetName('yCoord')
field_y.SetType(ogr.OFTReal)
field_y.SetWidth(15)
field_y.SetPrecision(6)
layer.CreateField(field_y)
field_itnum = ogr.FieldDefn()
field_itnum.SetName('IterNum')
field_itnum.SetType(ogr.OFTInteger)
field_itnum.SetWidth(10)
field_itnum.SetPrecision(1)
layer.CreateField(field_itnum)
#Iterate over the coordinate arrays and write the row
for index in range(len(xdata+1)):
feat = ogr.Feature(layer.GetLayerDefn())
feat.SetField('IterNum', index)
feat.SetField('xCoord', xdata[index])
feat.SetField('yCoord', ydata[index])
pt = ogr.Geometry(ogr.wkbPoint)
pt.AddPoint_2D(xdata[index], ydata[index])
feat.SetGeometry(pt)
layer.CreateFeature(feat)
spatialRef = osr.SpatialReference()
spatialRef.SetGeogCS("GCS_Moon_2000",
"D_Moon_2000",
"Moon_localradius",1737400.0, 0.0,
"Prime Meridian",0.0,
"Degree",0.0174532925199433 )
#Output the .prj file.
spatialRef.MorphToESRI()
basename = output.split('.')[0]
file = open(basename + ".prj", 'w')
file.write(spatialRef.ExportToWkt())
file.close()
def init(xarr_, yarr_):
global xarr
global yarr
xarr = xarr_
yarr = yarr_
def f(v):
f = v
return f #Using return at the end of a def statement passes the variable back to the calling function.
def random_azimuth():
'''This function returns a random floating point number between 1 and 360'''
#use normalvariate(mean, std) for a gaussian distribution
#A more complex weighting can be achieved, but would need to be modeled.
return uniform(0,360)
def strom_multi(xarr,yarr,i):
for index in range(len(xarr[i])):
#distance and coordinates
distance, angle, elevation = calc_distance()
azimuth = random_azimuth()
Xcoordinate = distance * math.sin(azimuth * math.pi/180) #Conversion to radians
Ycoordinate = distance * math.cos(azimuth* math.pi/180)
#The WAC visible spectrum data is 100mpp or 0.003297790480378 degrees / pixel.
Xcoordinate /= 100
Xcoordinate *= 0.003297790480378
Ycoordinate /= 100
Ycoordinate *= 0.003297790480378
x = float(Xcoordinate)
y = float(Ycoordinate)
#Randomly select the origin point along the linear vent
rand_index = randrange(0,10)
xorigin, yorigin = (xpt[rand_index], ypt[rand_index])
distance = check_topography(dtm, xorigin, yorigin, x+xorigin, y+yorigin, distance,elevation, dev, gtinv)
if distance[1] == True:
x = (distance[0] * math.sin(azimuth * math.pi/180))
y = (distance[0] * math.cos(azimuth* math.pi/180))
#Convert back to degrees
x /= 100
x *= 0.003297790480378
y /= 100
y *= 0.003297790480378
else:
pass
xarr[i][index] = x+xorigin
yarr[i][index] = y+yorigin
def calc_height(distance, ejectionangle, g, ejectionvelocity):
'''
height@x = initital_height + distance(tan(theta)) - ((g(x^2))/(2(v(cos(theta))^2))
initial_height = 0, a planar surface is fit to some reference elevation.
distance is in meters
angle is in radians
'''
trajectory = numpy.linspace(0,distance, distance/100,endpoint=True )
elevation = (trajectory * math.tan(ejectionangle)) - ((g*(trajectory**2)) / (2*((ejectionvelocity * math.cos(ejectionangle))**2)))
return elevation
def calc_distance():
g = 1.6249
#Calculate the ejection angle randomly from a range
ejectionangle = uniform(angle[0],angle[1])
ejectionangle *= math.pi/180 #Convert to radians
theta = math.sin(2*ejectionangle)
#Determine the ejection velocity randomly from a range
ejectionvelocity = uniform(velocity[0], velocity[1])
v2 = ejectionvelocity * ejectionvelocity
#Calculate total theoretical travel distance
distance = (v2 * theta) / g
#Calculate the elevation over a planar surface
elevation = calc_height(distance, ejectionangle, g, ejectionvelocity)
return distance, ejectionangle, elevation
def stromboli2():
'''distance = (velocity^2*(sin(2theta))) / gravity'''
p = 0
while p <= num:
p+=1
g = 1.6249 #Gravitational acceleration on the moon
distance, angle, elevation = calc_distance()
azimuth = random_azimuth()
Xcoordinate = distance * math.sin(azimuth * math.pi/180) #Conversion to radians
Ycoordinate = distance * math.cos(azimuth* math.pi/180)
#The WAC visible spectrum data is 100mpp or 0.003297790480378 degrees / pixel.
Xcoordinate /= 100
Xcoordinate *= 0.003297790480378
Ycoordinate /= 100
Ycoordinate *= 0.003297790480378
yield Xcoordinate, Ycoordinate, angle, azimuth, elevation, distance
if p > num:
done = False
yield done
def check_topography(dtm, originx, originy, destx, desty, distance,elevation, dev, gtinv):
'''
This function checks for impact due to variation in topography by
mimicing the functionality of a topographic profile from polyline.
1. Generate 2 arrays. One of X coordinates and one of Y coordinates
2. Transform these from GCS to PCS
3. Create a new array with the elevations extracted from the dtm
4. Compare it to the analytical trajectory heights
5. If the impact occurs before total potential travel distance,
drop the projectile there. If not, place it at the total possible
travel distance.
Parameters
----------
dtm: A digital terrain model, in 16bit, storing terrain elevation, ndarray
originx: The x coord of the projectile launch, scalar
originy: The y coord of the projectile launch, scalar
destx: The x landing coordinate on a flat plane, scalar
desty: The y landing coordinate on a flat plane, scalar
distance: The total possible distance traveled, scalar
elevation: An array storing heights above 0 of the projectile at some
interval (100m by default)
dev: Geotransform parameters
gtinv: Inverse geotransform parameters
Returns
-------
distance: The new distance the projectile has traveled if it impacts
the topography.
ToDo:
I should grab an elevation line longer than total possible distance. On a planar surface the object lands at total length. On a surface with increasing slope it lands early;later on a downward slope. We do not test for downward slope.
'''
#Extract the elevation from the dtm along the vector
#We add 5km to distance as total theoretical distance may be exceeded by
# downward sloping terrain
xpt = numpy.linspace(originx,destx,num=(distance)/100, endpoint=True)
ypt = numpy.linspace(originy,desty,num=(distance)/100, endpoint=True)
xpt -= geotransform[0]
ypt -= geotransform[3]
xsam = numpy.round_((gtinv[1] *xpt + gtinv[2] * ypt), decimals=0)
ylin = numpy.round_((gtinv[4] *xpt + gtinv[5] * ypt), decimals=0)
try:
dtmvector = dtm[ylin.astype(int),xsam.astype(int)]
#Compute elevation of projectile from a plane at the origin height
dtmvectormin = dtmvector.min()
elevation -= abs(dtmvector[0])
#Compare the projectile elevation to the dtm
dtmvector += abs(dtmvectormin)
elevation -= dtmvector
elevation += dtmvectormin
#Ignore the first 2.5km of ejection distance to ensure that we get a valid elevation check.
impact = numpy.where(elevation[250:] <= 0)
try:
#We are working at 100mpp, so the new distance is index +1
return ((impact[0][0])+1) * 100, True
except:
return False
except:
print "Total distance travel exceeds model dimensions."
def density(m, xdata, ydata, shapefile, ppg):
'''
This function converts the lat/lon of the input map to meters
assuming an equirectangular projection. It then creates a grid at | If the shapefile flag is set to true a shapefile is created by calling
the shapefile function.
Parameters:
m: A basemap mapping object
xdata: An array of x landing coordinates, ndarray
ydata: An array of y landing coordinates, ndarray
shapefile: A flag on whether or not to generate a shapefile
ppg: The number of meters per grid cell * 100
'''
#Convert from DD to m to create a mesh grid.
xmax = (m.xmax) / 0.003297790480378
xmin = (m.xmin) / 0.003297790480378
ymax = (m.ymax) / 0.003297790480378
ymin = (m.ymin) / 0.003297790480378
#Base 100mpp
nx = 1516 / int(ppg)
ny = 2123 / int(ppg)
#Convert to numpy arrays
xdata = numpy.asarray(xdata)
ydata = numpy.asarray(ydata)
#Bin the data & calculate the density
lon_bins = numpy.linspace(xdata.min(), xdata.max(), nx+1)
lat_bins = numpy.linspace(ydata.min(), ydata.max(), ny+1)
density, _, _ = numpy.histogram2d(ydata, xdata, [lat_bins, lon_bins])
#If the user wants a shapefile, pass the numpy arrays
if shapefile != None:
print "Writing model output to a shapefile."
create_shapefile(xdata, ydata, shapefile)
#Create a grid of equally spaced polygons
lon_bins_2d, lat_bins_2d = numpy.meshgrid(lon_bins, lat_bins)
if density.max() <= 3:
maxden = 5
else:
maxden = density.max()
#Mask the density array so that 0 is not plotted
density = numpy.ma.masked_where(density <=0, density)
plt.pcolormesh(lon_bins_2d,lat_bins_2d, density, cmap=cm.RdYlGn_r, vmin=0, vmax=maxden, alpha=0.5)
plt.colorbar(orientation='horizontal')
if __name__ == '__main__':
'''This is the main section which handles program flow.'''
#Parse all of the arguments.
parser = argparse.ArgumentParser(description='Stromboli Ejection Simulation Tool v1')
parser.add_argument('--velocity', '-v', action='store',nargs='+',default=[350,425], dest='velocity', help='A range of ejection velocities. ')
parser.add_argument('--angle','-a', action='store', nargs='+',default=[30, 60], dest='angle', help='Optional: A range of ejection angles. Example: -a 30 60')
parser.add_argument('-i', '--iterations', action='store', type=int, dest='i',default=500, help='The number of ejection iterations to perform.')
parser.add_argument('--shapefile', action='store',nargs=1, default=None, dest='shapefile', help='Use this flag to generate a shapefile, in Moon_2000GCS, of the point data.')
parser.add_argument('--fast', action='store', default=None, nargs=1, dest='multi', help='Use this flag to forgo creating a visualization and just create a shapefile. This uses all available processing cores and is substantially faster.')
parser.add_argument('--ppg', action='store', default=10, dest='ppg', help='The number of pixels per grid cell. Default is 10, which generates a 1000m grid square using 100mpp WAC Vis.')
args = parser.parse_args()
#Assign the user variables to the globals, not great form, but it works.
try:
velocity = [float(args.velocity[0]),float(args.velocity[1])]
except:
velocity = [float(args.velocity[0]),float(args.velocity[0])]
num = args.i
try:
angle = [float(args.angle[0]),float(args.angle[1])]
except:
angle = [float(args.angle[0]),float(args.angle[0])]
#Read the input DTM and get geotransformation info
ds = gdal.Open('wac_dtm.tif')
dtm = ds.ReadAsArray()
geotransform = ds.GetGeoTransform()
dev = (geotransform[1]*geotransform[5] - geotransform[2]*geotransform[4])
gtinv = ( geotransform[0] , geotransform[5]/dev, - geotransform[2]/dev, geotransform[3], - geotransform[4]/dev, geotransform[1]/dev)
#Set the approximate ejection coordinates
xpt = numpy.linspace(-97.788,-97.855,num=10, endpoint=True)
ypt = numpy.linspace(-30.263,-29.851,num=10, endpoint=True)
#If the user wants to process quickly then we omit the visualization and multiprocess to generate a shapefile
if args.multi is not None:
import multiprocessing
cores = multiprocessing.cpu_count()
cores *= 2
step = num // cores
xarray = numpy.frombuffer(multiprocessing.RawArray(ctypes.c_double, num))
yarray = numpy.frombuffer(multiprocessing.RawArray(ctypes.c_double, num))
init(xarray,yarray)
jobs = []
for i in range(0, num+1, step):
p = multiprocessing.Process(target=strom_multi, args=(xarr,yarr,slice(i, i+step)), )
jobs.append(p)
for job in jobs:
job.start()
for job in jobs:
job.join()
create_shapefile(xarr, yarr, args.multi)
else:
#Visualization - setup the plot
fig = plt.figure(figsize=(15,10))
ax1 = fig.add_subplot(1,2,1)
#Points that hit underlying topography
pt, = ax1.plot([], [],'ro', markersize=3)
xdata, ydata = [], []
#Points that travel the total theoretical distance
ptmax, = ax1.plot([],[], 'yo', markersize=3)
datamax, ydatamax = [],[]
#Map
lon_min = -102.5
lon_max = -93.5
lat_min = -34.5
lat_max = -25.5
m = Basemap(projection='cyl',llcrnrlat=lat_min,urcrnrlat=lat_max,
llcrnrlon=lon_min,urcrnrlon=lon_max,resolution=None, rsphere=(1737400.0,1737400.0))
m.drawmeridians(numpy.arange(lon_min+0.5, lon_max+1, 1), labels=[0,0,0,1], fontsize=10)
m.drawparallels(numpy.arange(lat_min+0.5,lat_max+1, 1), labels=[1,0,0,0], fontsize=10)
#Read the input image
im = imread('wac_global_vis.png')
m.imshow(im, origin='upper', cmap=cm.Greys_r, alpha=0.9)
def run(data):
if data == False:
density(m2,xdata, ydata, args.shapefile, args.ppg)
else:
#x, y are in degrees from the false origin 0,0
x,y, angle, azimuth, elevation, distance = data
rand_index = randrange(0,10)
#Randomly select the origin point along the linear vent
xorigin, yorigin = (xpt[rand_index], ypt[rand_index])
distance = check_topography(dtm, xorigin, yorigin, x+xorigin, y+yorigin, distance,elevation, dev, gtinv)
if distance[1] == True:
x = (distance[0] * math.sin(azimuth * math.pi/180))
y = (distance[0] * math.cos(azimuth* math.pi/180))
#Convert back to degrees
x /= 100
x *= 0.003297790480378
y /= 100
y *= 0.003297790480378
xdata.append(x + xorigin)
ydata.append(y + yorigin)
pt.set_data(xdata, ydata)
else:
print 'Particle landed at the maximum theoretical distance.'
#Convert back to degrees
x /= 100
x *= 0.003297790480378
y /= 100
y *= 0.003297790480378
xdatamax.append(x + xorigin)
ydatamax.append(y + yorigin)
#Set the point
ptmax.set_data(xdatamax, ydatamax)
print 'Angle: %f, Azimuth: %f, xCoordinate: %f, yCoordinate: %f' %(angle, azimuth,x+xorigin,y+yorigin)
return pt,
#Plot the volcano as approximated by a linear function.
plt.plot(xpt, ypt, 'bo', markersize=4)
#Run the animation
ani = animation.FuncAnimation(fig, run,stromboli2, interval=1, repeat=False, blit=False)
plt.title('Sample Deposition Using ' + (str(num+2)) + " Points")
ax2 = fig.add_subplot(1,2,2)
gridsize = str(int(args.ppg) * 100)
ax2.set_title('Impacts /' + gridsize + ' m')
m2 = Basemap(projection='cyl',llcrnrlat=lat_min,urcrnrlat=lat_max,
llcrnrlon=lon_min,urcrnrlon=lon_max,resolution=None, rsphere=(1737400.0,1737400.0))
m.drawmeridians(numpy.arange(lon_min+0.5, lon_max+1, 1), labels=[0,0,0,1], fontsize=10)
m.drawparallels(numpy.arange(lat_min+0.5,lat_max+1, 1), labels=[1,0,0,0], fontsize=10)
m2.imshow(im, origin='upper', cmap=cm.Greys_r)
plt.show()
#Save the animation
#ani.save('simulation.mp4', fps=10) | 100mpp, bins the input data into the grid (density) and creates a
histogram. Finally, a mesh grid is created and the histogram is
plotted in 2D over the basemap.
| random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.