file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
dx.aspnet.mvc.js | /*!
* DevExtreme (dx.aspnet.mvc.js)
* Version: 20.1.8 (build 20303-1716)
* Build date: Thu Oct 29 2020
*
* Copyright (c) 2012 - 2020 Developer Express Inc. ALL RIGHTS RESERVED
* Read about DevExtreme licensing here: https://js.devexpress.com/Licensing/
*/
! function(factory) {
if ("function" === typeof define && define.amd) {
define(function(require, exports, module) {
module.exports = factory(require("jquery"), require("./core/templates/template_engine_registry").setTemplateEngine, require("./core/templates/template_base").renderedCallbacks, require("./core/guid"), require("./ui/validation_engine"), require("./core/utils/iterator"), require("./core/utils/dom").extractTemplateMarkup, require("./core/utils/string").encodeHtml, require("./core/utils/ajax"), require("./core/utils/console"))
})
} else {
DevExpress.aspnet = factory(window.jQuery, DevExpress.setTemplateEngine, DevExpress.templateRendered, DevExpress.data.Guid, DevExpress.validationEngine, DevExpress.utils.iterator, DevExpress.utils.dom.extractTemplateMarkup, DevExpress.utils.string.encodeHtml, DevExpress.utils.ajax, DevExpress.utils.console)
}
}(function($, setTemplateEngine, templateRendered, Guid, validationEngine, iteratorUtils, extractTemplateMarkup, encodeHtml, ajax, console) {
var templateCompiler = createTemplateCompiler();
var pendingCreateComponentRoutines = [];
var enableAlternativeTemplateTags = true;
var warnBug17028 = false;
function createTemplateCompiler() {
var OPEN_TAG = "<%",
CLOSE_TAG = "%>",
ENCODE_QUALIFIER = "-",
INTERPOLATE_QUALIFIER = "=";
var EXTENDED_OPEN_TAG = /[<[]%/g,
EXTENDED_CLOSE_TAG = /%[>\]]/g;
function acceptText(bag, text) {
if (text) {
bag.push("_.push(", JSON.stringify(text), ");")
}
}
function acceptCode(bag, code) {
var encode = code.charAt(0) === ENCODE_QUALIFIER,
value = code.substr(1),
interpolate = code.charAt(0) === INTERPOLATE_QUALIFIER;
if (encode || interpolate) {
bag.push("_.push(");
bag.push(encode ? "arguments[1](" + value + ")" : value);
bag.push(");")
} else {
bag.push(code + "\n")
}
}
return function(text) {
var bag = ["var _ = [];", "with(obj||{}) {"],
chunks = text.split(enableAlternativeTemplateTags ? EXTENDED_OPEN_TAG : OPEN_TAG);
if (warnBug17028 && chunks.length > 1) {
if (text.indexOf(OPEN_TAG) > -1) {
console.logger.warn("Please use an alternative template syntax: https://community.devexpress.com/blogs/aspnet/archive/2020/01/29/asp-net-core-new-syntax-to-fix-razor-issue.aspx");
warnBug17028 = false
}
}
acceptText(bag, chunks.shift());
for (var i = 0; i < chunks.length; i++) {
var tmp = chunks[i].split(enableAlternativeTemplateTags ? EXTENDED_CLOSE_TAG : CLOSE_TAG);
if (2 !== tmp.length) {
throw "Template syntax error"
}
acceptCode(bag, tmp[0]);
acceptText(bag, tmp[1])
}
bag.push("}", "return _.join('')");
return new Function("obj", bag.join(""))
}
}
function createTemplateEngine() {
return {
compile: function(element) {
return templateCompiler(extractTemplateMarkup(element))
},
render: function(template, data) {
var html = template(data, encodeHtml);
var dxMvcExtensionsObj = window.MVCx;
if (dxMvcExtensionsObj && !dxMvcExtensionsObj.isDXScriptInitializedOnLoad) {
html = html.replace(/(<script[^>]+)id="dxss_.+?"/g, "$1")
}
return html
}
}
}
function getValidationSummary(validationGroup) {
var result;
$(".dx-validationsummary").each(function(_, element) {
var summary = $(element).data("dxValidationSummary");
if (summary && summary.option("validationGroup") === validationGroup) {
result = summary;
return false
}
});
return result
}
function createValidationSummaryItemsFromValidators(validators, editorNames) |
function createComponent(name, options, id, validatorOptions) {
var selector = "#" + String(id).replace(/[^\w-]/g, "\\$&");
pendingCreateComponentRoutines.push(function() {
var $element = $(selector);
if ($element.length) {
var $component = $(selector)[name](options);
if ($.isPlainObject(validatorOptions)) {
$component.dxValidator(validatorOptions)
}
return true
}
return false
})
}
templateRendered.add(function() {
var snapshot = pendingCreateComponentRoutines.slice();
var leftover = [];
pendingCreateComponentRoutines = [];
snapshot.forEach(function(func) {
if (!func()) {
leftover.push(func)
}
});
pendingCreateComponentRoutines = pendingCreateComponentRoutines.concat(leftover)
});
return {
createComponent: createComponent,
renderComponent: function(name, options, id, validatorOptions) {
id = id || "dx-" + new Guid;
createComponent(name, options, id, validatorOptions);
return '<div id="' + id + '"></div>'
},
getEditorValue: function(inputName) {
var $widget = $("input[name='" + inputName + "']").closest(".dx-widget");
if ($widget.length) {
var dxComponents = $widget.data("dxComponents"),
widget = $widget.data(dxComponents[0]);
if (widget) {
return widget.option("value")
}
}
},
setTemplateEngine: function() {
if (setTemplateEngine) {
setTemplateEngine(createTemplateEngine())
}
},
enableAlternativeTemplateTags: function(value) {
enableAlternativeTemplateTags = value
},
warnBug17028: function() {
warnBug17028 = true
},
createValidationSummaryItems: function(validationGroup, editorNames) {
var groupConfig, items, summary = getValidationSummary(validationGroup);
if (summary) {
groupConfig = validationEngine.getGroupConfig(validationGroup);
if (groupConfig) {
items = createValidationSummaryItemsFromValidators(groupConfig.validators, editorNames);
items.length && summary.option("items", items)
}
}
},
sendValidationRequest: function(propertyName, propertyValue, url, method) {
var d = $.Deferred();
var data = {};
data[propertyName] = propertyValue;
ajax.sendRequest({
url: url,
dataType: "json",
method: method || "GET",
data: data
}).then(function(response) {
if ("string" === typeof response) {
d.resolve({
isValid: false,
message: response
})
} else {
d.resolve(response)
}
}, function(xhr) {
d.reject({
isValid: false,
message: xhr.responseText
})
});
return d.promise()
}
}
});
| {
var items = [];
iteratorUtils.each(validators, function(_, validator) {
var widget = validator.$element().data("dx-validation-target");
if (widget && $.inArray(widget.option("name"), editorNames) > -1) {
items.push({
text: widget.option("validationError.message"),
validator: validator
})
}
});
return items
} | identifier_body |
dx.aspnet.mvc.js | /*!
* DevExtreme (dx.aspnet.mvc.js)
* Version: 20.1.8 (build 20303-1716)
* Build date: Thu Oct 29 2020
*
* Copyright (c) 2012 - 2020 Developer Express Inc. ALL RIGHTS RESERVED
* Read about DevExtreme licensing here: https://js.devexpress.com/Licensing/
*/
! function(factory) {
if ("function" === typeof define && define.amd) {
define(function(require, exports, module) {
module.exports = factory(require("jquery"), require("./core/templates/template_engine_registry").setTemplateEngine, require("./core/templates/template_base").renderedCallbacks, require("./core/guid"), require("./ui/validation_engine"), require("./core/utils/iterator"), require("./core/utils/dom").extractTemplateMarkup, require("./core/utils/string").encodeHtml, require("./core/utils/ajax"), require("./core/utils/console"))
})
} else {
DevExpress.aspnet = factory(window.jQuery, DevExpress.setTemplateEngine, DevExpress.templateRendered, DevExpress.data.Guid, DevExpress.validationEngine, DevExpress.utils.iterator, DevExpress.utils.dom.extractTemplateMarkup, DevExpress.utils.string.encodeHtml, DevExpress.utils.ajax, DevExpress.utils.console)
}
}(function($, setTemplateEngine, templateRendered, Guid, validationEngine, iteratorUtils, extractTemplateMarkup, encodeHtml, ajax, console) {
var templateCompiler = createTemplateCompiler();
var pendingCreateComponentRoutines = [];
var enableAlternativeTemplateTags = true;
var warnBug17028 = false;
function createTemplateCompiler() {
var OPEN_TAG = "<%",
CLOSE_TAG = "%>",
ENCODE_QUALIFIER = "-",
INTERPOLATE_QUALIFIER = "=";
var EXTENDED_OPEN_TAG = /[<[]%/g,
EXTENDED_CLOSE_TAG = /%[>\]]/g;
function acceptText(bag, text) {
if (text) {
bag.push("_.push(", JSON.stringify(text), ");")
}
}
function acceptCode(bag, code) {
var encode = code.charAt(0) === ENCODE_QUALIFIER,
value = code.substr(1),
interpolate = code.charAt(0) === INTERPOLATE_QUALIFIER;
if (encode || interpolate) {
bag.push("_.push(");
bag.push(encode ? "arguments[1](" + value + ")" : value);
bag.push(");")
} else {
bag.push(code + "\n")
}
}
return function(text) {
var bag = ["var _ = [];", "with(obj||{}) {"],
chunks = text.split(enableAlternativeTemplateTags ? EXTENDED_OPEN_TAG : OPEN_TAG);
if (warnBug17028 && chunks.length > 1) {
if (text.indexOf(OPEN_TAG) > -1) |
}
acceptText(bag, chunks.shift());
for (var i = 0; i < chunks.length; i++) {
var tmp = chunks[i].split(enableAlternativeTemplateTags ? EXTENDED_CLOSE_TAG : CLOSE_TAG);
if (2 !== tmp.length) {
throw "Template syntax error"
}
acceptCode(bag, tmp[0]);
acceptText(bag, tmp[1])
}
bag.push("}", "return _.join('')");
return new Function("obj", bag.join(""))
}
}
function createTemplateEngine() {
return {
compile: function(element) {
return templateCompiler(extractTemplateMarkup(element))
},
render: function(template, data) {
var html = template(data, encodeHtml);
var dxMvcExtensionsObj = window.MVCx;
if (dxMvcExtensionsObj && !dxMvcExtensionsObj.isDXScriptInitializedOnLoad) {
html = html.replace(/(<script[^>]+)id="dxss_.+?"/g, "$1")
}
return html
}
}
}
function getValidationSummary(validationGroup) {
var result;
$(".dx-validationsummary").each(function(_, element) {
var summary = $(element).data("dxValidationSummary");
if (summary && summary.option("validationGroup") === validationGroup) {
result = summary;
return false
}
});
return result
}
function createValidationSummaryItemsFromValidators(validators, editorNames) {
var items = [];
iteratorUtils.each(validators, function(_, validator) {
var widget = validator.$element().data("dx-validation-target");
if (widget && $.inArray(widget.option("name"), editorNames) > -1) {
items.push({
text: widget.option("validationError.message"),
validator: validator
})
}
});
return items
}
function createComponent(name, options, id, validatorOptions) {
var selector = "#" + String(id).replace(/[^\w-]/g, "\\$&");
pendingCreateComponentRoutines.push(function() {
var $element = $(selector);
if ($element.length) {
var $component = $(selector)[name](options);
if ($.isPlainObject(validatorOptions)) {
$component.dxValidator(validatorOptions)
}
return true
}
return false
})
}
templateRendered.add(function() {
var snapshot = pendingCreateComponentRoutines.slice();
var leftover = [];
pendingCreateComponentRoutines = [];
snapshot.forEach(function(func) {
if (!func()) {
leftover.push(func)
}
});
pendingCreateComponentRoutines = pendingCreateComponentRoutines.concat(leftover)
});
return {
createComponent: createComponent,
renderComponent: function(name, options, id, validatorOptions) {
id = id || "dx-" + new Guid;
createComponent(name, options, id, validatorOptions);
return '<div id="' + id + '"></div>'
},
getEditorValue: function(inputName) {
var $widget = $("input[name='" + inputName + "']").closest(".dx-widget");
if ($widget.length) {
var dxComponents = $widget.data("dxComponents"),
widget = $widget.data(dxComponents[0]);
if (widget) {
return widget.option("value")
}
}
},
setTemplateEngine: function() {
if (setTemplateEngine) {
setTemplateEngine(createTemplateEngine())
}
},
enableAlternativeTemplateTags: function(value) {
enableAlternativeTemplateTags = value
},
warnBug17028: function() {
warnBug17028 = true
},
createValidationSummaryItems: function(validationGroup, editorNames) {
var groupConfig, items, summary = getValidationSummary(validationGroup);
if (summary) {
groupConfig = validationEngine.getGroupConfig(validationGroup);
if (groupConfig) {
items = createValidationSummaryItemsFromValidators(groupConfig.validators, editorNames);
items.length && summary.option("items", items)
}
}
},
sendValidationRequest: function(propertyName, propertyValue, url, method) {
var d = $.Deferred();
var data = {};
data[propertyName] = propertyValue;
ajax.sendRequest({
url: url,
dataType: "json",
method: method || "GET",
data: data
}).then(function(response) {
if ("string" === typeof response) {
d.resolve({
isValid: false,
message: response
})
} else {
d.resolve(response)
}
}, function(xhr) {
d.reject({
isValid: false,
message: xhr.responseText
})
});
return d.promise()
}
}
});
| {
console.logger.warn("Please use an alternative template syntax: https://community.devexpress.com/blogs/aspnet/archive/2020/01/29/asp-net-core-new-syntax-to-fix-razor-issue.aspx");
warnBug17028 = false
} | conditional_block |
dx.aspnet.mvc.js | /*!
* DevExtreme (dx.aspnet.mvc.js)
* Version: 20.1.8 (build 20303-1716)
* Build date: Thu Oct 29 2020
*
* Copyright (c) 2012 - 2020 Developer Express Inc. ALL RIGHTS RESERVED
* Read about DevExtreme licensing here: https://js.devexpress.com/Licensing/
*/
! function(factory) {
if ("function" === typeof define && define.amd) {
define(function(require, exports, module) {
module.exports = factory(require("jquery"), require("./core/templates/template_engine_registry").setTemplateEngine, require("./core/templates/template_base").renderedCallbacks, require("./core/guid"), require("./ui/validation_engine"), require("./core/utils/iterator"), require("./core/utils/dom").extractTemplateMarkup, require("./core/utils/string").encodeHtml, require("./core/utils/ajax"), require("./core/utils/console"))
})
} else {
DevExpress.aspnet = factory(window.jQuery, DevExpress.setTemplateEngine, DevExpress.templateRendered, DevExpress.data.Guid, DevExpress.validationEngine, DevExpress.utils.iterator, DevExpress.utils.dom.extractTemplateMarkup, DevExpress.utils.string.encodeHtml, DevExpress.utils.ajax, DevExpress.utils.console)
}
}(function($, setTemplateEngine, templateRendered, Guid, validationEngine, iteratorUtils, extractTemplateMarkup, encodeHtml, ajax, console) {
var templateCompiler = createTemplateCompiler();
var pendingCreateComponentRoutines = [];
var enableAlternativeTemplateTags = true;
var warnBug17028 = false;
function createTemplateCompiler() {
var OPEN_TAG = "<%",
CLOSE_TAG = "%>",
ENCODE_QUALIFIER = "-",
INTERPOLATE_QUALIFIER = "=";
var EXTENDED_OPEN_TAG = /[<[]%/g,
EXTENDED_CLOSE_TAG = /%[>\]]/g;
function acceptText(bag, text) {
if (text) {
bag.push("_.push(", JSON.stringify(text), ");")
}
}
function | (bag, code) {
var encode = code.charAt(0) === ENCODE_QUALIFIER,
value = code.substr(1),
interpolate = code.charAt(0) === INTERPOLATE_QUALIFIER;
if (encode || interpolate) {
bag.push("_.push(");
bag.push(encode ? "arguments[1](" + value + ")" : value);
bag.push(");")
} else {
bag.push(code + "\n")
}
}
return function(text) {
var bag = ["var _ = [];", "with(obj||{}) {"],
chunks = text.split(enableAlternativeTemplateTags ? EXTENDED_OPEN_TAG : OPEN_TAG);
if (warnBug17028 && chunks.length > 1) {
if (text.indexOf(OPEN_TAG) > -1) {
console.logger.warn("Please use an alternative template syntax: https://community.devexpress.com/blogs/aspnet/archive/2020/01/29/asp-net-core-new-syntax-to-fix-razor-issue.aspx");
warnBug17028 = false
}
}
acceptText(bag, chunks.shift());
for (var i = 0; i < chunks.length; i++) {
var tmp = chunks[i].split(enableAlternativeTemplateTags ? EXTENDED_CLOSE_TAG : CLOSE_TAG);
if (2 !== tmp.length) {
throw "Template syntax error"
}
acceptCode(bag, tmp[0]);
acceptText(bag, tmp[1])
}
bag.push("}", "return _.join('')");
return new Function("obj", bag.join(""))
}
}
function createTemplateEngine() {
return {
compile: function(element) {
return templateCompiler(extractTemplateMarkup(element))
},
render: function(template, data) {
var html = template(data, encodeHtml);
var dxMvcExtensionsObj = window.MVCx;
if (dxMvcExtensionsObj && !dxMvcExtensionsObj.isDXScriptInitializedOnLoad) {
html = html.replace(/(<script[^>]+)id="dxss_.+?"/g, "$1")
}
return html
}
}
}
function getValidationSummary(validationGroup) {
var result;
$(".dx-validationsummary").each(function(_, element) {
var summary = $(element).data("dxValidationSummary");
if (summary && summary.option("validationGroup") === validationGroup) {
result = summary;
return false
}
});
return result
}
function createValidationSummaryItemsFromValidators(validators, editorNames) {
var items = [];
iteratorUtils.each(validators, function(_, validator) {
var widget = validator.$element().data("dx-validation-target");
if (widget && $.inArray(widget.option("name"), editorNames) > -1) {
items.push({
text: widget.option("validationError.message"),
validator: validator
})
}
});
return items
}
function createComponent(name, options, id, validatorOptions) {
var selector = "#" + String(id).replace(/[^\w-]/g, "\\$&");
pendingCreateComponentRoutines.push(function() {
var $element = $(selector);
if ($element.length) {
var $component = $(selector)[name](options);
if ($.isPlainObject(validatorOptions)) {
$component.dxValidator(validatorOptions)
}
return true
}
return false
})
}
templateRendered.add(function() {
var snapshot = pendingCreateComponentRoutines.slice();
var leftover = [];
pendingCreateComponentRoutines = [];
snapshot.forEach(function(func) {
if (!func()) {
leftover.push(func)
}
});
pendingCreateComponentRoutines = pendingCreateComponentRoutines.concat(leftover)
});
return {
createComponent: createComponent,
renderComponent: function(name, options, id, validatorOptions) {
id = id || "dx-" + new Guid;
createComponent(name, options, id, validatorOptions);
return '<div id="' + id + '"></div>'
},
getEditorValue: function(inputName) {
var $widget = $("input[name='" + inputName + "']").closest(".dx-widget");
if ($widget.length) {
var dxComponents = $widget.data("dxComponents"),
widget = $widget.data(dxComponents[0]);
if (widget) {
return widget.option("value")
}
}
},
setTemplateEngine: function() {
if (setTemplateEngine) {
setTemplateEngine(createTemplateEngine())
}
},
enableAlternativeTemplateTags: function(value) {
enableAlternativeTemplateTags = value
},
warnBug17028: function() {
warnBug17028 = true
},
createValidationSummaryItems: function(validationGroup, editorNames) {
var groupConfig, items, summary = getValidationSummary(validationGroup);
if (summary) {
groupConfig = validationEngine.getGroupConfig(validationGroup);
if (groupConfig) {
items = createValidationSummaryItemsFromValidators(groupConfig.validators, editorNames);
items.length && summary.option("items", items)
}
}
},
sendValidationRequest: function(propertyName, propertyValue, url, method) {
var d = $.Deferred();
var data = {};
data[propertyName] = propertyValue;
ajax.sendRequest({
url: url,
dataType: "json",
method: method || "GET",
data: data
}).then(function(response) {
if ("string" === typeof response) {
d.resolve({
isValid: false,
message: response
})
} else {
d.resolve(response)
}
}, function(xhr) {
d.reject({
isValid: false,
message: xhr.responseText
})
});
return d.promise()
}
}
});
| acceptCode | identifier_name |
index.tsx | import { GetStaticProps } from 'next'
import { useTranslation } from 'next-i18next'
import { Box, Divider } from '@chakra-ui/react'
import { translationProps } from '@utils/i18n'
import { getGithubStars } from '@utils/getGithubStars'
import { getDiscordMembers } from '@utils/getDiscordMembers'
import { getTestCoverage } from '@utils/getTestCoverage'
import { getCodeQuality } from '@utils/getCodeQuality'
import { getUsedBy } from '@utils/getUsedBy'
import { getSponsors } from '@utils/getSponsors'
import {
getTotalNpmDownloads,
getMonthlyNpmDownloads | import { SEO } from '@components/SEO'
import { Announcer } from '@components/Announcer'
import { Header } from '@components/Header'
import { Hero } from '@components/Hero'
import { Features } from '@components/Features'
import { Stats } from '@components/Stats'
import { Demo } from '@components/Demo'
import { DiscordBar } from '@components/DiscordBar'
import { ISponsor, Sponsors } from '@components/Sponsors'
import { UsedBy } from '@components/UsedBy'
import { Footer } from '@components/Footer'
interface IHomeProps {
githubStars: string
totalDownloads: string
monthlyDownloads: string
discordMembers: string
testCoverage: string
codeQuality: string
usedBy: {
organizations: ISponsor[]
}
sponsors: {
individuals: ISponsor[]
organizations: ISponsor[]
}
}
export const getStaticProps: GetStaticProps = async ({ locale }) => {
const [
{ prettyCount: githubStars },
{ prettyCount: totalDownloads },
{ prettyCount: monthlyDownloads },
{ prettyCount: discordMembers },
{ prettyCount: testCoverage },
{ score: codeQuality }
] = await Promise.all([
getGithubStars(),
getTotalNpmDownloads(),
getMonthlyNpmDownloads(),
getDiscordMembers(),
getTestCoverage(),
getCodeQuality()
])
const usedBy = await getUsedBy()
const sponsors = await getSponsors()
return {
props: {
...(await translationProps(locale, ['common'])),
githubStars,
totalDownloads,
monthlyDownloads,
discordMembers,
testCoverage,
codeQuality,
usedBy,
sponsors
},
revalidate: 60
}
}
export default function IndexPage (props: IHomeProps) {
const { t } = useTranslation('common')
return (
<>
<SEO
title={t('homepage.seo.title')}
description={t('homepage.seo.description')}
/>
<Announcer
message={t('announcement.message')}
link={t('announcement.link.href')}
title={t('announcement.link.title')}
/>
<Header />
<Box mb={20}>
<Hero />
<Divider />
<Features />
<Divider />
<Stats
githubStars={props.githubStars}
totalDownloads={props.totalDownloads}
monthlyDownloads={props.monthlyDownloads}
discordMembers={props.discordMembers}
testCoverage={props.testCoverage}
codeQuality={props.codeQuality}
/>
<Divider />
<Demo />
<Divider />
<Sponsors
organizations={props.sponsors.organizations}
individuals={props.sponsors.individuals}
/>
<Divider />
<UsedBy usedBy={props.usedBy.organizations} />
<Divider />
<DiscordBar />
<Footer />
</Box>
</>
)
} | } from '@utils/getNpmDownloads'
| random_line_split |
index.tsx | import { GetStaticProps } from 'next'
import { useTranslation } from 'next-i18next'
import { Box, Divider } from '@chakra-ui/react'
import { translationProps } from '@utils/i18n'
import { getGithubStars } from '@utils/getGithubStars'
import { getDiscordMembers } from '@utils/getDiscordMembers'
import { getTestCoverage } from '@utils/getTestCoverage'
import { getCodeQuality } from '@utils/getCodeQuality'
import { getUsedBy } from '@utils/getUsedBy'
import { getSponsors } from '@utils/getSponsors'
import {
getTotalNpmDownloads,
getMonthlyNpmDownloads
} from '@utils/getNpmDownloads'
import { SEO } from '@components/SEO'
import { Announcer } from '@components/Announcer'
import { Header } from '@components/Header'
import { Hero } from '@components/Hero'
import { Features } from '@components/Features'
import { Stats } from '@components/Stats'
import { Demo } from '@components/Demo'
import { DiscordBar } from '@components/DiscordBar'
import { ISponsor, Sponsors } from '@components/Sponsors'
import { UsedBy } from '@components/UsedBy'
import { Footer } from '@components/Footer'
interface IHomeProps {
githubStars: string
totalDownloads: string
monthlyDownloads: string
discordMembers: string
testCoverage: string
codeQuality: string
usedBy: {
organizations: ISponsor[]
}
sponsors: {
individuals: ISponsor[]
organizations: ISponsor[]
}
}
export const getStaticProps: GetStaticProps = async ({ locale }) => {
const [
{ prettyCount: githubStars },
{ prettyCount: totalDownloads },
{ prettyCount: monthlyDownloads },
{ prettyCount: discordMembers },
{ prettyCount: testCoverage },
{ score: codeQuality }
] = await Promise.all([
getGithubStars(),
getTotalNpmDownloads(),
getMonthlyNpmDownloads(),
getDiscordMembers(),
getTestCoverage(),
getCodeQuality()
])
const usedBy = await getUsedBy()
const sponsors = await getSponsors()
return {
props: {
...(await translationProps(locale, ['common'])),
githubStars,
totalDownloads,
monthlyDownloads,
discordMembers,
testCoverage,
codeQuality,
usedBy,
sponsors
},
revalidate: 60
}
}
export default function | (props: IHomeProps) {
const { t } = useTranslation('common')
return (
<>
<SEO
title={t('homepage.seo.title')}
description={t('homepage.seo.description')}
/>
<Announcer
message={t('announcement.message')}
link={t('announcement.link.href')}
title={t('announcement.link.title')}
/>
<Header />
<Box mb={20}>
<Hero />
<Divider />
<Features />
<Divider />
<Stats
githubStars={props.githubStars}
totalDownloads={props.totalDownloads}
monthlyDownloads={props.monthlyDownloads}
discordMembers={props.discordMembers}
testCoverage={props.testCoverage}
codeQuality={props.codeQuality}
/>
<Divider />
<Demo />
<Divider />
<Sponsors
organizations={props.sponsors.organizations}
individuals={props.sponsors.individuals}
/>
<Divider />
<UsedBy usedBy={props.usedBy.organizations} />
<Divider />
<DiscordBar />
<Footer />
</Box>
</>
)
}
| IndexPage | identifier_name |
index.tsx | import { GetStaticProps } from 'next'
import { useTranslation } from 'next-i18next'
import { Box, Divider } from '@chakra-ui/react'
import { translationProps } from '@utils/i18n'
import { getGithubStars } from '@utils/getGithubStars'
import { getDiscordMembers } from '@utils/getDiscordMembers'
import { getTestCoverage } from '@utils/getTestCoverage'
import { getCodeQuality } from '@utils/getCodeQuality'
import { getUsedBy } from '@utils/getUsedBy'
import { getSponsors } from '@utils/getSponsors'
import {
getTotalNpmDownloads,
getMonthlyNpmDownloads
} from '@utils/getNpmDownloads'
import { SEO } from '@components/SEO'
import { Announcer } from '@components/Announcer'
import { Header } from '@components/Header'
import { Hero } from '@components/Hero'
import { Features } from '@components/Features'
import { Stats } from '@components/Stats'
import { Demo } from '@components/Demo'
import { DiscordBar } from '@components/DiscordBar'
import { ISponsor, Sponsors } from '@components/Sponsors'
import { UsedBy } from '@components/UsedBy'
import { Footer } from '@components/Footer'
interface IHomeProps {
githubStars: string
totalDownloads: string
monthlyDownloads: string
discordMembers: string
testCoverage: string
codeQuality: string
usedBy: {
organizations: ISponsor[]
}
sponsors: {
individuals: ISponsor[]
organizations: ISponsor[]
}
}
export const getStaticProps: GetStaticProps = async ({ locale }) => {
const [
{ prettyCount: githubStars },
{ prettyCount: totalDownloads },
{ prettyCount: monthlyDownloads },
{ prettyCount: discordMembers },
{ prettyCount: testCoverage },
{ score: codeQuality }
] = await Promise.all([
getGithubStars(),
getTotalNpmDownloads(),
getMonthlyNpmDownloads(),
getDiscordMembers(),
getTestCoverage(),
getCodeQuality()
])
const usedBy = await getUsedBy()
const sponsors = await getSponsors()
return {
props: {
...(await translationProps(locale, ['common'])),
githubStars,
totalDownloads,
monthlyDownloads,
discordMembers,
testCoverage,
codeQuality,
usedBy,
sponsors
},
revalidate: 60
}
}
export default function IndexPage (props: IHomeProps) | {
const { t } = useTranslation('common')
return (
<>
<SEO
title={t('homepage.seo.title')}
description={t('homepage.seo.description')}
/>
<Announcer
message={t('announcement.message')}
link={t('announcement.link.href')}
title={t('announcement.link.title')}
/>
<Header />
<Box mb={20}>
<Hero />
<Divider />
<Features />
<Divider />
<Stats
githubStars={props.githubStars}
totalDownloads={props.totalDownloads}
monthlyDownloads={props.monthlyDownloads}
discordMembers={props.discordMembers}
testCoverage={props.testCoverage}
codeQuality={props.codeQuality}
/>
<Divider />
<Demo />
<Divider />
<Sponsors
organizations={props.sponsors.organizations}
individuals={props.sponsors.individuals}
/>
<Divider />
<UsedBy usedBy={props.usedBy.organizations} />
<Divider />
<DiscordBar />
<Footer />
</Box>
</>
)
} | identifier_body | |
command.rs | // Copyright 2016 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(missing_docs)]
use std::ptr;
use winapi::{FLOAT, INT, UINT, UINT8, DXGI_FORMAT,
DXGI_FORMAT_R16_UINT, DXGI_FORMAT_R32_UINT,
D3D11_CLEAR_FLAG, D3D11_PRIMITIVE_TOPOLOGY, D3D11_VIEWPORT, D3D11_RECT,
ID3D11RasterizerState, ID3D11DepthStencilState, ID3D11BlendState};
use core::{command, pso, shade, state, target, texture as tex};
use core::{IndexType, VertexCount};
use core::{MAX_VERTEX_ATTRIBUTES, MAX_CONSTANT_BUFFERS,
MAX_RESOURCE_VIEWS, MAX_UNORDERED_VIEWS,
MAX_SAMPLERS, MAX_COLOR_TARGETS};
use {native, Resources, InputLayout, Buffer, Texture, Pipeline, Program};
| #[derive(Clone, Copy, PartialEq, Debug)]
pub struct DataPointer {
offset: u32,
size: u32,
}
pub struct DataBuffer(Vec<u8>);
impl DataBuffer {
/// Create a new empty data buffer.
pub fn new() -> DataBuffer {
DataBuffer(Vec::new())
}
/// Reset the contents.
pub fn reset(&mut self) {
self.0.clear();
}
/// Copy a given vector slice into the buffer.
pub fn add(&mut self, data: &[u8]) -> DataPointer {
self.0.extend_from_slice(data);
DataPointer {
offset: (self.0.len() - data.len()) as u32,
size: data.len() as u32,
}
}
/// Return a reference to a stored data object.
pub fn get(&self, ptr: DataPointer) -> &[u8] {
&self.0[ptr.offset as usize .. (ptr.offset + ptr.size) as usize]
}
}
///Serialized device command.
#[derive(Clone, Copy, Debug)]
pub enum Command {
// states
BindProgram(Program),
BindInputLayout(InputLayout),
BindIndex(Buffer, DXGI_FORMAT),
BindVertexBuffers([native::Buffer; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES]),
BindConstantBuffers(shade::Stage, [native::Buffer; MAX_CONSTANT_BUFFERS]),
BindShaderResources(shade::Stage, [native::Srv; MAX_RESOURCE_VIEWS]),
BindSamplers(shade::Stage, [native::Sampler; MAX_SAMPLERS]),
BindPixelTargets([native::Rtv; MAX_COLOR_TARGETS], native::Dsv),
SetPrimitive(D3D11_PRIMITIVE_TOPOLOGY),
SetViewport(D3D11_VIEWPORT),
SetScissor(D3D11_RECT),
SetRasterizer(*const ID3D11RasterizerState),
SetDepthStencil(*const ID3D11DepthStencilState, UINT),
SetBlend(*const ID3D11BlendState, [FLOAT; 4], UINT),
// resource updates
UpdateBuffer(Buffer, DataPointer, usize),
UpdateTexture(Texture, tex::Kind, Option<tex::CubeFace>, DataPointer, tex::RawImageInfo),
GenerateMips(native::Srv),
// drawing
ClearColor(native::Rtv, [f32; 4]),
ClearDepthStencil(native::Dsv, D3D11_CLEAR_FLAG, FLOAT, UINT8),
Draw(UINT, UINT),
DrawInstanced(UINT, UINT, UINT, UINT),
DrawIndexed(UINT, UINT, INT),
DrawIndexedInstanced(UINT, UINT, UINT, INT, UINT),
}
unsafe impl Send for Command {}
struct Cache {
attrib_strides: [Option<pso::ElemStride>; MAX_VERTEX_ATTRIBUTES],
rasterizer: *const ID3D11RasterizerState,
depth_stencil: *const ID3D11DepthStencilState,
stencil_ref: UINT,
blend: *const ID3D11BlendState,
blend_ref: [FLOAT; 4],
}
unsafe impl Send for Cache {}
impl Cache {
fn new() -> Cache {
Cache {
attrib_strides: [None; MAX_VERTEX_ATTRIBUTES],
rasterizer: ptr::null(),
depth_stencil: ptr::null(),
stencil_ref: 0,
blend: ptr::null(),
blend_ref: [0.0; 4],
}
}
}
pub struct CommandBuffer<P> {
pub parser: P,
cache: Cache,
}
pub trait Parser: Sized + Send {
fn reset(&mut self);
fn parse(&mut self, Command);
fn update_buffer(&mut self, Buffer, &[u8], usize);
fn update_texture(&mut self, Texture, tex::Kind, Option<tex::CubeFace>, &[u8], tex::RawImageInfo);
}
impl<P: Parser> From<P> for CommandBuffer<P> {
fn from(parser: P) -> CommandBuffer<P> {
CommandBuffer {
parser: parser,
cache: Cache::new(),
}
}
}
impl<P: Parser> CommandBuffer<P> {
fn flush(&mut self) {
let sample_mask = !0; //TODO
self.parser.parse(Command::SetDepthStencil(self.cache.depth_stencil, self.cache.stencil_ref));
self.parser.parse(Command::SetBlend(self.cache.blend, self.cache.blend_ref, sample_mask));
}
}
impl<P: Parser> command::Buffer<Resources> for CommandBuffer<P> {
fn reset(&mut self) {
self.parser.reset();
self.cache = Cache::new();
}
fn bind_pipeline_state(&mut self, pso: Pipeline) {
self.parser.parse(Command::SetPrimitive(pso.topology));
for (stride, ad_option) in self.cache.attrib_strides.iter_mut().zip(pso.attributes.iter()) {
*stride = ad_option.map(|(buf_id, _)| match pso.vertex_buffers[buf_id as usize] {
Some(ref bdesc) => bdesc.stride,
None => {
error!("Unexpected use of buffer id {}", buf_id);
0
},
});
}
if self.cache.rasterizer != pso.rasterizer {
self.cache.rasterizer = pso.rasterizer;
self.parser.parse(Command::SetRasterizer(pso.rasterizer));
}
self.cache.depth_stencil = pso.depth_stencil;
self.cache.blend = pso.blend;
self.parser.parse(Command::BindInputLayout(pso.layout));
self.parser.parse(Command::BindProgram(pso.program));
}
fn bind_vertex_buffers(&mut self, vbs: pso::VertexBufferSet<Resources>) {
//Note: assumes `bind_pipeline_state` is called prior
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_VERTEX_ATTRIBUTES];
let mut strides = [0; MAX_VERTEX_ATTRIBUTES];
let mut offsets = [0; MAX_VERTEX_ATTRIBUTES];
for i in 0 .. MAX_VERTEX_ATTRIBUTES {
match (vbs.0[i], self.cache.attrib_strides[i]) {
(None, Some(stride)) => {
error!("No vertex input provided for slot {} with stride {}", i, stride)
},
(Some((buffer, offset)), Some(stride)) => {
buffers[i] = buffer.0;
strides[i] = stride as UINT;
offsets[i] = offset as UINT;
},
(_, None) => (),
}
}
self.parser.parse(Command::BindVertexBuffers(buffers, strides, offsets));
}
fn bind_constant_buffers(&mut self, cbs: &[pso::ConstantBufferParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_CONSTANT_BUFFERS];
let mask = stage.into();
let mut count = 0;
for cbuf in cbs.iter() {
if cbuf.1.contains(mask) {
buffers[cbuf.2 as usize] = (cbuf.0).0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindConstantBuffers(stage, buffers));
}
}
}
fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {
error!("Global constants are not supported");
}
fn bind_resource_views(&mut self, rvs: &[pso::ResourceViewParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut views = [native::Srv(ptr::null_mut()); MAX_RESOURCE_VIEWS];
let mask = stage.into();
let mut count = 0;
for view in rvs.iter() {
if view.1.contains(mask) {
views[view.2 as usize] = view.0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindShaderResources(stage, views));
}
}
}
fn bind_unordered_views(&mut self, uvs: &[pso::UnorderedViewParam<Resources>]) {
let mut views = [(); MAX_UNORDERED_VIEWS];
let mut count = 0;
for view in uvs.iter() {
views[view.2 as usize] = view.0;
count += 1;
}
if count != 0 {
unimplemented!()
//self.parser.parse(Command::BindUnorderedAccess(stage, views));
}
}
fn bind_samplers(&mut self, ss: &[pso::SamplerParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut samplers = [native::Sampler(ptr::null_mut()); MAX_SAMPLERS];
let mask = stage.into();
let mut count = 0;
for sm in ss.iter() {
if sm.1.contains(mask) {
samplers[sm.2 as usize] = sm.0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindSamplers(stage, samplers));
}
}
}
fn bind_pixel_targets(&mut self, pts: pso::PixelTargetSet<Resources>) {
if let (Some(ref d), Some(ref s)) = (pts.depth, pts.stencil) {
if d != s {
error!("Depth and stencil views have to be the same");
}
}
let view = pts.get_view();
let viewport = D3D11_VIEWPORT {
TopLeftX: 0.0,
TopLeftY: 0.0,
Width: view.0 as f32,
Height: view.1 as f32,
MinDepth: 0.0,
MaxDepth: 1.0,
};
let mut colors = [native::Rtv(ptr::null_mut()); MAX_COLOR_TARGETS];
for i in 0 .. MAX_COLOR_TARGETS {
if let Some(c) = pts.colors[i] {
colors[i] = c;
}
}
let ds = pts.depth.unwrap_or(native::Dsv(ptr::null_mut()));
self.parser.parse(Command::BindPixelTargets(colors, ds));
self.parser.parse(Command::SetViewport(viewport));
}
fn bind_index(&mut self, buf: Buffer, itype: IndexType) {
let format = match itype {
IndexType::U16 => DXGI_FORMAT_R16_UINT,
IndexType::U32 => DXGI_FORMAT_R32_UINT,
};
self.parser.parse(Command::BindIndex(buf, format));
}
fn set_scissor(&mut self, rect: target::Rect) {
self.parser.parse(Command::SetScissor(D3D11_RECT {
left: rect.x as INT,
top: rect.y as INT,
right: (rect.x + rect.w) as INT,
bottom: (rect.y + rect.h) as INT,
}));
}
fn set_ref_values(&mut self, rv: state::RefValues) {
if rv.stencil.0 != rv.stencil.1 {
error!("Unable to set different stencil ref values for front ({}) and back ({})",
rv.stencil.0, rv.stencil.1);
}
self.cache.stencil_ref = rv.stencil.0 as UINT;
self.cache.blend_ref = rv.blend;
}
fn update_buffer(&mut self, buf: Buffer, data: &[u8], offset: usize) {
self.parser.update_buffer(buf, data, offset);
}
fn update_texture(&mut self, tex: Texture, kind: tex::Kind, face: Option<tex::CubeFace>,
data: &[u8], image: tex::RawImageInfo) {
self.parser.update_texture(tex, kind, face, data, image);
}
fn generate_mipmap(&mut self, srv: native::Srv) {
self.parser.parse(Command::GenerateMips(srv));
}
fn clear_color(&mut self, target: native::Rtv, value: command::ClearColor) {
match value {
command::ClearColor::Float(data) => {
self.parser.parse(Command::ClearColor(target, data));
},
_ => {
error!("Unable to clear int/uint target");
},
}
}
fn clear_depth_stencil(&mut self, target: native::Dsv, depth: Option<target::Depth>,
stencil: Option<target::Stencil>) {
let flags = //warning: magic constants ahead
D3D11_CLEAR_FLAG(if depth.is_some() {1} else {0}) |
D3D11_CLEAR_FLAG(if stencil.is_some() {2} else {0});
self.parser.parse(Command::ClearDepthStencil(target, flags,
depth.unwrap_or_default() as FLOAT,
stencil.unwrap_or_default() as UINT8
));
}
fn call_draw(&mut self, start: VertexCount, count: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawInstanced(
count as UINT, ninst as UINT, start as UINT, offset as UINT),
None => Command::Draw(count as UINT, start as UINT),
});
}
fn call_draw_indexed(&mut self, start: VertexCount, count: VertexCount,
base: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawIndexedInstanced(
count as UINT, ninst as UINT, start as UINT, base as INT, offset as UINT),
None => Command::DrawIndexed(count as UINT, start as UINT, base as INT),
});
}
} | /// The place of some data in the data buffer. | random_line_split |
command.rs | // Copyright 2016 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(missing_docs)]
use std::ptr;
use winapi::{FLOAT, INT, UINT, UINT8, DXGI_FORMAT,
DXGI_FORMAT_R16_UINT, DXGI_FORMAT_R32_UINT,
D3D11_CLEAR_FLAG, D3D11_PRIMITIVE_TOPOLOGY, D3D11_VIEWPORT, D3D11_RECT,
ID3D11RasterizerState, ID3D11DepthStencilState, ID3D11BlendState};
use core::{command, pso, shade, state, target, texture as tex};
use core::{IndexType, VertexCount};
use core::{MAX_VERTEX_ATTRIBUTES, MAX_CONSTANT_BUFFERS,
MAX_RESOURCE_VIEWS, MAX_UNORDERED_VIEWS,
MAX_SAMPLERS, MAX_COLOR_TARGETS};
use {native, Resources, InputLayout, Buffer, Texture, Pipeline, Program};
/// The place of some data in the data buffer.
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct DataPointer {
offset: u32,
size: u32,
}
pub struct DataBuffer(Vec<u8>);
impl DataBuffer {
/// Create a new empty data buffer.
pub fn new() -> DataBuffer {
DataBuffer(Vec::new())
}
/// Reset the contents.
pub fn reset(&mut self) {
self.0.clear();
}
/// Copy a given vector slice into the buffer.
pub fn add(&mut self, data: &[u8]) -> DataPointer {
self.0.extend_from_slice(data);
DataPointer {
offset: (self.0.len() - data.len()) as u32,
size: data.len() as u32,
}
}
/// Return a reference to a stored data object.
pub fn get(&self, ptr: DataPointer) -> &[u8] {
&self.0[ptr.offset as usize .. (ptr.offset + ptr.size) as usize]
}
}
///Serialized device command.
#[derive(Clone, Copy, Debug)]
pub enum Command {
// states
BindProgram(Program),
BindInputLayout(InputLayout),
BindIndex(Buffer, DXGI_FORMAT),
BindVertexBuffers([native::Buffer; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES]),
BindConstantBuffers(shade::Stage, [native::Buffer; MAX_CONSTANT_BUFFERS]),
BindShaderResources(shade::Stage, [native::Srv; MAX_RESOURCE_VIEWS]),
BindSamplers(shade::Stage, [native::Sampler; MAX_SAMPLERS]),
BindPixelTargets([native::Rtv; MAX_COLOR_TARGETS], native::Dsv),
SetPrimitive(D3D11_PRIMITIVE_TOPOLOGY),
SetViewport(D3D11_VIEWPORT),
SetScissor(D3D11_RECT),
SetRasterizer(*const ID3D11RasterizerState),
SetDepthStencil(*const ID3D11DepthStencilState, UINT),
SetBlend(*const ID3D11BlendState, [FLOAT; 4], UINT),
// resource updates
UpdateBuffer(Buffer, DataPointer, usize),
UpdateTexture(Texture, tex::Kind, Option<tex::CubeFace>, DataPointer, tex::RawImageInfo),
GenerateMips(native::Srv),
// drawing
ClearColor(native::Rtv, [f32; 4]),
ClearDepthStencil(native::Dsv, D3D11_CLEAR_FLAG, FLOAT, UINT8),
Draw(UINT, UINT),
DrawInstanced(UINT, UINT, UINT, UINT),
DrawIndexed(UINT, UINT, INT),
DrawIndexedInstanced(UINT, UINT, UINT, INT, UINT),
}
unsafe impl Send for Command {}
struct Cache {
attrib_strides: [Option<pso::ElemStride>; MAX_VERTEX_ATTRIBUTES],
rasterizer: *const ID3D11RasterizerState,
depth_stencil: *const ID3D11DepthStencilState,
stencil_ref: UINT,
blend: *const ID3D11BlendState,
blend_ref: [FLOAT; 4],
}
unsafe impl Send for Cache {}
impl Cache {
fn new() -> Cache {
Cache {
attrib_strides: [None; MAX_VERTEX_ATTRIBUTES],
rasterizer: ptr::null(),
depth_stencil: ptr::null(),
stencil_ref: 0,
blend: ptr::null(),
blend_ref: [0.0; 4],
}
}
}
pub struct CommandBuffer<P> {
pub parser: P,
cache: Cache,
}
pub trait Parser: Sized + Send {
fn reset(&mut self);
fn parse(&mut self, Command);
fn update_buffer(&mut self, Buffer, &[u8], usize);
fn update_texture(&mut self, Texture, tex::Kind, Option<tex::CubeFace>, &[u8], tex::RawImageInfo);
}
impl<P: Parser> From<P> for CommandBuffer<P> {
fn from(parser: P) -> CommandBuffer<P> {
CommandBuffer {
parser: parser,
cache: Cache::new(),
}
}
}
impl<P: Parser> CommandBuffer<P> {
fn flush(&mut self) {
let sample_mask = !0; //TODO
self.parser.parse(Command::SetDepthStencil(self.cache.depth_stencil, self.cache.stencil_ref));
self.parser.parse(Command::SetBlend(self.cache.blend, self.cache.blend_ref, sample_mask));
}
}
impl<P: Parser> command::Buffer<Resources> for CommandBuffer<P> {
fn reset(&mut self) {
self.parser.reset();
self.cache = Cache::new();
}
fn bind_pipeline_state(&mut self, pso: Pipeline) {
self.parser.parse(Command::SetPrimitive(pso.topology));
for (stride, ad_option) in self.cache.attrib_strides.iter_mut().zip(pso.attributes.iter()) {
*stride = ad_option.map(|(buf_id, _)| match pso.vertex_buffers[buf_id as usize] {
Some(ref bdesc) => bdesc.stride,
None => {
error!("Unexpected use of buffer id {}", buf_id);
0
},
});
}
if self.cache.rasterizer != pso.rasterizer {
self.cache.rasterizer = pso.rasterizer;
self.parser.parse(Command::SetRasterizer(pso.rasterizer));
}
self.cache.depth_stencil = pso.depth_stencil;
self.cache.blend = pso.blend;
self.parser.parse(Command::BindInputLayout(pso.layout));
self.parser.parse(Command::BindProgram(pso.program));
}
fn bind_vertex_buffers(&mut self, vbs: pso::VertexBufferSet<Resources>) {
//Note: assumes `bind_pipeline_state` is called prior
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_VERTEX_ATTRIBUTES];
let mut strides = [0; MAX_VERTEX_ATTRIBUTES];
let mut offsets = [0; MAX_VERTEX_ATTRIBUTES];
for i in 0 .. MAX_VERTEX_ATTRIBUTES {
match (vbs.0[i], self.cache.attrib_strides[i]) {
(None, Some(stride)) => {
error!("No vertex input provided for slot {} with stride {}", i, stride)
},
(Some((buffer, offset)), Some(stride)) => {
buffers[i] = buffer.0;
strides[i] = stride as UINT;
offsets[i] = offset as UINT;
},
(_, None) => (),
}
}
self.parser.parse(Command::BindVertexBuffers(buffers, strides, offsets));
}
fn bind_constant_buffers(&mut self, cbs: &[pso::ConstantBufferParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_CONSTANT_BUFFERS];
let mask = stage.into();
let mut count = 0;
for cbuf in cbs.iter() {
if cbuf.1.contains(mask) {
buffers[cbuf.2 as usize] = (cbuf.0).0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindConstantBuffers(stage, buffers));
}
}
}
fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {
error!("Global constants are not supported");
}
fn bind_resource_views(&mut self, rvs: &[pso::ResourceViewParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut views = [native::Srv(ptr::null_mut()); MAX_RESOURCE_VIEWS];
let mask = stage.into();
let mut count = 0;
for view in rvs.iter() {
if view.1.contains(mask) {
views[view.2 as usize] = view.0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindShaderResources(stage, views));
}
}
}
fn bind_unordered_views(&mut self, uvs: &[pso::UnorderedViewParam<Resources>]) {
let mut views = [(); MAX_UNORDERED_VIEWS];
let mut count = 0;
for view in uvs.iter() {
views[view.2 as usize] = view.0;
count += 1;
}
if count != 0 {
unimplemented!()
//self.parser.parse(Command::BindUnorderedAccess(stage, views));
}
}
fn bind_samplers(&mut self, ss: &[pso::SamplerParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut samplers = [native::Sampler(ptr::null_mut()); MAX_SAMPLERS];
let mask = stage.into();
let mut count = 0;
for sm in ss.iter() {
if sm.1.contains(mask) {
samplers[sm.2 as usize] = sm.0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindSamplers(stage, samplers));
}
}
}
fn bind_pixel_targets(&mut self, pts: pso::PixelTargetSet<Resources>) {
if let (Some(ref d), Some(ref s)) = (pts.depth, pts.stencil) {
if d != s {
error!("Depth and stencil views have to be the same");
}
}
let view = pts.get_view();
let viewport = D3D11_VIEWPORT {
TopLeftX: 0.0,
TopLeftY: 0.0,
Width: view.0 as f32,
Height: view.1 as f32,
MinDepth: 0.0,
MaxDepth: 1.0,
};
let mut colors = [native::Rtv(ptr::null_mut()); MAX_COLOR_TARGETS];
for i in 0 .. MAX_COLOR_TARGETS {
if let Some(c) = pts.colors[i] {
colors[i] = c;
}
}
let ds = pts.depth.unwrap_or(native::Dsv(ptr::null_mut()));
self.parser.parse(Command::BindPixelTargets(colors, ds));
self.parser.parse(Command::SetViewport(viewport));
}
fn bind_index(&mut self, buf: Buffer, itype: IndexType) {
let format = match itype {
IndexType::U16 => DXGI_FORMAT_R16_UINT,
IndexType::U32 => DXGI_FORMAT_R32_UINT,
};
self.parser.parse(Command::BindIndex(buf, format));
}
fn set_scissor(&mut self, rect: target::Rect) {
self.parser.parse(Command::SetScissor(D3D11_RECT {
left: rect.x as INT,
top: rect.y as INT,
right: (rect.x + rect.w) as INT,
bottom: (rect.y + rect.h) as INT,
}));
}
fn set_ref_values(&mut self, rv: state::RefValues) {
if rv.stencil.0 != rv.stencil.1 {
error!("Unable to set different stencil ref values for front ({}) and back ({})",
rv.stencil.0, rv.stencil.1);
}
self.cache.stencil_ref = rv.stencil.0 as UINT;
self.cache.blend_ref = rv.blend;
}
fn update_buffer(&mut self, buf: Buffer, data: &[u8], offset: usize) {
self.parser.update_buffer(buf, data, offset);
}
fn update_texture(&mut self, tex: Texture, kind: tex::Kind, face: Option<tex::CubeFace>,
data: &[u8], image: tex::RawImageInfo) {
self.parser.update_texture(tex, kind, face, data, image);
}
fn generate_mipmap(&mut self, srv: native::Srv) {
self.parser.parse(Command::GenerateMips(srv));
}
fn clear_color(&mut self, target: native::Rtv, value: command::ClearColor) |
fn clear_depth_stencil(&mut self, target: native::Dsv, depth: Option<target::Depth>,
stencil: Option<target::Stencil>) {
let flags = //warning: magic constants ahead
D3D11_CLEAR_FLAG(if depth.is_some() {1} else {0}) |
D3D11_CLEAR_FLAG(if stencil.is_some() {2} else {0});
self.parser.parse(Command::ClearDepthStencil(target, flags,
depth.unwrap_or_default() as FLOAT,
stencil.unwrap_or_default() as UINT8
));
}
fn call_draw(&mut self, start: VertexCount, count: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawInstanced(
count as UINT, ninst as UINT, start as UINT, offset as UINT),
None => Command::Draw(count as UINT, start as UINT),
});
}
fn call_draw_indexed(&mut self, start: VertexCount, count: VertexCount,
base: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawIndexedInstanced(
count as UINT, ninst as UINT, start as UINT, base as INT, offset as UINT),
None => Command::DrawIndexed(count as UINT, start as UINT, base as INT),
});
}
}
| {
match value {
command::ClearColor::Float(data) => {
self.parser.parse(Command::ClearColor(target, data));
},
_ => {
error!("Unable to clear int/uint target");
},
}
} | identifier_body |
command.rs | // Copyright 2016 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(missing_docs)]
use std::ptr;
use winapi::{FLOAT, INT, UINT, UINT8, DXGI_FORMAT,
DXGI_FORMAT_R16_UINT, DXGI_FORMAT_R32_UINT,
D3D11_CLEAR_FLAG, D3D11_PRIMITIVE_TOPOLOGY, D3D11_VIEWPORT, D3D11_RECT,
ID3D11RasterizerState, ID3D11DepthStencilState, ID3D11BlendState};
use core::{command, pso, shade, state, target, texture as tex};
use core::{IndexType, VertexCount};
use core::{MAX_VERTEX_ATTRIBUTES, MAX_CONSTANT_BUFFERS,
MAX_RESOURCE_VIEWS, MAX_UNORDERED_VIEWS,
MAX_SAMPLERS, MAX_COLOR_TARGETS};
use {native, Resources, InputLayout, Buffer, Texture, Pipeline, Program};
/// The place of some data in the data buffer.
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct DataPointer {
offset: u32,
size: u32,
}
pub struct DataBuffer(Vec<u8>);
impl DataBuffer {
/// Create a new empty data buffer.
pub fn new() -> DataBuffer {
DataBuffer(Vec::new())
}
/// Reset the contents.
pub fn reset(&mut self) {
self.0.clear();
}
/// Copy a given vector slice into the buffer.
pub fn add(&mut self, data: &[u8]) -> DataPointer {
self.0.extend_from_slice(data);
DataPointer {
offset: (self.0.len() - data.len()) as u32,
size: data.len() as u32,
}
}
/// Return a reference to a stored data object.
pub fn get(&self, ptr: DataPointer) -> &[u8] {
&self.0[ptr.offset as usize .. (ptr.offset + ptr.size) as usize]
}
}
///Serialized device command.
#[derive(Clone, Copy, Debug)]
pub enum Command {
// states
BindProgram(Program),
BindInputLayout(InputLayout),
BindIndex(Buffer, DXGI_FORMAT),
BindVertexBuffers([native::Buffer; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES]),
BindConstantBuffers(shade::Stage, [native::Buffer; MAX_CONSTANT_BUFFERS]),
BindShaderResources(shade::Stage, [native::Srv; MAX_RESOURCE_VIEWS]),
BindSamplers(shade::Stage, [native::Sampler; MAX_SAMPLERS]),
BindPixelTargets([native::Rtv; MAX_COLOR_TARGETS], native::Dsv),
SetPrimitive(D3D11_PRIMITIVE_TOPOLOGY),
SetViewport(D3D11_VIEWPORT),
SetScissor(D3D11_RECT),
SetRasterizer(*const ID3D11RasterizerState),
SetDepthStencil(*const ID3D11DepthStencilState, UINT),
SetBlend(*const ID3D11BlendState, [FLOAT; 4], UINT),
// resource updates
UpdateBuffer(Buffer, DataPointer, usize),
UpdateTexture(Texture, tex::Kind, Option<tex::CubeFace>, DataPointer, tex::RawImageInfo),
GenerateMips(native::Srv),
// drawing
ClearColor(native::Rtv, [f32; 4]),
ClearDepthStencil(native::Dsv, D3D11_CLEAR_FLAG, FLOAT, UINT8),
Draw(UINT, UINT),
DrawInstanced(UINT, UINT, UINT, UINT),
DrawIndexed(UINT, UINT, INT),
DrawIndexedInstanced(UINT, UINT, UINT, INT, UINT),
}
unsafe impl Send for Command {}
struct Cache {
attrib_strides: [Option<pso::ElemStride>; MAX_VERTEX_ATTRIBUTES],
rasterizer: *const ID3D11RasterizerState,
depth_stencil: *const ID3D11DepthStencilState,
stencil_ref: UINT,
blend: *const ID3D11BlendState,
blend_ref: [FLOAT; 4],
}
unsafe impl Send for Cache {}
impl Cache {
fn new() -> Cache {
Cache {
attrib_strides: [None; MAX_VERTEX_ATTRIBUTES],
rasterizer: ptr::null(),
depth_stencil: ptr::null(),
stencil_ref: 0,
blend: ptr::null(),
blend_ref: [0.0; 4],
}
}
}
pub struct CommandBuffer<P> {
pub parser: P,
cache: Cache,
}
pub trait Parser: Sized + Send {
fn reset(&mut self);
fn parse(&mut self, Command);
fn update_buffer(&mut self, Buffer, &[u8], usize);
fn update_texture(&mut self, Texture, tex::Kind, Option<tex::CubeFace>, &[u8], tex::RawImageInfo);
}
impl<P: Parser> From<P> for CommandBuffer<P> {
fn from(parser: P) -> CommandBuffer<P> {
CommandBuffer {
parser: parser,
cache: Cache::new(),
}
}
}
impl<P: Parser> CommandBuffer<P> {
fn flush(&mut self) {
let sample_mask = !0; //TODO
self.parser.parse(Command::SetDepthStencil(self.cache.depth_stencil, self.cache.stencil_ref));
self.parser.parse(Command::SetBlend(self.cache.blend, self.cache.blend_ref, sample_mask));
}
}
impl<P: Parser> command::Buffer<Resources> for CommandBuffer<P> {
fn reset(&mut self) {
self.parser.reset();
self.cache = Cache::new();
}
fn bind_pipeline_state(&mut self, pso: Pipeline) {
self.parser.parse(Command::SetPrimitive(pso.topology));
for (stride, ad_option) in self.cache.attrib_strides.iter_mut().zip(pso.attributes.iter()) {
*stride = ad_option.map(|(buf_id, _)| match pso.vertex_buffers[buf_id as usize] {
Some(ref bdesc) => bdesc.stride,
None => {
error!("Unexpected use of buffer id {}", buf_id);
0
},
});
}
if self.cache.rasterizer != pso.rasterizer {
self.cache.rasterizer = pso.rasterizer;
self.parser.parse(Command::SetRasterizer(pso.rasterizer));
}
self.cache.depth_stencil = pso.depth_stencil;
self.cache.blend = pso.blend;
self.parser.parse(Command::BindInputLayout(pso.layout));
self.parser.parse(Command::BindProgram(pso.program));
}
fn bind_vertex_buffers(&mut self, vbs: pso::VertexBufferSet<Resources>) {
//Note: assumes `bind_pipeline_state` is called prior
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_VERTEX_ATTRIBUTES];
let mut strides = [0; MAX_VERTEX_ATTRIBUTES];
let mut offsets = [0; MAX_VERTEX_ATTRIBUTES];
for i in 0 .. MAX_VERTEX_ATTRIBUTES {
match (vbs.0[i], self.cache.attrib_strides[i]) {
(None, Some(stride)) => {
error!("No vertex input provided for slot {} with stride {}", i, stride)
},
(Some((buffer, offset)), Some(stride)) => {
buffers[i] = buffer.0;
strides[i] = stride as UINT;
offsets[i] = offset as UINT;
},
(_, None) => (),
}
}
self.parser.parse(Command::BindVertexBuffers(buffers, strides, offsets));
}
fn bind_constant_buffers(&mut self, cbs: &[pso::ConstantBufferParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_CONSTANT_BUFFERS];
let mask = stage.into();
let mut count = 0;
for cbuf in cbs.iter() {
if cbuf.1.contains(mask) {
buffers[cbuf.2 as usize] = (cbuf.0).0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindConstantBuffers(stage, buffers));
}
}
}
fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {
error!("Global constants are not supported");
}
fn bind_resource_views(&mut self, rvs: &[pso::ResourceViewParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut views = [native::Srv(ptr::null_mut()); MAX_RESOURCE_VIEWS];
let mask = stage.into();
let mut count = 0;
for view in rvs.iter() {
if view.1.contains(mask) {
views[view.2 as usize] = view.0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindShaderResources(stage, views));
}
}
}
fn bind_unordered_views(&mut self, uvs: &[pso::UnorderedViewParam<Resources>]) {
let mut views = [(); MAX_UNORDERED_VIEWS];
let mut count = 0;
for view in uvs.iter() {
views[view.2 as usize] = view.0;
count += 1;
}
if count != 0 {
unimplemented!()
//self.parser.parse(Command::BindUnorderedAccess(stage, views));
}
}
fn bind_samplers(&mut self, ss: &[pso::SamplerParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut samplers = [native::Sampler(ptr::null_mut()); MAX_SAMPLERS];
let mask = stage.into();
let mut count = 0;
for sm in ss.iter() {
if sm.1.contains(mask) {
samplers[sm.2 as usize] = sm.0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindSamplers(stage, samplers));
}
}
}
fn bind_pixel_targets(&mut self, pts: pso::PixelTargetSet<Resources>) {
if let (Some(ref d), Some(ref s)) = (pts.depth, pts.stencil) {
if d != s {
error!("Depth and stencil views have to be the same");
}
}
let view = pts.get_view();
let viewport = D3D11_VIEWPORT {
TopLeftX: 0.0,
TopLeftY: 0.0,
Width: view.0 as f32,
Height: view.1 as f32,
MinDepth: 0.0,
MaxDepth: 1.0,
};
let mut colors = [native::Rtv(ptr::null_mut()); MAX_COLOR_TARGETS];
for i in 0 .. MAX_COLOR_TARGETS {
if let Some(c) = pts.colors[i] {
colors[i] = c;
}
}
let ds = pts.depth.unwrap_or(native::Dsv(ptr::null_mut()));
self.parser.parse(Command::BindPixelTargets(colors, ds));
self.parser.parse(Command::SetViewport(viewport));
}
fn bind_index(&mut self, buf: Buffer, itype: IndexType) {
let format = match itype {
IndexType::U16 => DXGI_FORMAT_R16_UINT,
IndexType::U32 => DXGI_FORMAT_R32_UINT,
};
self.parser.parse(Command::BindIndex(buf, format));
}
fn set_scissor(&mut self, rect: target::Rect) {
self.parser.parse(Command::SetScissor(D3D11_RECT {
left: rect.x as INT,
top: rect.y as INT,
right: (rect.x + rect.w) as INT,
bottom: (rect.y + rect.h) as INT,
}));
}
fn set_ref_values(&mut self, rv: state::RefValues) {
if rv.stencil.0 != rv.stencil.1 {
error!("Unable to set different stencil ref values for front ({}) and back ({})",
rv.stencil.0, rv.stencil.1);
}
self.cache.stencil_ref = rv.stencil.0 as UINT;
self.cache.blend_ref = rv.blend;
}
fn update_buffer(&mut self, buf: Buffer, data: &[u8], offset: usize) {
self.parser.update_buffer(buf, data, offset);
}
fn update_texture(&mut self, tex: Texture, kind: tex::Kind, face: Option<tex::CubeFace>,
data: &[u8], image: tex::RawImageInfo) {
self.parser.update_texture(tex, kind, face, data, image);
}
fn generate_mipmap(&mut self, srv: native::Srv) {
self.parser.parse(Command::GenerateMips(srv));
}
fn | (&mut self, target: native::Rtv, value: command::ClearColor) {
match value {
command::ClearColor::Float(data) => {
self.parser.parse(Command::ClearColor(target, data));
},
_ => {
error!("Unable to clear int/uint target");
},
}
}
fn clear_depth_stencil(&mut self, target: native::Dsv, depth: Option<target::Depth>,
stencil: Option<target::Stencil>) {
let flags = //warning: magic constants ahead
D3D11_CLEAR_FLAG(if depth.is_some() {1} else {0}) |
D3D11_CLEAR_FLAG(if stencil.is_some() {2} else {0});
self.parser.parse(Command::ClearDepthStencil(target, flags,
depth.unwrap_or_default() as FLOAT,
stencil.unwrap_or_default() as UINT8
));
}
fn call_draw(&mut self, start: VertexCount, count: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawInstanced(
count as UINT, ninst as UINT, start as UINT, offset as UINT),
None => Command::Draw(count as UINT, start as UINT),
});
}
fn call_draw_indexed(&mut self, start: VertexCount, count: VertexCount,
base: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawIndexedInstanced(
count as UINT, ninst as UINT, start as UINT, base as INT, offset as UINT),
None => Command::DrawIndexed(count as UINT, start as UINT, base as INT),
});
}
}
| clear_color | identifier_name |
command.rs | // Copyright 2016 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(missing_docs)]
use std::ptr;
use winapi::{FLOAT, INT, UINT, UINT8, DXGI_FORMAT,
DXGI_FORMAT_R16_UINT, DXGI_FORMAT_R32_UINT,
D3D11_CLEAR_FLAG, D3D11_PRIMITIVE_TOPOLOGY, D3D11_VIEWPORT, D3D11_RECT,
ID3D11RasterizerState, ID3D11DepthStencilState, ID3D11BlendState};
use core::{command, pso, shade, state, target, texture as tex};
use core::{IndexType, VertexCount};
use core::{MAX_VERTEX_ATTRIBUTES, MAX_CONSTANT_BUFFERS,
MAX_RESOURCE_VIEWS, MAX_UNORDERED_VIEWS,
MAX_SAMPLERS, MAX_COLOR_TARGETS};
use {native, Resources, InputLayout, Buffer, Texture, Pipeline, Program};
/// The place of some data in the data buffer.
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct DataPointer {
offset: u32,
size: u32,
}
pub struct DataBuffer(Vec<u8>);
impl DataBuffer {
/// Create a new empty data buffer.
pub fn new() -> DataBuffer {
DataBuffer(Vec::new())
}
/// Reset the contents.
pub fn reset(&mut self) {
self.0.clear();
}
/// Copy a given vector slice into the buffer.
pub fn add(&mut self, data: &[u8]) -> DataPointer {
self.0.extend_from_slice(data);
DataPointer {
offset: (self.0.len() - data.len()) as u32,
size: data.len() as u32,
}
}
/// Return a reference to a stored data object.
pub fn get(&self, ptr: DataPointer) -> &[u8] {
&self.0[ptr.offset as usize .. (ptr.offset + ptr.size) as usize]
}
}
///Serialized device command.
#[derive(Clone, Copy, Debug)]
pub enum Command {
// states
BindProgram(Program),
BindInputLayout(InputLayout),
BindIndex(Buffer, DXGI_FORMAT),
BindVertexBuffers([native::Buffer; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES]),
BindConstantBuffers(shade::Stage, [native::Buffer; MAX_CONSTANT_BUFFERS]),
BindShaderResources(shade::Stage, [native::Srv; MAX_RESOURCE_VIEWS]),
BindSamplers(shade::Stage, [native::Sampler; MAX_SAMPLERS]),
BindPixelTargets([native::Rtv; MAX_COLOR_TARGETS], native::Dsv),
SetPrimitive(D3D11_PRIMITIVE_TOPOLOGY),
SetViewport(D3D11_VIEWPORT),
SetScissor(D3D11_RECT),
SetRasterizer(*const ID3D11RasterizerState),
SetDepthStencil(*const ID3D11DepthStencilState, UINT),
SetBlend(*const ID3D11BlendState, [FLOAT; 4], UINT),
// resource updates
UpdateBuffer(Buffer, DataPointer, usize),
UpdateTexture(Texture, tex::Kind, Option<tex::CubeFace>, DataPointer, tex::RawImageInfo),
GenerateMips(native::Srv),
// drawing
ClearColor(native::Rtv, [f32; 4]),
ClearDepthStencil(native::Dsv, D3D11_CLEAR_FLAG, FLOAT, UINT8),
Draw(UINT, UINT),
DrawInstanced(UINT, UINT, UINT, UINT),
DrawIndexed(UINT, UINT, INT),
DrawIndexedInstanced(UINT, UINT, UINT, INT, UINT),
}
unsafe impl Send for Command {}
struct Cache {
attrib_strides: [Option<pso::ElemStride>; MAX_VERTEX_ATTRIBUTES],
rasterizer: *const ID3D11RasterizerState,
depth_stencil: *const ID3D11DepthStencilState,
stencil_ref: UINT,
blend: *const ID3D11BlendState,
blend_ref: [FLOAT; 4],
}
unsafe impl Send for Cache {}
impl Cache {
fn new() -> Cache {
Cache {
attrib_strides: [None; MAX_VERTEX_ATTRIBUTES],
rasterizer: ptr::null(),
depth_stencil: ptr::null(),
stencil_ref: 0,
blend: ptr::null(),
blend_ref: [0.0; 4],
}
}
}
pub struct CommandBuffer<P> {
pub parser: P,
cache: Cache,
}
pub trait Parser: Sized + Send {
fn reset(&mut self);
fn parse(&mut self, Command);
fn update_buffer(&mut self, Buffer, &[u8], usize);
fn update_texture(&mut self, Texture, tex::Kind, Option<tex::CubeFace>, &[u8], tex::RawImageInfo);
}
impl<P: Parser> From<P> for CommandBuffer<P> {
fn from(parser: P) -> CommandBuffer<P> {
CommandBuffer {
parser: parser,
cache: Cache::new(),
}
}
}
impl<P: Parser> CommandBuffer<P> {
fn flush(&mut self) {
let sample_mask = !0; //TODO
self.parser.parse(Command::SetDepthStencil(self.cache.depth_stencil, self.cache.stencil_ref));
self.parser.parse(Command::SetBlend(self.cache.blend, self.cache.blend_ref, sample_mask));
}
}
impl<P: Parser> command::Buffer<Resources> for CommandBuffer<P> {
fn reset(&mut self) {
self.parser.reset();
self.cache = Cache::new();
}
fn bind_pipeline_state(&mut self, pso: Pipeline) {
self.parser.parse(Command::SetPrimitive(pso.topology));
for (stride, ad_option) in self.cache.attrib_strides.iter_mut().zip(pso.attributes.iter()) {
*stride = ad_option.map(|(buf_id, _)| match pso.vertex_buffers[buf_id as usize] {
Some(ref bdesc) => bdesc.stride,
None => {
error!("Unexpected use of buffer id {}", buf_id);
0
},
});
}
if self.cache.rasterizer != pso.rasterizer {
self.cache.rasterizer = pso.rasterizer;
self.parser.parse(Command::SetRasterizer(pso.rasterizer));
}
self.cache.depth_stencil = pso.depth_stencil;
self.cache.blend = pso.blend;
self.parser.parse(Command::BindInputLayout(pso.layout));
self.parser.parse(Command::BindProgram(pso.program));
}
fn bind_vertex_buffers(&mut self, vbs: pso::VertexBufferSet<Resources>) {
//Note: assumes `bind_pipeline_state` is called prior
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_VERTEX_ATTRIBUTES];
let mut strides = [0; MAX_VERTEX_ATTRIBUTES];
let mut offsets = [0; MAX_VERTEX_ATTRIBUTES];
for i in 0 .. MAX_VERTEX_ATTRIBUTES {
match (vbs.0[i], self.cache.attrib_strides[i]) {
(None, Some(stride)) => {
error!("No vertex input provided for slot {} with stride {}", i, stride)
},
(Some((buffer, offset)), Some(stride)) => {
buffers[i] = buffer.0;
strides[i] = stride as UINT;
offsets[i] = offset as UINT;
},
(_, None) => (),
}
}
self.parser.parse(Command::BindVertexBuffers(buffers, strides, offsets));
}
fn bind_constant_buffers(&mut self, cbs: &[pso::ConstantBufferParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_CONSTANT_BUFFERS];
let mask = stage.into();
let mut count = 0;
for cbuf in cbs.iter() {
if cbuf.1.contains(mask) {
buffers[cbuf.2 as usize] = (cbuf.0).0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindConstantBuffers(stage, buffers));
}
}
}
fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {
error!("Global constants are not supported");
}
fn bind_resource_views(&mut self, rvs: &[pso::ResourceViewParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut views = [native::Srv(ptr::null_mut()); MAX_RESOURCE_VIEWS];
let mask = stage.into();
let mut count = 0;
for view in rvs.iter() {
if view.1.contains(mask) {
views[view.2 as usize] = view.0;
count += 1;
}
}
if count != 0 |
}
}
fn bind_unordered_views(&mut self, uvs: &[pso::UnorderedViewParam<Resources>]) {
let mut views = [(); MAX_UNORDERED_VIEWS];
let mut count = 0;
for view in uvs.iter() {
views[view.2 as usize] = view.0;
count += 1;
}
if count != 0 {
unimplemented!()
//self.parser.parse(Command::BindUnorderedAccess(stage, views));
}
}
fn bind_samplers(&mut self, ss: &[pso::SamplerParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut samplers = [native::Sampler(ptr::null_mut()); MAX_SAMPLERS];
let mask = stage.into();
let mut count = 0;
for sm in ss.iter() {
if sm.1.contains(mask) {
samplers[sm.2 as usize] = sm.0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindSamplers(stage, samplers));
}
}
}
fn bind_pixel_targets(&mut self, pts: pso::PixelTargetSet<Resources>) {
if let (Some(ref d), Some(ref s)) = (pts.depth, pts.stencil) {
if d != s {
error!("Depth and stencil views have to be the same");
}
}
let view = pts.get_view();
let viewport = D3D11_VIEWPORT {
TopLeftX: 0.0,
TopLeftY: 0.0,
Width: view.0 as f32,
Height: view.1 as f32,
MinDepth: 0.0,
MaxDepth: 1.0,
};
let mut colors = [native::Rtv(ptr::null_mut()); MAX_COLOR_TARGETS];
for i in 0 .. MAX_COLOR_TARGETS {
if let Some(c) = pts.colors[i] {
colors[i] = c;
}
}
let ds = pts.depth.unwrap_or(native::Dsv(ptr::null_mut()));
self.parser.parse(Command::BindPixelTargets(colors, ds));
self.parser.parse(Command::SetViewport(viewport));
}
fn bind_index(&mut self, buf: Buffer, itype: IndexType) {
let format = match itype {
IndexType::U16 => DXGI_FORMAT_R16_UINT,
IndexType::U32 => DXGI_FORMAT_R32_UINT,
};
self.parser.parse(Command::BindIndex(buf, format));
}
fn set_scissor(&mut self, rect: target::Rect) {
self.parser.parse(Command::SetScissor(D3D11_RECT {
left: rect.x as INT,
top: rect.y as INT,
right: (rect.x + rect.w) as INT,
bottom: (rect.y + rect.h) as INT,
}));
}
fn set_ref_values(&mut self, rv: state::RefValues) {
if rv.stencil.0 != rv.stencil.1 {
error!("Unable to set different stencil ref values for front ({}) and back ({})",
rv.stencil.0, rv.stencil.1);
}
self.cache.stencil_ref = rv.stencil.0 as UINT;
self.cache.blend_ref = rv.blend;
}
fn update_buffer(&mut self, buf: Buffer, data: &[u8], offset: usize) {
self.parser.update_buffer(buf, data, offset);
}
fn update_texture(&mut self, tex: Texture, kind: tex::Kind, face: Option<tex::CubeFace>,
data: &[u8], image: tex::RawImageInfo) {
self.parser.update_texture(tex, kind, face, data, image);
}
fn generate_mipmap(&mut self, srv: native::Srv) {
self.parser.parse(Command::GenerateMips(srv));
}
fn clear_color(&mut self, target: native::Rtv, value: command::ClearColor) {
match value {
command::ClearColor::Float(data) => {
self.parser.parse(Command::ClearColor(target, data));
},
_ => {
error!("Unable to clear int/uint target");
},
}
}
fn clear_depth_stencil(&mut self, target: native::Dsv, depth: Option<target::Depth>,
stencil: Option<target::Stencil>) {
let flags = //warning: magic constants ahead
D3D11_CLEAR_FLAG(if depth.is_some() {1} else {0}) |
D3D11_CLEAR_FLAG(if stencil.is_some() {2} else {0});
self.parser.parse(Command::ClearDepthStencil(target, flags,
depth.unwrap_or_default() as FLOAT,
stencil.unwrap_or_default() as UINT8
));
}
fn call_draw(&mut self, start: VertexCount, count: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawInstanced(
count as UINT, ninst as UINT, start as UINT, offset as UINT),
None => Command::Draw(count as UINT, start as UINT),
});
}
fn call_draw_indexed(&mut self, start: VertexCount, count: VertexCount,
base: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawIndexedInstanced(
count as UINT, ninst as UINT, start as UINT, base as INT, offset as UINT),
None => Command::DrawIndexed(count as UINT, start as UINT, base as INT),
});
}
}
| {
self.parser.parse(Command::BindShaderResources(stage, views));
} | conditional_block |
startup.legacy.js | define(['jquery', 'knockout', './router', 'bootstrap', 'knockout-projections', 'knockout-select-on-focus'], function($, ko, router) {
// Components can be packaged as AMD modules, such as the following: | ko.components.register('home-page', { require: 'components/home-page/legacy/home' });
ko.components.register('ko-gmap', { require: 'components/ko-gmap/legacy/ko-gmap' });
ko.components.register('ko-gmap-search', { require: 'components/ko-gmap-search/legacy/ko-gmap-search' });
ko.components.register('gll-map', { require: 'components/gll-map/legacy/gll-map' });
ko.components.register('gll-search', { require: 'components/gll-search/legacy/gll-search' });
ko.components.register('gll-locations', { require: 'components/gll-locations/legacy/gll-locations' });
// [Scaffolded component registrations will be inserted here. To retain this feature, don't remove this comment.]
ko.components.register('gll-card', { template: { require: 'text!components/gll-card/legacy/gll-card.html' } });
// Start the application
ko.applyBindings({ route: router.currentRoute });
}); | random_line_split | |
node.rs | // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use std::fmt::{Debug, Display};
use std::hash::Hash;
use boxfuture::BoxFuture;
use hashing::Digest;
use futures01::future::Future;
use petgraph::stable_graph;
use crate::entry::Entry;
use crate::Graph;
// 2^32 Nodes ought to be more than enough for anyone!
pub type EntryId = stable_graph::NodeIndex<u32>;
///
/// Defines executing a cacheable/memoizable step within the given NodeContext.
///
/// Note that it is assumed that Nodes are very cheap to clone.
///
pub trait Node: Clone + Debug + Display + Eq + Hash + Send + 'static {
type Context: NodeContext<Node = Self>;
type Item: Clone + Debug + Eq + Send + 'static;
type Error: NodeError;
fn run(self, context: Self::Context) -> BoxFuture<Self::Item, Self::Error>;
///
/// If the given Node output represents an FS operation, returns its Digest.
///
fn digest(result: Self::Item) -> Option<Digest>;
///
/// If the node result is cacheable, return true.
///
fn cacheable(&self, context: &Self::Context) -> bool;
/// Nodes optionally have a user-facing name (distinct from their Debug and Display
/// implementations). This user-facing name is intended to provide high-level information
/// to end users of pants about what computation pants is currently doing. Not all
/// `Node`s need a user-facing name. For `Node`s derived from Python `@rule`s, the
/// user-facing name should be the same as the `name` annotation on the rule decorator.
fn user_facing_name(&self) -> Option<String> {
None
}
}
pub trait NodeError: Clone + Debug + Eq + Send {
///
/// Creates an instance that represents that a Node was invalidated out of the | /// Creates an instance that represents that a Node dependency was cyclic along the given path.
///
fn cyclic(path: Vec<String>) -> Self;
}
///
/// A trait used to visualize Nodes in either DOT/GraphViz format.
///
pub trait NodeVisualizer<N: Node> {
///
/// Returns a GraphViz color scheme name for this visualizer.
///
fn color_scheme(&self) -> &str;
///
/// Returns a GraphViz color name/id within Self::color_scheme for the given Entry.
///
fn color(&mut self, entry: &Entry<N>, context: &N::Context) -> String;
}
///
/// A trait used to visualize Nodes for the purposes of CLI-output tracing.
///
pub trait NodeTracer<N: Node> {
///
/// Returns true if the given Node Result represents the "bottom" of a trace.
///
/// A trace represents a sub-dag of the entire Graph, and a "bottom" Node result represents
/// a boundary that the trace stops before (ie, a bottom Node will not be rendered in the trace,
/// but anything that depends on a bottom Node will be).
///
fn is_bottom(result: Option<Result<N::Item, N::Error>>) -> bool;
///
/// Renders the given result for a trace. The trace will already be indented by `indent`, but
/// an implementer creating a multi-line output would need to indent them as well.
///
fn state_str(indent: &str, result: Option<Result<N::Item, N::Error>>) -> String;
}
///
/// A context passed between Nodes that also stores an EntryId to uniquely identify them.
///
pub trait NodeContext: Clone + Send + 'static {
///
/// The type generated when this Context is cloned for another Node.
///
type Node: Node;
///
/// The Session ID type for this Context. Some Node behaviours (in particular: Node::cacheable)
/// have Session-specific semantics. More than one context object might be associated with a
/// single caller "session".
///
type SessionId: Clone + Debug + Eq;
///
/// Creates a clone of this NodeContext to be used for a different Node.
///
/// To clone a Context for use for the same Node, `Clone` is used directly.
///
fn clone_for(&self, entry_id: EntryId) -> <Self::Node as Node>::Context;
///
/// Returns the SessionId for this Context, which should uniquely identify a caller's run for the
/// purposes of "once per Session" behaviour.
///
fn session_id(&self) -> &Self::SessionId;
///
/// Returns a reference to the Graph for this Context.
///
fn graph(&self) -> &Graph<Self::Node>;
///
/// Spawns a Future on an Executor provided by the context.
///
/// NB: Unlike the futures `Executor` trait itself, this implementation _must_ spawn the work
/// on another thread, as it is called from within the Graph lock.
///
fn spawn<F>(&self, future: F)
where
F: Future<Item = (), Error = ()> + Send + 'static;
} | /// Graph (generally while running).
///
fn invalidated() -> Self;
/// | random_line_split |
node.rs | // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use std::fmt::{Debug, Display};
use std::hash::Hash;
use boxfuture::BoxFuture;
use hashing::Digest;
use futures01::future::Future;
use petgraph::stable_graph;
use crate::entry::Entry;
use crate::Graph;
// 2^32 Nodes ought to be more than enough for anyone!
pub type EntryId = stable_graph::NodeIndex<u32>;
///
/// Defines executing a cacheable/memoizable step within the given NodeContext.
///
/// Note that it is assumed that Nodes are very cheap to clone.
///
pub trait Node: Clone + Debug + Display + Eq + Hash + Send + 'static {
type Context: NodeContext<Node = Self>;
type Item: Clone + Debug + Eq + Send + 'static;
type Error: NodeError;
fn run(self, context: Self::Context) -> BoxFuture<Self::Item, Self::Error>;
///
/// If the given Node output represents an FS operation, returns its Digest.
///
fn digest(result: Self::Item) -> Option<Digest>;
///
/// If the node result is cacheable, return true.
///
fn cacheable(&self, context: &Self::Context) -> bool;
/// Nodes optionally have a user-facing name (distinct from their Debug and Display
/// implementations). This user-facing name is intended to provide high-level information
/// to end users of pants about what computation pants is currently doing. Not all
/// `Node`s need a user-facing name. For `Node`s derived from Python `@rule`s, the
/// user-facing name should be the same as the `name` annotation on the rule decorator.
fn | (&self) -> Option<String> {
None
}
}
pub trait NodeError: Clone + Debug + Eq + Send {
///
/// Creates an instance that represents that a Node was invalidated out of the
/// Graph (generally while running).
///
fn invalidated() -> Self;
///
/// Creates an instance that represents that a Node dependency was cyclic along the given path.
///
fn cyclic(path: Vec<String>) -> Self;
}
///
/// A trait used to visualize Nodes in either DOT/GraphViz format.
///
pub trait NodeVisualizer<N: Node> {
///
/// Returns a GraphViz color scheme name for this visualizer.
///
fn color_scheme(&self) -> &str;
///
/// Returns a GraphViz color name/id within Self::color_scheme for the given Entry.
///
fn color(&mut self, entry: &Entry<N>, context: &N::Context) -> String;
}
///
/// A trait used to visualize Nodes for the purposes of CLI-output tracing.
///
pub trait NodeTracer<N: Node> {
///
/// Returns true if the given Node Result represents the "bottom" of a trace.
///
/// A trace represents a sub-dag of the entire Graph, and a "bottom" Node result represents
/// a boundary that the trace stops before (ie, a bottom Node will not be rendered in the trace,
/// but anything that depends on a bottom Node will be).
///
fn is_bottom(result: Option<Result<N::Item, N::Error>>) -> bool;
///
/// Renders the given result for a trace. The trace will already be indented by `indent`, but
/// an implementer creating a multi-line output would need to indent them as well.
///
fn state_str(indent: &str, result: Option<Result<N::Item, N::Error>>) -> String;
}
///
/// A context passed between Nodes that also stores an EntryId to uniquely identify them.
///
pub trait NodeContext: Clone + Send + 'static {
///
/// The type generated when this Context is cloned for another Node.
///
type Node: Node;
///
/// The Session ID type for this Context. Some Node behaviours (in particular: Node::cacheable)
/// have Session-specific semantics. More than one context object might be associated with a
/// single caller "session".
///
type SessionId: Clone + Debug + Eq;
///
/// Creates a clone of this NodeContext to be used for a different Node.
///
/// To clone a Context for use for the same Node, `Clone` is used directly.
///
fn clone_for(&self, entry_id: EntryId) -> <Self::Node as Node>::Context;
///
/// Returns the SessionId for this Context, which should uniquely identify a caller's run for the
/// purposes of "once per Session" behaviour.
///
fn session_id(&self) -> &Self::SessionId;
///
/// Returns a reference to the Graph for this Context.
///
fn graph(&self) -> &Graph<Self::Node>;
///
/// Spawns a Future on an Executor provided by the context.
///
/// NB: Unlike the futures `Executor` trait itself, this implementation _must_ spawn the work
/// on another thread, as it is called from within the Graph lock.
///
fn spawn<F>(&self, future: F)
where
F: Future<Item = (), Error = ()> + Send + 'static;
}
| user_facing_name | identifier_name |
node.rs | // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use std::fmt::{Debug, Display};
use std::hash::Hash;
use boxfuture::BoxFuture;
use hashing::Digest;
use futures01::future::Future;
use petgraph::stable_graph;
use crate::entry::Entry;
use crate::Graph;
// 2^32 Nodes ought to be more than enough for anyone!
pub type EntryId = stable_graph::NodeIndex<u32>;
///
/// Defines executing a cacheable/memoizable step within the given NodeContext.
///
/// Note that it is assumed that Nodes are very cheap to clone.
///
pub trait Node: Clone + Debug + Display + Eq + Hash + Send + 'static {
type Context: NodeContext<Node = Self>;
type Item: Clone + Debug + Eq + Send + 'static;
type Error: NodeError;
fn run(self, context: Self::Context) -> BoxFuture<Self::Item, Self::Error>;
///
/// If the given Node output represents an FS operation, returns its Digest.
///
fn digest(result: Self::Item) -> Option<Digest>;
///
/// If the node result is cacheable, return true.
///
fn cacheable(&self, context: &Self::Context) -> bool;
/// Nodes optionally have a user-facing name (distinct from their Debug and Display
/// implementations). This user-facing name is intended to provide high-level information
/// to end users of pants about what computation pants is currently doing. Not all
/// `Node`s need a user-facing name. For `Node`s derived from Python `@rule`s, the
/// user-facing name should be the same as the `name` annotation on the rule decorator.
fn user_facing_name(&self) -> Option<String> |
}
pub trait NodeError: Clone + Debug + Eq + Send {
///
/// Creates an instance that represents that a Node was invalidated out of the
/// Graph (generally while running).
///
fn invalidated() -> Self;
///
/// Creates an instance that represents that a Node dependency was cyclic along the given path.
///
fn cyclic(path: Vec<String>) -> Self;
}
///
/// A trait used to visualize Nodes in either DOT/GraphViz format.
///
pub trait NodeVisualizer<N: Node> {
///
/// Returns a GraphViz color scheme name for this visualizer.
///
fn color_scheme(&self) -> &str;
///
/// Returns a GraphViz color name/id within Self::color_scheme for the given Entry.
///
fn color(&mut self, entry: &Entry<N>, context: &N::Context) -> String;
}
///
/// A trait used to visualize Nodes for the purposes of CLI-output tracing.
///
pub trait NodeTracer<N: Node> {
///
/// Returns true if the given Node Result represents the "bottom" of a trace.
///
/// A trace represents a sub-dag of the entire Graph, and a "bottom" Node result represents
/// a boundary that the trace stops before (ie, a bottom Node will not be rendered in the trace,
/// but anything that depends on a bottom Node will be).
///
fn is_bottom(result: Option<Result<N::Item, N::Error>>) -> bool;
///
/// Renders the given result for a trace. The trace will already be indented by `indent`, but
/// an implementer creating a multi-line output would need to indent them as well.
///
fn state_str(indent: &str, result: Option<Result<N::Item, N::Error>>) -> String;
}
///
/// A context passed between Nodes that also stores an EntryId to uniquely identify them.
///
pub trait NodeContext: Clone + Send + 'static {
///
/// The type generated when this Context is cloned for another Node.
///
type Node: Node;
///
/// The Session ID type for this Context. Some Node behaviours (in particular: Node::cacheable)
/// have Session-specific semantics. More than one context object might be associated with a
/// single caller "session".
///
type SessionId: Clone + Debug + Eq;
///
/// Creates a clone of this NodeContext to be used for a different Node.
///
/// To clone a Context for use for the same Node, `Clone` is used directly.
///
fn clone_for(&self, entry_id: EntryId) -> <Self::Node as Node>::Context;
///
/// Returns the SessionId for this Context, which should uniquely identify a caller's run for the
/// purposes of "once per Session" behaviour.
///
fn session_id(&self) -> &Self::SessionId;
///
/// Returns a reference to the Graph for this Context.
///
fn graph(&self) -> &Graph<Self::Node>;
///
/// Spawns a Future on an Executor provided by the context.
///
/// NB: Unlike the futures `Executor` trait itself, this implementation _must_ spawn the work
/// on another thread, as it is called from within the Graph lock.
///
fn spawn<F>(&self, future: F)
where
F: Future<Item = (), Error = ()> + Send + 'static;
}
| {
None
} | identifier_body |
default.py | # -*- coding: utf-8 -*-
'''
Template Add-on
Copyright (C) 2016 Demo
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib2, urllib, xbmcgui, xbmcplugin, xbmcaddon, xbmc, re, sys, os
try:
import json
except:
import simplejson as json
import yt
ADDON_NAME = 'Carrera'
addon_id = 'plugin.video.Carrera'
Base_Url = 'http://herovision.x10host.com/carrera/'
Main_Menu_File_Name = 'main.php'
search_filenames = ['sv1','teh','s5']
########################################################################################
### FAVOURITES SECTION IS NOT THIS AUTHORS CODE, I COULD NOT GET IT TO REMOVE FAVOURITES SO ALL CREDIT DUE TO THEM, SORRY IM NOT SURE WHERE IT CAME FROM BUT GOOD WORK :) ###
ADDON = xbmcaddon.Addon(id=addon_id)
ADDON_PATH = xbmc.translatePath('special://home/addons/'+addon_id)
ICON = ADDON_PATH + 'icon.png'
FANART = ADDON_PATH + 'fanart.jpg'
PATH = 'Carrera'
VERSION = '0.0.1'
Dialog = xbmcgui.Dialog()
addon_data = xbmc.translatePath('special://home/userdata/addon_data/'+addon_id+'/')
favorites = os.path.join(addon_data, 'favorites.txt')
watched = addon_data + 'watched.txt'
source_file = Base_Url + 'source_file.php'
debug = ADDON.getSetting('debug')
if os.path.exists(addon_data)==False:
os.makedirs(addon_data)
if not os.path.exists(watched):
open(watched,'w+')
if os.path.exists(favorites)==True:
FAV = open(favorites).read()
else: FAV = []
watched_read = open(watched).read()
def Main_Menu():
OPEN = Open_Url(Base_Url+Main_Menu_File_Name)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
if name == '[COLORskyblue]F[COLORblue]avourites[/COLOR]':
Menu(name,url,6,icon,fanart,desc)
elif 'php' in url:
Menu(name,url,1,icon,fanart,desc)
elif name == '[COLORskyblue]S[COLORblue]earch[/COLOR]':
Menu('[COLORskyblue]S[COLORblue]earch[/COLOR]',url,3,icon,fanart,desc)
elif name == '[COLORskyblue]i[COLORblue]dex[/COLOR]':
Menu('[COLORskyblue]O[COLORblue]nline Lists[/COLOR]',url,10,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
def Second_Menu(url):
|
def index_Menu():
#Menu('Favourites','',5,'','','','','')
Menu('List of Index\'s','',10,'','','')
# Menu('Search','',6,ICON,FANART,'','','')
# Menu('[COLORred]Press here to add a source url[/COLOR] ','',2,'','','','','')
def Index_List():
OPEN = Open_Url(source_file)
Regex = re.compile('url="(.+?)">name="(.+?)"').findall(OPEN)
for url,name in Regex:
Menu(name,url,8,'','','')
#####################################MAIN REGEX LOOP ###############################
def Main_Loop(url):
HTML = Open_Url(url)
match = re.compile('<a href="(.+?)">(.+?)</a>').findall(HTML)
for url2,name in match:
url3 = url + url2
if '..' in url3:
pass
elif 'rar' in url3:
pass
elif 'jpg' in url3:
pass
elif 'vtx' in url3:
pass
elif 'srt' in url3:
pass
elif 'C=' in url2:
pass
elif '/' in url2:
Menu((name).replace('/',''),url3,8,ICON,FANART,'','','')
else:
Clean_name(name,url3)
################################### TIDY UP NAME #############################
def Clean_name(name,url3):
name1 = (name).replace('S01E','S01 E').replace('(MovIran).mkv','').replace('The.Walking.Dead','').replace('.mkv','').replace('Tehmovies.com.mkv','').replace('Nightsdl','').replace('Ganool','')
name2=(name1).replace('.',' ').replace(' (ParsFilm).mkv','').replace('_TehMovies.Com.mkv','').replace(' (SaberFun.IR).mkv','').replace('[UpFilm].mkv','').replace('(Bia2Movies)','')
name3=(name2).replace('.mkv','').replace('.Film2Movie_INFO.mkv','').replace('.HEVC.Film2Movie_INFO.mkv','').replace('.ParsFilm.mkv ','').replace('(SaberFunIR)','')
name4=(name3).replace('.INTERNAL.','').replace('.Film2Movie_INFO.mkv','').replace('.web-dl.Tehmovies.net.mkv','').replace('S01E06','S01 E06').replace('S01E07','S01 E07')
name5=(name4).replace('S01E08','S01 E08').replace('S01E09','S01 E09').replace('S01E10','S01 E10').replace('.Tehmovies.net','').replace('.WEBRip.Tehmovies.com.mkv','')
name6=(name5).replace('.mp4','').replace('.mkv','').replace('.Tehmovies.ir','').replace('x265HEVC','').replace('Film2Movie_INFO','').replace('Tehmovies.com.mkv','')
name7=(name6).replace(' (ParsFilm)','').replace('Tehmovies.ir.mkv','').replace('.480p',' 480p').replace('.WEBrip','').replace('.web-dl','').replace('.WEB-DL','')
name8=(name7).replace('.','').replace('.Tehmovies.com','').replace('480p.Tehmovies.net</',' 480p').replace('720p.Tehmovies.net','720p').replace('.480p',' 480p')
name9=(name8).replace('.480p.WEB-DL',' 480p').replace('.mkv','').replace('.INTERNAL.','').replace('720p',' 720p').replace('.Tehmovi..>','').replace('.Tehmovies.net.mkv','')
name10=(name9).replace('..720p',' 720p').replace('.REPACK.Tehmovies..>','').replace('.Tehmovies.com.mkv','').replace('.Tehmovies..>','').replace('Tehmovies.ir..>','')
name11=(name10).replace('Tehmovies.ne..>','').replace('.HDTV.x264-mRs','').replace('...>','').replace('.Tehmovies...>','').replace('.Tehmovies.com.mp4','')
name12=(name11).replace('.Tehmovies.com.mp4','').replace('_MovieFarsi','').replace('_MovieFar','').replace('_com','').replace('>','').replace('avi','').replace('(1)','')
name13=(name12).replace('(2)','').replace('cd 2','').replace('cd 1','').replace('-dos-xvid','').replace('divx','').replace('Xvid','').replace('DVD','').replace('DVDrip','')
name14=(name13).replace('DvDrip-aXXo','').replace('[','').replace(']','').replace('(','').replace(')','').replace('XviD-TLF-','').replace('CD1','').replace('CD2','')
name15=(name14).replace('CD3','').replace('mp4','').replace('&','&').replace('HDRip','').replace('-','').replace(' ',' ').replace('xvid','').replace('1080p','')
name16=(name15).replace('1970','').replace('1971','').replace('1972','').replace('1973','').replace('1974','').replace('1975','').replace('1976','').replace('1977','')
name17=(name16).replace('1978','').replace('1979','').replace('1980','').replace('1981','').replace('1982','').replace('1983','').replace('1984','').replace('1985','')
name18=(name17).replace('1986','').replace('1987','').replace('1988','').replace('1989','').replace('1990','').replace('1991','').replace('1992','').replace('1993','')
name19=(name18).replace('1994','').replace('1995','').replace('1996','').replace('1997','').replace('1998','').replace('1999','').replace('2000','').replace('2001','')
name20=(name19).replace('2002','').replace('2003','').replace('2004','').replace('2005','').replace('2006','').replace('2007','').replace('2008','').replace('2009','')
name21=(name20).replace('2010','').replace('2011','').replace('2012','').replace('2013','').replace('2014','').replace('2015','').replace('2016','').replace('720p','')
name22=(name21).replace('360p','').replace(' ',' ').replace('BluRay','').replace('rip','').replace('WEBDL','').replace('s01','').replace('s02','').replace('S02','')
name23=(name22).replace('s03','').replace('s04','').replace('s05','').replace('s06','').replace('s07','').replace('s08','').replace('s09','').replace('S01','')
name24=(name23).replace('S03','').replace('S04',' ').replace('S05','').replace('S06','').replace('S07','').replace('S08','').replace('S09','').replace('E01','')
name25=(name24).replace('E02','').replace('E03','').replace('E04','').replace('E05','').replace('E06','').replace('E07','').replace('E08','').replace('E09','').replace('e01','')
name25=(name24).replace('e02','').replace('e03','').replace('e04','').replace('e05','').replace('e06','').replace('e07','').replace('e08','').replace('e09','').replace('e01','')
clean_name = name15
search_name = name25
#if ADDON.getSetting('Data')=='true':
# Imdb_Scrape(url3,clean_name,search_name)
#if ADDON.getSetting('Data')=='false':
Play(clean_name,url3,2,ICON,FANART,'','','')
def Search():
Search_Name = Dialog.input('Search', type=xbmcgui.INPUT_ALPHANUM)
Search_Title = Search_Name.lower()
if Search_Title == '':
pass
else:
for file_Name in search_filenames:
search_URL = Base_Url + file_Name + '.php'
OPEN = Open_Url(search_URL)
if OPEN != 'Opened':
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
if Search_Title in name.lower():
Watched = re.compile('item="(.+?)"\n').findall(str(watched_read))
for item in Watched:
if item == url:
name = '[COLORred]* [/COLOR]'+(name).replace('[COLORred]* [/COLOR][COLORred]* [/COLOR]','[COLORred]* [/COLOR]')
print_text_file = open(watched,"a")
print_text_file.write('item="'+name+'"\n')
print_text_file.close
if 'php' in url:
Menu(name,url,1,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
####################################################################PROCESSES###################################################
def Open_Url(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = ''
link = ''
try:
response = urllib2.urlopen(req)
link=response.read()
response.close()
except: pass
if link != '':
return link
else:
link = 'Opened'
return link
def setView(content, viewType):
if content:
xbmcplugin.setContent(int(sys.argv[1]), content)
def Menu(name,url,mode,iconimage,fanart,description,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=5&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=4&name=%s&url=%s&iconimage=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def Play(name,url,mode,iconimage,fanart,description,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=5&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=4&name=%s&url=%s&iconimage=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
return ok
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def GetPlayerCore():
try:
PlayerMethod=getSet("core-player")
if (PlayerMethod=='DVDPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_DVDPLAYER
elif (PlayerMethod=='MPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_MPLAYER
elif (PlayerMethod=='PAPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_PAPLAYER
else: PlayerMeth=xbmc.PLAYER_CORE_AUTO
except: PlayerMeth=xbmc.PLAYER_CORE_AUTO
return PlayerMeth
return True
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def resolve(url):
print_text_file = open(watched,"a")
print_text_file.write('item="'+url+'"\n')
print_text_file.close
play=xbmc.Player(GetPlayerCore())
import urlresolver
try: play.play(url)
except: pass
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def addon_log(string):
if debug == 'true':
xbmc.log("["+ADDON_NAME+"]: %s" %(addon_version, string))
def addFavorite(name,url,iconimage,fanart,mode,playlist=None,regexs=None):
favList = []
try:
# seems that after
name = name.encode('utf-8', 'ignore')
except:
pass
if os.path.exists(favorites)==False:
addon_log('Making Favorites File')
favList.append((name,url,iconimage,fanart,mode,playlist,regexs))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
addon_log('Appending Favorites')
a = open(favorites).read()
data = json.loads(a)
data.append((name,url,iconimage,fanart,mode))
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
def getFavorites():
if os.path.exists(favorites)==False:
favList = []
addon_log('Making Favorites File')
favList.append(('[COLORskyblue]C[COLORblue]arrera Favourites Section[/COLOR]','','','','','',''))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
items = json.loads(open(favorites).read())
total = len(items)
for i in items:
name = i[0]
url = i[1]
iconimage = i[2]
try:
fanArt = i[3]
if fanArt == None:
raise
except:
if ADDON.getSetting('use_thumb') == "true":
fanArt = iconimage
else:
fanArt = fanart
try: playlist = i[5]
except: playlist = None
try: regexs = i[6]
except: regexs = None
if i[4] == 0:
Menu(name,url,'',iconimage,fanart,'','fav')
else:
Menu(name,url,i[4],iconimage,fanart,'','fav')
def rmFavorite(name):
data = json.loads(open(favorites).read())
for index in range(len(data)):
if data[index][0]==name:
del data[index]
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
break
xbmc.executebuiltin("XBMC.Container.Refresh")
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
params=get_params()
url=None
name=None
iconimage=None
mode=None
fanart=None
description=None
fav_mode=None
try:
fav_mode=int(params["fav_mode"])
except:
pass
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
iconimage=urllib.unquote_plus(params["iconimage"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
try:
fanart=urllib.unquote_plus(params["fanart"])
except:
pass
try:
description=urllib.unquote_plus(params["description"])
except:
pass
print str(PATH)+': '+str(VERSION)
print "Mode: "+str(mode)
print "URL: "+str(url)
print "Name: "+str(name)
print "IconImage: "+str(iconimage)
#####################################################END PROCESSES##############################################################
if mode == None: Main_Menu()
elif mode == 1 : Second_Menu(url)
elif mode == 2 :
if 'youtube' in url:
url = (url).replace('https://www.youtube.com/watch?v=','').replace('http://www.youtube.com/watch?v=','')
yt.PlayVideo(url)
else:
resolve(url)
elif mode == 3 : Search()
elif mode==4:
addon_log("addFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
addFavorite(name,url,iconimage,fanart,fav_mode)
elif mode==5:
addon_log("rmFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
rmFavorite(name)
elif mode==6:
addon_log("getFavorites")
getFavorites()
elif mode == 7 : index_Menu()
elif mode == 8 : Main_Loop(url)
elif mode == 9 : Source_File()
elif mode ==10 : Index_List()
xbmcplugin.addSortMethod(int(sys.argv[1]), 1)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
| OPEN = Open_Url(url)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
Watched = re.compile('item="(.+?)"\n').findall(str(watched_read))
for item in Watched:
if item == url:
name = '[COLORred]* [/COLOR]'+(name).replace('[COLORred]* [/COLOR][COLORred]* [/COLOR]','[COLORred]* [/COLOR]')
print_text_file = open(watched,"a")
print_text_file.write('item="'+name+'"\n')
print_text_file.close
if 'php' in url:
Menu(name,url,1,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3') | identifier_body |
default.py | # -*- coding: utf-8 -*-
'''
Template Add-on
Copyright (C) 2016 Demo
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib2, urllib, xbmcgui, xbmcplugin, xbmcaddon, xbmc, re, sys, os
try:
import json
except:
import simplejson as json
import yt
ADDON_NAME = 'Carrera'
addon_id = 'plugin.video.Carrera'
Base_Url = 'http://herovision.x10host.com/carrera/'
Main_Menu_File_Name = 'main.php'
search_filenames = ['sv1','teh','s5']
########################################################################################
### FAVOURITES SECTION IS NOT THIS AUTHORS CODE, I COULD NOT GET IT TO REMOVE FAVOURITES SO ALL CREDIT DUE TO THEM, SORRY IM NOT SURE WHERE IT CAME FROM BUT GOOD WORK :) ###
ADDON = xbmcaddon.Addon(id=addon_id)
ADDON_PATH = xbmc.translatePath('special://home/addons/'+addon_id)
ICON = ADDON_PATH + 'icon.png'
FANART = ADDON_PATH + 'fanart.jpg'
PATH = 'Carrera'
VERSION = '0.0.1'
Dialog = xbmcgui.Dialog()
addon_data = xbmc.translatePath('special://home/userdata/addon_data/'+addon_id+'/')
favorites = os.path.join(addon_data, 'favorites.txt')
watched = addon_data + 'watched.txt'
source_file = Base_Url + 'source_file.php'
debug = ADDON.getSetting('debug')
if os.path.exists(addon_data)==False:
os.makedirs(addon_data)
if not os.path.exists(watched):
open(watched,'w+')
if os.path.exists(favorites)==True:
FAV = open(favorites).read()
else: FAV = []
watched_read = open(watched).read()
def Main_Menu():
OPEN = Open_Url(Base_Url+Main_Menu_File_Name)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
if name == '[COLORskyblue]F[COLORblue]avourites[/COLOR]':
Menu(name,url,6,icon,fanart,desc)
elif 'php' in url:
Menu(name,url,1,icon,fanart,desc)
elif name == '[COLORskyblue]S[COLORblue]earch[/COLOR]':
Menu('[COLORskyblue]S[COLORblue]earch[/COLOR]',url,3,icon,fanart,desc)
elif name == '[COLORskyblue]i[COLORblue]dex[/COLOR]':
Menu('[COLORskyblue]O[COLORblue]nline Lists[/COLOR]',url,10,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
def Second_Menu(url):
OPEN = Open_Url(url)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
Watched = re.compile('item="(.+?)"\n').findall(str(watched_read))
for item in Watched:
if item == url:
name = '[COLORred]* [/COLOR]'+(name).replace('[COLORred]* [/COLOR][COLORred]* [/COLOR]','[COLORred]* [/COLOR]')
print_text_file = open(watched,"a")
print_text_file.write('item="'+name+'"\n')
print_text_file.close
if 'php' in url:
Menu(name,url,1,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
def index_Menu():
#Menu('Favourites','',5,'','','','','')
Menu('List of Index\'s','',10,'','','')
# Menu('Search','',6,ICON,FANART,'','','')
# Menu('[COLORred]Press here to add a source url[/COLOR] ','',2,'','','','','')
def Index_List():
OPEN = Open_Url(source_file)
Regex = re.compile('url="(.+?)">name="(.+?)"').findall(OPEN)
for url,name in Regex:
Menu(name,url,8,'','','')
#####################################MAIN REGEX LOOP ###############################
def Main_Loop(url):
HTML = Open_Url(url)
match = re.compile('<a href="(.+?)">(.+?)</a>').findall(HTML)
for url2,name in match:
url3 = url + url2
if '..' in url3:
pass
elif 'rar' in url3:
pass
elif 'jpg' in url3:
pass
elif 'vtx' in url3:
pass
elif 'srt' in url3:
pass
elif 'C=' in url2:
pass
elif '/' in url2:
Menu((name).replace('/',''),url3,8,ICON,FANART,'','','')
else:
Clean_name(name,url3)
################################### TIDY UP NAME #############################
def Clean_name(name,url3):
name1 = (name).replace('S01E','S01 E').replace('(MovIran).mkv','').replace('The.Walking.Dead','').replace('.mkv','').replace('Tehmovies.com.mkv','').replace('Nightsdl','').replace('Ganool','')
name2=(name1).replace('.',' ').replace(' (ParsFilm).mkv','').replace('_TehMovies.Com.mkv','').replace(' (SaberFun.IR).mkv','').replace('[UpFilm].mkv','').replace('(Bia2Movies)','')
name3=(name2).replace('.mkv','').replace('.Film2Movie_INFO.mkv','').replace('.HEVC.Film2Movie_INFO.mkv','').replace('.ParsFilm.mkv ','').replace('(SaberFunIR)','')
name4=(name3).replace('.INTERNAL.','').replace('.Film2Movie_INFO.mkv','').replace('.web-dl.Tehmovies.net.mkv','').replace('S01E06','S01 E06').replace('S01E07','S01 E07')
name5=(name4).replace('S01E08','S01 E08').replace('S01E09','S01 E09').replace('S01E10','S01 E10').replace('.Tehmovies.net','').replace('.WEBRip.Tehmovies.com.mkv','')
name6=(name5).replace('.mp4','').replace('.mkv','').replace('.Tehmovies.ir','').replace('x265HEVC','').replace('Film2Movie_INFO','').replace('Tehmovies.com.mkv','')
name7=(name6).replace(' (ParsFilm)','').replace('Tehmovies.ir.mkv','').replace('.480p',' 480p').replace('.WEBrip','').replace('.web-dl','').replace('.WEB-DL','')
name8=(name7).replace('.','').replace('.Tehmovies.com','').replace('480p.Tehmovies.net</',' 480p').replace('720p.Tehmovies.net','720p').replace('.480p',' 480p')
name9=(name8).replace('.480p.WEB-DL',' 480p').replace('.mkv','').replace('.INTERNAL.','').replace('720p',' 720p').replace('.Tehmovi..>','').replace('.Tehmovies.net.mkv','')
name10=(name9).replace('..720p',' 720p').replace('.REPACK.Tehmovies..>','').replace('.Tehmovies.com.mkv','').replace('.Tehmovies..>','').replace('Tehmovies.ir..>','')
name11=(name10).replace('Tehmovies.ne..>','').replace('.HDTV.x264-mRs','').replace('...>','').replace('.Tehmovies...>','').replace('.Tehmovies.com.mp4','')
name12=(name11).replace('.Tehmovies.com.mp4','').replace('_MovieFarsi','').replace('_MovieFar','').replace('_com','').replace('>','').replace('avi','').replace('(1)','')
name13=(name12).replace('(2)','').replace('cd 2','').replace('cd 1','').replace('-dos-xvid','').replace('divx','').replace('Xvid','').replace('DVD','').replace('DVDrip','')
name14=(name13).replace('DvDrip-aXXo','').replace('[','').replace(']','').replace('(','').replace(')','').replace('XviD-TLF-','').replace('CD1','').replace('CD2','')
name15=(name14).replace('CD3','').replace('mp4','').replace('&','&').replace('HDRip','').replace('-','').replace(' ',' ').replace('xvid','').replace('1080p','')
name16=(name15).replace('1970','').replace('1971','').replace('1972','').replace('1973','').replace('1974','').replace('1975','').replace('1976','').replace('1977','')
name17=(name16).replace('1978','').replace('1979','').replace('1980','').replace('1981','').replace('1982','').replace('1983','').replace('1984','').replace('1985','')
name18=(name17).replace('1986','').replace('1987','').replace('1988','').replace('1989','').replace('1990','').replace('1991','').replace('1992','').replace('1993','')
name19=(name18).replace('1994','').replace('1995','').replace('1996','').replace('1997','').replace('1998','').replace('1999','').replace('2000','').replace('2001','')
name20=(name19).replace('2002','').replace('2003','').replace('2004','').replace('2005','').replace('2006','').replace('2007','').replace('2008','').replace('2009','')
name21=(name20).replace('2010','').replace('2011','').replace('2012','').replace('2013','').replace('2014','').replace('2015','').replace('2016','').replace('720p','')
name22=(name21).replace('360p','').replace(' ',' ').replace('BluRay','').replace('rip','').replace('WEBDL','').replace('s01','').replace('s02','').replace('S02','')
name23=(name22).replace('s03','').replace('s04','').replace('s05','').replace('s06','').replace('s07','').replace('s08','').replace('s09','').replace('S01','')
name24=(name23).replace('S03','').replace('S04',' ').replace('S05','').replace('S06','').replace('S07','').replace('S08','').replace('S09','').replace('E01','')
name25=(name24).replace('E02','').replace('E03','').replace('E04','').replace('E05','').replace('E06','').replace('E07','').replace('E08','').replace('E09','').replace('e01','')
name25=(name24).replace('e02','').replace('e03','').replace('e04','').replace('e05','').replace('e06','').replace('e07','').replace('e08','').replace('e09','').replace('e01','')
clean_name = name15
search_name = name25
#if ADDON.getSetting('Data')=='true':
# Imdb_Scrape(url3,clean_name,search_name)
#if ADDON.getSetting('Data')=='false':
Play(clean_name,url3,2,ICON,FANART,'','','')
def Search():
Search_Name = Dialog.input('Search', type=xbmcgui.INPUT_ALPHANUM)
Search_Title = Search_Name.lower()
if Search_Title == '':
pass
else:
for file_Name in search_filenames:
search_URL = Base_Url + file_Name + '.php'
OPEN = Open_Url(search_URL)
if OPEN != 'Opened':
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
if Search_Title in name.lower():
Watched = re.compile('item="(.+?)"\n').findall(str(watched_read))
for item in Watched:
if item == url:
name = '[COLORred]* [/COLOR]'+(name).replace('[COLORred]* [/COLOR][COLORred]* [/COLOR]','[COLORred]* [/COLOR]')
print_text_file = open(watched,"a")
print_text_file.write('item="'+name+'"\n')
print_text_file.close
if 'php' in url:
Menu(name,url,1,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
####################################################################PROCESSES###################################################
def Open_Url(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = ''
link = ''
try:
response = urllib2.urlopen(req)
link=response.read()
response.close()
except: pass
if link != '':
return link
else:
link = 'Opened'
return link
def setView(content, viewType):
if content:
xbmcplugin.setContent(int(sys.argv[1]), content)
def Menu(name,url,mode,iconimage,fanart,description,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=5&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=4&name=%s&url=%s&iconimage=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def Play(name,url,mode,iconimage,fanart,description,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=5&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=4&name=%s&url=%s&iconimage=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
return ok
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def GetPlayerCore():
try:
PlayerMethod=getSet("core-player")
if (PlayerMethod=='DVDPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_DVDPLAYER
elif (PlayerMethod=='MPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_MPLAYER
elif (PlayerMethod=='PAPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_PAPLAYER
else: PlayerMeth=xbmc.PLAYER_CORE_AUTO
except: PlayerMeth=xbmc.PLAYER_CORE_AUTO
return PlayerMeth
return True
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def resolve(url):
print_text_file = open(watched,"a")
print_text_file.write('item="'+url+'"\n')
print_text_file.close
play=xbmc.Player(GetPlayerCore())
import urlresolver
try: play.play(url)
except: pass
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def addon_log(string):
if debug == 'true':
xbmc.log("["+ADDON_NAME+"]: %s" %(addon_version, string))
def addFavorite(name,url,iconimage,fanart,mode,playlist=None,regexs=None):
favList = []
try:
# seems that after
name = name.encode('utf-8', 'ignore')
except:
pass
if os.path.exists(favorites)==False:
addon_log('Making Favorites File')
favList.append((name,url,iconimage,fanart,mode,playlist,regexs))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
addon_log('Appending Favorites')
a = open(favorites).read()
data = json.loads(a)
data.append((name,url,iconimage,fanart,mode))
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
def getFavorites():
if os.path.exists(favorites)==False:
favList = []
addon_log('Making Favorites File')
favList.append(('[COLORskyblue]C[COLORblue]arrera Favourites Section[/COLOR]','','','','','',''))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
items = json.loads(open(favorites).read())
total = len(items)
for i in items:
name = i[0]
url = i[1]
iconimage = i[2]
try:
fanArt = i[3]
if fanArt == None:
raise
except:
if ADDON.getSetting('use_thumb') == "true":
fanArt = iconimage
else:
fanArt = fanart
try: playlist = i[5]
except: playlist = None
try: regexs = i[6]
except: regexs = None
if i[4] == 0:
Menu(name,url,'',iconimage,fanart,'','fav')
else:
|
def rmFavorite(name):
data = json.loads(open(favorites).read())
for index in range(len(data)):
if data[index][0]==name:
del data[index]
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
break
xbmc.executebuiltin("XBMC.Container.Refresh")
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
params=get_params()
url=None
name=None
iconimage=None
mode=None
fanart=None
description=None
fav_mode=None
try:
fav_mode=int(params["fav_mode"])
except:
pass
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
iconimage=urllib.unquote_plus(params["iconimage"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
try:
fanart=urllib.unquote_plus(params["fanart"])
except:
pass
try:
description=urllib.unquote_plus(params["description"])
except:
pass
print str(PATH)+': '+str(VERSION)
print "Mode: "+str(mode)
print "URL: "+str(url)
print "Name: "+str(name)
print "IconImage: "+str(iconimage)
#####################################################END PROCESSES##############################################################
if mode == None: Main_Menu()
elif mode == 1 : Second_Menu(url)
elif mode == 2 :
if 'youtube' in url:
url = (url).replace('https://www.youtube.com/watch?v=','').replace('http://www.youtube.com/watch?v=','')
yt.PlayVideo(url)
else:
resolve(url)
elif mode == 3 : Search()
elif mode==4:
addon_log("addFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
addFavorite(name,url,iconimage,fanart,fav_mode)
elif mode==5:
addon_log("rmFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
rmFavorite(name)
elif mode==6:
addon_log("getFavorites")
getFavorites()
elif mode == 7 : index_Menu()
elif mode == 8 : Main_Loop(url)
elif mode == 9 : Source_File()
elif mode ==10 : Index_List()
xbmcplugin.addSortMethod(int(sys.argv[1]), 1)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
| Menu(name,url,i[4],iconimage,fanart,'','fav') | conditional_block |
default.py | # -*- coding: utf-8 -*-
'''
Template Add-on
Copyright (C) 2016 Demo
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib2, urllib, xbmcgui, xbmcplugin, xbmcaddon, xbmc, re, sys, os
try:
import json
except:
import simplejson as json
import yt
ADDON_NAME = 'Carrera'
addon_id = 'plugin.video.Carrera'
Base_Url = 'http://herovision.x10host.com/carrera/'
Main_Menu_File_Name = 'main.php'
search_filenames = ['sv1','teh','s5']
########################################################################################
### FAVOURITES SECTION IS NOT THIS AUTHORS CODE, I COULD NOT GET IT TO REMOVE FAVOURITES SO ALL CREDIT DUE TO THEM, SORRY IM NOT SURE WHERE IT CAME FROM BUT GOOD WORK :) ###
ADDON = xbmcaddon.Addon(id=addon_id)
ADDON_PATH = xbmc.translatePath('special://home/addons/'+addon_id)
ICON = ADDON_PATH + 'icon.png'
FANART = ADDON_PATH + 'fanart.jpg'
PATH = 'Carrera'
VERSION = '0.0.1'
Dialog = xbmcgui.Dialog()
addon_data = xbmc.translatePath('special://home/userdata/addon_data/'+addon_id+'/')
favorites = os.path.join(addon_data, 'favorites.txt')
watched = addon_data + 'watched.txt'
source_file = Base_Url + 'source_file.php'
debug = ADDON.getSetting('debug')
if os.path.exists(addon_data)==False:
os.makedirs(addon_data)
if not os.path.exists(watched):
open(watched,'w+')
if os.path.exists(favorites)==True:
FAV = open(favorites).read()
else: FAV = []
watched_read = open(watched).read()
def Main_Menu():
OPEN = Open_Url(Base_Url+Main_Menu_File_Name)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
if name == '[COLORskyblue]F[COLORblue]avourites[/COLOR]':
Menu(name,url,6,icon,fanart,desc)
elif 'php' in url:
Menu(name,url,1,icon,fanart,desc)
elif name == '[COLORskyblue]S[COLORblue]earch[/COLOR]':
Menu('[COLORskyblue]S[COLORblue]earch[/COLOR]',url,3,icon,fanart,desc)
elif name == '[COLORskyblue]i[COLORblue]dex[/COLOR]':
Menu('[COLORskyblue]O[COLORblue]nline Lists[/COLOR]',url,10,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
def Second_Menu(url):
OPEN = Open_Url(url)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
Watched = re.compile('item="(.+?)"\n').findall(str(watched_read))
for item in Watched:
if item == url:
name = '[COLORred]* [/COLOR]'+(name).replace('[COLORred]* [/COLOR][COLORred]* [/COLOR]','[COLORred]* [/COLOR]')
print_text_file = open(watched,"a")
print_text_file.write('item="'+name+'"\n')
print_text_file.close
if 'php' in url:
Menu(name,url,1,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
def index_Menu():
#Menu('Favourites','',5,'','','','','')
Menu('List of Index\'s','',10,'','','')
# Menu('Search','',6,ICON,FANART,'','','')
# Menu('[COLORred]Press here to add a source url[/COLOR] ','',2,'','','','','')
def Index_List():
OPEN = Open_Url(source_file)
Regex = re.compile('url="(.+?)">name="(.+?)"').findall(OPEN)
for url,name in Regex:
Menu(name,url,8,'','','')
#####################################MAIN REGEX LOOP ###############################
def Main_Loop(url):
HTML = Open_Url(url)
match = re.compile('<a href="(.+?)">(.+?)</a>').findall(HTML)
for url2,name in match:
url3 = url + url2
if '..' in url3:
pass
elif 'rar' in url3:
pass
elif 'jpg' in url3:
pass
elif 'vtx' in url3:
pass
elif 'srt' in url3:
pass
elif 'C=' in url2:
pass
elif '/' in url2:
Menu((name).replace('/',''),url3,8,ICON,FANART,'','','')
else:
Clean_name(name,url3)
################################### TIDY UP NAME #############################
def Clean_name(name,url3):
name1 = (name).replace('S01E','S01 E').replace('(MovIran).mkv','').replace('The.Walking.Dead','').replace('.mkv','').replace('Tehmovies.com.mkv','').replace('Nightsdl','').replace('Ganool','')
name2=(name1).replace('.',' ').replace(' (ParsFilm).mkv','').replace('_TehMovies.Com.mkv','').replace(' (SaberFun.IR).mkv','').replace('[UpFilm].mkv','').replace('(Bia2Movies)','')
name3=(name2).replace('.mkv','').replace('.Film2Movie_INFO.mkv','').replace('.HEVC.Film2Movie_INFO.mkv','').replace('.ParsFilm.mkv ','').replace('(SaberFunIR)','')
name4=(name3).replace('.INTERNAL.','').replace('.Film2Movie_INFO.mkv','').replace('.web-dl.Tehmovies.net.mkv','').replace('S01E06','S01 E06').replace('S01E07','S01 E07')
name5=(name4).replace('S01E08','S01 E08').replace('S01E09','S01 E09').replace('S01E10','S01 E10').replace('.Tehmovies.net','').replace('.WEBRip.Tehmovies.com.mkv','')
name6=(name5).replace('.mp4','').replace('.mkv','').replace('.Tehmovies.ir','').replace('x265HEVC','').replace('Film2Movie_INFO','').replace('Tehmovies.com.mkv','')
name7=(name6).replace(' (ParsFilm)','').replace('Tehmovies.ir.mkv','').replace('.480p',' 480p').replace('.WEBrip','').replace('.web-dl','').replace('.WEB-DL','')
name8=(name7).replace('.','').replace('.Tehmovies.com','').replace('480p.Tehmovies.net</',' 480p').replace('720p.Tehmovies.net','720p').replace('.480p',' 480p')
name9=(name8).replace('.480p.WEB-DL',' 480p').replace('.mkv','').replace('.INTERNAL.','').replace('720p',' 720p').replace('.Tehmovi..>','').replace('.Tehmovies.net.mkv','')
name10=(name9).replace('..720p',' 720p').replace('.REPACK.Tehmovies..>','').replace('.Tehmovies.com.mkv','').replace('.Tehmovies..>','').replace('Tehmovies.ir..>','')
name11=(name10).replace('Tehmovies.ne..>','').replace('.HDTV.x264-mRs','').replace('...>','').replace('.Tehmovies...>','').replace('.Tehmovies.com.mp4','')
name12=(name11).replace('.Tehmovies.com.mp4','').replace('_MovieFarsi','').replace('_MovieFar','').replace('_com','').replace('>','').replace('avi','').replace('(1)','')
name13=(name12).replace('(2)','').replace('cd 2','').replace('cd 1','').replace('-dos-xvid','').replace('divx','').replace('Xvid','').replace('DVD','').replace('DVDrip','')
name14=(name13).replace('DvDrip-aXXo','').replace('[','').replace(']','').replace('(','').replace(')','').replace('XviD-TLF-','').replace('CD1','').replace('CD2','')
name15=(name14).replace('CD3','').replace('mp4','').replace('&','&').replace('HDRip','').replace('-','').replace(' ',' ').replace('xvid','').replace('1080p','')
name16=(name15).replace('1970','').replace('1971','').replace('1972','').replace('1973','').replace('1974','').replace('1975','').replace('1976','').replace('1977','')
name17=(name16).replace('1978','').replace('1979','').replace('1980','').replace('1981','').replace('1982','').replace('1983','').replace('1984','').replace('1985','')
name18=(name17).replace('1986','').replace('1987','').replace('1988','').replace('1989','').replace('1990','').replace('1991','').replace('1992','').replace('1993','')
name19=(name18).replace('1994','').replace('1995','').replace('1996','').replace('1997','').replace('1998','').replace('1999','').replace('2000','').replace('2001','')
name20=(name19).replace('2002','').replace('2003','').replace('2004','').replace('2005','').replace('2006','').replace('2007','').replace('2008','').replace('2009','')
name21=(name20).replace('2010','').replace('2011','').replace('2012','').replace('2013','').replace('2014','').replace('2015','').replace('2016','').replace('720p','')
name22=(name21).replace('360p','').replace(' ',' ').replace('BluRay','').replace('rip','').replace('WEBDL','').replace('s01','').replace('s02','').replace('S02','')
name23=(name22).replace('s03','').replace('s04','').replace('s05','').replace('s06','').replace('s07','').replace('s08','').replace('s09','').replace('S01','')
name24=(name23).replace('S03','').replace('S04',' ').replace('S05','').replace('S06','').replace('S07','').replace('S08','').replace('S09','').replace('E01','')
name25=(name24).replace('E02','').replace('E03','').replace('E04','').replace('E05','').replace('E06','').replace('E07','').replace('E08','').replace('E09','').replace('e01','')
name25=(name24).replace('e02','').replace('e03','').replace('e04','').replace('e05','').replace('e06','').replace('e07','').replace('e08','').replace('e09','').replace('e01','')
clean_name = name15
search_name = name25
#if ADDON.getSetting('Data')=='true':
# Imdb_Scrape(url3,clean_name,search_name)
#if ADDON.getSetting('Data')=='false':
Play(clean_name,url3,2,ICON,FANART,'','','')
def Search():
Search_Name = Dialog.input('Search', type=xbmcgui.INPUT_ALPHANUM)
Search_Title = Search_Name.lower()
if Search_Title == '':
pass
else:
for file_Name in search_filenames:
search_URL = Base_Url + file_Name + '.php'
OPEN = Open_Url(search_URL)
if OPEN != 'Opened':
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
if Search_Title in name.lower():
Watched = re.compile('item="(.+?)"\n').findall(str(watched_read))
for item in Watched:
if item == url:
name = '[COLORred]* [/COLOR]'+(name).replace('[COLORred]* [/COLOR][COLORred]* [/COLOR]','[COLORred]* [/COLOR]')
print_text_file = open(watched,"a")
print_text_file.write('item="'+name+'"\n')
print_text_file.close
if 'php' in url:
Menu(name,url,1,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
####################################################################PROCESSES###################################################
def Open_Url(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = ''
link = ''
try:
response = urllib2.urlopen(req)
link=response.read()
response.close()
except: pass
if link != '':
return link
else:
link = 'Opened'
return link
def setView(content, viewType):
if content:
xbmcplugin.setContent(int(sys.argv[1]), content)
def Menu(name,url,mode,iconimage,fanart,description,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=5&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=4&name=%s&url=%s&iconimage=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def Play(name,url,mode,iconimage,fanart,description,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=5&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=4&name=%s&url=%s&iconimage=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
return ok
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def GetPlayerCore():
try:
PlayerMethod=getSet("core-player")
if (PlayerMethod=='DVDPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_DVDPLAYER
elif (PlayerMethod=='MPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_MPLAYER
elif (PlayerMethod=='PAPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_PAPLAYER
else: PlayerMeth=xbmc.PLAYER_CORE_AUTO
except: PlayerMeth=xbmc.PLAYER_CORE_AUTO
return PlayerMeth
return True
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def resolve(url):
print_text_file = open(watched,"a")
print_text_file.write('item="'+url+'"\n')
print_text_file.close
play=xbmc.Player(GetPlayerCore())
import urlresolver
try: play.play(url)
except: pass
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def addon_log(string):
if debug == 'true':
xbmc.log("["+ADDON_NAME+"]: %s" %(addon_version, string))
def addFavorite(name,url,iconimage,fanart,mode,playlist=None,regexs=None):
favList = []
try:
# seems that after
name = name.encode('utf-8', 'ignore')
except:
pass
if os.path.exists(favorites)==False:
addon_log('Making Favorites File')
favList.append((name,url,iconimage,fanart,mode,playlist,regexs))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
addon_log('Appending Favorites')
a = open(favorites).read()
data = json.loads(a)
data.append((name,url,iconimage,fanart,mode))
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
def getFavorites():
if os.path.exists(favorites)==False:
favList = []
addon_log('Making Favorites File')
favList.append(('[COLORskyblue]C[COLORblue]arrera Favourites Section[/COLOR]','','','','','',''))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
items = json.loads(open(favorites).read())
total = len(items)
for i in items:
name = i[0]
url = i[1]
iconimage = i[2]
try:
fanArt = i[3]
if fanArt == None:
raise
except:
if ADDON.getSetting('use_thumb') == "true":
fanArt = iconimage
else:
fanArt = fanart
try: playlist = i[5]
except: playlist = None
try: regexs = i[6]
except: regexs = None
if i[4] == 0:
Menu(name,url,'',iconimage,fanart,'','fav')
else:
Menu(name,url,i[4],iconimage,fanart,'','fav')
def rmFavorite(name):
data = json.loads(open(favorites).read())
for index in range(len(data)):
if data[index][0]==name:
del data[index]
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
break
xbmc.executebuiltin("XBMC.Container.Refresh")
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
params=get_params()
url=None
name=None
iconimage=None
mode=None
fanart=None
description=None
fav_mode=None
try:
fav_mode=int(params["fav_mode"])
except:
pass
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
iconimage=urllib.unquote_plus(params["iconimage"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
try:
fanart=urllib.unquote_plus(params["fanart"])
except:
pass
try:
description=urllib.unquote_plus(params["description"])
except:
pass
print str(PATH)+': '+str(VERSION)
print "Mode: "+str(mode)
print "URL: "+str(url)
print "Name: "+str(name)
print "IconImage: "+str(iconimage)
#####################################################END PROCESSES##############################################################
if mode == None: Main_Menu()
elif mode == 1 : Second_Menu(url)
elif mode == 2 :
if 'youtube' in url:
url = (url).replace('https://www.youtube.com/watch?v=','').replace('http://www.youtube.com/watch?v=','')
yt.PlayVideo(url)
else:
resolve(url)
elif mode == 3 : Search()
elif mode==4:
addon_log("addFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
addFavorite(name,url,iconimage,fanart,fav_mode)
elif mode==5:
addon_log("rmFavorite") | try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
rmFavorite(name)
elif mode==6:
addon_log("getFavorites")
getFavorites()
elif mode == 7 : index_Menu()
elif mode == 8 : Main_Loop(url)
elif mode == 9 : Source_File()
elif mode ==10 : Index_List()
xbmcplugin.addSortMethod(int(sys.argv[1]), 1)
xbmcplugin.endOfDirectory(int(sys.argv[1])) | random_line_split | |
default.py | # -*- coding: utf-8 -*-
'''
Template Add-on
Copyright (C) 2016 Demo
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib2, urllib, xbmcgui, xbmcplugin, xbmcaddon, xbmc, re, sys, os
try:
import json
except:
import simplejson as json
import yt
ADDON_NAME = 'Carrera'
addon_id = 'plugin.video.Carrera'
Base_Url = 'http://herovision.x10host.com/carrera/'
Main_Menu_File_Name = 'main.php'
search_filenames = ['sv1','teh','s5']
########################################################################################
### FAVOURITES SECTION IS NOT THIS AUTHORS CODE, I COULD NOT GET IT TO REMOVE FAVOURITES SO ALL CREDIT DUE TO THEM, SORRY IM NOT SURE WHERE IT CAME FROM BUT GOOD WORK :) ###
ADDON = xbmcaddon.Addon(id=addon_id)
ADDON_PATH = xbmc.translatePath('special://home/addons/'+addon_id)
ICON = ADDON_PATH + 'icon.png'
FANART = ADDON_PATH + 'fanart.jpg'
PATH = 'Carrera'
VERSION = '0.0.1'
Dialog = xbmcgui.Dialog()
addon_data = xbmc.translatePath('special://home/userdata/addon_data/'+addon_id+'/')
favorites = os.path.join(addon_data, 'favorites.txt')
watched = addon_data + 'watched.txt'
source_file = Base_Url + 'source_file.php'
debug = ADDON.getSetting('debug')
if os.path.exists(addon_data)==False:
os.makedirs(addon_data)
if not os.path.exists(watched):
open(watched,'w+')
if os.path.exists(favorites)==True:
FAV = open(favorites).read()
else: FAV = []
watched_read = open(watched).read()
def Main_Menu():
OPEN = Open_Url(Base_Url+Main_Menu_File_Name)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
if name == '[COLORskyblue]F[COLORblue]avourites[/COLOR]':
Menu(name,url,6,icon,fanart,desc)
elif 'php' in url:
Menu(name,url,1,icon,fanart,desc)
elif name == '[COLORskyblue]S[COLORblue]earch[/COLOR]':
Menu('[COLORskyblue]S[COLORblue]earch[/COLOR]',url,3,icon,fanart,desc)
elif name == '[COLORskyblue]i[COLORblue]dex[/COLOR]':
Menu('[COLORskyblue]O[COLORblue]nline Lists[/COLOR]',url,10,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
def Second_Menu(url):
OPEN = Open_Url(url)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
Watched = re.compile('item="(.+?)"\n').findall(str(watched_read))
for item in Watched:
if item == url:
name = '[COLORred]* [/COLOR]'+(name).replace('[COLORred]* [/COLOR][COLORred]* [/COLOR]','[COLORred]* [/COLOR]')
print_text_file = open(watched,"a")
print_text_file.write('item="'+name+'"\n')
print_text_file.close
if 'php' in url:
Menu(name,url,1,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
def index_Menu():
#Menu('Favourites','',5,'','','','','')
Menu('List of Index\'s','',10,'','','')
# Menu('Search','',6,ICON,FANART,'','','')
# Menu('[COLORred]Press here to add a source url[/COLOR] ','',2,'','','','','')
def Index_List():
OPEN = Open_Url(source_file)
Regex = re.compile('url="(.+?)">name="(.+?)"').findall(OPEN)
for url,name in Regex:
Menu(name,url,8,'','','')
#####################################MAIN REGEX LOOP ###############################
def | (url):
HTML = Open_Url(url)
match = re.compile('<a href="(.+?)">(.+?)</a>').findall(HTML)
for url2,name in match:
url3 = url + url2
if '..' in url3:
pass
elif 'rar' in url3:
pass
elif 'jpg' in url3:
pass
elif 'vtx' in url3:
pass
elif 'srt' in url3:
pass
elif 'C=' in url2:
pass
elif '/' in url2:
Menu((name).replace('/',''),url3,8,ICON,FANART,'','','')
else:
Clean_name(name,url3)
################################### TIDY UP NAME #############################
def Clean_name(name,url3):
name1 = (name).replace('S01E','S01 E').replace('(MovIran).mkv','').replace('The.Walking.Dead','').replace('.mkv','').replace('Tehmovies.com.mkv','').replace('Nightsdl','').replace('Ganool','')
name2=(name1).replace('.',' ').replace(' (ParsFilm).mkv','').replace('_TehMovies.Com.mkv','').replace(' (SaberFun.IR).mkv','').replace('[UpFilm].mkv','').replace('(Bia2Movies)','')
name3=(name2).replace('.mkv','').replace('.Film2Movie_INFO.mkv','').replace('.HEVC.Film2Movie_INFO.mkv','').replace('.ParsFilm.mkv ','').replace('(SaberFunIR)','')
name4=(name3).replace('.INTERNAL.','').replace('.Film2Movie_INFO.mkv','').replace('.web-dl.Tehmovies.net.mkv','').replace('S01E06','S01 E06').replace('S01E07','S01 E07')
name5=(name4).replace('S01E08','S01 E08').replace('S01E09','S01 E09').replace('S01E10','S01 E10').replace('.Tehmovies.net','').replace('.WEBRip.Tehmovies.com.mkv','')
name6=(name5).replace('.mp4','').replace('.mkv','').replace('.Tehmovies.ir','').replace('x265HEVC','').replace('Film2Movie_INFO','').replace('Tehmovies.com.mkv','')
name7=(name6).replace(' (ParsFilm)','').replace('Tehmovies.ir.mkv','').replace('.480p',' 480p').replace('.WEBrip','').replace('.web-dl','').replace('.WEB-DL','')
name8=(name7).replace('.','').replace('.Tehmovies.com','').replace('480p.Tehmovies.net</',' 480p').replace('720p.Tehmovies.net','720p').replace('.480p',' 480p')
name9=(name8).replace('.480p.WEB-DL',' 480p').replace('.mkv','').replace('.INTERNAL.','').replace('720p',' 720p').replace('.Tehmovi..>','').replace('.Tehmovies.net.mkv','')
name10=(name9).replace('..720p',' 720p').replace('.REPACK.Tehmovies..>','').replace('.Tehmovies.com.mkv','').replace('.Tehmovies..>','').replace('Tehmovies.ir..>','')
name11=(name10).replace('Tehmovies.ne..>','').replace('.HDTV.x264-mRs','').replace('...>','').replace('.Tehmovies...>','').replace('.Tehmovies.com.mp4','')
name12=(name11).replace('.Tehmovies.com.mp4','').replace('_MovieFarsi','').replace('_MovieFar','').replace('_com','').replace('>','').replace('avi','').replace('(1)','')
name13=(name12).replace('(2)','').replace('cd 2','').replace('cd 1','').replace('-dos-xvid','').replace('divx','').replace('Xvid','').replace('DVD','').replace('DVDrip','')
name14=(name13).replace('DvDrip-aXXo','').replace('[','').replace(']','').replace('(','').replace(')','').replace('XviD-TLF-','').replace('CD1','').replace('CD2','')
name15=(name14).replace('CD3','').replace('mp4','').replace('&','&').replace('HDRip','').replace('-','').replace(' ',' ').replace('xvid','').replace('1080p','')
name16=(name15).replace('1970','').replace('1971','').replace('1972','').replace('1973','').replace('1974','').replace('1975','').replace('1976','').replace('1977','')
name17=(name16).replace('1978','').replace('1979','').replace('1980','').replace('1981','').replace('1982','').replace('1983','').replace('1984','').replace('1985','')
name18=(name17).replace('1986','').replace('1987','').replace('1988','').replace('1989','').replace('1990','').replace('1991','').replace('1992','').replace('1993','')
name19=(name18).replace('1994','').replace('1995','').replace('1996','').replace('1997','').replace('1998','').replace('1999','').replace('2000','').replace('2001','')
name20=(name19).replace('2002','').replace('2003','').replace('2004','').replace('2005','').replace('2006','').replace('2007','').replace('2008','').replace('2009','')
name21=(name20).replace('2010','').replace('2011','').replace('2012','').replace('2013','').replace('2014','').replace('2015','').replace('2016','').replace('720p','')
name22=(name21).replace('360p','').replace(' ',' ').replace('BluRay','').replace('rip','').replace('WEBDL','').replace('s01','').replace('s02','').replace('S02','')
name23=(name22).replace('s03','').replace('s04','').replace('s05','').replace('s06','').replace('s07','').replace('s08','').replace('s09','').replace('S01','')
name24=(name23).replace('S03','').replace('S04',' ').replace('S05','').replace('S06','').replace('S07','').replace('S08','').replace('S09','').replace('E01','')
name25=(name24).replace('E02','').replace('E03','').replace('E04','').replace('E05','').replace('E06','').replace('E07','').replace('E08','').replace('E09','').replace('e01','')
name25=(name24).replace('e02','').replace('e03','').replace('e04','').replace('e05','').replace('e06','').replace('e07','').replace('e08','').replace('e09','').replace('e01','')
clean_name = name15
search_name = name25
#if ADDON.getSetting('Data')=='true':
# Imdb_Scrape(url3,clean_name,search_name)
#if ADDON.getSetting('Data')=='false':
Play(clean_name,url3,2,ICON,FANART,'','','')
def Search():
Search_Name = Dialog.input('Search', type=xbmcgui.INPUT_ALPHANUM)
Search_Title = Search_Name.lower()
if Search_Title == '':
pass
else:
for file_Name in search_filenames:
search_URL = Base_Url + file_Name + '.php'
OPEN = Open_Url(search_URL)
if OPEN != 'Opened':
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
if Search_Title in name.lower():
Watched = re.compile('item="(.+?)"\n').findall(str(watched_read))
for item in Watched:
if item == url:
name = '[COLORred]* [/COLOR]'+(name).replace('[COLORred]* [/COLOR][COLORred]* [/COLOR]','[COLORred]* [/COLOR]')
print_text_file = open(watched,"a")
print_text_file.write('item="'+name+'"\n')
print_text_file.close
if 'php' in url:
Menu(name,url,1,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
####################################################################PROCESSES###################################################
def Open_Url(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = ''
link = ''
try:
response = urllib2.urlopen(req)
link=response.read()
response.close()
except: pass
if link != '':
return link
else:
link = 'Opened'
return link
def setView(content, viewType):
if content:
xbmcplugin.setContent(int(sys.argv[1]), content)
def Menu(name,url,mode,iconimage,fanart,description,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=5&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=4&name=%s&url=%s&iconimage=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def Play(name,url,mode,iconimage,fanart,description,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=5&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=4&name=%s&url=%s&iconimage=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
return ok
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def GetPlayerCore():
try:
PlayerMethod=getSet("core-player")
if (PlayerMethod=='DVDPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_DVDPLAYER
elif (PlayerMethod=='MPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_MPLAYER
elif (PlayerMethod=='PAPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_PAPLAYER
else: PlayerMeth=xbmc.PLAYER_CORE_AUTO
except: PlayerMeth=xbmc.PLAYER_CORE_AUTO
return PlayerMeth
return True
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def resolve(url):
print_text_file = open(watched,"a")
print_text_file.write('item="'+url+'"\n')
print_text_file.close
play=xbmc.Player(GetPlayerCore())
import urlresolver
try: play.play(url)
except: pass
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def addon_log(string):
if debug == 'true':
xbmc.log("["+ADDON_NAME+"]: %s" %(addon_version, string))
def addFavorite(name,url,iconimage,fanart,mode,playlist=None,regexs=None):
favList = []
try:
# seems that after
name = name.encode('utf-8', 'ignore')
except:
pass
if os.path.exists(favorites)==False:
addon_log('Making Favorites File')
favList.append((name,url,iconimage,fanart,mode,playlist,regexs))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
addon_log('Appending Favorites')
a = open(favorites).read()
data = json.loads(a)
data.append((name,url,iconimage,fanart,mode))
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
def getFavorites():
if os.path.exists(favorites)==False:
favList = []
addon_log('Making Favorites File')
favList.append(('[COLORskyblue]C[COLORblue]arrera Favourites Section[/COLOR]','','','','','',''))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
items = json.loads(open(favorites).read())
total = len(items)
for i in items:
name = i[0]
url = i[1]
iconimage = i[2]
try:
fanArt = i[3]
if fanArt == None:
raise
except:
if ADDON.getSetting('use_thumb') == "true":
fanArt = iconimage
else:
fanArt = fanart
try: playlist = i[5]
except: playlist = None
try: regexs = i[6]
except: regexs = None
if i[4] == 0:
Menu(name,url,'',iconimage,fanart,'','fav')
else:
Menu(name,url,i[4],iconimage,fanart,'','fav')
def rmFavorite(name):
data = json.loads(open(favorites).read())
for index in range(len(data)):
if data[index][0]==name:
del data[index]
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
break
xbmc.executebuiltin("XBMC.Container.Refresh")
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
params=get_params()
url=None
name=None
iconimage=None
mode=None
fanart=None
description=None
fav_mode=None
try:
fav_mode=int(params["fav_mode"])
except:
pass
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
iconimage=urllib.unquote_plus(params["iconimage"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
try:
fanart=urllib.unquote_plus(params["fanart"])
except:
pass
try:
description=urllib.unquote_plus(params["description"])
except:
pass
print str(PATH)+': '+str(VERSION)
print "Mode: "+str(mode)
print "URL: "+str(url)
print "Name: "+str(name)
print "IconImage: "+str(iconimage)
#####################################################END PROCESSES##############################################################
if mode == None: Main_Menu()
elif mode == 1 : Second_Menu(url)
elif mode == 2 :
if 'youtube' in url:
url = (url).replace('https://www.youtube.com/watch?v=','').replace('http://www.youtube.com/watch?v=','')
yt.PlayVideo(url)
else:
resolve(url)
elif mode == 3 : Search()
elif mode==4:
addon_log("addFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
addFavorite(name,url,iconimage,fanart,fav_mode)
elif mode==5:
addon_log("rmFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
rmFavorite(name)
elif mode==6:
addon_log("getFavorites")
getFavorites()
elif mode == 7 : index_Menu()
elif mode == 8 : Main_Loop(url)
elif mode == 9 : Source_File()
elif mode ==10 : Index_List()
xbmcplugin.addSortMethod(int(sys.argv[1]), 1)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
| Main_Loop | identifier_name |
CardHeader.d.ts | import * as React from 'react';
import { TypographyProps } from '../Typography';
import { OverridableComponent, OverrideProps } from '../OverridableComponent';
export interface CardHeaderTypeMap<P = {}, D extends React.ElementType = 'div'> {
props: P & {
action?: React.ReactNode;
avatar?: React.ReactNode;
disableTypography?: boolean;
subheader?: React.ReactNode; | defaultComponent: D;
classKey: CardHeaderClassKey;
}
/**
*
* Demos:
*
* - [Cards](https://material-ui.com/components/cards/)
*
* API:
*
* - [CardHeader API](https://material-ui.com/api/card-header/)
*/
declare const CardHeader: OverridableComponent<CardHeaderTypeMap>;
export type CardHeaderClassKey = 'root' | 'avatar' | 'action' | 'content' | 'title' | 'subheader';
export type CardHeaderProps<
D extends React.ElementType = CardHeaderTypeMap['defaultComponent'],
P = {}
> = OverrideProps<CardHeaderTypeMap<P, D>, D>;
export default CardHeader; | subheaderTypographyProps?: Partial<TypographyProps>;
title?: React.ReactNode;
titleTypographyProps?: Partial<TypographyProps>;
}; | random_line_split |
CensusDataSourceEditor.tsx | import * as React from "react";
import * as weavejs from "weavejs";
import * as _ from "lodash";
import Weave = weavejs.Weave;
import StatefulTextField = weavejs.ui.StatefulTextField;
import ComboBox = weavejs.ui.ComboBox;
import WeaveReactUtils = weavejs.util.WeaveReactUtils
import HBox = weavejs.ui.flexbox.HBox;
import VBox = weavejs.ui.flexbox.VBox;
import HelpIcon = weavejs.ui.HelpIcon;
import EntityNode = weavejs.data.hierarchy.EntityNode;
import EntityType = weavejs.api.data.EntityType;
import IWeaveTreeNode = weavejs.api.data.IWeaveTreeNode;
import ColumnUtils = weavejs.data.ColumnUtils;
import IQualifiedKey = weavejs.api.data.IQualifiedKey;
import ColumnTreeNode = weavejs.data.hierarchy.ColumnTreeNode;
import StandardLib = weavejs.util.StandardLib;
import CensusDataSource = weavejs.data.source.CensusDataSource;
import CensusApi = weavejs.data.source.CensusApi;
import DataSourceEditor, {IDataSourceEditorState, IDataSourceEditorProps} from "weaveapp/editor/DataSourceEditor";
import {CensusGeographyFilter} from "weaveapp/editor/CensusGeographyFilter";
import KeyTypeInput from "weaveapp/ui/KeyTypeInput";
import CensusApiDataSet = weavejs.data.source.CensusApiDataSet;
import CensusApiGeography = weavejs.data.source.CensusApiGeography;
export interface ICensusDataSourceEditorState extends IDataSourceEditorState
{
dataFamily?: string;
dataVintage?: string;
geographies?: {value: string, label: string }[];
datasets?: CensusApiDataSet[];
optional?: string; /* Optional geography filter name */
requires?: string[]; /* Required geography filter names */
}
export default class CensusDataSourceEditor extends DataSourceEditor<ICensusDataSourceEditorState>
{
constructor(props:IDataSourceEditorProps)
{
super(props);
let ds = (this.props.dataSource as CensusDataSource);
this.api = ds.getAPI();
this.api.getDatasets().then(
(result: { dataset: CensusApiDataSet[] }) => { this.state = { datasets: result.dataset } }
);
ds.dataSet.addGroupedCallback(this, this.getGeographies, true);
ds.geographicScope.addGroupedCallback(this, this.updateRequiresAndOptional, true);
}
updateRequiresAndOptional=()=>
{
let ds = (this.props.dataSource as CensusDataSource);
this.api.getGeographies(ds.dataSet.value).then(
(geographies:({[id:string]: CensusApiGeography}))=>
{
let geography = geographies[ds.geographicScope.value];
if (geography)
{
this.setState({ optional: geography.optionalWithWCFor, requires: geography.requires });
}
}
)
}
static isUsableFamily(family: string): boolean |
static isInFamily(family: string, dataset: CensusApiDataSet): boolean {
return family && dataset && ((family == "All") || (dataset.c_dataset.indexOf(family) != -1));
}
static isOfVintage(vintage: string, dataset: CensusApiDataSet): boolean {
return vintage && dataset && ((vintage == "All") || (dataset.c_vintage !== undefined && dataset.c_vintage.toString() == vintage));
}
private api: CensusApi;
state: ICensusDataSourceEditorState = {};
private getDataFamilies():string[]
{
let raw_datasets = this.state.datasets;
if (!raw_datasets)
return ["All"];
let families_set = new Set<string>(_.flatten(_.map(raw_datasets, (d) => d.c_dataset)).filter(CensusDataSourceEditor.isUsableFamily));
let families_list = _.sortBy(Array.from(families_set));
families_list.unshift("All");
return families_list;
}
private getDataVintages(family:string):string[]
{
let raw_datasets = this.state.datasets;
if (!raw_datasets || !family)
return ["All"];
let datasetsInFamily = raw_datasets.filter(CensusDataSourceEditor.isInFamily.bind(null, family));
let vintages_set = new Set<string>(datasetsInFamily.map((d) => d.c_vintage !== undefined && d.c_vintage.toString()));
let vintages_list = _.sortBy(Array.from(vintages_set));
vintages_list.unshift("All");
return vintages_list;
}
private getDatasets(family:string, vintage:string)
{
let raw_datasets = this.state.datasets;
let ds = this.props.dataSource as CensusDataSource;
if (!raw_datasets || !family || !vintage)
return [{ value: ds.dataSet.value, label: ds.dataSet.value}];
let filterFunc = (dataset: CensusApiDataSet) => CensusDataSourceEditor.isInFamily(family, dataset) && CensusDataSourceEditor.isOfVintage(vintage, dataset);
let makeEntry = (dataset: CensusApiDataSet) => { return { value: dataset.identifier, label: dataset.title }; };
return _.sortBy(raw_datasets.filter(filterFunc).map(makeEntry), "label");
}
private getDataset(datasetName:string)
{
let raw_datasets = this.state.datasets;
if (!raw_datasets)
return null;
return raw_datasets.filter((dataset) => dataset.identifier === datasetName);
}
private getGeographies(dataSet: string)
{
let ds = (this.props.dataSource as CensusDataSource);
this.api.getGeographies(ds.dataSet.value).then(
(geographies: { [id: string]: { name: string } }) => {
let tempGeographies = new Array<{ value: string, label: string }>();
for (let id in geographies) {
tempGeographies.push({ value: id, label: geographies[id].name });
}
tempGeographies = _.sortBy(tempGeographies, "value");
if (!_.isEqual(this.state.geographies, tempGeographies))
this.setState({ geographies: tempGeographies });
});
}
dataFamilyChanged=(selectedItem:any)=>
{
this.setState({ dataFamily: (selectedItem as string) });
}
dataVintageChanged=(selectedItem:any)=>
{
this.setState({ dataVintage: (selectedItem as string)});
}
get editorFields(): [React.ReactChild, React.ReactChild][] {
let ds = (this.props.dataSource as CensusDataSource);
this.api = ds.getAPI();
let datasets = this.getDatasets("All", "All");
let dataset = _.first(this.getDataset(ds.dataSet.value));
let datasetLabel: string = dataset ? dataset.title : "";
return [
this.getLabelEditor(ds.label),
[
Weave.lang("API key"),
<StatefulTextField style={{width: "100%"}}
ref={WeaveReactUtils.linkReactStateRef(this, { content: ds.apiKey }) }/>
],
[
<HBox className="weave-padded-hbox" style={{alignItems: "center", justifyContent: "flex-end"}}>
{Weave.lang("Key namespace")}
<HelpIcon>{Weave.lang("Key namespaces are used to link tables using matching key columns.")}</HelpIcon>
</HBox>,
<KeyTypeInput style={{width: "100%"}}
keyTypeProperty={ds.keyType}/>
],
[
Weave.lang("Dataset"),
<ComboBox className="search" style={{width: "100%"}}
ref={WeaveReactUtils.linkReactStateRef(this, { value: ds.dataSet }) }
selectFirstOnInvalid
options={datasets || [{value: dataset, label: datasetLabel}]}/>
],
[
Weave.lang("Geographic scope"),
<ComboBox style={{width: "100%"}}
ref={WeaveReactUtils.linkReactStateRef(this, {value: ds.geographicScope })}
selectFirstOnInvalid
options={this.state.geographies || [{value: ds.geographicScope.value, label: ds.geographicScope.value}]}/>
],
];
}
renderFields(): JSX.Element {
return (
<VBox>
{
super.renderFields()
}
<CensusGeographyFilter filterLinkableVariable={(this.props.dataSource as CensusDataSource).geographicFilters}
optional={this.state.optional}
requires={this.state.requires || null}/>
</VBox>
)
}
}
| {
return family && (family.indexOf("acs") == 0 || family.indexOf("sf") == 0);
} | identifier_body |
CensusDataSourceEditor.tsx | import * as React from "react";
import * as weavejs from "weavejs";
import * as _ from "lodash";
import Weave = weavejs.Weave;
import StatefulTextField = weavejs.ui.StatefulTextField;
import ComboBox = weavejs.ui.ComboBox;
import WeaveReactUtils = weavejs.util.WeaveReactUtils
import HBox = weavejs.ui.flexbox.HBox;
import VBox = weavejs.ui.flexbox.VBox;
import HelpIcon = weavejs.ui.HelpIcon;
import EntityNode = weavejs.data.hierarchy.EntityNode;
import EntityType = weavejs.api.data.EntityType;
import IWeaveTreeNode = weavejs.api.data.IWeaveTreeNode;
import ColumnUtils = weavejs.data.ColumnUtils;
import IQualifiedKey = weavejs.api.data.IQualifiedKey;
import ColumnTreeNode = weavejs.data.hierarchy.ColumnTreeNode;
import StandardLib = weavejs.util.StandardLib;
import CensusDataSource = weavejs.data.source.CensusDataSource;
import CensusApi = weavejs.data.source.CensusApi;
import DataSourceEditor, {IDataSourceEditorState, IDataSourceEditorProps} from "weaveapp/editor/DataSourceEditor";
import {CensusGeographyFilter} from "weaveapp/editor/CensusGeographyFilter";
import KeyTypeInput from "weaveapp/ui/KeyTypeInput";
import CensusApiDataSet = weavejs.data.source.CensusApiDataSet;
import CensusApiGeography = weavejs.data.source.CensusApiGeography;
export interface ICensusDataSourceEditorState extends IDataSourceEditorState
{
dataFamily?: string;
dataVintage?: string;
geographies?: {value: string, label: string }[];
datasets?: CensusApiDataSet[];
optional?: string; /* Optional geography filter name */
requires?: string[]; /* Required geography filter names */
}
export default class CensusDataSourceEditor extends DataSourceEditor<ICensusDataSourceEditorState>
{
constructor(props:IDataSourceEditorProps)
{
super(props);
let ds = (this.props.dataSource as CensusDataSource);
this.api = ds.getAPI();
this.api.getDatasets().then(
(result: { dataset: CensusApiDataSet[] }) => { this.state = { datasets: result.dataset } }
);
ds.dataSet.addGroupedCallback(this, this.getGeographies, true);
ds.geographicScope.addGroupedCallback(this, this.updateRequiresAndOptional, true);
}
updateRequiresAndOptional=()=>
{
let ds = (this.props.dataSource as CensusDataSource);
this.api.getGeographies(ds.dataSet.value).then(
(geographies:({[id:string]: CensusApiGeography}))=>
{
let geography = geographies[ds.geographicScope.value];
if (geography)
{
this.setState({ optional: geography.optionalWithWCFor, requires: geography.requires });
}
}
)
}
static isUsableFamily(family: string): boolean {
return family && (family.indexOf("acs") == 0 || family.indexOf("sf") == 0);
}
static isInFamily(family: string, dataset: CensusApiDataSet): boolean {
return family && dataset && ((family == "All") || (dataset.c_dataset.indexOf(family) != -1));
}
static isOfVintage(vintage: string, dataset: CensusApiDataSet): boolean {
return vintage && dataset && ((vintage == "All") || (dataset.c_vintage !== undefined && dataset.c_vintage.toString() == vintage));
}
private api: CensusApi;
state: ICensusDataSourceEditorState = {};
private | ():string[]
{
let raw_datasets = this.state.datasets;
if (!raw_datasets)
return ["All"];
let families_set = new Set<string>(_.flatten(_.map(raw_datasets, (d) => d.c_dataset)).filter(CensusDataSourceEditor.isUsableFamily));
let families_list = _.sortBy(Array.from(families_set));
families_list.unshift("All");
return families_list;
}
private getDataVintages(family:string):string[]
{
let raw_datasets = this.state.datasets;
if (!raw_datasets || !family)
return ["All"];
let datasetsInFamily = raw_datasets.filter(CensusDataSourceEditor.isInFamily.bind(null, family));
let vintages_set = new Set<string>(datasetsInFamily.map((d) => d.c_vintage !== undefined && d.c_vintage.toString()));
let vintages_list = _.sortBy(Array.from(vintages_set));
vintages_list.unshift("All");
return vintages_list;
}
private getDatasets(family:string, vintage:string)
{
let raw_datasets = this.state.datasets;
let ds = this.props.dataSource as CensusDataSource;
if (!raw_datasets || !family || !vintage)
return [{ value: ds.dataSet.value, label: ds.dataSet.value}];
let filterFunc = (dataset: CensusApiDataSet) => CensusDataSourceEditor.isInFamily(family, dataset) && CensusDataSourceEditor.isOfVintage(vintage, dataset);
let makeEntry = (dataset: CensusApiDataSet) => { return { value: dataset.identifier, label: dataset.title }; };
return _.sortBy(raw_datasets.filter(filterFunc).map(makeEntry), "label");
}
private getDataset(datasetName:string)
{
let raw_datasets = this.state.datasets;
if (!raw_datasets)
return null;
return raw_datasets.filter((dataset) => dataset.identifier === datasetName);
}
private getGeographies(dataSet: string)
{
let ds = (this.props.dataSource as CensusDataSource);
this.api.getGeographies(ds.dataSet.value).then(
(geographies: { [id: string]: { name: string } }) => {
let tempGeographies = new Array<{ value: string, label: string }>();
for (let id in geographies) {
tempGeographies.push({ value: id, label: geographies[id].name });
}
tempGeographies = _.sortBy(tempGeographies, "value");
if (!_.isEqual(this.state.geographies, tempGeographies))
this.setState({ geographies: tempGeographies });
});
}
dataFamilyChanged=(selectedItem:any)=>
{
this.setState({ dataFamily: (selectedItem as string) });
}
dataVintageChanged=(selectedItem:any)=>
{
this.setState({ dataVintage: (selectedItem as string)});
}
get editorFields(): [React.ReactChild, React.ReactChild][] {
let ds = (this.props.dataSource as CensusDataSource);
this.api = ds.getAPI();
let datasets = this.getDatasets("All", "All");
let dataset = _.first(this.getDataset(ds.dataSet.value));
let datasetLabel: string = dataset ? dataset.title : "";
return [
this.getLabelEditor(ds.label),
[
Weave.lang("API key"),
<StatefulTextField style={{width: "100%"}}
ref={WeaveReactUtils.linkReactStateRef(this, { content: ds.apiKey }) }/>
],
[
<HBox className="weave-padded-hbox" style={{alignItems: "center", justifyContent: "flex-end"}}>
{Weave.lang("Key namespace")}
<HelpIcon>{Weave.lang("Key namespaces are used to link tables using matching key columns.")}</HelpIcon>
</HBox>,
<KeyTypeInput style={{width: "100%"}}
keyTypeProperty={ds.keyType}/>
],
[
Weave.lang("Dataset"),
<ComboBox className="search" style={{width: "100%"}}
ref={WeaveReactUtils.linkReactStateRef(this, { value: ds.dataSet }) }
selectFirstOnInvalid
options={datasets || [{value: dataset, label: datasetLabel}]}/>
],
[
Weave.lang("Geographic scope"),
<ComboBox style={{width: "100%"}}
ref={WeaveReactUtils.linkReactStateRef(this, {value: ds.geographicScope })}
selectFirstOnInvalid
options={this.state.geographies || [{value: ds.geographicScope.value, label: ds.geographicScope.value}]}/>
],
];
}
renderFields(): JSX.Element {
return (
<VBox>
{
super.renderFields()
}
<CensusGeographyFilter filterLinkableVariable={(this.props.dataSource as CensusDataSource).geographicFilters}
optional={this.state.optional}
requires={this.state.requires || null}/>
</VBox>
)
}
}
| getDataFamilies | identifier_name |
CensusDataSourceEditor.tsx | import * as React from "react";
import * as weavejs from "weavejs";
import * as _ from "lodash";
import Weave = weavejs.Weave;
import StatefulTextField = weavejs.ui.StatefulTextField;
import ComboBox = weavejs.ui.ComboBox;
import WeaveReactUtils = weavejs.util.WeaveReactUtils
import HBox = weavejs.ui.flexbox.HBox;
import VBox = weavejs.ui.flexbox.VBox;
import HelpIcon = weavejs.ui.HelpIcon;
import EntityNode = weavejs.data.hierarchy.EntityNode;
import EntityType = weavejs.api.data.EntityType;
import IWeaveTreeNode = weavejs.api.data.IWeaveTreeNode;
import ColumnUtils = weavejs.data.ColumnUtils;
import IQualifiedKey = weavejs.api.data.IQualifiedKey;
import ColumnTreeNode = weavejs.data.hierarchy.ColumnTreeNode;
import StandardLib = weavejs.util.StandardLib;
import CensusDataSource = weavejs.data.source.CensusDataSource;
import CensusApi = weavejs.data.source.CensusApi;
import DataSourceEditor, {IDataSourceEditorState, IDataSourceEditorProps} from "weaveapp/editor/DataSourceEditor";
import {CensusGeographyFilter} from "weaveapp/editor/CensusGeographyFilter";
import KeyTypeInput from "weaveapp/ui/KeyTypeInput";
import CensusApiDataSet = weavejs.data.source.CensusApiDataSet;
import CensusApiGeography = weavejs.data.source.CensusApiGeography;
export interface ICensusDataSourceEditorState extends IDataSourceEditorState
{
dataFamily?: string;
dataVintage?: string;
geographies?: {value: string, label: string }[];
datasets?: CensusApiDataSet[];
optional?: string; /* Optional geography filter name */
requires?: string[]; /* Required geography filter names */
}
export default class CensusDataSourceEditor extends DataSourceEditor<ICensusDataSourceEditorState> | this.api = ds.getAPI();
this.api.getDatasets().then(
(result: { dataset: CensusApiDataSet[] }) => { this.state = { datasets: result.dataset } }
);
ds.dataSet.addGroupedCallback(this, this.getGeographies, true);
ds.geographicScope.addGroupedCallback(this, this.updateRequiresAndOptional, true);
}
updateRequiresAndOptional=()=>
{
let ds = (this.props.dataSource as CensusDataSource);
this.api.getGeographies(ds.dataSet.value).then(
(geographies:({[id:string]: CensusApiGeography}))=>
{
let geography = geographies[ds.geographicScope.value];
if (geography)
{
this.setState({ optional: geography.optionalWithWCFor, requires: geography.requires });
}
}
)
}
static isUsableFamily(family: string): boolean {
return family && (family.indexOf("acs") == 0 || family.indexOf("sf") == 0);
}
static isInFamily(family: string, dataset: CensusApiDataSet): boolean {
return family && dataset && ((family == "All") || (dataset.c_dataset.indexOf(family) != -1));
}
static isOfVintage(vintage: string, dataset: CensusApiDataSet): boolean {
return vintage && dataset && ((vintage == "All") || (dataset.c_vintage !== undefined && dataset.c_vintage.toString() == vintage));
}
private api: CensusApi;
state: ICensusDataSourceEditorState = {};
private getDataFamilies():string[]
{
let raw_datasets = this.state.datasets;
if (!raw_datasets)
return ["All"];
let families_set = new Set<string>(_.flatten(_.map(raw_datasets, (d) => d.c_dataset)).filter(CensusDataSourceEditor.isUsableFamily));
let families_list = _.sortBy(Array.from(families_set));
families_list.unshift("All");
return families_list;
}
private getDataVintages(family:string):string[]
{
let raw_datasets = this.state.datasets;
if (!raw_datasets || !family)
return ["All"];
let datasetsInFamily = raw_datasets.filter(CensusDataSourceEditor.isInFamily.bind(null, family));
let vintages_set = new Set<string>(datasetsInFamily.map((d) => d.c_vintage !== undefined && d.c_vintage.toString()));
let vintages_list = _.sortBy(Array.from(vintages_set));
vintages_list.unshift("All");
return vintages_list;
}
private getDatasets(family:string, vintage:string)
{
let raw_datasets = this.state.datasets;
let ds = this.props.dataSource as CensusDataSource;
if (!raw_datasets || !family || !vintage)
return [{ value: ds.dataSet.value, label: ds.dataSet.value}];
let filterFunc = (dataset: CensusApiDataSet) => CensusDataSourceEditor.isInFamily(family, dataset) && CensusDataSourceEditor.isOfVintage(vintage, dataset);
let makeEntry = (dataset: CensusApiDataSet) => { return { value: dataset.identifier, label: dataset.title }; };
return _.sortBy(raw_datasets.filter(filterFunc).map(makeEntry), "label");
}
private getDataset(datasetName:string)
{
let raw_datasets = this.state.datasets;
if (!raw_datasets)
return null;
return raw_datasets.filter((dataset) => dataset.identifier === datasetName);
}
private getGeographies(dataSet: string)
{
let ds = (this.props.dataSource as CensusDataSource);
this.api.getGeographies(ds.dataSet.value).then(
(geographies: { [id: string]: { name: string } }) => {
let tempGeographies = new Array<{ value: string, label: string }>();
for (let id in geographies) {
tempGeographies.push({ value: id, label: geographies[id].name });
}
tempGeographies = _.sortBy(tempGeographies, "value");
if (!_.isEqual(this.state.geographies, tempGeographies))
this.setState({ geographies: tempGeographies });
});
}
dataFamilyChanged=(selectedItem:any)=>
{
this.setState({ dataFamily: (selectedItem as string) });
}
dataVintageChanged=(selectedItem:any)=>
{
this.setState({ dataVintage: (selectedItem as string)});
}
get editorFields(): [React.ReactChild, React.ReactChild][] {
let ds = (this.props.dataSource as CensusDataSource);
this.api = ds.getAPI();
let datasets = this.getDatasets("All", "All");
let dataset = _.first(this.getDataset(ds.dataSet.value));
let datasetLabel: string = dataset ? dataset.title : "";
return [
this.getLabelEditor(ds.label),
[
Weave.lang("API key"),
<StatefulTextField style={{width: "100%"}}
ref={WeaveReactUtils.linkReactStateRef(this, { content: ds.apiKey }) }/>
],
[
<HBox className="weave-padded-hbox" style={{alignItems: "center", justifyContent: "flex-end"}}>
{Weave.lang("Key namespace")}
<HelpIcon>{Weave.lang("Key namespaces are used to link tables using matching key columns.")}</HelpIcon>
</HBox>,
<KeyTypeInput style={{width: "100%"}}
keyTypeProperty={ds.keyType}/>
],
[
Weave.lang("Dataset"),
<ComboBox className="search" style={{width: "100%"}}
ref={WeaveReactUtils.linkReactStateRef(this, { value: ds.dataSet }) }
selectFirstOnInvalid
options={datasets || [{value: dataset, label: datasetLabel}]}/>
],
[
Weave.lang("Geographic scope"),
<ComboBox style={{width: "100%"}}
ref={WeaveReactUtils.linkReactStateRef(this, {value: ds.geographicScope })}
selectFirstOnInvalid
options={this.state.geographies || [{value: ds.geographicScope.value, label: ds.geographicScope.value}]}/>
],
];
}
renderFields(): JSX.Element {
return (
<VBox>
{
super.renderFields()
}
<CensusGeographyFilter filterLinkableVariable={(this.props.dataSource as CensusDataSource).geographicFilters}
optional={this.state.optional}
requires={this.state.requires || null}/>
</VBox>
)
}
} | {
constructor(props:IDataSourceEditorProps)
{
super(props);
let ds = (this.props.dataSource as CensusDataSource); | random_line_split |
test_marshal.py | from __future__ import print_function, absolute_import
import random
import unittest
from pysmoke import marshal
from pysmoke.smoke import ffi, Type, TypedValue, pystring, smokec, not_implemented, charp, dbg
from pysmoke import QtCore, QtGui
qtcore = QtCore.__binding__
qtgui = QtGui.__binding__
class MarshalTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_qstring(self):
qstr = marshal.QString.from_py('aqstring')
print(qstr)
pstr = marshal.QString.to_py(qstr)
#dbg()
self.assertEqual(pstr, 'aqstring') | import gc; gc.collect()
qstr2 = marshal.QString.from_py(pstr)
print('QS:', qstr, pstr, qstr2, marshal.QString.to_py(qstr))
obj = QtGui.QObject()
print('obj', obj.__cval__.value.s_voidp)
obj.setObjectName('my_object')
self.assertEqual(obj.objectName(), 'my_object')
if __name__ == '__main__':
unittest.main() | random_line_split | |
test_marshal.py | from __future__ import print_function, absolute_import
import random
import unittest
from pysmoke import marshal
from pysmoke.smoke import ffi, Type, TypedValue, pystring, smokec, not_implemented, charp, dbg
from pysmoke import QtCore, QtGui
qtcore = QtCore.__binding__
qtgui = QtGui.__binding__
class MarshalTestCase(unittest.TestCase):
def | (self):
pass
def tearDown(self):
pass
def test_qstring(self):
qstr = marshal.QString.from_py('aqstring')
print(qstr)
pstr = marshal.QString.to_py(qstr)
#dbg()
self.assertEqual(pstr, 'aqstring')
import gc; gc.collect()
qstr2 = marshal.QString.from_py(pstr)
print('QS:', qstr, pstr, qstr2, marshal.QString.to_py(qstr))
obj = QtGui.QObject()
print('obj', obj.__cval__.value.s_voidp)
obj.setObjectName('my_object')
self.assertEqual(obj.objectName(), 'my_object')
if __name__ == '__main__':
unittest.main()
| setUp | identifier_name |
test_marshal.py | from __future__ import print_function, absolute_import
import random
import unittest
from pysmoke import marshal
from pysmoke.smoke import ffi, Type, TypedValue, pystring, smokec, not_implemented, charp, dbg
from pysmoke import QtCore, QtGui
qtcore = QtCore.__binding__
qtgui = QtGui.__binding__
class MarshalTestCase(unittest.TestCase):
|
if __name__ == '__main__':
unittest.main()
| def setUp(self):
pass
def tearDown(self):
pass
def test_qstring(self):
qstr = marshal.QString.from_py('aqstring')
print(qstr)
pstr = marshal.QString.to_py(qstr)
#dbg()
self.assertEqual(pstr, 'aqstring')
import gc; gc.collect()
qstr2 = marshal.QString.from_py(pstr)
print('QS:', qstr, pstr, qstr2, marshal.QString.to_py(qstr))
obj = QtGui.QObject()
print('obj', obj.__cval__.value.s_voidp)
obj.setObjectName('my_object')
self.assertEqual(obj.objectName(), 'my_object') | identifier_body |
test_marshal.py | from __future__ import print_function, absolute_import
import random
import unittest
from pysmoke import marshal
from pysmoke.smoke import ffi, Type, TypedValue, pystring, smokec, not_implemented, charp, dbg
from pysmoke import QtCore, QtGui
qtcore = QtCore.__binding__
qtgui = QtGui.__binding__
class MarshalTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_qstring(self):
qstr = marshal.QString.from_py('aqstring')
print(qstr)
pstr = marshal.QString.to_py(qstr)
#dbg()
self.assertEqual(pstr, 'aqstring')
import gc; gc.collect()
qstr2 = marshal.QString.from_py(pstr)
print('QS:', qstr, pstr, qstr2, marshal.QString.to_py(qstr))
obj = QtGui.QObject()
print('obj', obj.__cval__.value.s_voidp)
obj.setObjectName('my_object')
self.assertEqual(obj.objectName(), 'my_object')
if __name__ == '__main__':
| unittest.main() | conditional_block | |
toolkit.py | from abc import abstractmethod
from threading import Timer
from ctx.uncertainty.measurers import clear_dobson_paddy
class Event:
def __init__(self, type, **kwargs):
self.type = type
self.properties = kwargs
class Observer:
def update(self):
raise NotImplementedError("Not implemented")
class Observable:
def __init__(self):
self._observers = []
def register(self, observer):
self._observers.append(observer)
def notify(self, event):
event.source = self
for observer in self._observers:
observer.update(event)
class Widget(Observable, Observer):
@abstractmethod
def update(self, event):
pass
def __init__(self, type, status_name, *generators):
super(Widget, self).__init__()
self.type = type
self.generators = generators
self.status = None
self.status_name = status_name
for generator in generators:
generator.register(self)
def get_property(self, type):
|
class Generator(Observable):
def __init__(self, type, relevance, threshold, certainty_measurer=clear_dobson_paddy):
super().__init__()
self.certainty_measurer = certainty_measurer
self.property = None
self.type = type
self.relevance = relevance
self.threshold = threshold
def generate(self):
# generate a dict, e.g.: {"value": 12, "certainty" : 0.9}
raise NotImplementedError("Not implemented")
def has_acceptable_certainty(self, new_property):
certainty_level = self.certainty_measurer(self.relevance, new_property['accuracy'])
is_acceptable = certainty_level > self.threshold
return is_acceptable
def start(self, delay=5):
new_property = self.generate()
if new_property['value'] != self.property and self.has_acceptable_certainty(new_property):
self.property = new_property['value']
event = Event(self.type, property=new_property['value'])
super().notify(event)
timer_task = Timer(delay, lambda: self.start(delay), ())
timer_task.start()
| for generator in self.generators:
if generator.type == type:
return generator.property | identifier_body |
toolkit.py | from abc import abstractmethod
from threading import Timer
from ctx.uncertainty.measurers import clear_dobson_paddy
class Event:
def __init__(self, type, **kwargs):
self.type = type
self.properties = kwargs
class | :
def update(self):
raise NotImplementedError("Not implemented")
class Observable:
def __init__(self):
self._observers = []
def register(self, observer):
self._observers.append(observer)
def notify(self, event):
event.source = self
for observer in self._observers:
observer.update(event)
class Widget(Observable, Observer):
@abstractmethod
def update(self, event):
pass
def __init__(self, type, status_name, *generators):
super(Widget, self).__init__()
self.type = type
self.generators = generators
self.status = None
self.status_name = status_name
for generator in generators:
generator.register(self)
def get_property(self, type):
for generator in self.generators:
if generator.type == type:
return generator.property
class Generator(Observable):
def __init__(self, type, relevance, threshold, certainty_measurer=clear_dobson_paddy):
super().__init__()
self.certainty_measurer = certainty_measurer
self.property = None
self.type = type
self.relevance = relevance
self.threshold = threshold
def generate(self):
# generate a dict, e.g.: {"value": 12, "certainty" : 0.9}
raise NotImplementedError("Not implemented")
def has_acceptable_certainty(self, new_property):
certainty_level = self.certainty_measurer(self.relevance, new_property['accuracy'])
is_acceptable = certainty_level > self.threshold
return is_acceptable
def start(self, delay=5):
new_property = self.generate()
if new_property['value'] != self.property and self.has_acceptable_certainty(new_property):
self.property = new_property['value']
event = Event(self.type, property=new_property['value'])
super().notify(event)
timer_task = Timer(delay, lambda: self.start(delay), ())
timer_task.start()
| Observer | identifier_name |
toolkit.py | from abc import abstractmethod
from threading import Timer
from ctx.uncertainty.measurers import clear_dobson_paddy
class Event:
def __init__(self, type, **kwargs):
self.type = type
self.properties = kwargs
class Observer:
def update(self):
raise NotImplementedError("Not implemented")
| def register(self, observer):
self._observers.append(observer)
def notify(self, event):
event.source = self
for observer in self._observers:
observer.update(event)
class Widget(Observable, Observer):
@abstractmethod
def update(self, event):
pass
def __init__(self, type, status_name, *generators):
super(Widget, self).__init__()
self.type = type
self.generators = generators
self.status = None
self.status_name = status_name
for generator in generators:
generator.register(self)
def get_property(self, type):
for generator in self.generators:
if generator.type == type:
return generator.property
class Generator(Observable):
def __init__(self, type, relevance, threshold, certainty_measurer=clear_dobson_paddy):
super().__init__()
self.certainty_measurer = certainty_measurer
self.property = None
self.type = type
self.relevance = relevance
self.threshold = threshold
def generate(self):
# generate a dict, e.g.: {"value": 12, "certainty" : 0.9}
raise NotImplementedError("Not implemented")
def has_acceptable_certainty(self, new_property):
certainty_level = self.certainty_measurer(self.relevance, new_property['accuracy'])
is_acceptable = certainty_level > self.threshold
return is_acceptable
def start(self, delay=5):
new_property = self.generate()
if new_property['value'] != self.property and self.has_acceptable_certainty(new_property):
self.property = new_property['value']
event = Event(self.type, property=new_property['value'])
super().notify(event)
timer_task = Timer(delay, lambda: self.start(delay), ())
timer_task.start() |
class Observable:
def __init__(self):
self._observers = []
| random_line_split |
toolkit.py | from abc import abstractmethod
from threading import Timer
from ctx.uncertainty.measurers import clear_dobson_paddy
class Event:
def __init__(self, type, **kwargs):
self.type = type
self.properties = kwargs
class Observer:
def update(self):
raise NotImplementedError("Not implemented")
class Observable:
def __init__(self):
self._observers = []
def register(self, observer):
self._observers.append(observer)
def notify(self, event):
event.source = self
for observer in self._observers:
observer.update(event)
class Widget(Observable, Observer):
@abstractmethod
def update(self, event):
pass
def __init__(self, type, status_name, *generators):
super(Widget, self).__init__()
self.type = type
self.generators = generators
self.status = None
self.status_name = status_name
for generator in generators:
generator.register(self)
def get_property(self, type):
for generator in self.generators:
|
class Generator(Observable):
def __init__(self, type, relevance, threshold, certainty_measurer=clear_dobson_paddy):
super().__init__()
self.certainty_measurer = certainty_measurer
self.property = None
self.type = type
self.relevance = relevance
self.threshold = threshold
def generate(self):
# generate a dict, e.g.: {"value": 12, "certainty" : 0.9}
raise NotImplementedError("Not implemented")
def has_acceptable_certainty(self, new_property):
certainty_level = self.certainty_measurer(self.relevance, new_property['accuracy'])
is_acceptable = certainty_level > self.threshold
return is_acceptable
def start(self, delay=5):
new_property = self.generate()
if new_property['value'] != self.property and self.has_acceptable_certainty(new_property):
self.property = new_property['value']
event = Event(self.type, property=new_property['value'])
super().notify(event)
timer_task = Timer(delay, lambda: self.start(delay), ())
timer_task.start()
| if generator.type == type:
return generator.property | conditional_block |
index.js | var assert = require('assert')
var Completion = require('./lib/completion')
var locale = require('./lib/locale')
var Parser = require('./lib/parser')
var path = require('path')
var Usage = require('./lib/usage')
var Validation = require('./lib/validation')
var Y18n = require('y18n')
Argv(process.argv.slice(2))
var exports = module.exports = Argv
function Argv (processArgs, cwd) {
processArgs = processArgs || [] // handle calling yargs().
var self = {}
var completion = null
var usage = null
var validation = null
var y18n = Y18n({
directory: path.resolve(__dirname, './locales'),
updateFiles: false
})
if (!cwd) cwd = process.cwd()
self.$0 = process.argv
.slice(0, 2)
.map(function (x, i) {
// ignore the node bin, specify this in your
// bin file with #!/usr/bin/env node
if (i === 0 && /\b(node|iojs)$/.test(x)) return
var b = rebase(cwd, x)
return x.match(/^\//) && b.length < x.length
? b : x
})
.join(' ').trim()
if (process.env._ !== undefined && process.argv[1] === process.env._) {
self.$0 = process.env._.replace(
path.dirname(process.execPath) + '/', ''
)
}
var options
self.resetOptions = self.reset = function () {
// put yargs back into its initial
// state, this is useful for creating a
// nested CLI.
options = {
array: [],
boolean: [],
string: [],
narg: {},
key: {},
alias: {},
default: {},
defaultDescription: {},
choices: {},
requiresArg: [],
count: [],
normalize: [],
config: []
}
usage = Usage(self, y18n) // handle usage output.
validation = Validation(self, usage, y18n) // handle arg validation.
completion = Completion(self, usage)
demanded = {}
exitProcess = true
strict = false
helpOpt = null
versionOpt = null
commandHandlers = {}
self.parsed = false
return self
}
self.resetOptions()
self.boolean = function (bools) {
options.boolean.push.apply(options.boolean, [].concat(bools))
return self
}
self.array = function (arrays) {
options.array.push.apply(options.array, [].concat(arrays))
return self
}
self.nargs = function (key, n) {
if (typeof key === 'object') {
Object.keys(key).forEach(function (k) {
self.nargs(k, key[k])
})
} else {
options.narg[key] = n
}
return self
}
self.choices = function (key, values) {
if (typeof key === 'object') {
Object.keys(key).forEach(function (k) {
self.choices(k, key[k])
})
} else {
options.choices[key] = (options.choices[key] || []).concat(values)
}
return self
}
self.normalize = function (strings) {
options.normalize.push.apply(options.normalize, [].concat(strings))
return self
}
self.config = function (key, msg) {
self.describe(key, msg || usage.deferY18nLookup('Path to JSON config file'))
options.config.push.apply(options.config, [].concat(key))
return self
}
self.example = function (cmd, description) {
usage.example(cmd, description)
return self
}
self.command = function (cmd, description, fn) {
if (description !== false) {
usage.command(cmd, description)
}
if (fn) commandHandlers[cmd] = fn
return self
}
var commandHandlers = {}
self.getCommandHandlers = function () {
return commandHandlers
}
self.string = function (strings) {
options.string.push.apply(options.string, [].concat(strings))
return self
}
self.default = function (key, value, defaultDescription) {
if (typeof key === 'object') {
Object.keys(key).forEach(function (k) {
self.default(k, key[k])
})
} else {
if (defaultDescription) options.defaultDescription[key] = defaultDescription
if (typeof value === 'function') {
if (!options.defaultDescription[key]) options.defaultDescription[key] = usage.functionDescription(value)
value = value.call()
}
options.default[key] = value
}
return self
}
self.alias = function (x, y) {
if (typeof x === 'object') {
Object.keys(x).forEach(function (key) {
self.alias(key, x[key])
})
} else {
options.alias[x] = (options.alias[x] || []).concat(y)
}
return self
}
self.count = function (counts) {
options.count.push.apply(options.count, [].concat(counts))
return self
}
var demanded = {}
self.demand = self.required = self.require = function (keys, max, msg) {
// you can optionally provide a 'max' key,
// which will raise an exception if too many '_'
// options are provided.
if (typeof max !== 'number') {
msg = max
max = Infinity
}
if (typeof keys === 'number') {
if (!demanded._) demanded._ = { count: 0, msg: null, max: max }
demanded._.count = keys
demanded._.msg = msg
} else if (Array.isArray(keys)) {
keys.forEach(function (key) {
self.demand(key, msg)
})
} else {
if (typeof msg === 'string') {
demanded[keys] = { msg: msg }
} else if (msg === true || typeof msg === 'undefined') {
demanded[keys] = { msg: undefined }
}
}
return self
}
self.getDemanded = function () {
return demanded
}
self.requiresArg = function (requiresArgs) {
options.requiresArg.push.apply(options.requiresArg, [].concat(requiresArgs))
return self
}
self.implies = function (key, value) {
validation.implies(key, value)
return self
}
self.usage = function (msg, opts) {
if (!opts && typeof msg === 'object') {
opts = msg
msg = null
}
usage.usage(msg)
if (opts) self.options(opts)
return self
}
self.epilogue = self.epilog = function (msg) {
usage.epilog(msg)
return self
}
self.fail = function (f) {
usage.failFn(f)
return self
}
self.check = function (f) {
validation.check(f)
return self
}
self.defaults = self.default
self.describe = function (key, desc) {
options.key[key] = true
usage.describe(key, desc)
return self
}
self.parse = function (args) {
return parseArgs(args)
}
self.option = self.options = function (key, opt) {
if (typeof key === 'object') {
Object.keys(key).forEach(function (k) {
self.options(k, key[k])
})
} else {
assert(typeof opt === 'object', 'second argument to option must be an object')
options.key[key] = true // track manually set keys.
if (opt.alias) self.alias(key, opt.alias)
var demand = opt.demand || opt.required || opt.require
if (demand) {
self.demand(key, demand)
} if ('config' in opt) {
self.config(key)
} if ('default' in opt) {
self.default(key, opt.default)
} if ('nargs' in opt) {
self.nargs(key, opt.nargs)
} if ('choices' in opt) {
self.choices(key, opt.choices)
} if (opt.boolean || opt.type === 'boolean') {
self.boolean(key)
if (opt.alias) self.boolean(opt.alias)
} if (opt.array || opt.type === 'array') {
self.array(key)
if (opt.alias) self.array(opt.alias)
} if (opt.string || opt.type === 'string') {
self.string(key)
if (opt.alias) self.string(opt.alias)
} if (opt.count || opt.type === 'count') {
self.count(key)
} if (opt.defaultDescription) {
options.defaultDescription[key] = opt.defaultDescription
}
var desc = opt.describe || opt.description || opt.desc
if (desc) {
self.describe(key, desc)
}
if (opt.requiresArg) {
self.requiresArg(key)
}
}
return self
}
self.getOptions = function () {
return options
}
self.wrap = function (cols) {
usage.wrap(cols)
return self
}
var strict = false
self.strict = function () {
strict = true
return self
}
self.getStrict = function () {
return strict
}
self.showHelp = function (level) {
if (!self.parsed) parseArgs(processArgs) // run parser, if it has not already been executed.
usage.showHelp(level)
return self
}
var versionOpt = null
self.version = function (ver, opt, msg) {
versionOpt = opt || 'version'
usage.version(ver)
self.boolean(versionOpt)
self.describe(versionOpt, msg || usage.deferY18nLookup('Show version number'))
return self
}
var helpOpt = null
self.addHelpOpt = function (opt, msg) {
helpOpt = opt
self.boolean(opt)
self.describe(opt, msg || usage.deferY18nLookup('Show help'))
return self
}
self.showHelpOnFail = function (enabled, message) {
usage.showHelpOnFail(enabled, message)
return self
}
var exitProcess = true
self.exitProcess = function (enabled) {
if (typeof enabled !== 'boolean') {
enabled = true
}
exitProcess = enabled
return self
}
self.getExitProcess = function () {
return exitProcess
}
self.help = function () {
if (arguments.length > 0) return self.addHelpOpt.apply(self, arguments)
if (!self.parsed) parseArgs(processArgs) // run parser, if it has not already been executed.
return usage.help()
}
var completionCommand = null
self.completion = function (cmd, desc, fn) {
// a function to execute when generating
// completions can be provided as the second
// or third argument to completion.
if (typeof desc === 'function') {
fn = desc
desc = null
}
// register the completion command.
completionCommand = cmd || 'completion'
if (!desc && desc !== false) {
desc = 'generate bash completion script'
}
self.command(completionCommand, desc)
// a function can be provided
if (fn) completion.registerFunction(fn)
return self
}
self.showCompletionScript = function ($0) {
$0 = $0 || self.$0
console.log(completion.generateCompletionScript($0))
return self
}
self.locale = function (locale) {
if (arguments.length === 0) {
guessLocale()
return y18n.getLocale()
}
detectLocale = false
y18n.setLocale(locale)
return self
}
self.updateStrings = self.updateLocale = function (obj) {
detectLocale = false
y18n.updateLocale(obj)
return self
}
var detectLocale = true
self.detectLocale = function (detect) {
detectLocale = detect
return self
}
self.getDetectLocale = function () {
return detectLocale
}
self.getUsageInstance = function () {
return usage
}
self.getValidationInstance = function () {
return validation
}
self.terminalWidth = function () {
return require('window-size').width
}
Object.defineProperty(self, 'argv', {
get: function () {
var args = null
try {
args = parseArgs(processArgs)
} catch (err) {
usage.fail(err.message)
}
return args
},
enumerable: true
})
function parseArgs (args) {
var parsed = Parser(args, options, y18n)
var argv = parsed.argv
var aliases = parsed.aliases
argv.$0 = self.$0
self.parsed = parsed
guessLocale() // guess locale lazily, so that it can be turned off in chain.
// while building up the argv object, there
// are two passes through the parser. If completion
// is being performed short-circuit on the first pass.
if (completionCommand &&
(process.argv.join(' ')).indexOf(completion.completionKey) !== -1 &&
!argv[completion.completionKey]) {
return argv
}
// if there's a handler associated with a
// command defer processing to it.
var handlerKeys = Object.keys(self.getCommandHandlers())
for (var i = 0, command; (command = handlerKeys[i]) !== undefined; i++) {
if (~argv._.indexOf(command)) {
self.getCommandHandlers()[command](self.reset())
return self.argv
}
}
// generate a completion script for adding to ~/.bashrc.
if (completionCommand && ~argv._.indexOf(completionCommand) && !argv[completion.completionKey]) {
self.showCompletionScript()
if (exitProcess) {
process.exit(0)
}
}
// we must run completions first, a user might
// want to complete the --help or --version option.
if (completion.completionKey in argv) {
// we allow for asynchronous completions,
// e.g., loading in a list of commands from an API.
completion.getCompletion(function (completions) {
;(completions || []).forEach(function (completion) {
console.log(completion)
})
if (exitProcess) {
process.exit(0)
}
})
return
}
Object.keys(argv).forEach(function (key) {
if (key === helpOpt && argv[key]) {
self.showHelp('log') | process.exit(0)
}
} else if (key === versionOpt && argv[key]) {
usage.showVersion()
if (exitProcess) {
process.exit(0)
}
}
})
if (parsed.error) throw parsed.error
// if we're executed via bash completion, don't
// bother with validation.
if (!argv[completion.completionKey]) {
validation.nonOptionCount(argv)
validation.missingArgumentValue(argv)
validation.requiredArguments(argv)
if (strict) validation.unknownArguments(argv, aliases)
validation.customChecks(argv, aliases)
validation.limitedChoices(argv)
validation.implications(argv)
}
setPlaceholderKeys(argv)
return argv
}
function guessLocale () {
if (!detectLocale) return
self.locale(locale())
}
function setPlaceholderKeys (argv) {
Object.keys(options.key).forEach(function (key) {
if (typeof argv[key] === 'undefined') argv[key] = undefined
})
}
sigletonify(self)
return self
}
// rebase an absolute path to a relative one with respect to a base directory
// exported for tests
exports.rebase = rebase
function rebase (base, dir) {
return path.relative(base, dir)
}
/* Hack an instance of Argv with process.argv into Argv
so people can do
require('yargs')(['--beeble=1','-z','zizzle']).argv
to parse a list of args and
require('yargs').argv
to get a parsed version of process.argv.
*/
function sigletonify (inst) {
Object.keys(inst).forEach(function (key) {
if (key === 'argv') {
Argv.__defineGetter__(key, inst.__lookupGetter__(key))
} else {
Argv[key] = typeof inst[key] === 'function'
? inst[key].bind(inst)
: inst[key]
}
})
} | if (exitProcess) { | random_line_split |
index.js | var assert = require('assert')
var Completion = require('./lib/completion')
var locale = require('./lib/locale')
var Parser = require('./lib/parser')
var path = require('path')
var Usage = require('./lib/usage')
var Validation = require('./lib/validation')
var Y18n = require('y18n')
Argv(process.argv.slice(2))
var exports = module.exports = Argv
function Argv (processArgs, cwd) {
processArgs = processArgs || [] // handle calling yargs().
var self = {}
var completion = null
var usage = null
var validation = null
var y18n = Y18n({
directory: path.resolve(__dirname, './locales'),
updateFiles: false
})
if (!cwd) cwd = process.cwd()
self.$0 = process.argv
.slice(0, 2)
.map(function (x, i) {
// ignore the node bin, specify this in your
// bin file with #!/usr/bin/env node
if (i === 0 && /\b(node|iojs)$/.test(x)) return
var b = rebase(cwd, x)
return x.match(/^\//) && b.length < x.length
? b : x
})
.join(' ').trim()
if (process.env._ !== undefined && process.argv[1] === process.env._) {
self.$0 = process.env._.replace(
path.dirname(process.execPath) + '/', ''
)
}
var options
self.resetOptions = self.reset = function () {
// put yargs back into its initial
// state, this is useful for creating a
// nested CLI.
options = {
array: [],
boolean: [],
string: [],
narg: {},
key: {},
alias: {},
default: {},
defaultDescription: {},
choices: {},
requiresArg: [],
count: [],
normalize: [],
config: []
}
usage = Usage(self, y18n) // handle usage output.
validation = Validation(self, usage, y18n) // handle arg validation.
completion = Completion(self, usage)
demanded = {}
exitProcess = true
strict = false
helpOpt = null
versionOpt = null
commandHandlers = {}
self.parsed = false
return self
}
self.resetOptions()
self.boolean = function (bools) {
options.boolean.push.apply(options.boolean, [].concat(bools))
return self
}
self.array = function (arrays) {
options.array.push.apply(options.array, [].concat(arrays))
return self
}
self.nargs = function (key, n) {
if (typeof key === 'object') {
Object.keys(key).forEach(function (k) {
self.nargs(k, key[k])
})
} else {
options.narg[key] = n
}
return self
}
self.choices = function (key, values) {
if (typeof key === 'object') {
Object.keys(key).forEach(function (k) {
self.choices(k, key[k])
})
} else {
options.choices[key] = (options.choices[key] || []).concat(values)
}
return self
}
self.normalize = function (strings) {
options.normalize.push.apply(options.normalize, [].concat(strings))
return self
}
self.config = function (key, msg) {
self.describe(key, msg || usage.deferY18nLookup('Path to JSON config file'))
options.config.push.apply(options.config, [].concat(key))
return self
}
self.example = function (cmd, description) {
usage.example(cmd, description)
return self
}
self.command = function (cmd, description, fn) {
if (description !== false) {
usage.command(cmd, description)
}
if (fn) commandHandlers[cmd] = fn
return self
}
var commandHandlers = {}
self.getCommandHandlers = function () {
return commandHandlers
}
self.string = function (strings) {
options.string.push.apply(options.string, [].concat(strings))
return self
}
self.default = function (key, value, defaultDescription) {
if (typeof key === 'object') {
Object.keys(key).forEach(function (k) {
self.default(k, key[k])
})
} else {
if (defaultDescription) options.defaultDescription[key] = defaultDescription
if (typeof value === 'function') {
if (!options.defaultDescription[key]) options.defaultDescription[key] = usage.functionDescription(value)
value = value.call()
}
options.default[key] = value
}
return self
}
self.alias = function (x, y) {
if (typeof x === 'object') {
Object.keys(x).forEach(function (key) {
self.alias(key, x[key])
})
} else {
options.alias[x] = (options.alias[x] || []).concat(y)
}
return self
}
self.count = function (counts) {
options.count.push.apply(options.count, [].concat(counts))
return self
}
var demanded = {}
self.demand = self.required = self.require = function (keys, max, msg) {
// you can optionally provide a 'max' key,
// which will raise an exception if too many '_'
// options are provided.
if (typeof max !== 'number') {
msg = max
max = Infinity
}
if (typeof keys === 'number') {
if (!demanded._) demanded._ = { count: 0, msg: null, max: max }
demanded._.count = keys
demanded._.msg = msg
} else if (Array.isArray(keys)) {
keys.forEach(function (key) {
self.demand(key, msg)
})
} else {
if (typeof msg === 'string') {
demanded[keys] = { msg: msg }
} else if (msg === true || typeof msg === 'undefined') {
demanded[keys] = { msg: undefined }
}
}
return self
}
self.getDemanded = function () {
return demanded
}
self.requiresArg = function (requiresArgs) {
options.requiresArg.push.apply(options.requiresArg, [].concat(requiresArgs))
return self
}
self.implies = function (key, value) {
validation.implies(key, value)
return self
}
self.usage = function (msg, opts) {
if (!opts && typeof msg === 'object') {
opts = msg
msg = null
}
usage.usage(msg)
if (opts) self.options(opts)
return self
}
self.epilogue = self.epilog = function (msg) {
usage.epilog(msg)
return self
}
self.fail = function (f) {
usage.failFn(f)
return self
}
self.check = function (f) {
validation.check(f)
return self
}
self.defaults = self.default
self.describe = function (key, desc) {
options.key[key] = true
usage.describe(key, desc)
return self
}
self.parse = function (args) {
return parseArgs(args)
}
self.option = self.options = function (key, opt) {
if (typeof key === 'object') {
Object.keys(key).forEach(function (k) {
self.options(k, key[k])
})
} else {
assert(typeof opt === 'object', 'second argument to option must be an object')
options.key[key] = true // track manually set keys.
if (opt.alias) self.alias(key, opt.alias)
var demand = opt.demand || opt.required || opt.require
if (demand) {
self.demand(key, demand)
} if ('config' in opt) {
self.config(key)
} if ('default' in opt) {
self.default(key, opt.default)
} if ('nargs' in opt) {
self.nargs(key, opt.nargs)
} if ('choices' in opt) {
self.choices(key, opt.choices)
} if (opt.boolean || opt.type === 'boolean') {
self.boolean(key)
if (opt.alias) self.boolean(opt.alias)
} if (opt.array || opt.type === 'array') {
self.array(key)
if (opt.alias) self.array(opt.alias)
} if (opt.string || opt.type === 'string') {
self.string(key)
if (opt.alias) self.string(opt.alias)
} if (opt.count || opt.type === 'count') {
self.count(key)
} if (opt.defaultDescription) {
options.defaultDescription[key] = opt.defaultDescription
}
var desc = opt.describe || opt.description || opt.desc
if (desc) {
self.describe(key, desc)
}
if (opt.requiresArg) {
self.requiresArg(key)
}
}
return self
}
self.getOptions = function () {
return options
}
self.wrap = function (cols) {
usage.wrap(cols)
return self
}
var strict = false
self.strict = function () {
strict = true
return self
}
self.getStrict = function () {
return strict
}
self.showHelp = function (level) {
if (!self.parsed) parseArgs(processArgs) // run parser, if it has not already been executed.
usage.showHelp(level)
return self
}
var versionOpt = null
self.version = function (ver, opt, msg) {
versionOpt = opt || 'version'
usage.version(ver)
self.boolean(versionOpt)
self.describe(versionOpt, msg || usage.deferY18nLookup('Show version number'))
return self
}
var helpOpt = null
self.addHelpOpt = function (opt, msg) {
helpOpt = opt
self.boolean(opt)
self.describe(opt, msg || usage.deferY18nLookup('Show help'))
return self
}
self.showHelpOnFail = function (enabled, message) {
usage.showHelpOnFail(enabled, message)
return self
}
var exitProcess = true
self.exitProcess = function (enabled) {
if (typeof enabled !== 'boolean') {
enabled = true
}
exitProcess = enabled
return self
}
self.getExitProcess = function () {
return exitProcess
}
self.help = function () {
if (arguments.length > 0) return self.addHelpOpt.apply(self, arguments)
if (!self.parsed) parseArgs(processArgs) // run parser, if it has not already been executed.
return usage.help()
}
var completionCommand = null
self.completion = function (cmd, desc, fn) {
// a function to execute when generating
// completions can be provided as the second
// or third argument to completion.
if (typeof desc === 'function') {
fn = desc
desc = null
}
// register the completion command.
completionCommand = cmd || 'completion'
if (!desc && desc !== false) {
desc = 'generate bash completion script'
}
self.command(completionCommand, desc)
// a function can be provided
if (fn) completion.registerFunction(fn)
return self
}
self.showCompletionScript = function ($0) {
$0 = $0 || self.$0
console.log(completion.generateCompletionScript($0))
return self
}
self.locale = function (locale) {
if (arguments.length === 0) {
guessLocale()
return y18n.getLocale()
}
detectLocale = false
y18n.setLocale(locale)
return self
}
self.updateStrings = self.updateLocale = function (obj) {
detectLocale = false
y18n.updateLocale(obj)
return self
}
var detectLocale = true
self.detectLocale = function (detect) {
detectLocale = detect
return self
}
self.getDetectLocale = function () {
return detectLocale
}
self.getUsageInstance = function () {
return usage
}
self.getValidationInstance = function () {
return validation
}
self.terminalWidth = function () {
return require('window-size').width
}
Object.defineProperty(self, 'argv', {
get: function () {
var args = null
try {
args = parseArgs(processArgs)
} catch (err) {
usage.fail(err.message)
}
return args
},
enumerable: true
})
function parseArgs (args) {
var parsed = Parser(args, options, y18n)
var argv = parsed.argv
var aliases = parsed.aliases
argv.$0 = self.$0
self.parsed = parsed
guessLocale() // guess locale lazily, so that it can be turned off in chain.
// while building up the argv object, there
// are two passes through the parser. If completion
// is being performed short-circuit on the first pass.
if (completionCommand &&
(process.argv.join(' ')).indexOf(completion.completionKey) !== -1 &&
!argv[completion.completionKey]) {
return argv
}
// if there's a handler associated with a
// command defer processing to it.
var handlerKeys = Object.keys(self.getCommandHandlers())
for (var i = 0, command; (command = handlerKeys[i]) !== undefined; i++) {
if (~argv._.indexOf(command)) {
self.getCommandHandlers()[command](self.reset())
return self.argv
}
}
// generate a completion script for adding to ~/.bashrc.
if (completionCommand && ~argv._.indexOf(completionCommand) && !argv[completion.completionKey]) {
self.showCompletionScript()
if (exitProcess) {
process.exit(0)
}
}
// we must run completions first, a user might
// want to complete the --help or --version option.
if (completion.completionKey in argv) {
// we allow for asynchronous completions,
// e.g., loading in a list of commands from an API.
completion.getCompletion(function (completions) {
;(completions || []).forEach(function (completion) {
console.log(completion)
})
if (exitProcess) {
process.exit(0)
}
})
return
}
Object.keys(argv).forEach(function (key) {
if (key === helpOpt && argv[key]) {
self.showHelp('log')
if (exitProcess) {
process.exit(0)
}
} else if (key === versionOpt && argv[key]) {
usage.showVersion()
if (exitProcess) {
process.exit(0)
}
}
})
if (parsed.error) throw parsed.error
// if we're executed via bash completion, don't
// bother with validation.
if (!argv[completion.completionKey]) {
validation.nonOptionCount(argv)
validation.missingArgumentValue(argv)
validation.requiredArguments(argv)
if (strict) validation.unknownArguments(argv, aliases)
validation.customChecks(argv, aliases)
validation.limitedChoices(argv)
validation.implications(argv)
}
setPlaceholderKeys(argv)
return argv
}
function guessLocale () {
if (!detectLocale) return
self.locale(locale())
}
function | (argv) {
Object.keys(options.key).forEach(function (key) {
if (typeof argv[key] === 'undefined') argv[key] = undefined
})
}
sigletonify(self)
return self
}
// rebase an absolute path to a relative one with respect to a base directory
// exported for tests
exports.rebase = rebase
function rebase (base, dir) {
return path.relative(base, dir)
}
/* Hack an instance of Argv with process.argv into Argv
so people can do
require('yargs')(['--beeble=1','-z','zizzle']).argv
to parse a list of args and
require('yargs').argv
to get a parsed version of process.argv.
*/
function sigletonify (inst) {
Object.keys(inst).forEach(function (key) {
if (key === 'argv') {
Argv.__defineGetter__(key, inst.__lookupGetter__(key))
} else {
Argv[key] = typeof inst[key] === 'function'
? inst[key].bind(inst)
: inst[key]
}
})
}
| setPlaceholderKeys | identifier_name |
index.js | var assert = require('assert')
var Completion = require('./lib/completion')
var locale = require('./lib/locale')
var Parser = require('./lib/parser')
var path = require('path')
var Usage = require('./lib/usage')
var Validation = require('./lib/validation')
var Y18n = require('y18n')
Argv(process.argv.slice(2))
var exports = module.exports = Argv
function Argv (processArgs, cwd) {
processArgs = processArgs || [] // handle calling yargs().
var self = {}
var completion = null
var usage = null
var validation = null
var y18n = Y18n({
directory: path.resolve(__dirname, './locales'),
updateFiles: false
})
if (!cwd) cwd = process.cwd()
self.$0 = process.argv
.slice(0, 2)
.map(function (x, i) {
// ignore the node bin, specify this in your
// bin file with #!/usr/bin/env node
if (i === 0 && /\b(node|iojs)$/.test(x)) return
var b = rebase(cwd, x)
return x.match(/^\//) && b.length < x.length
? b : x
})
.join(' ').trim()
if (process.env._ !== undefined && process.argv[1] === process.env._) {
self.$0 = process.env._.replace(
path.dirname(process.execPath) + '/', ''
)
}
var options
self.resetOptions = self.reset = function () {
// put yargs back into its initial
// state, this is useful for creating a
// nested CLI.
options = {
array: [],
boolean: [],
string: [],
narg: {},
key: {},
alias: {},
default: {},
defaultDescription: {},
choices: {},
requiresArg: [],
count: [],
normalize: [],
config: []
}
usage = Usage(self, y18n) // handle usage output.
validation = Validation(self, usage, y18n) // handle arg validation.
completion = Completion(self, usage)
demanded = {}
exitProcess = true
strict = false
helpOpt = null
versionOpt = null
commandHandlers = {}
self.parsed = false
return self
}
self.resetOptions()
self.boolean = function (bools) {
options.boolean.push.apply(options.boolean, [].concat(bools))
return self
}
self.array = function (arrays) {
options.array.push.apply(options.array, [].concat(arrays))
return self
}
self.nargs = function (key, n) {
if (typeof key === 'object') {
Object.keys(key).forEach(function (k) {
self.nargs(k, key[k])
})
} else {
options.narg[key] = n
}
return self
}
self.choices = function (key, values) {
if (typeof key === 'object') {
Object.keys(key).forEach(function (k) {
self.choices(k, key[k])
})
} else {
options.choices[key] = (options.choices[key] || []).concat(values)
}
return self
}
self.normalize = function (strings) {
options.normalize.push.apply(options.normalize, [].concat(strings))
return self
}
self.config = function (key, msg) {
self.describe(key, msg || usage.deferY18nLookup('Path to JSON config file'))
options.config.push.apply(options.config, [].concat(key))
return self
}
self.example = function (cmd, description) {
usage.example(cmd, description)
return self
}
self.command = function (cmd, description, fn) {
if (description !== false) {
usage.command(cmd, description)
}
if (fn) commandHandlers[cmd] = fn
return self
}
var commandHandlers = {}
self.getCommandHandlers = function () {
return commandHandlers
}
self.string = function (strings) {
options.string.push.apply(options.string, [].concat(strings))
return self
}
self.default = function (key, value, defaultDescription) {
if (typeof key === 'object') {
Object.keys(key).forEach(function (k) {
self.default(k, key[k])
})
} else {
if (defaultDescription) options.defaultDescription[key] = defaultDescription
if (typeof value === 'function') {
if (!options.defaultDescription[key]) options.defaultDescription[key] = usage.functionDescription(value)
value = value.call()
}
options.default[key] = value
}
return self
}
self.alias = function (x, y) {
if (typeof x === 'object') {
Object.keys(x).forEach(function (key) {
self.alias(key, x[key])
})
} else {
options.alias[x] = (options.alias[x] || []).concat(y)
}
return self
}
self.count = function (counts) {
options.count.push.apply(options.count, [].concat(counts))
return self
}
var demanded = {}
self.demand = self.required = self.require = function (keys, max, msg) {
// you can optionally provide a 'max' key,
// which will raise an exception if too many '_'
// options are provided.
if (typeof max !== 'number') {
msg = max
max = Infinity
}
if (typeof keys === 'number') {
if (!demanded._) demanded._ = { count: 0, msg: null, max: max }
demanded._.count = keys
demanded._.msg = msg
} else if (Array.isArray(keys)) {
keys.forEach(function (key) {
self.demand(key, msg)
})
} else {
if (typeof msg === 'string') {
demanded[keys] = { msg: msg }
} else if (msg === true || typeof msg === 'undefined') {
demanded[keys] = { msg: undefined }
}
}
return self
}
self.getDemanded = function () {
return demanded
}
self.requiresArg = function (requiresArgs) {
options.requiresArg.push.apply(options.requiresArg, [].concat(requiresArgs))
return self
}
self.implies = function (key, value) {
validation.implies(key, value)
return self
}
self.usage = function (msg, opts) {
if (!opts && typeof msg === 'object') {
opts = msg
msg = null
}
usage.usage(msg)
if (opts) self.options(opts)
return self
}
self.epilogue = self.epilog = function (msg) {
usage.epilog(msg)
return self
}
self.fail = function (f) {
usage.failFn(f)
return self
}
self.check = function (f) {
validation.check(f)
return self
}
self.defaults = self.default
self.describe = function (key, desc) {
options.key[key] = true
usage.describe(key, desc)
return self
}
self.parse = function (args) {
return parseArgs(args)
}
self.option = self.options = function (key, opt) {
if (typeof key === 'object') {
Object.keys(key).forEach(function (k) {
self.options(k, key[k])
})
} else {
assert(typeof opt === 'object', 'second argument to option must be an object')
options.key[key] = true // track manually set keys.
if (opt.alias) self.alias(key, opt.alias)
var demand = opt.demand || opt.required || opt.require
if (demand) {
self.demand(key, demand)
} if ('config' in opt) {
self.config(key)
} if ('default' in opt) {
self.default(key, opt.default)
} if ('nargs' in opt) {
self.nargs(key, opt.nargs)
} if ('choices' in opt) {
self.choices(key, opt.choices)
} if (opt.boolean || opt.type === 'boolean') {
self.boolean(key)
if (opt.alias) self.boolean(opt.alias)
} if (opt.array || opt.type === 'array') {
self.array(key)
if (opt.alias) self.array(opt.alias)
} if (opt.string || opt.type === 'string') {
self.string(key)
if (opt.alias) self.string(opt.alias)
} if (opt.count || opt.type === 'count') {
self.count(key)
} if (opt.defaultDescription) {
options.defaultDescription[key] = opt.defaultDescription
}
var desc = opt.describe || opt.description || opt.desc
if (desc) {
self.describe(key, desc)
}
if (opt.requiresArg) {
self.requiresArg(key)
}
}
return self
}
self.getOptions = function () {
return options
}
self.wrap = function (cols) {
usage.wrap(cols)
return self
}
var strict = false
self.strict = function () {
strict = true
return self
}
self.getStrict = function () {
return strict
}
self.showHelp = function (level) {
if (!self.parsed) parseArgs(processArgs) // run parser, if it has not already been executed.
usage.showHelp(level)
return self
}
var versionOpt = null
self.version = function (ver, opt, msg) {
versionOpt = opt || 'version'
usage.version(ver)
self.boolean(versionOpt)
self.describe(versionOpt, msg || usage.deferY18nLookup('Show version number'))
return self
}
var helpOpt = null
self.addHelpOpt = function (opt, msg) {
helpOpt = opt
self.boolean(opt)
self.describe(opt, msg || usage.deferY18nLookup('Show help'))
return self
}
self.showHelpOnFail = function (enabled, message) {
usage.showHelpOnFail(enabled, message)
return self
}
var exitProcess = true
self.exitProcess = function (enabled) {
if (typeof enabled !== 'boolean') {
enabled = true
}
exitProcess = enabled
return self
}
self.getExitProcess = function () {
return exitProcess
}
self.help = function () {
if (arguments.length > 0) return self.addHelpOpt.apply(self, arguments)
if (!self.parsed) parseArgs(processArgs) // run parser, if it has not already been executed.
return usage.help()
}
var completionCommand = null
self.completion = function (cmd, desc, fn) {
// a function to execute when generating
// completions can be provided as the second
// or third argument to completion.
if (typeof desc === 'function') {
fn = desc
desc = null
}
// register the completion command.
completionCommand = cmd || 'completion'
if (!desc && desc !== false) {
desc = 'generate bash completion script'
}
self.command(completionCommand, desc)
// a function can be provided
if (fn) completion.registerFunction(fn)
return self
}
self.showCompletionScript = function ($0) {
$0 = $0 || self.$0
console.log(completion.generateCompletionScript($0))
return self
}
self.locale = function (locale) {
if (arguments.length === 0) {
guessLocale()
return y18n.getLocale()
}
detectLocale = false
y18n.setLocale(locale)
return self
}
self.updateStrings = self.updateLocale = function (obj) {
detectLocale = false
y18n.updateLocale(obj)
return self
}
var detectLocale = true
self.detectLocale = function (detect) {
detectLocale = detect
return self
}
self.getDetectLocale = function () {
return detectLocale
}
self.getUsageInstance = function () {
return usage
}
self.getValidationInstance = function () {
return validation
}
self.terminalWidth = function () {
return require('window-size').width
}
Object.defineProperty(self, 'argv', {
get: function () {
var args = null
try {
args = parseArgs(processArgs)
} catch (err) {
usage.fail(err.message)
}
return args
},
enumerable: true
})
function parseArgs (args) {
var parsed = Parser(args, options, y18n)
var argv = parsed.argv
var aliases = parsed.aliases
argv.$0 = self.$0
self.parsed = parsed
guessLocale() // guess locale lazily, so that it can be turned off in chain.
// while building up the argv object, there
// are two passes through the parser. If completion
// is being performed short-circuit on the first pass.
if (completionCommand &&
(process.argv.join(' ')).indexOf(completion.completionKey) !== -1 &&
!argv[completion.completionKey]) {
return argv
}
// if there's a handler associated with a
// command defer processing to it.
var handlerKeys = Object.keys(self.getCommandHandlers())
for (var i = 0, command; (command = handlerKeys[i]) !== undefined; i++) {
if (~argv._.indexOf(command)) {
self.getCommandHandlers()[command](self.reset())
return self.argv
}
}
// generate a completion script for adding to ~/.bashrc.
if (completionCommand && ~argv._.indexOf(completionCommand) && !argv[completion.completionKey]) {
self.showCompletionScript()
if (exitProcess) {
process.exit(0)
}
}
// we must run completions first, a user might
// want to complete the --help or --version option.
if (completion.completionKey in argv) {
// we allow for asynchronous completions,
// e.g., loading in a list of commands from an API.
completion.getCompletion(function (completions) {
;(completions || []).forEach(function (completion) {
console.log(completion)
})
if (exitProcess) {
process.exit(0)
}
})
return
}
Object.keys(argv).forEach(function (key) {
if (key === helpOpt && argv[key]) {
self.showHelp('log')
if (exitProcess) {
process.exit(0)
}
} else if (key === versionOpt && argv[key]) {
usage.showVersion()
if (exitProcess) {
process.exit(0)
}
}
})
if (parsed.error) throw parsed.error
// if we're executed via bash completion, don't
// bother with validation.
if (!argv[completion.completionKey]) {
validation.nonOptionCount(argv)
validation.missingArgumentValue(argv)
validation.requiredArguments(argv)
if (strict) validation.unknownArguments(argv, aliases)
validation.customChecks(argv, aliases)
validation.limitedChoices(argv)
validation.implications(argv)
}
setPlaceholderKeys(argv)
return argv
}
function guessLocale () {
if (!detectLocale) return
self.locale(locale())
}
function setPlaceholderKeys (argv) {
Object.keys(options.key).forEach(function (key) {
if (typeof argv[key] === 'undefined') argv[key] = undefined
})
}
sigletonify(self)
return self
}
// rebase an absolute path to a relative one with respect to a base directory
// exported for tests
exports.rebase = rebase
function rebase (base, dir) |
/* Hack an instance of Argv with process.argv into Argv
so people can do
require('yargs')(['--beeble=1','-z','zizzle']).argv
to parse a list of args and
require('yargs').argv
to get a parsed version of process.argv.
*/
function sigletonify (inst) {
Object.keys(inst).forEach(function (key) {
if (key === 'argv') {
Argv.__defineGetter__(key, inst.__lookupGetter__(key))
} else {
Argv[key] = typeof inst[key] === 'function'
? inst[key].bind(inst)
: inst[key]
}
})
}
| {
return path.relative(base, dir)
} | identifier_body |
mappers.js | import mapValues from 'lodash/mapValues';
import snakeCase from 'lodash/snakeCase';
function ensureTypeSnakeCase(value) |
export function assessmentMetaDataState(data) {
const blankState = {
assessment: false,
assessmentIds: [],
masteryModel: null,
randomize: false,
};
if (typeof data.assessmentmetadata === 'undefined') {
return blankState;
}
// Data is from a serializer for a one to many key, so it will return an array of length 0 or 1
const assessmentMetaData = data.assessmentmetadata[0];
if (!assessmentMetaData) {
return blankState;
}
const assessmentIds = assessmentMetaData.assessment_item_ids;
const masteryModel = mapValues(assessmentMetaData.mastery_model, ensureTypeSnakeCase);
if (!assessmentIds.length || !Object.keys(masteryModel).length) {
return blankState;
}
return {
assessment: true,
assessmentIds,
masteryModel,
randomize: assessmentMetaData.randomize,
};
}
| {
if (typeof value === 'string') {
return snakeCase(value);
}
return value;
} | identifier_body |
mappers.js | import mapValues from 'lodash/mapValues';
import snakeCase from 'lodash/snakeCase';
function | (value) {
if (typeof value === 'string') {
return snakeCase(value);
}
return value;
}
export function assessmentMetaDataState(data) {
const blankState = {
assessment: false,
assessmentIds: [],
masteryModel: null,
randomize: false,
};
if (typeof data.assessmentmetadata === 'undefined') {
return blankState;
}
// Data is from a serializer for a one to many key, so it will return an array of length 0 or 1
const assessmentMetaData = data.assessmentmetadata[0];
if (!assessmentMetaData) {
return blankState;
}
const assessmentIds = assessmentMetaData.assessment_item_ids;
const masteryModel = mapValues(assessmentMetaData.mastery_model, ensureTypeSnakeCase);
if (!assessmentIds.length || !Object.keys(masteryModel).length) {
return blankState;
}
return {
assessment: true,
assessmentIds,
masteryModel,
randomize: assessmentMetaData.randomize,
};
}
| ensureTypeSnakeCase | identifier_name |
mappers.js | import mapValues from 'lodash/mapValues';
import snakeCase from 'lodash/snakeCase';
function ensureTypeSnakeCase(value) {
if (typeof value === 'string') {
return snakeCase(value);
}
return value;
}
export function assessmentMetaDataState(data) {
const blankState = {
assessment: false,
assessmentIds: [],
masteryModel: null,
randomize: false,
};
if (typeof data.assessmentmetadata === 'undefined') {
return blankState;
}
// Data is from a serializer for a one to many key, so it will return an array of length 0 or 1 | const assessmentIds = assessmentMetaData.assessment_item_ids;
const masteryModel = mapValues(assessmentMetaData.mastery_model, ensureTypeSnakeCase);
if (!assessmentIds.length || !Object.keys(masteryModel).length) {
return blankState;
}
return {
assessment: true,
assessmentIds,
masteryModel,
randomize: assessmentMetaData.randomize,
};
} | const assessmentMetaData = data.assessmentmetadata[0];
if (!assessmentMetaData) {
return blankState;
} | random_line_split |
mappers.js | import mapValues from 'lodash/mapValues';
import snakeCase from 'lodash/snakeCase';
function ensureTypeSnakeCase(value) {
if (typeof value === 'string') |
return value;
}
export function assessmentMetaDataState(data) {
const blankState = {
assessment: false,
assessmentIds: [],
masteryModel: null,
randomize: false,
};
if (typeof data.assessmentmetadata === 'undefined') {
return blankState;
}
// Data is from a serializer for a one to many key, so it will return an array of length 0 or 1
const assessmentMetaData = data.assessmentmetadata[0];
if (!assessmentMetaData) {
return blankState;
}
const assessmentIds = assessmentMetaData.assessment_item_ids;
const masteryModel = mapValues(assessmentMetaData.mastery_model, ensureTypeSnakeCase);
if (!assessmentIds.length || !Object.keys(masteryModel).length) {
return blankState;
}
return {
assessment: true,
assessmentIds,
masteryModel,
randomize: assessmentMetaData.randomize,
};
}
| {
return snakeCase(value);
} | conditional_block |
index.ts | import { EMPTY_P } from '../../../utils/const'
import Editor from '../../../editor/index'
import $, { DomElement } from '../../../utils/dom-core'
function | (editor: Editor) {
function quoteEnter(e: Event) {
const $selectElem = editor.selection.getSelectionContainerElem() as DomElement
const $topSelectElem = editor.selection.getSelectionRangeTopNodes()[0]
// 对quote的enter进行特殊处理
//最后一行为空标签时再按会出跳出blockquote
if ($topSelectElem?.getNodeName() === 'BLOCKQUOTE') {
// firefox下点击引用按钮会选中外容器<blockquote></blockquote>
if ($selectElem.getNodeName() === 'BLOCKQUOTE') {
const selectNode = $selectElem.childNodes()?.getNode() as Node
editor.selection.moveCursor(selectNode)
}
if ($selectElem.text() === '') {
e.preventDefault()
$selectElem.remove()
const $newLine = $(EMPTY_P)
$newLine.insertAfter($topSelectElem)
// 将光标移动br前面
editor.selection.moveCursor($newLine.getNode(), 0)
}
// 当blockQuote中没有内容回车后移除blockquote
if ($topSelectElem.text() === '') {
$topSelectElem.remove()
}
}
}
editor.txt.eventHooks.enterDownEvents.push(quoteEnter)
}
export default bindEvent
| bindEvent | identifier_name |
index.ts | import { EMPTY_P } from '../../../utils/const'
import Editor from '../../../editor/index'
import $, { DomElement } from '../../../utils/dom-core'
function bindEvent(editor: Editor) {
function quoteEnter(e: Event) | {
const $selectElem = editor.selection.getSelectionContainerElem() as DomElement
const $topSelectElem = editor.selection.getSelectionRangeTopNodes()[0]
// 对quote的enter进行特殊处理
//最后一行为空标签时再按会出跳出blockquote
if ($topSelectElem?.getNodeName() === 'BLOCKQUOTE') {
// firefox下点击引用按钮会选中外容器<blockquote></blockquote>
if ($selectElem.getNodeName() === 'BLOCKQUOTE') {
const selectNode = $selectElem.childNodes()?.getNode() as Node
editor.selection.moveCursor(selectNode)
}
if ($selectElem.text() === '') {
e.preventDefault()
$selectElem.remove()
const $newLine = $(EMPTY_P)
$newLine.insertAfter($topSelectElem)
// 将光标移动br前面
editor.selection.moveCursor($newLine.getNode(), 0)
}
// 当blockQuote中没有内容回车后移除blockquote
if ($topSelectElem.text() === '') {
$topSelectElem.remove()
}
}
}
editor.txt.eventHooks.enterDownEvents.push(quoteEnter)
}
export default bindEvent
| identifier_body | |
index.ts | import { EMPTY_P } from '../../../utils/const'
import Editor from '../../../editor/index'
import $, { DomElement } from '../../../utils/dom-core'
function bindEvent(editor: Editor) {
function quoteEnter(e: Event) {
const $selectElem = editor.selection.getSelectionContainerElem() as DomElement
const $topSelectElem = editor.selection.getSelectionRangeTopNodes()[0]
// 对quote的enter进行特殊处理
//最后一行为空标签时再按会出跳出blockquote
if ($topSelectElem?.getNodeName() === 'BLOCKQUOTE') {
// firefox下点击引用按钮会选中外容器<blockquote></blockquote>
if ($selectElem.getNodeName() === 'BLOCKQUOTE') {
const selectNode = $selectElem.childNodes()?.getNode() | efault()
$selectElem.remove()
const $newLine = $(EMPTY_P)
$newLine.insertAfter($topSelectElem)
// 将光标移动br前面
editor.selection.moveCursor($newLine.getNode(), 0)
}
// 当blockQuote中没有内容回车后移除blockquote
if ($topSelectElem.text() === '') {
$topSelectElem.remove()
}
}
}
editor.txt.eventHooks.enterDownEvents.push(quoteEnter)
}
export default bindEvent
| as Node
editor.selection.moveCursor(selectNode)
}
if ($selectElem.text() === '') {
e.preventD | conditional_block |
index.ts | import { EMPTY_P } from '../../../utils/const'
import Editor from '../../../editor/index'
import $, { DomElement } from '../../../utils/dom-core'
function bindEvent(editor: Editor) {
function quoteEnter(e: Event) {
const $selectElem = editor.selection.getSelectionContainerElem() as DomElement
const $topSelectElem = editor.selection.getSelectionRangeTopNodes()[0]
// 对quote的enter进行特殊处理
//最后一行为空标签时再按会出跳出blockquote
if ($topSelectElem?.getNodeName() === 'BLOCKQUOTE') {
// firefox下点击引用按钮会选中外容器<blockquote></blockquote>
if ($selectElem.getNodeName() === 'BLOCKQUOTE') {
const selectNode = $selectElem.childNodes()?.getNode() as Node
editor.selection.moveCursor(selectNode)
}
if ($selectElem.text() === '') {
e.preventDefault()
$selectElem.remove()
const $newLine = $(EMPTY_P) | // 当blockQuote中没有内容回车后移除blockquote
if ($topSelectElem.text() === '') {
$topSelectElem.remove()
}
}
}
editor.txt.eventHooks.enterDownEvents.push(quoteEnter)
}
export default bindEvent | $newLine.insertAfter($topSelectElem)
// 将光标移动br前面
editor.selection.moveCursor($newLine.getNode(), 0)
}
| random_line_split |
methods.py | import json
from math import ceil
from asyncpg import Connection
from qllr.common import MATCH_LIST_ITEM_COUNT
from qllr.db import cache
from qllr.settings import PLAYER_COUNT_PER_PAGE
KEEPING_TIME = 60 * 60 * 24 * 30
SQL_TOP_PLAYERS_BY_GAMETYPE = """
SELECT
p.steam_id,
p.name,
p.model,
gr.rating,
gr.deviation,
gr.n,
count(*) OVER () AS count,
ROW_NUMBER() OVER (ORDER BY gr.rating DESC) AS rank
FROM
players p
LEFT JOIN (SUBQUERY) gr ON
gr.steam_id = p.steam_id
WHERE
gr.n >= 10 AND
gr.last_played_timestamp > LEAST( $1, (
SELECT timestamp
FROM matches
WHERE gametype_id = $2
ORDER BY timestamp DESC
LIMIT 1 OFFSET {OFFSET}
)) AND
gr.gametype_id = $2
ORDER BY gr.rating DESC
""".format(
OFFSET=int(MATCH_LIST_ITEM_COUNT)
).replace(
"(SUBQUERY)", "({SUBQUERY})"
)
SQL_TOP_PLAYERS_BY_GAMETYPE_R1 = SQL_TOP_PLAYERS_BY_GAMETYPE.format(
SUBQUERY="""
SELECT
steam_id,
r1_mean AS rating,
r1_deviation AS deviation,
last_played_timestamp,
gametype_id,
n
FROM
gametype_ratings
"""
)
SQL_TOP_PLAYERS_BY_GAMETYPE_R2 = SQL_TOP_PLAYERS_BY_GAMETYPE.format(
SUBQUERY="""
SELECT
steam_id,
r2_value AS rating,
0 AS deviation,
last_played_timestamp,
gametype_id,
n
FROM
gametype_ratings
"""
)
def get_sql_top_players_query_by_gametype_id(gametype_id: int):
if cache.USE_AVG_PERF[gametype_id]:
return SQL_TOP_PLAYERS_BY_GAMETYPE_R2
else:
return SQL_TOP_PLAYERS_BY_GAMETYPE_R1
async def get_list(con: Connection, gametype_id: int, page: int, show_inactive=False):
await con.set_type_codec(
"json", encoder=json.dumps, decoder=json.loads, schema="pg_catalog"
)
query = get_sql_top_players_query_by_gametype_id(
gametype_id
) + "LIMIT {LIMIT} OFFSET {OFFSET}".format(
LIMIT=int(PLAYER_COUNT_PER_PAGE), OFFSET=int(PLAYER_COUNT_PER_PAGE * page)
)
start_timestamp = 0
if show_inactive is False:
start_timestamp = cache.LAST_GAME_TIMESTAMPS[gametype_id] - KEEPING_TIME
result = []
player_count = 0
async for row in con.cursor(query, start_timestamp, gametype_id):
if row[0] != None:
|
player_count = row[6]
steam_ids = list(map(lambda player: int(player["_id"]), result))
query = """
SELECT
s.steam_id,
CEIL(AVG(CASE
WHEN m.team1_score > m.team2_score AND s.team = 1 THEN 1
WHEN m.team2_score > m.team1_score AND s.team = 2 THEN 1
ELSE 0
END)*100)
FROM
matches m
LEFT JOIN scoreboards s ON s.match_id = m.match_id
WHERE
m.gametype_id = $1 AND s.steam_id = ANY($2)
GROUP BY s.steam_id;
"""
for row in await con.fetch(query, gametype_id, steam_ids):
try:
result_index = steam_ids.index(row[0])
result[result_index]["win_ratio"] = int(row[1])
except ValueError:
pass # must not happen
return {
"ok": True,
"response": result,
"page_count": ceil(player_count / PLAYER_COUNT_PER_PAGE),
}
| result.append(
{
"_id": str(row[0]),
"name": row[1],
"model": (
row[2] + ("/default" if row[2].find("/") == -1 else "")
).lower(),
"rating": round(row[3], 2),
"rd": round(row[4], 2),
"n": row[5],
"rank": row[7],
}
) | conditional_block |
methods.py | from asyncpg import Connection
from qllr.common import MATCH_LIST_ITEM_COUNT
from qllr.db import cache
from qllr.settings import PLAYER_COUNT_PER_PAGE
KEEPING_TIME = 60 * 60 * 24 * 30
SQL_TOP_PLAYERS_BY_GAMETYPE = """
SELECT
p.steam_id,
p.name,
p.model,
gr.rating,
gr.deviation,
gr.n,
count(*) OVER () AS count,
ROW_NUMBER() OVER (ORDER BY gr.rating DESC) AS rank
FROM
players p
LEFT JOIN (SUBQUERY) gr ON
gr.steam_id = p.steam_id
WHERE
gr.n >= 10 AND
gr.last_played_timestamp > LEAST( $1, (
SELECT timestamp
FROM matches
WHERE gametype_id = $2
ORDER BY timestamp DESC
LIMIT 1 OFFSET {OFFSET}
)) AND
gr.gametype_id = $2
ORDER BY gr.rating DESC
""".format(
OFFSET=int(MATCH_LIST_ITEM_COUNT)
).replace(
"(SUBQUERY)", "({SUBQUERY})"
)
SQL_TOP_PLAYERS_BY_GAMETYPE_R1 = SQL_TOP_PLAYERS_BY_GAMETYPE.format(
SUBQUERY="""
SELECT
steam_id,
r1_mean AS rating,
r1_deviation AS deviation,
last_played_timestamp,
gametype_id,
n
FROM
gametype_ratings
"""
)
SQL_TOP_PLAYERS_BY_GAMETYPE_R2 = SQL_TOP_PLAYERS_BY_GAMETYPE.format(
SUBQUERY="""
SELECT
steam_id,
r2_value AS rating,
0 AS deviation,
last_played_timestamp,
gametype_id,
n
FROM
gametype_ratings
"""
)
def get_sql_top_players_query_by_gametype_id(gametype_id: int):
if cache.USE_AVG_PERF[gametype_id]:
return SQL_TOP_PLAYERS_BY_GAMETYPE_R2
else:
return SQL_TOP_PLAYERS_BY_GAMETYPE_R1
async def get_list(con: Connection, gametype_id: int, page: int, show_inactive=False):
await con.set_type_codec(
"json", encoder=json.dumps, decoder=json.loads, schema="pg_catalog"
)
query = get_sql_top_players_query_by_gametype_id(
gametype_id
) + "LIMIT {LIMIT} OFFSET {OFFSET}".format(
LIMIT=int(PLAYER_COUNT_PER_PAGE), OFFSET=int(PLAYER_COUNT_PER_PAGE * page)
)
start_timestamp = 0
if show_inactive is False:
start_timestamp = cache.LAST_GAME_TIMESTAMPS[gametype_id] - KEEPING_TIME
result = []
player_count = 0
async for row in con.cursor(query, start_timestamp, gametype_id):
if row[0] != None:
result.append(
{
"_id": str(row[0]),
"name": row[1],
"model": (
row[2] + ("/default" if row[2].find("/") == -1 else "")
).lower(),
"rating": round(row[3], 2),
"rd": round(row[4], 2),
"n": row[5],
"rank": row[7],
}
)
player_count = row[6]
steam_ids = list(map(lambda player: int(player["_id"]), result))
query = """
SELECT
s.steam_id,
CEIL(AVG(CASE
WHEN m.team1_score > m.team2_score AND s.team = 1 THEN 1
WHEN m.team2_score > m.team1_score AND s.team = 2 THEN 1
ELSE 0
END)*100)
FROM
matches m
LEFT JOIN scoreboards s ON s.match_id = m.match_id
WHERE
m.gametype_id = $1 AND s.steam_id = ANY($2)
GROUP BY s.steam_id;
"""
for row in await con.fetch(query, gametype_id, steam_ids):
try:
result_index = steam_ids.index(row[0])
result[result_index]["win_ratio"] = int(row[1])
except ValueError:
pass # must not happen
return {
"ok": True,
"response": result,
"page_count": ceil(player_count / PLAYER_COUNT_PER_PAGE),
} | import json
from math import ceil
| random_line_split | |
methods.py | import json
from math import ceil
from asyncpg import Connection
from qllr.common import MATCH_LIST_ITEM_COUNT
from qllr.db import cache
from qllr.settings import PLAYER_COUNT_PER_PAGE
KEEPING_TIME = 60 * 60 * 24 * 30
SQL_TOP_PLAYERS_BY_GAMETYPE = """
SELECT
p.steam_id,
p.name,
p.model,
gr.rating,
gr.deviation,
gr.n,
count(*) OVER () AS count,
ROW_NUMBER() OVER (ORDER BY gr.rating DESC) AS rank
FROM
players p
LEFT JOIN (SUBQUERY) gr ON
gr.steam_id = p.steam_id
WHERE
gr.n >= 10 AND
gr.last_played_timestamp > LEAST( $1, (
SELECT timestamp
FROM matches
WHERE gametype_id = $2
ORDER BY timestamp DESC
LIMIT 1 OFFSET {OFFSET}
)) AND
gr.gametype_id = $2
ORDER BY gr.rating DESC
""".format(
OFFSET=int(MATCH_LIST_ITEM_COUNT)
).replace(
"(SUBQUERY)", "({SUBQUERY})"
)
SQL_TOP_PLAYERS_BY_GAMETYPE_R1 = SQL_TOP_PLAYERS_BY_GAMETYPE.format(
SUBQUERY="""
SELECT
steam_id,
r1_mean AS rating,
r1_deviation AS deviation,
last_played_timestamp,
gametype_id,
n
FROM
gametype_ratings
"""
)
SQL_TOP_PLAYERS_BY_GAMETYPE_R2 = SQL_TOP_PLAYERS_BY_GAMETYPE.format(
SUBQUERY="""
SELECT
steam_id,
r2_value AS rating,
0 AS deviation,
last_played_timestamp,
gametype_id,
n
FROM
gametype_ratings
"""
)
def get_sql_top_players_query_by_gametype_id(gametype_id: int):
if cache.USE_AVG_PERF[gametype_id]:
return SQL_TOP_PLAYERS_BY_GAMETYPE_R2
else:
return SQL_TOP_PLAYERS_BY_GAMETYPE_R1
async def | (con: Connection, gametype_id: int, page: int, show_inactive=False):
await con.set_type_codec(
"json", encoder=json.dumps, decoder=json.loads, schema="pg_catalog"
)
query = get_sql_top_players_query_by_gametype_id(
gametype_id
) + "LIMIT {LIMIT} OFFSET {OFFSET}".format(
LIMIT=int(PLAYER_COUNT_PER_PAGE), OFFSET=int(PLAYER_COUNT_PER_PAGE * page)
)
start_timestamp = 0
if show_inactive is False:
start_timestamp = cache.LAST_GAME_TIMESTAMPS[gametype_id] - KEEPING_TIME
result = []
player_count = 0
async for row in con.cursor(query, start_timestamp, gametype_id):
if row[0] != None:
result.append(
{
"_id": str(row[0]),
"name": row[1],
"model": (
row[2] + ("/default" if row[2].find("/") == -1 else "")
).lower(),
"rating": round(row[3], 2),
"rd": round(row[4], 2),
"n": row[5],
"rank": row[7],
}
)
player_count = row[6]
steam_ids = list(map(lambda player: int(player["_id"]), result))
query = """
SELECT
s.steam_id,
CEIL(AVG(CASE
WHEN m.team1_score > m.team2_score AND s.team = 1 THEN 1
WHEN m.team2_score > m.team1_score AND s.team = 2 THEN 1
ELSE 0
END)*100)
FROM
matches m
LEFT JOIN scoreboards s ON s.match_id = m.match_id
WHERE
m.gametype_id = $1 AND s.steam_id = ANY($2)
GROUP BY s.steam_id;
"""
for row in await con.fetch(query, gametype_id, steam_ids):
try:
result_index = steam_ids.index(row[0])
result[result_index]["win_ratio"] = int(row[1])
except ValueError:
pass # must not happen
return {
"ok": True,
"response": result,
"page_count": ceil(player_count / PLAYER_COUNT_PER_PAGE),
}
| get_list | identifier_name |
methods.py | import json
from math import ceil
from asyncpg import Connection
from qllr.common import MATCH_LIST_ITEM_COUNT
from qllr.db import cache
from qllr.settings import PLAYER_COUNT_PER_PAGE
KEEPING_TIME = 60 * 60 * 24 * 30
SQL_TOP_PLAYERS_BY_GAMETYPE = """
SELECT
p.steam_id,
p.name,
p.model,
gr.rating,
gr.deviation,
gr.n,
count(*) OVER () AS count,
ROW_NUMBER() OVER (ORDER BY gr.rating DESC) AS rank
FROM
players p
LEFT JOIN (SUBQUERY) gr ON
gr.steam_id = p.steam_id
WHERE
gr.n >= 10 AND
gr.last_played_timestamp > LEAST( $1, (
SELECT timestamp
FROM matches
WHERE gametype_id = $2
ORDER BY timestamp DESC
LIMIT 1 OFFSET {OFFSET}
)) AND
gr.gametype_id = $2
ORDER BY gr.rating DESC
""".format(
OFFSET=int(MATCH_LIST_ITEM_COUNT)
).replace(
"(SUBQUERY)", "({SUBQUERY})"
)
SQL_TOP_PLAYERS_BY_GAMETYPE_R1 = SQL_TOP_PLAYERS_BY_GAMETYPE.format(
SUBQUERY="""
SELECT
steam_id,
r1_mean AS rating,
r1_deviation AS deviation,
last_played_timestamp,
gametype_id,
n
FROM
gametype_ratings
"""
)
SQL_TOP_PLAYERS_BY_GAMETYPE_R2 = SQL_TOP_PLAYERS_BY_GAMETYPE.format(
SUBQUERY="""
SELECT
steam_id,
r2_value AS rating,
0 AS deviation,
last_played_timestamp,
gametype_id,
n
FROM
gametype_ratings
"""
)
def get_sql_top_players_query_by_gametype_id(gametype_id: int):
if cache.USE_AVG_PERF[gametype_id]:
return SQL_TOP_PLAYERS_BY_GAMETYPE_R2
else:
return SQL_TOP_PLAYERS_BY_GAMETYPE_R1
async def get_list(con: Connection, gametype_id: int, page: int, show_inactive=False):
| await con.set_type_codec(
"json", encoder=json.dumps, decoder=json.loads, schema="pg_catalog"
)
query = get_sql_top_players_query_by_gametype_id(
gametype_id
) + "LIMIT {LIMIT} OFFSET {OFFSET}".format(
LIMIT=int(PLAYER_COUNT_PER_PAGE), OFFSET=int(PLAYER_COUNT_PER_PAGE * page)
)
start_timestamp = 0
if show_inactive is False:
start_timestamp = cache.LAST_GAME_TIMESTAMPS[gametype_id] - KEEPING_TIME
result = []
player_count = 0
async for row in con.cursor(query, start_timestamp, gametype_id):
if row[0] != None:
result.append(
{
"_id": str(row[0]),
"name": row[1],
"model": (
row[2] + ("/default" if row[2].find("/") == -1 else "")
).lower(),
"rating": round(row[3], 2),
"rd": round(row[4], 2),
"n": row[5],
"rank": row[7],
}
)
player_count = row[6]
steam_ids = list(map(lambda player: int(player["_id"]), result))
query = """
SELECT
s.steam_id,
CEIL(AVG(CASE
WHEN m.team1_score > m.team2_score AND s.team = 1 THEN 1
WHEN m.team2_score > m.team1_score AND s.team = 2 THEN 1
ELSE 0
END)*100)
FROM
matches m
LEFT JOIN scoreboards s ON s.match_id = m.match_id
WHERE
m.gametype_id = $1 AND s.steam_id = ANY($2)
GROUP BY s.steam_id;
"""
for row in await con.fetch(query, gametype_id, steam_ids):
try:
result_index = steam_ids.index(row[0])
result[result_index]["win_ratio"] = int(row[1])
except ValueError:
pass # must not happen
return {
"ok": True,
"response": result,
"page_count": ceil(player_count / PLAYER_COUNT_PER_PAGE),
} | identifier_body | |
downloadFiles.py | # -*- coding: utf-8 -*-
import datetime
import os
import urllib.request
import re
def getUrlList(url):
ret = []
baseUrl = re.findall('(http://.+?)/',url)[0]
httpRequest = urllib.request.Request(url)
httpResponse = urllib.request.urlopen(httpRequest)
html = httpResponse.read().decode('utf-8').split('>')
for h in html:
#find link for pdf like <a href='*.pdf'>*</a> also .xls, xlsx, .zip
u = re.findall('<a href="(.+\.)(pdf|xls|xlsx|zip)"', h)
if len(u) > 0:
ret.append(baseUrl + u[0][0] + u[0][1])
return ret
def downloadFiles_exe(url, downloadPath):
try:
urllib.request.urlretrieve(url,downloadPath)
print('[ok] %s' % url)
except:
print('[ng] %s' % url)
return
def | (url, downloadDir):
print('[start] %s' % url)
ul = getUrlList(url)
for u in ul:
f = re.findall('http://.+/(.+\.)(pdf|xls|xlsx|zip)',u)[0]
downloadFiles_exe(u, os.path.join(downloadDir, f[0]+f[1]))
return
def main(argv):
if len(argv) == 3:
urlList = argv[1]
downloadDir = argv[2]
else:
f = open('win.ini','r')
s = f.read().split('\n')
urlList = s[0]
downloadDir = s[1]
f.close()
today = datetime.datetime.today().strftime('%Y%m%d')
#./dl/yyyyMMdd/
os.mkdir(os.path.join(downloadDir, today))
for line in open(urlList,'r'):
#tag \t url
l = line.split('\t')
os.mkdir(os.path.join(downloadDir, today, l[0]))
downloadFiles(l[1], os.path.join(downloadDir, today, l[0]))
print(os.path.join(downloadDir, today))
return
if __name__ == "__main__":
import sys
#python3 donwloadFiles.py ../etc/urlList.tsv ../dl/
main(sys.argv)
| downloadFiles | identifier_name |
downloadFiles.py | # -*- coding: utf-8 -*-
import datetime
import os
import urllib.request
import re
def getUrlList(url):
ret = []
baseUrl = re.findall('(http://.+?)/',url)[0]
httpRequest = urllib.request.Request(url)
httpResponse = urllib.request.urlopen(httpRequest)
html = httpResponse.read().decode('utf-8').split('>')
for h in html:
#find link for pdf like <a href='*.pdf'>*</a> also .xls, xlsx, .zip
u = re.findall('<a href="(.+\.)(pdf|xls|xlsx|zip)"', h)
if len(u) > 0:
| ret.append(baseUrl + u[0][0] + u[0][1])
return ret
def downloadFiles_exe(url, downloadPath):
try:
urllib.request.urlretrieve(url,downloadPath)
print('[ok] %s' % url)
except:
print('[ng] %s' % url)
return
def downloadFiles(url, downloadDir):
print('[start] %s' % url)
ul = getUrlList(url)
for u in ul:
f = re.findall('http://.+/(.+\.)(pdf|xls|xlsx|zip)',u)[0]
downloadFiles_exe(u, os.path.join(downloadDir, f[0]+f[1]))
return
def main(argv):
if len(argv) == 3:
urlList = argv[1]
downloadDir = argv[2]
else:
f = open('win.ini','r')
s = f.read().split('\n')
urlList = s[0]
downloadDir = s[1]
f.close()
today = datetime.datetime.today().strftime('%Y%m%d')
#./dl/yyyyMMdd/
os.mkdir(os.path.join(downloadDir, today))
for line in open(urlList,'r'):
#tag \t url
l = line.split('\t')
os.mkdir(os.path.join(downloadDir, today, l[0]))
downloadFiles(l[1], os.path.join(downloadDir, today, l[0]))
print(os.path.join(downloadDir, today))
return
if __name__ == "__main__":
import sys
#python3 donwloadFiles.py ../etc/urlList.tsv ../dl/
main(sys.argv) | random_line_split | |
downloadFiles.py | # -*- coding: utf-8 -*-
import datetime
import os
import urllib.request
import re
def getUrlList(url):
ret = []
baseUrl = re.findall('(http://.+?)/',url)[0]
httpRequest = urllib.request.Request(url)
httpResponse = urllib.request.urlopen(httpRequest)
html = httpResponse.read().decode('utf-8').split('>')
for h in html:
#find link for pdf like <a href='*.pdf'>*</a> also .xls, xlsx, .zip
u = re.findall('<a href="(.+\.)(pdf|xls|xlsx|zip)"', h)
if len(u) > 0:
ret.append(baseUrl + u[0][0] + u[0][1])
return ret
def downloadFiles_exe(url, downloadPath):
|
def downloadFiles(url, downloadDir):
print('[start] %s' % url)
ul = getUrlList(url)
for u in ul:
f = re.findall('http://.+/(.+\.)(pdf|xls|xlsx|zip)',u)[0]
downloadFiles_exe(u, os.path.join(downloadDir, f[0]+f[1]))
return
def main(argv):
if len(argv) == 3:
urlList = argv[1]
downloadDir = argv[2]
else:
f = open('win.ini','r')
s = f.read().split('\n')
urlList = s[0]
downloadDir = s[1]
f.close()
today = datetime.datetime.today().strftime('%Y%m%d')
#./dl/yyyyMMdd/
os.mkdir(os.path.join(downloadDir, today))
for line in open(urlList,'r'):
#tag \t url
l = line.split('\t')
os.mkdir(os.path.join(downloadDir, today, l[0]))
downloadFiles(l[1], os.path.join(downloadDir, today, l[0]))
print(os.path.join(downloadDir, today))
return
if __name__ == "__main__":
import sys
#python3 donwloadFiles.py ../etc/urlList.tsv ../dl/
main(sys.argv)
| try:
urllib.request.urlretrieve(url,downloadPath)
print('[ok] %s' % url)
except:
print('[ng] %s' % url)
return | identifier_body |
downloadFiles.py | # -*- coding: utf-8 -*-
import datetime
import os
import urllib.request
import re
def getUrlList(url):
ret = []
baseUrl = re.findall('(http://.+?)/',url)[0]
httpRequest = urllib.request.Request(url)
httpResponse = urllib.request.urlopen(httpRequest)
html = httpResponse.read().decode('utf-8').split('>')
for h in html:
#find link for pdf like <a href='*.pdf'>*</a> also .xls, xlsx, .zip
u = re.findall('<a href="(.+\.)(pdf|xls|xlsx|zip)"', h)
if len(u) > 0:
ret.append(baseUrl + u[0][0] + u[0][1])
return ret
def downloadFiles_exe(url, downloadPath):
try:
urllib.request.urlretrieve(url,downloadPath)
print('[ok] %s' % url)
except:
print('[ng] %s' % url)
return
def downloadFiles(url, downloadDir):
print('[start] %s' % url)
ul = getUrlList(url)
for u in ul:
f = re.findall('http://.+/(.+\.)(pdf|xls|xlsx|zip)',u)[0]
downloadFiles_exe(u, os.path.join(downloadDir, f[0]+f[1]))
return
def main(argv):
if len(argv) == 3:
urlList = argv[1]
downloadDir = argv[2]
else:
|
today = datetime.datetime.today().strftime('%Y%m%d')
#./dl/yyyyMMdd/
os.mkdir(os.path.join(downloadDir, today))
for line in open(urlList,'r'):
#tag \t url
l = line.split('\t')
os.mkdir(os.path.join(downloadDir, today, l[0]))
downloadFiles(l[1], os.path.join(downloadDir, today, l[0]))
print(os.path.join(downloadDir, today))
return
if __name__ == "__main__":
import sys
#python3 donwloadFiles.py ../etc/urlList.tsv ../dl/
main(sys.argv)
| f = open('win.ini','r')
s = f.read().split('\n')
urlList = s[0]
downloadDir = s[1]
f.close() | conditional_block |
cssparse.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// Some little helpers for hooking up the HTML parser with the CSS parser.
use std::cell::Cell;
use std::comm;
use std::comm::Port;
use std::task;
use style::Stylesheet;
use servo_net::resource_task::{Load, ProgressMsg, Payload, Done, ResourceTask};
use extra::url::Url;
/// Where a style sheet comes from.
pub enum StylesheetProvenance {
UrlProvenance(Url),
InlineProvenance(Url, ~str),
}
pub fn spawn_css_parser(provenance: StylesheetProvenance,
resource_task: ResourceTask)
-> Port<Stylesheet> {
let (result_port, result_chan) = comm::stream();
let provenance_cell = Cell::new(provenance); | do task::spawn {
// TODO: CSS parsing should take a base URL.
let _url = do provenance_cell.with_ref |p| {
match *p {
UrlProvenance(ref the_url) => (*the_url).clone(),
InlineProvenance(ref the_url, _) => (*the_url).clone()
}
};
let sheet = match provenance_cell.take() {
UrlProvenance(url) => {
debug!("cssparse: loading style sheet at {:s}", url.to_str());
let (input_port, input_chan) = comm::stream();
resource_task.send(Load(url, input_chan));
Stylesheet::from_iter(ProgressMsgPortIterator {
progress_port: input_port.recv().progress_port
})
}
InlineProvenance(_, data) => {
Stylesheet::from_str(data)
}
};
result_chan.send(sheet);
}
return result_port;
}
struct ProgressMsgPortIterator {
progress_port: Port<ProgressMsg>
}
impl Iterator<~[u8]> for ProgressMsgPortIterator {
fn next(&mut self) -> Option<~[u8]> {
match self.progress_port.recv() {
Payload(data) => Some(data),
Done(*) => None
}
}
} | random_line_split | |
cssparse.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// Some little helpers for hooking up the HTML parser with the CSS parser.
use std::cell::Cell;
use std::comm;
use std::comm::Port;
use std::task;
use style::Stylesheet;
use servo_net::resource_task::{Load, ProgressMsg, Payload, Done, ResourceTask};
use extra::url::Url;
/// Where a style sheet comes from.
pub enum StylesheetProvenance {
UrlProvenance(Url),
InlineProvenance(Url, ~str),
}
pub fn | (provenance: StylesheetProvenance,
resource_task: ResourceTask)
-> Port<Stylesheet> {
let (result_port, result_chan) = comm::stream();
let provenance_cell = Cell::new(provenance);
do task::spawn {
// TODO: CSS parsing should take a base URL.
let _url = do provenance_cell.with_ref |p| {
match *p {
UrlProvenance(ref the_url) => (*the_url).clone(),
InlineProvenance(ref the_url, _) => (*the_url).clone()
}
};
let sheet = match provenance_cell.take() {
UrlProvenance(url) => {
debug!("cssparse: loading style sheet at {:s}", url.to_str());
let (input_port, input_chan) = comm::stream();
resource_task.send(Load(url, input_chan));
Stylesheet::from_iter(ProgressMsgPortIterator {
progress_port: input_port.recv().progress_port
})
}
InlineProvenance(_, data) => {
Stylesheet::from_str(data)
}
};
result_chan.send(sheet);
}
return result_port;
}
struct ProgressMsgPortIterator {
progress_port: Port<ProgressMsg>
}
impl Iterator<~[u8]> for ProgressMsgPortIterator {
fn next(&mut self) -> Option<~[u8]> {
match self.progress_port.recv() {
Payload(data) => Some(data),
Done(*) => None
}
}
}
| spawn_css_parser | identifier_name |
fluent-ffmpeg-tests.ts | import ffmpeg = require('fluent-ffmpeg');
import { createWriteStream } from 'fs';
const stream = createWriteStream('outputfile.divx');
ffmpeg('/path/to/file.avi')
.output('outputfile.mp4')
.output(stream);
ffmpeg('/path/to/file.avi')
// You may pass a pipe() options object when using a stream
.output(stream, { end: true });
// Output-related methods apply to the last output added
ffmpeg('/path/to/file.avi')
.output('outputfile.mp4')
.audioCodec('libfaac')
.videoCodec('libx264')
.size('320x200')
.output(stream)
.preset('divx')
.size('640x480');
// Use the run() method to run commands with multiple outputs
ffmpeg('/path/to/file.avi')
.output('outputfile.mp4')
.output(stream)
.on('end', () => {
console.log('Finished processing');
})
.run();
// Create a command to convert source.avi to MP4
const command = ffmpeg('/path/to/source.avi')
.audioCodec('libfaac')
.videoCodec('libx264')
.format('mp4');
// Create a clone to save a small resized version
command.clone()
.size('320x200')
.save('/path/to/output-small.mp4');
// Create a clone to save a medium resized version
command.clone()
.size('640x400')
.save('/path/to/output-medium.mp4');
// Save a converted version with the original size
command.save('/path/to/output-original-size.mp4');
ffmpeg.ffprobe('/path/to/file.avi', (err, metadata) => {
console.dir(metadata);
});
ffmpeg.setFfmpegPath('path/to/ffmpeg');
ffmpeg.setFfprobePath('path/to/ffprobe');
ffmpeg.setFfmpegPath('path/to/ffmpeg');
ffmpeg.getAvailableFormats((err, formats) => {
console.log('Available formats:');
console.dir(formats);
});
ffmpeg.getAvailableCodecs((err, codecs) => {
console.log('Available codecs:');
console.dir(codecs); | ffmpeg.getAvailableEncoders((err, encoders) => {
console.log('Available encoders:');
console.dir(encoders);
});
ffmpeg.getAvailableFilters((err, filters) => {
console.log("Available filters:");
console.dir(filters);
}); | });
| random_line_split |
menu.test.ts | import '@toba/test';
import { MockRequest, MockResponse } from '@toba/test';
import { Header } from '@toba/tools';
import { menu } from '../controllers/';
import { Page } from '../views/';
const req = new MockRequest();
const res = new MockResponse(req);
beforeEach(() => {
res.reset();
req.reset();
});
test('builds data for main menu', done => {
res.onEnd = () => {
expect(res).toRenderTemplate(Page.PostMenuData);
expect(res.headers).toHaveKeyValue(Header.Vary, Header.Accept.Encoding);
expect(res.rendered.context).toHaveProperty('blog');
done();
};
menu.data(req, res); | expect(res).toRenderTemplate(Page.MobileMenuData);
expect(res.rendered.context).toHaveProperty('blog');
done();
};
menu.mobile(req, res);
}); | });
it('renders mobile menu', done => {
res.onEnd = () => { | random_line_split |
apsim75.py | import glob
import os
import shutil
import sys
import tarfile
import traceback
from model import Model
from subprocess import Popen, PIPE
class Apsim75(Model):
def run(self, latidx, lonidx):
try:
apsim_bin = self.config.get('executable')
# The apsim 'executable' is a gzipped tarball that needs to be extracted into the current working directory
tar = tarfile.open(apsim_bin)
tar.extractall()
tar.close()
model_dir = 'Model' | for xml_file in glob.glob('*.xml'):
if os.path.basename(xml_file) == 'Apsim.xml':
continue
old_xml = '%s/%s' % (model_dir, os.path.basename(xml_file))
if os.path.isfile(old_xml):
os.remove(old_xml)
if os.path.islink(xml_file):
link = os.readlink(xml_file)
shutil.copy(link, model_dir)
else:
shutil.copy(xml_file, model_dir)
# Create sim files
p = Popen('source paths.sh ; mono Model/ApsimToSim.exe Generic.apsim', shell=True, executable='/bin/bash', stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
stdout_file = open('RESULT.OUT', 'w')
stdout_file.write(stdout)
if p.returncode != 0:
rc = p.returncode
# Run apsim for each sim file
for sim in glob.glob('*.sim'):
p = Popen('source paths.sh ; Model/ApsimModel.exe %s' % sim, shell=True, executable='/bin/bash', stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
stdout_file.write(stdout)
if p.returncode != 0:
rc = p.returncode
stdout_file.close()
return True
except:
print "[%s]: %s" % (os.path.basename(__file__), traceback.format_exc())
return False | random_line_split | |
apsim75.py | import glob
import os
import shutil
import sys
import tarfile
import traceback
from model import Model
from subprocess import Popen, PIPE
class Apsim75(Model):
| def run(self, latidx, lonidx):
try:
apsim_bin = self.config.get('executable')
# The apsim 'executable' is a gzipped tarball that needs to be extracted into the current working directory
tar = tarfile.open(apsim_bin)
tar.extractall()
tar.close()
model_dir = 'Model'
for xml_file in glob.glob('*.xml'):
if os.path.basename(xml_file) == 'Apsim.xml':
continue
old_xml = '%s/%s' % (model_dir, os.path.basename(xml_file))
if os.path.isfile(old_xml):
os.remove(old_xml)
if os.path.islink(xml_file):
link = os.readlink(xml_file)
shutil.copy(link, model_dir)
else:
shutil.copy(xml_file, model_dir)
# Create sim files
p = Popen('source paths.sh ; mono Model/ApsimToSim.exe Generic.apsim', shell=True, executable='/bin/bash', stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
stdout_file = open('RESULT.OUT', 'w')
stdout_file.write(stdout)
if p.returncode != 0:
rc = p.returncode
# Run apsim for each sim file
for sim in glob.glob('*.sim'):
p = Popen('source paths.sh ; Model/ApsimModel.exe %s' % sim, shell=True, executable='/bin/bash', stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
stdout_file.write(stdout)
if p.returncode != 0:
rc = p.returncode
stdout_file.close()
return True
except:
print "[%s]: %s" % (os.path.basename(__file__), traceback.format_exc())
return False | identifier_body | |
apsim75.py | import glob
import os
import shutil
import sys
import tarfile
import traceback
from model import Model
from subprocess import Popen, PIPE
class Apsim75(Model):
def run(self, latidx, lonidx):
try:
apsim_bin = self.config.get('executable')
# The apsim 'executable' is a gzipped tarball that needs to be extracted into the current working directory
tar = tarfile.open(apsim_bin)
tar.extractall()
tar.close()
model_dir = 'Model'
for xml_file in glob.glob('*.xml'):
if os.path.basename(xml_file) == 'Apsim.xml':
continue
old_xml = '%s/%s' % (model_dir, os.path.basename(xml_file))
if os.path.isfile(old_xml):
os.remove(old_xml)
if os.path.islink(xml_file):
link = os.readlink(xml_file)
shutil.copy(link, model_dir)
else:
|
# Create sim files
p = Popen('source paths.sh ; mono Model/ApsimToSim.exe Generic.apsim', shell=True, executable='/bin/bash', stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
stdout_file = open('RESULT.OUT', 'w')
stdout_file.write(stdout)
if p.returncode != 0:
rc = p.returncode
# Run apsim for each sim file
for sim in glob.glob('*.sim'):
p = Popen('source paths.sh ; Model/ApsimModel.exe %s' % sim, shell=True, executable='/bin/bash', stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
stdout_file.write(stdout)
if p.returncode != 0:
rc = p.returncode
stdout_file.close()
return True
except:
print "[%s]: %s" % (os.path.basename(__file__), traceback.format_exc())
return False
| shutil.copy(xml_file, model_dir) | conditional_block |
apsim75.py | import glob
import os
import shutil
import sys
import tarfile
import traceback
from model import Model
from subprocess import Popen, PIPE
class | (Model):
def run(self, latidx, lonidx):
try:
apsim_bin = self.config.get('executable')
# The apsim 'executable' is a gzipped tarball that needs to be extracted into the current working directory
tar = tarfile.open(apsim_bin)
tar.extractall()
tar.close()
model_dir = 'Model'
for xml_file in glob.glob('*.xml'):
if os.path.basename(xml_file) == 'Apsim.xml':
continue
old_xml = '%s/%s' % (model_dir, os.path.basename(xml_file))
if os.path.isfile(old_xml):
os.remove(old_xml)
if os.path.islink(xml_file):
link = os.readlink(xml_file)
shutil.copy(link, model_dir)
else:
shutil.copy(xml_file, model_dir)
# Create sim files
p = Popen('source paths.sh ; mono Model/ApsimToSim.exe Generic.apsim', shell=True, executable='/bin/bash', stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
stdout_file = open('RESULT.OUT', 'w')
stdout_file.write(stdout)
if p.returncode != 0:
rc = p.returncode
# Run apsim for each sim file
for sim in glob.glob('*.sim'):
p = Popen('source paths.sh ; Model/ApsimModel.exe %s' % sim, shell=True, executable='/bin/bash', stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
stdout_file.write(stdout)
if p.returncode != 0:
rc = p.returncode
stdout_file.close()
return True
except:
print "[%s]: %s" % (os.path.basename(__file__), traceback.format_exc())
return False
| Apsim75 | identifier_name |
int32_test.py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.compiler.tensorrt.test import tf_trt_integration_test_base as trt_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.platform import test
class ExcludeUnsupportedInt32Test(trt_test.TfTrtIntegrationTestBase):
"""Test exclusion of ops which are not supported in INT32 mode by TF-TRT"""
def _ConstOp(self, shape, dtype):
return constant_op.constant(np.random.randn(*shape), dtype=dtype)
def GraphFn(self, x):
dtype = x.dtype
b = self._ConstOp((4, 10), dtype)
x = math_ops.matmul(x, b)
b = self._ConstOp((10,), dtype)
x = nn.bias_add(x, b)
return array_ops.identity(x, name='output_0')
def GetParams(self):
return self.BuildParams(self.GraphFn, dtypes.int32, [[100, 4]], [[100, 10]])
def GetConversionParams(self, run_params):
"""Return a ConversionParams for test."""
conversion_params = super(ExcludeUnsupportedInt32Test,
self).GetConversionParams(run_params)
return conversion_params._replace(
max_batch_size=100,
maximum_cached_engines=1,
# Disable layout optimizer, since it will convert BiasAdd with NHWC
# format to NCHW format under four dimentional input.
rewriter_config_template=trt_test.OptimizerDisabledRewriterConfig())
def ExpectedEnginesToBuild(self, run_params):
"""Return the expected engines to build."""
return []
if __name__ == '__main__':
test.main() | # ==============================================================================
"""Test conversion of graphs involving INT32 tensors and operations."""
| random_line_split |
int32_test.py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test conversion of graphs involving INT32 tensors and operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.compiler.tensorrt.test import tf_trt_integration_test_base as trt_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.platform import test
class ExcludeUnsupportedInt32Test(trt_test.TfTrtIntegrationTestBase):
"""Test exclusion of ops which are not supported in INT32 mode by TF-TRT"""
def _ConstOp(self, shape, dtype):
return constant_op.constant(np.random.randn(*shape), dtype=dtype)
def GraphFn(self, x):
|
def GetParams(self):
return self.BuildParams(self.GraphFn, dtypes.int32, [[100, 4]], [[100, 10]])
def GetConversionParams(self, run_params):
"""Return a ConversionParams for test."""
conversion_params = super(ExcludeUnsupportedInt32Test,
self).GetConversionParams(run_params)
return conversion_params._replace(
max_batch_size=100,
maximum_cached_engines=1,
# Disable layout optimizer, since it will convert BiasAdd with NHWC
# format to NCHW format under four dimentional input.
rewriter_config_template=trt_test.OptimizerDisabledRewriterConfig())
def ExpectedEnginesToBuild(self, run_params):
"""Return the expected engines to build."""
return []
if __name__ == '__main__':
test.main()
| dtype = x.dtype
b = self._ConstOp((4, 10), dtype)
x = math_ops.matmul(x, b)
b = self._ConstOp((10,), dtype)
x = nn.bias_add(x, b)
return array_ops.identity(x, name='output_0') | identifier_body |
int32_test.py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test conversion of graphs involving INT32 tensors and operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.compiler.tensorrt.test import tf_trt_integration_test_base as trt_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.platform import test
class ExcludeUnsupportedInt32Test(trt_test.TfTrtIntegrationTestBase):
"""Test exclusion of ops which are not supported in INT32 mode by TF-TRT"""
def _ConstOp(self, shape, dtype):
return constant_op.constant(np.random.randn(*shape), dtype=dtype)
def GraphFn(self, x):
dtype = x.dtype
b = self._ConstOp((4, 10), dtype)
x = math_ops.matmul(x, b)
b = self._ConstOp((10,), dtype)
x = nn.bias_add(x, b)
return array_ops.identity(x, name='output_0')
def GetParams(self):
return self.BuildParams(self.GraphFn, dtypes.int32, [[100, 4]], [[100, 10]])
def GetConversionParams(self, run_params):
"""Return a ConversionParams for test."""
conversion_params = super(ExcludeUnsupportedInt32Test,
self).GetConversionParams(run_params)
return conversion_params._replace(
max_batch_size=100,
maximum_cached_engines=1,
# Disable layout optimizer, since it will convert BiasAdd with NHWC
# format to NCHW format under four dimentional input.
rewriter_config_template=trt_test.OptimizerDisabledRewriterConfig())
def | (self, run_params):
"""Return the expected engines to build."""
return []
if __name__ == '__main__':
test.main()
| ExpectedEnginesToBuild | identifier_name |
int32_test.py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test conversion of graphs involving INT32 tensors and operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.compiler.tensorrt.test import tf_trt_integration_test_base as trt_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.platform import test
class ExcludeUnsupportedInt32Test(trt_test.TfTrtIntegrationTestBase):
"""Test exclusion of ops which are not supported in INT32 mode by TF-TRT"""
def _ConstOp(self, shape, dtype):
return constant_op.constant(np.random.randn(*shape), dtype=dtype)
def GraphFn(self, x):
dtype = x.dtype
b = self._ConstOp((4, 10), dtype)
x = math_ops.matmul(x, b)
b = self._ConstOp((10,), dtype)
x = nn.bias_add(x, b)
return array_ops.identity(x, name='output_0')
def GetParams(self):
return self.BuildParams(self.GraphFn, dtypes.int32, [[100, 4]], [[100, 10]])
def GetConversionParams(self, run_params):
"""Return a ConversionParams for test."""
conversion_params = super(ExcludeUnsupportedInt32Test,
self).GetConversionParams(run_params)
return conversion_params._replace(
max_batch_size=100,
maximum_cached_engines=1,
# Disable layout optimizer, since it will convert BiasAdd with NHWC
# format to NCHW format under four dimentional input.
rewriter_config_template=trt_test.OptimizerDisabledRewriterConfig())
def ExpectedEnginesToBuild(self, run_params):
"""Return the expected engines to build."""
return []
if __name__ == '__main__':
| test.main() | conditional_block | |
certificates.py | # pylint: disable=C0111
# pylint: disable=W0621
from lettuce import world, step
from lettuce.django import django_url
from course_modes.models import CourseMode
from nose.tools import assert_equal
UPSELL_LINK_CSS = '.message-upsell a.action-upgrade[href*="edx/999/Certificates"]'
def create_cert_course():
world.clear_courses()
org = 'edx'
number = '999'
name = 'Certificates'
course_id = '{org}/{number}/{name}'.format(
org=org, number=number, name=name)
world.scenario_dict['course_id'] = course_id
world.scenario_dict['COURSE'] = world.CourseFactory.create(
org=org, number=number, display_name=name)
audit_mode = world.CourseModeFactory.create(
course_id=course_id,
mode_slug='audit',
mode_display_name='audit course',
min_price=0,
)
assert isinstance(audit_mode, CourseMode)
verfied_mode = world.CourseModeFactory.create(
course_id=course_id,
mode_slug='verified',
mode_display_name='verified cert course',
min_price=16,
suggested_prices='32,64,128',
currency='usd',
)
assert isinstance(verfied_mode, CourseMode)
def register():
url = 'courses/{org}/{number}/{name}/about'.format(
org='edx', number='999', name='Certificates')
world.browser.visit(django_url(url))
world.css_click('section.intro a.register')
assert world.is_css_present('section.wrapper h3.title')
@step(u'the course has an honor mode')
def the_course_has_an_honor_mode(step):
create_cert_course()
honor_mode = world.CourseModeFactory.create(
course_id=world.scenario_dict['course_id'],
mode_slug='honor',
mode_display_name='honor mode',
min_price=0,
)
assert isinstance(honor_mode, CourseMode)
@step(u'I select the audit track$')
def select_the_audit_track(step):
create_cert_course()
register()
btn_css = 'input[value="Select Audit"]'
world.wait(1) # TODO remove this after troubleshooting JZ
world.css_find(btn_css)
world.css_click(btn_css)
def select_contribution(amount=32):
radio_css = 'input[value="{}"]'.format(amount)
world.css_click(radio_css)
assert world.css_find(radio_css).selected
def click_verified_track_button():
world.wait_for_ajax_complete()
btn_css = 'input[value="Select Certificate"]'
world.css_click(btn_css)
@step(u'I select the verified track for upgrade')
def select_verified_track_upgrade(step):
select_contribution(32)
world.wait_for_ajax_complete()
btn_css = 'input[value="Upgrade Your Registration"]'
world.css_click(btn_css)
# TODO: might want to change this depending on the changes for upgrade
assert world.is_css_present('section.progress')
@step(u'I select the verified track$')
def select_the_verified_track(step):
create_cert_course()
register()
select_contribution(32)
click_verified_track_button()
assert world.is_css_present('section.progress')
@step(u'I should see the course on my dashboard$')
def should_see_the_course_on_my_dashboard(step):
course_css = 'li.course-item'
assert world.is_css_present(course_css)
@step(u'I go to step "([^"]*)"$')
def goto_next_step(step, step_num):
btn_css = {
'1': '#face_next_button',
'2': '#face_next_link',
'3': '#photo_id_next_link',
'4': '#pay_button',
}
next_css = {
'1': 'div#wrapper-facephoto.carousel-active',
'2': 'div#wrapper-idphoto.carousel-active',
'3': 'div#wrapper-review.carousel-active',
'4': 'div#wrapper-review.carousel-active',
}
world.css_click(btn_css[step_num])
# Pressing the button will advance the carousel to the next item
# and give the wrapper div the "carousel-active" class
assert world.css_find(next_css[step_num])
@step(u'I capture my "([^"]*)" photo$')
def capture_my_photo(step, name):
# Hard coded red dot image
image_data = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=='
snapshot_script = "$('#{}_image')[0].src = '{}';".format(name, image_data)
# Mirror the javascript of the photo_verification.html page
world.browser.execute_script(snapshot_script)
world.browser.execute_script("$('#{}_capture_button').hide();".format(name))
world.browser.execute_script("$('#{}_reset_button').show();".format(name))
world.browser.execute_script("$('#{}_approve_button').show();".format(name))
assert world.css_find('#{}_approve_button'.format(name))
@step(u'I approve my "([^"]*)" photo$')
def approve_my_photo(step, name):
button_css = {
'face': 'div#wrapper-facephoto li.control-approve',
'photo_id': 'div#wrapper-idphoto li.control-approve',
}
wrapper_css = {
'face': 'div#wrapper-facephoto',
'photo_id': 'div#wrapper-idphoto',
}
# Make sure that the carousel is in the right place
assert world.css_has_class(wrapper_css[name], 'carousel-active')
assert world.css_find(button_css[name])
# HACK: for now don't bother clicking the approve button for
# id_photo, because it is sending you back to Step 1.
# Come back and figure it out later. JZ Aug 29 2013
if name=='face':
|
# Make sure you didn't advance the carousel
assert world.css_has_class(wrapper_css[name], 'carousel-active')
@step(u'I select a contribution amount$')
def select_contribution_amount(step):
select_contribution(32)
@step(u'I confirm that the details match$')
def confirm_details_match(step):
# First you need to scroll down on the page
# to make the element visible?
# Currently chrome is failing with ElementNotVisibleException
world.browser.execute_script("window.scrollTo(0,1024)")
cb_css = 'input#confirm_pics_good'
world.css_click(cb_css)
assert world.css_find(cb_css).checked
@step(u'I am at the payment page')
def at_the_payment_page(step):
world.wait_for_present('input[name=transactionSignature]')
@step(u'I submit valid payment information$')
def submit_payment(step):
# First make sure that the page is done if it still executing
# an ajax query.
world.wait_for_ajax_complete()
button_css = 'input[value=Submit]'
world.css_click(button_css)
@step(u'I have submitted face and ID photos$')
def submitted_face_and_id_photos(step):
step.given('I am logged in')
step.given('I select the verified track')
step.given('I go to step "1"')
step.given('I capture my "face" photo')
step.given('I approve my "face" photo')
step.given('I go to step "2"')
step.given('I capture my "photo_id" photo')
step.given('I approve my "photo_id" photo')
step.given('I go to step "3"')
@step(u'I have submitted photos to verify my identity')
def submitted_photos_to_verify_my_identity(step):
step.given('I have submitted face and ID photos')
step.given('I select a contribution amount')
step.given('I confirm that the details match')
step.given('I go to step "4"')
@step(u'I submit my photos and confirm')
def submit_photos_and_confirm(step):
step.given('I go to step "1"')
step.given('I capture my "face" photo')
step.given('I approve my "face" photo')
step.given('I go to step "2"')
step.given('I capture my "photo_id" photo')
step.given('I approve my "photo_id" photo')
step.given('I go to step "3"')
step.given('I select a contribution amount')
step.given('I confirm that the details match')
step.given('I go to step "4"')
@step(u'I see that my payment was successful')
def see_that_my_payment_was_successful(step):
title = world.css_find('div.wrapper-content-main h3.title')
assert_equal(title.text, u'Congratulations! You are now verified on edX.')
@step(u'I navigate to my dashboard')
def navigate_to_my_dashboard(step):
world.css_click('span.avatar')
assert world.css_find('section.my-courses')
@step(u'I see the course on my dashboard')
def see_the_course_on_my_dashboard(step):
course_link_css = 'section.my-courses a[href*="edx/999/Certificates"]'
assert world.is_css_present(course_link_css)
@step(u'I see the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
course_link_css = UPSELL_LINK_CSS
assert world.is_css_present(course_link_css)
@step(u'I do not see the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
course_link_css = UPSELL_LINK_CSS
assert world.is_css_not_present(course_link_css)
@step(u'I select the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
# expand the upsell section
world.css_click('.message-upsell')
course_link_css = UPSELL_LINK_CSS
# click the actual link
world.css_click(course_link_css)
@step(u'I see that I am on the verified track')
def see_that_i_am_on_the_verified_track(step):
id_verified_css = 'li.course-item article.course.verified'
assert world.is_css_present(id_verified_css)
@step(u'I leave the flow and return$')
def leave_the_flow_and_return(step):
world.visit('verify_student/verified/edx/999/Certificates/')
@step(u'I am at the verified page$')
def see_the_payment_page(step):
assert world.css_find('button#pay_button')
@step(u'I edit my name$')
def edit_my_name(step):
btn_css = 'a.retake-photos'
world.css_click(btn_css)
@step(u'I select the honor code option$')
def give_a_reason_why_i_cannot_pay(step):
register()
link_css = 'h5 i.expandable-icon'
world.css_click(link_css)
cb_css = 'input#honor-code'
world.css_click(cb_css)
btn_css = 'input[value="Select Certificate"]'
world.css_click(btn_css)
| world.css_click(button_css[name]) | conditional_block |
certificates.py | # pylint: disable=C0111
# pylint: disable=W0621
from lettuce import world, step
from lettuce.django import django_url
from course_modes.models import CourseMode
from nose.tools import assert_equal
UPSELL_LINK_CSS = '.message-upsell a.action-upgrade[href*="edx/999/Certificates"]'
def create_cert_course():
world.clear_courses()
org = 'edx'
number = '999'
name = 'Certificates'
course_id = '{org}/{number}/{name}'.format(
org=org, number=number, name=name)
world.scenario_dict['course_id'] = course_id
world.scenario_dict['COURSE'] = world.CourseFactory.create(
org=org, number=number, display_name=name)
audit_mode = world.CourseModeFactory.create(
course_id=course_id,
mode_slug='audit',
mode_display_name='audit course',
min_price=0,
)
assert isinstance(audit_mode, CourseMode)
verfied_mode = world.CourseModeFactory.create(
course_id=course_id,
mode_slug='verified',
mode_display_name='verified cert course',
min_price=16,
suggested_prices='32,64,128',
currency='usd',
)
assert isinstance(verfied_mode, CourseMode)
def register():
url = 'courses/{org}/{number}/{name}/about'.format(
org='edx', number='999', name='Certificates')
world.browser.visit(django_url(url))
world.css_click('section.intro a.register')
assert world.is_css_present('section.wrapper h3.title')
@step(u'the course has an honor mode')
def the_course_has_an_honor_mode(step):
create_cert_course()
honor_mode = world.CourseModeFactory.create(
course_id=world.scenario_dict['course_id'],
mode_slug='honor',
mode_display_name='honor mode',
min_price=0,
)
assert isinstance(honor_mode, CourseMode)
@step(u'I select the audit track$')
def select_the_audit_track(step):
create_cert_course()
register()
btn_css = 'input[value="Select Audit"]'
world.wait(1) # TODO remove this after troubleshooting JZ
world.css_find(btn_css)
world.css_click(btn_css)
def select_contribution(amount=32):
radio_css = 'input[value="{}"]'.format(amount)
world.css_click(radio_css)
assert world.css_find(radio_css).selected
def click_verified_track_button():
world.wait_for_ajax_complete()
btn_css = 'input[value="Select Certificate"]'
world.css_click(btn_css)
@step(u'I select the verified track for upgrade')
def select_verified_track_upgrade(step):
select_contribution(32)
world.wait_for_ajax_complete()
btn_css = 'input[value="Upgrade Your Registration"]'
world.css_click(btn_css)
# TODO: might want to change this depending on the changes for upgrade
assert world.is_css_present('section.progress')
@step(u'I select the verified track$')
def select_the_verified_track(step):
|
@step(u'I should see the course on my dashboard$')
def should_see_the_course_on_my_dashboard(step):
course_css = 'li.course-item'
assert world.is_css_present(course_css)
@step(u'I go to step "([^"]*)"$')
def goto_next_step(step, step_num):
btn_css = {
'1': '#face_next_button',
'2': '#face_next_link',
'3': '#photo_id_next_link',
'4': '#pay_button',
}
next_css = {
'1': 'div#wrapper-facephoto.carousel-active',
'2': 'div#wrapper-idphoto.carousel-active',
'3': 'div#wrapper-review.carousel-active',
'4': 'div#wrapper-review.carousel-active',
}
world.css_click(btn_css[step_num])
# Pressing the button will advance the carousel to the next item
# and give the wrapper div the "carousel-active" class
assert world.css_find(next_css[step_num])
@step(u'I capture my "([^"]*)" photo$')
def capture_my_photo(step, name):
# Hard coded red dot image
image_data = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=='
snapshot_script = "$('#{}_image')[0].src = '{}';".format(name, image_data)
# Mirror the javascript of the photo_verification.html page
world.browser.execute_script(snapshot_script)
world.browser.execute_script("$('#{}_capture_button').hide();".format(name))
world.browser.execute_script("$('#{}_reset_button').show();".format(name))
world.browser.execute_script("$('#{}_approve_button').show();".format(name))
assert world.css_find('#{}_approve_button'.format(name))
@step(u'I approve my "([^"]*)" photo$')
def approve_my_photo(step, name):
button_css = {
'face': 'div#wrapper-facephoto li.control-approve',
'photo_id': 'div#wrapper-idphoto li.control-approve',
}
wrapper_css = {
'face': 'div#wrapper-facephoto',
'photo_id': 'div#wrapper-idphoto',
}
# Make sure that the carousel is in the right place
assert world.css_has_class(wrapper_css[name], 'carousel-active')
assert world.css_find(button_css[name])
# HACK: for now don't bother clicking the approve button for
# id_photo, because it is sending you back to Step 1.
# Come back and figure it out later. JZ Aug 29 2013
if name=='face':
world.css_click(button_css[name])
# Make sure you didn't advance the carousel
assert world.css_has_class(wrapper_css[name], 'carousel-active')
@step(u'I select a contribution amount$')
def select_contribution_amount(step):
select_contribution(32)
@step(u'I confirm that the details match$')
def confirm_details_match(step):
# First you need to scroll down on the page
# to make the element visible?
# Currently chrome is failing with ElementNotVisibleException
world.browser.execute_script("window.scrollTo(0,1024)")
cb_css = 'input#confirm_pics_good'
world.css_click(cb_css)
assert world.css_find(cb_css).checked
@step(u'I am at the payment page')
def at_the_payment_page(step):
world.wait_for_present('input[name=transactionSignature]')
@step(u'I submit valid payment information$')
def submit_payment(step):
# First make sure that the page is done if it still executing
# an ajax query.
world.wait_for_ajax_complete()
button_css = 'input[value=Submit]'
world.css_click(button_css)
@step(u'I have submitted face and ID photos$')
def submitted_face_and_id_photos(step):
step.given('I am logged in')
step.given('I select the verified track')
step.given('I go to step "1"')
step.given('I capture my "face" photo')
step.given('I approve my "face" photo')
step.given('I go to step "2"')
step.given('I capture my "photo_id" photo')
step.given('I approve my "photo_id" photo')
step.given('I go to step "3"')
@step(u'I have submitted photos to verify my identity')
def submitted_photos_to_verify_my_identity(step):
step.given('I have submitted face and ID photos')
step.given('I select a contribution amount')
step.given('I confirm that the details match')
step.given('I go to step "4"')
@step(u'I submit my photos and confirm')
def submit_photos_and_confirm(step):
step.given('I go to step "1"')
step.given('I capture my "face" photo')
step.given('I approve my "face" photo')
step.given('I go to step "2"')
step.given('I capture my "photo_id" photo')
step.given('I approve my "photo_id" photo')
step.given('I go to step "3"')
step.given('I select a contribution amount')
step.given('I confirm that the details match')
step.given('I go to step "4"')
@step(u'I see that my payment was successful')
def see_that_my_payment_was_successful(step):
title = world.css_find('div.wrapper-content-main h3.title')
assert_equal(title.text, u'Congratulations! You are now verified on edX.')
@step(u'I navigate to my dashboard')
def navigate_to_my_dashboard(step):
world.css_click('span.avatar')
assert world.css_find('section.my-courses')
@step(u'I see the course on my dashboard')
def see_the_course_on_my_dashboard(step):
course_link_css = 'section.my-courses a[href*="edx/999/Certificates"]'
assert world.is_css_present(course_link_css)
@step(u'I see the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
course_link_css = UPSELL_LINK_CSS
assert world.is_css_present(course_link_css)
@step(u'I do not see the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
course_link_css = UPSELL_LINK_CSS
assert world.is_css_not_present(course_link_css)
@step(u'I select the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
# expand the upsell section
world.css_click('.message-upsell')
course_link_css = UPSELL_LINK_CSS
# click the actual link
world.css_click(course_link_css)
@step(u'I see that I am on the verified track')
def see_that_i_am_on_the_verified_track(step):
id_verified_css = 'li.course-item article.course.verified'
assert world.is_css_present(id_verified_css)
@step(u'I leave the flow and return$')
def leave_the_flow_and_return(step):
world.visit('verify_student/verified/edx/999/Certificates/')
@step(u'I am at the verified page$')
def see_the_payment_page(step):
assert world.css_find('button#pay_button')
@step(u'I edit my name$')
def edit_my_name(step):
btn_css = 'a.retake-photos'
world.css_click(btn_css)
@step(u'I select the honor code option$')
def give_a_reason_why_i_cannot_pay(step):
register()
link_css = 'h5 i.expandable-icon'
world.css_click(link_css)
cb_css = 'input#honor-code'
world.css_click(cb_css)
btn_css = 'input[value="Select Certificate"]'
world.css_click(btn_css)
| create_cert_course()
register()
select_contribution(32)
click_verified_track_button()
assert world.is_css_present('section.progress') | identifier_body |
certificates.py | # pylint: disable=C0111
# pylint: disable=W0621
from lettuce import world, step
from lettuce.django import django_url
from course_modes.models import CourseMode
from nose.tools import assert_equal
UPSELL_LINK_CSS = '.message-upsell a.action-upgrade[href*="edx/999/Certificates"]'
def create_cert_course():
world.clear_courses()
org = 'edx'
number = '999'
name = 'Certificates'
course_id = '{org}/{number}/{name}'.format(
org=org, number=number, name=name)
world.scenario_dict['course_id'] = course_id
world.scenario_dict['COURSE'] = world.CourseFactory.create(
org=org, number=number, display_name=name)
audit_mode = world.CourseModeFactory.create(
course_id=course_id,
mode_slug='audit',
mode_display_name='audit course',
min_price=0,
)
assert isinstance(audit_mode, CourseMode)
verfied_mode = world.CourseModeFactory.create(
course_id=course_id,
mode_slug='verified',
mode_display_name='verified cert course',
min_price=16,
suggested_prices='32,64,128',
currency='usd',
)
assert isinstance(verfied_mode, CourseMode)
def register():
url = 'courses/{org}/{number}/{name}/about'.format(
org='edx', number='999', name='Certificates')
world.browser.visit(django_url(url))
world.css_click('section.intro a.register')
assert world.is_css_present('section.wrapper h3.title')
@step(u'the course has an honor mode')
def the_course_has_an_honor_mode(step):
create_cert_course()
honor_mode = world.CourseModeFactory.create(
course_id=world.scenario_dict['course_id'],
mode_slug='honor',
mode_display_name='honor mode',
min_price=0,
)
assert isinstance(honor_mode, CourseMode)
@step(u'I select the audit track$')
def select_the_audit_track(step):
create_cert_course()
register()
btn_css = 'input[value="Select Audit"]'
world.wait(1) # TODO remove this after troubleshooting JZ
world.css_find(btn_css)
world.css_click(btn_css)
def select_contribution(amount=32):
radio_css = 'input[value="{}"]'.format(amount)
world.css_click(radio_css)
assert world.css_find(radio_css).selected
def click_verified_track_button():
world.wait_for_ajax_complete()
btn_css = 'input[value="Select Certificate"]'
world.css_click(btn_css)
@step(u'I select the verified track for upgrade')
def select_verified_track_upgrade(step):
select_contribution(32)
world.wait_for_ajax_complete()
btn_css = 'input[value="Upgrade Your Registration"]'
world.css_click(btn_css)
# TODO: might want to change this depending on the changes for upgrade
assert world.is_css_present('section.progress')
@step(u'I select the verified track$')
def select_the_verified_track(step):
create_cert_course()
register()
select_contribution(32)
click_verified_track_button()
assert world.is_css_present('section.progress')
@step(u'I should see the course on my dashboard$')
def should_see_the_course_on_my_dashboard(step):
course_css = 'li.course-item'
assert world.is_css_present(course_css)
@step(u'I go to step "([^"]*)"$')
def goto_next_step(step, step_num):
btn_css = {
'1': '#face_next_button',
'2': '#face_next_link',
'3': '#photo_id_next_link',
'4': '#pay_button',
}
next_css = {
'1': 'div#wrapper-facephoto.carousel-active',
'2': 'div#wrapper-idphoto.carousel-active',
'3': 'div#wrapper-review.carousel-active',
'4': 'div#wrapper-review.carousel-active',
}
world.css_click(btn_css[step_num])
# Pressing the button will advance the carousel to the next item
# and give the wrapper div the "carousel-active" class
assert world.css_find(next_css[step_num])
@step(u'I capture my "([^"]*)" photo$')
def capture_my_photo(step, name):
# Hard coded red dot image
image_data = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=='
snapshot_script = "$('#{}_image')[0].src = '{}';".format(name, image_data)
# Mirror the javascript of the photo_verification.html page
world.browser.execute_script(snapshot_script)
world.browser.execute_script("$('#{}_capture_button').hide();".format(name))
world.browser.execute_script("$('#{}_reset_button').show();".format(name))
world.browser.execute_script("$('#{}_approve_button').show();".format(name))
assert world.css_find('#{}_approve_button'.format(name))
@step(u'I approve my "([^"]*)" photo$')
def approve_my_photo(step, name):
button_css = {
'face': 'div#wrapper-facephoto li.control-approve',
'photo_id': 'div#wrapper-idphoto li.control-approve',
}
wrapper_css = {
'face': 'div#wrapper-facephoto',
'photo_id': 'div#wrapper-idphoto',
}
# Make sure that the carousel is in the right place
assert world.css_has_class(wrapper_css[name], 'carousel-active')
assert world.css_find(button_css[name])
# HACK: for now don't bother clicking the approve button for
# id_photo, because it is sending you back to Step 1.
# Come back and figure it out later. JZ Aug 29 2013
if name=='face':
world.css_click(button_css[name])
# Make sure you didn't advance the carousel
assert world.css_has_class(wrapper_css[name], 'carousel-active')
@step(u'I select a contribution amount$')
def select_contribution_amount(step):
select_contribution(32)
@step(u'I confirm that the details match$')
def confirm_details_match(step):
# First you need to scroll down on the page
# to make the element visible?
# Currently chrome is failing with ElementNotVisibleException
world.browser.execute_script("window.scrollTo(0,1024)")
cb_css = 'input#confirm_pics_good'
world.css_click(cb_css)
assert world.css_find(cb_css).checked
@step(u'I am at the payment page')
def | (step):
world.wait_for_present('input[name=transactionSignature]')
@step(u'I submit valid payment information$')
def submit_payment(step):
# First make sure that the page is done if it still executing
# an ajax query.
world.wait_for_ajax_complete()
button_css = 'input[value=Submit]'
world.css_click(button_css)
@step(u'I have submitted face and ID photos$')
def submitted_face_and_id_photos(step):
step.given('I am logged in')
step.given('I select the verified track')
step.given('I go to step "1"')
step.given('I capture my "face" photo')
step.given('I approve my "face" photo')
step.given('I go to step "2"')
step.given('I capture my "photo_id" photo')
step.given('I approve my "photo_id" photo')
step.given('I go to step "3"')
@step(u'I have submitted photos to verify my identity')
def submitted_photos_to_verify_my_identity(step):
step.given('I have submitted face and ID photos')
step.given('I select a contribution amount')
step.given('I confirm that the details match')
step.given('I go to step "4"')
@step(u'I submit my photos and confirm')
def submit_photos_and_confirm(step):
step.given('I go to step "1"')
step.given('I capture my "face" photo')
step.given('I approve my "face" photo')
step.given('I go to step "2"')
step.given('I capture my "photo_id" photo')
step.given('I approve my "photo_id" photo')
step.given('I go to step "3"')
step.given('I select a contribution amount')
step.given('I confirm that the details match')
step.given('I go to step "4"')
@step(u'I see that my payment was successful')
def see_that_my_payment_was_successful(step):
title = world.css_find('div.wrapper-content-main h3.title')
assert_equal(title.text, u'Congratulations! You are now verified on edX.')
@step(u'I navigate to my dashboard')
def navigate_to_my_dashboard(step):
world.css_click('span.avatar')
assert world.css_find('section.my-courses')
@step(u'I see the course on my dashboard')
def see_the_course_on_my_dashboard(step):
course_link_css = 'section.my-courses a[href*="edx/999/Certificates"]'
assert world.is_css_present(course_link_css)
@step(u'I see the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
course_link_css = UPSELL_LINK_CSS
assert world.is_css_present(course_link_css)
@step(u'I do not see the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
course_link_css = UPSELL_LINK_CSS
assert world.is_css_not_present(course_link_css)
@step(u'I select the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
# expand the upsell section
world.css_click('.message-upsell')
course_link_css = UPSELL_LINK_CSS
# click the actual link
world.css_click(course_link_css)
@step(u'I see that I am on the verified track')
def see_that_i_am_on_the_verified_track(step):
id_verified_css = 'li.course-item article.course.verified'
assert world.is_css_present(id_verified_css)
@step(u'I leave the flow and return$')
def leave_the_flow_and_return(step):
world.visit('verify_student/verified/edx/999/Certificates/')
@step(u'I am at the verified page$')
def see_the_payment_page(step):
assert world.css_find('button#pay_button')
@step(u'I edit my name$')
def edit_my_name(step):
btn_css = 'a.retake-photos'
world.css_click(btn_css)
@step(u'I select the honor code option$')
def give_a_reason_why_i_cannot_pay(step):
register()
link_css = 'h5 i.expandable-icon'
world.css_click(link_css)
cb_css = 'input#honor-code'
world.css_click(cb_css)
btn_css = 'input[value="Select Certificate"]'
world.css_click(btn_css)
| at_the_payment_page | identifier_name |
certificates.py | # pylint: disable=C0111
# pylint: disable=W0621
from lettuce import world, step
from lettuce.django import django_url
from course_modes.models import CourseMode
from nose.tools import assert_equal
UPSELL_LINK_CSS = '.message-upsell a.action-upgrade[href*="edx/999/Certificates"]'
def create_cert_course():
world.clear_courses()
org = 'edx'
number = '999'
name = 'Certificates'
course_id = '{org}/{number}/{name}'.format(
org=org, number=number, name=name)
world.scenario_dict['course_id'] = course_id
world.scenario_dict['COURSE'] = world.CourseFactory.create(
org=org, number=number, display_name=name)
audit_mode = world.CourseModeFactory.create(
course_id=course_id,
mode_slug='audit',
mode_display_name='audit course',
min_price=0,
)
assert isinstance(audit_mode, CourseMode)
verfied_mode = world.CourseModeFactory.create(
course_id=course_id,
mode_slug='verified',
mode_display_name='verified cert course',
min_price=16,
suggested_prices='32,64,128',
currency='usd',
)
assert isinstance(verfied_mode, CourseMode)
def register():
url = 'courses/{org}/{number}/{name}/about'.format(
org='edx', number='999', name='Certificates')
world.browser.visit(django_url(url))
world.css_click('section.intro a.register')
assert world.is_css_present('section.wrapper h3.title')
@step(u'the course has an honor mode')
def the_course_has_an_honor_mode(step):
create_cert_course()
honor_mode = world.CourseModeFactory.create(
course_id=world.scenario_dict['course_id'],
mode_slug='honor',
mode_display_name='honor mode',
min_price=0,
)
assert isinstance(honor_mode, CourseMode)
@step(u'I select the audit track$')
def select_the_audit_track(step):
create_cert_course()
register()
btn_css = 'input[value="Select Audit"]'
world.wait(1) # TODO remove this after troubleshooting JZ
world.css_find(btn_css)
world.css_click(btn_css)
def select_contribution(amount=32):
radio_css = 'input[value="{}"]'.format(amount) | assert world.css_find(radio_css).selected
def click_verified_track_button():
world.wait_for_ajax_complete()
btn_css = 'input[value="Select Certificate"]'
world.css_click(btn_css)
@step(u'I select the verified track for upgrade')
def select_verified_track_upgrade(step):
select_contribution(32)
world.wait_for_ajax_complete()
btn_css = 'input[value="Upgrade Your Registration"]'
world.css_click(btn_css)
# TODO: might want to change this depending on the changes for upgrade
assert world.is_css_present('section.progress')
@step(u'I select the verified track$')
def select_the_verified_track(step):
create_cert_course()
register()
select_contribution(32)
click_verified_track_button()
assert world.is_css_present('section.progress')
@step(u'I should see the course on my dashboard$')
def should_see_the_course_on_my_dashboard(step):
course_css = 'li.course-item'
assert world.is_css_present(course_css)
@step(u'I go to step "([^"]*)"$')
def goto_next_step(step, step_num):
btn_css = {
'1': '#face_next_button',
'2': '#face_next_link',
'3': '#photo_id_next_link',
'4': '#pay_button',
}
next_css = {
'1': 'div#wrapper-facephoto.carousel-active',
'2': 'div#wrapper-idphoto.carousel-active',
'3': 'div#wrapper-review.carousel-active',
'4': 'div#wrapper-review.carousel-active',
}
world.css_click(btn_css[step_num])
# Pressing the button will advance the carousel to the next item
# and give the wrapper div the "carousel-active" class
assert world.css_find(next_css[step_num])
@step(u'I capture my "([^"]*)" photo$')
def capture_my_photo(step, name):
# Hard coded red dot image
image_data = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=='
snapshot_script = "$('#{}_image')[0].src = '{}';".format(name, image_data)
# Mirror the javascript of the photo_verification.html page
world.browser.execute_script(snapshot_script)
world.browser.execute_script("$('#{}_capture_button').hide();".format(name))
world.browser.execute_script("$('#{}_reset_button').show();".format(name))
world.browser.execute_script("$('#{}_approve_button').show();".format(name))
assert world.css_find('#{}_approve_button'.format(name))
@step(u'I approve my "([^"]*)" photo$')
def approve_my_photo(step, name):
button_css = {
'face': 'div#wrapper-facephoto li.control-approve',
'photo_id': 'div#wrapper-idphoto li.control-approve',
}
wrapper_css = {
'face': 'div#wrapper-facephoto',
'photo_id': 'div#wrapper-idphoto',
}
# Make sure that the carousel is in the right place
assert world.css_has_class(wrapper_css[name], 'carousel-active')
assert world.css_find(button_css[name])
# HACK: for now don't bother clicking the approve button for
# id_photo, because it is sending you back to Step 1.
# Come back and figure it out later. JZ Aug 29 2013
if name=='face':
world.css_click(button_css[name])
# Make sure you didn't advance the carousel
assert world.css_has_class(wrapper_css[name], 'carousel-active')
@step(u'I select a contribution amount$')
def select_contribution_amount(step):
select_contribution(32)
@step(u'I confirm that the details match$')
def confirm_details_match(step):
# First you need to scroll down on the page
# to make the element visible?
# Currently chrome is failing with ElementNotVisibleException
world.browser.execute_script("window.scrollTo(0,1024)")
cb_css = 'input#confirm_pics_good'
world.css_click(cb_css)
assert world.css_find(cb_css).checked
@step(u'I am at the payment page')
def at_the_payment_page(step):
world.wait_for_present('input[name=transactionSignature]')
@step(u'I submit valid payment information$')
def submit_payment(step):
# First make sure that the page is done if it still executing
# an ajax query.
world.wait_for_ajax_complete()
button_css = 'input[value=Submit]'
world.css_click(button_css)
@step(u'I have submitted face and ID photos$')
def submitted_face_and_id_photos(step):
step.given('I am logged in')
step.given('I select the verified track')
step.given('I go to step "1"')
step.given('I capture my "face" photo')
step.given('I approve my "face" photo')
step.given('I go to step "2"')
step.given('I capture my "photo_id" photo')
step.given('I approve my "photo_id" photo')
step.given('I go to step "3"')
@step(u'I have submitted photos to verify my identity')
def submitted_photos_to_verify_my_identity(step):
step.given('I have submitted face and ID photos')
step.given('I select a contribution amount')
step.given('I confirm that the details match')
step.given('I go to step "4"')
@step(u'I submit my photos and confirm')
def submit_photos_and_confirm(step):
step.given('I go to step "1"')
step.given('I capture my "face" photo')
step.given('I approve my "face" photo')
step.given('I go to step "2"')
step.given('I capture my "photo_id" photo')
step.given('I approve my "photo_id" photo')
step.given('I go to step "3"')
step.given('I select a contribution amount')
step.given('I confirm that the details match')
step.given('I go to step "4"')
@step(u'I see that my payment was successful')
def see_that_my_payment_was_successful(step):
title = world.css_find('div.wrapper-content-main h3.title')
assert_equal(title.text, u'Congratulations! You are now verified on edX.')
@step(u'I navigate to my dashboard')
def navigate_to_my_dashboard(step):
world.css_click('span.avatar')
assert world.css_find('section.my-courses')
@step(u'I see the course on my dashboard')
def see_the_course_on_my_dashboard(step):
course_link_css = 'section.my-courses a[href*="edx/999/Certificates"]'
assert world.is_css_present(course_link_css)
@step(u'I see the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
course_link_css = UPSELL_LINK_CSS
assert world.is_css_present(course_link_css)
@step(u'I do not see the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
course_link_css = UPSELL_LINK_CSS
assert world.is_css_not_present(course_link_css)
@step(u'I select the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
# expand the upsell section
world.css_click('.message-upsell')
course_link_css = UPSELL_LINK_CSS
# click the actual link
world.css_click(course_link_css)
@step(u'I see that I am on the verified track')
def see_that_i_am_on_the_verified_track(step):
id_verified_css = 'li.course-item article.course.verified'
assert world.is_css_present(id_verified_css)
@step(u'I leave the flow and return$')
def leave_the_flow_and_return(step):
world.visit('verify_student/verified/edx/999/Certificates/')
@step(u'I am at the verified page$')
def see_the_payment_page(step):
assert world.css_find('button#pay_button')
@step(u'I edit my name$')
def edit_my_name(step):
btn_css = 'a.retake-photos'
world.css_click(btn_css)
@step(u'I select the honor code option$')
def give_a_reason_why_i_cannot_pay(step):
register()
link_css = 'h5 i.expandable-icon'
world.css_click(link_css)
cb_css = 'input#honor-code'
world.css_click(cb_css)
btn_css = 'input[value="Select Certificate"]'
world.css_click(btn_css) | world.css_click(radio_css) | random_line_split |
char.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Character manipulation (`char` type, Unicode Scalar Value)
//!
//! This module provides the `CharExt` trait, as well as its
//! implementation for the primitive `char` type, in order to allow
//! basic character manipulation.
//!
//! A `char` actually represents a
//! *[Unicode Scalar
//! Value](http://www.unicode.org/glossary/#unicode_scalar_value)*, as it can
//! contain any Unicode code point except high-surrogate and low-surrogate code
//! points.
//!
//! As such, only values in the ranges \[0x0,0xD7FF\] and \[0xE000,0x10FFFF\]
//! (inclusive) are allowed. A `char` can always be safely cast to a `u32`;
//! however the converse is not always true due to the above range limits
//! and, as such, should be performed via the `from_u32` function.
#![stable(feature = "rust1", since = "1.0.0")]
#![doc(primitive = "char")]
use core::char::CharExt as C;
use core::option::Option::{self, Some, None};
use core::iter::Iterator;
use tables::{derived_property, property, general_category, conversions, charwidth};
// stable reexports
pub use core::char::{MAX, from_u32, from_digit, EscapeUnicode, EscapeDefault};
// unstable reexports
#[allow(deprecated)]
pub use normalize::{decompose_canonical, decompose_compatible, compose};
#[allow(deprecated)]
pub use tables::normalization::canonical_combining_class;
pub use tables::UNICODE_VERSION;
/// An iterator over the lowercase mapping of a given character, returned from
/// the [`to_lowercase` method](../primitive.char.html#method.to_lowercase) on
/// characters.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ToLowercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ToLowercase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
| #[stable(feature = "rust1", since = "1.0.0")]
pub struct ToUppercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ToUppercase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
/// An iterator over the titlecase mapping of a given character, returned from
/// the [`to_titlecase` method](../primitive.char.html#method.to_titlecase) on
/// characters.
#[unstable(feature = "unicode", reason = "recently added")]
pub struct ToTitlecase(CaseMappingIter);
#[stable(feature = "unicode_case_mapping", since = "1.2.0")]
impl Iterator for ToTitlecase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
enum CaseMappingIter {
Three(char, char, char),
Two(char, char),
One(char),
Zero
}
impl CaseMappingIter {
fn new(chars: [char; 3]) -> CaseMappingIter {
if chars[2] == '\0' {
if chars[1] == '\0' {
CaseMappingIter::One(chars[0]) // Including if chars[0] == '\0'
} else {
CaseMappingIter::Two(chars[0], chars[1])
}
} else {
CaseMappingIter::Three(chars[0], chars[1], chars[2])
}
}
}
impl Iterator for CaseMappingIter {
type Item = char;
fn next(&mut self) -> Option<char> {
match *self {
CaseMappingIter::Three(a, b, c) => {
*self = CaseMappingIter::Two(b, c);
Some(a)
}
CaseMappingIter::Two(b, c) => {
*self = CaseMappingIter::One(c);
Some(b)
}
CaseMappingIter::One(c) => {
*self = CaseMappingIter::Zero;
Some(c)
}
CaseMappingIter::Zero => None,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[lang = "char"]
impl char {
/// Checks if a `char` parses as a numeric digit in the given radix.
///
/// Compared to `is_numeric()`, this function only recognizes the characters
/// `0-9`, `a-z` and `A-Z`.
///
/// # Return value
///
/// Returns `true` if `c` is a valid digit under `radix`, and `false`
/// otherwise.
///
/// # Panics
///
/// Panics if given a radix > 36.
///
/// # Examples
///
/// ```
/// let c = '1';
///
/// assert!(c.is_digit(10));
///
/// assert!('f'.is_digit(16));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_digit(self, radix: u32) -> bool { C::is_digit(self, radix) }
/// Converts a character to the corresponding digit.
///
/// # Return value
///
/// If `c` is between '0' and '9', the corresponding value between 0 and
/// 9. If `c` is 'a' or 'A', 10. If `c` is 'b' or 'B', 11, etc. Returns
/// none if the character does not refer to a digit in the given radix.
///
/// # Panics
///
/// Panics if given a radix outside the range [0..36].
///
/// # Examples
///
/// ```
/// let c = '1';
///
/// assert_eq!(c.to_digit(10), Some(1));
///
/// assert_eq!('f'.to_digit(16), Some(15));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_digit(self, radix: u32) -> Option<u32> { C::to_digit(self, radix) }
/// Returns an iterator that yields the hexadecimal Unicode escape of a
/// character, as `char`s.
///
/// All characters are escaped with Rust syntax of the form `\\u{NNNN}`
/// where `NNNN` is the shortest hexadecimal representation of the code
/// point.
///
/// # Examples
///
/// ```
/// for i in '❤'.escape_unicode() {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// \
/// u
/// {
/// 2
/// 7
/// 6
/// 4
/// }
/// ```
///
/// Collecting into a `String`:
///
/// ```
/// let heart: String = '❤'.escape_unicode().collect();
///
/// assert_eq!(heart, r"\u{2764}");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn escape_unicode(self) -> EscapeUnicode { C::escape_unicode(self) }
/// Returns an iterator that yields the 'default' ASCII and
/// C++11-like literal escape of a character, as `char`s.
///
/// The default is chosen with a bias toward producing literals that are
/// legal in a variety of languages, including C++11 and similar C-family
/// languages. The exact rules are:
///
/// * Tab, CR and LF are escaped as '\t', '\r' and '\n' respectively.
/// * Single-quote, double-quote and backslash chars are backslash-
/// escaped.
/// * Any other chars in the range [0x20,0x7e] are not escaped.
/// * Any other chars are given hex Unicode escapes; see `escape_unicode`.
///
/// # Examples
///
/// ```
/// for i in '"'.escape_default() {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// \
/// "
/// ```
///
/// Collecting into a `String`:
///
/// ```
/// let quote: String = '"'.escape_default().collect();
///
/// assert_eq!(quote, "\\\"");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn escape_default(self) -> EscapeDefault { C::escape_default(self) }
/// Returns the number of bytes this character would need if encoded in
/// UTF-8.
///
/// # Examples
///
/// ```
/// let n = 'ß'.len_utf8();
///
/// assert_eq!(n, 2);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len_utf8(self) -> usize { C::len_utf8(self) }
/// Returns the number of 16-bit code units this character would need if
/// encoded in UTF-16.
///
/// # Examples
///
/// ```
/// let n = 'ß'.len_utf16();
///
/// assert_eq!(n, 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len_utf16(self) -> usize { C::len_utf16(self) }
/// Encodes this character as UTF-8 into the provided byte buffer, and then
/// returns the number of bytes written.
///
/// If the buffer is not large enough, nothing will be written into it and a
/// `None` will be returned. A buffer of length four is large enough to
/// encode any `char`.
///
/// # Examples
///
/// In both of these examples, 'ß' takes two bytes to encode.
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 2];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, Some(2));
/// ```
///
/// A buffer that's too small:
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 1];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, None);
/// ```
#[unstable(feature = "unicode",
reason = "pending decision about Iterator/Writer/Reader")]
pub fn encode_utf8(self, dst: &mut [u8]) -> Option<usize> { C::encode_utf8(self, dst) }
/// Encodes this character as UTF-16 into the provided `u16` buffer, and
/// then returns the number of `u16`s written.
///
/// If the buffer is not large enough, nothing will be written into it and a
/// `None` will be returned. A buffer of length 2 is large enough to encode
/// any `char`.
///
/// # Examples
///
/// In both of these examples, 'ß' takes one `u16` to encode.
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 1];
///
/// let result = 'ß'.encode_utf16(&mut b);
///
/// assert_eq!(result, Some(1));
/// ```
///
/// A buffer that's too small:
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 0];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, None);
/// ```
#[unstable(feature = "unicode",
reason = "pending decision about Iterator/Writer/Reader")]
pub fn encode_utf16(self, dst: &mut [u16]) -> Option<usize> { C::encode_utf16(self, dst) }
/// Returns whether the specified character is considered a Unicode
/// alphabetic code point.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_alphabetic(self) -> bool {
match self {
'a' ... 'z' | 'A' ... 'Z' => true,
c if c > '\x7f' => derived_property::Alphabetic(c),
_ => false
}
}
/// Returns whether the specified character satisfies the 'XID_Start'
/// Unicode property.
///
/// 'XID_Start' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to ID_Start but modified for closure under NFKx.
#[unstable(feature = "unicode",
reason = "mainly needed for compiler internals")]
#[inline]
pub fn is_xid_start(self) -> bool { derived_property::XID_Start(self) }
/// Returns whether the specified `char` satisfies the 'XID_Continue'
/// Unicode property.
///
/// 'XID_Continue' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to 'ID_Continue' but modified for closure under NFKx.
#[unstable(feature = "unicode",
reason = "mainly needed for compiler internals")]
#[inline]
pub fn is_xid_continue(self) -> bool { derived_property::XID_Continue(self) }
/// Indicates whether a character is in lowercase.
///
/// This is defined according to the terms of the Unicode Derived Core
/// Property `Lowercase`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_lowercase(self) -> bool {
match self {
'a' ... 'z' => true,
c if c > '\x7f' => derived_property::Lowercase(c),
_ => false
}
}
/// Indicates whether a character is in uppercase.
///
/// This is defined according to the terms of the Unicode Derived Core
/// Property `Uppercase`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_uppercase(self) -> bool {
match self {
'A' ... 'Z' => true,
c if c > '\x7f' => derived_property::Uppercase(c),
_ => false
}
}
/// Indicates whether a character is whitespace.
///
/// Whitespace is defined in terms of the Unicode Property `White_Space`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_whitespace(self) -> bool {
match self {
' ' | '\x09' ... '\x0d' => true,
c if c > '\x7f' => property::White_Space(c),
_ => false
}
}
/// Indicates whether a character is alphanumeric.
///
/// Alphanumericness is defined in terms of the Unicode General Categories
/// 'Nd', 'Nl', 'No' and the Derived Core Property 'Alphabetic'.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_alphanumeric(self) -> bool {
self.is_alphabetic() || self.is_numeric()
}
/// Indicates whether a character is a control code point.
///
/// Control code points are defined in terms of the Unicode General
/// Category `Cc`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_control(self) -> bool { general_category::Cc(self) }
/// Indicates whether the character is numeric (Nd, Nl, or No).
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_numeric(self) -> bool {
match self {
'0' ... '9' => true,
c if c > '\x7f' => general_category::N(c),
_ => false
}
}
/// Converts a character to its lowercase equivalent.
///
/// This performs complex unconditional mappings with no tailoring.
/// See `to_uppercase()` for references and more information.
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// lowercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_lowercase(self) -> ToLowercase {
ToLowercase(CaseMappingIter::new(conversions::to_lower(self)))
}
/// Converts a character to its titlecase equivalent.
///
/// This performs complex unconditional mappings with no tailoring.
/// See `to_uppercase()` for references and more information.
///
/// This differs from `to_uppercase()` since Unicode contains
/// digraphs and ligature characters.
/// For example, U+01F3 “dz” and U+FB01 “fi”
/// map to U+01F1 “DZ” and U+0046 U+0069 “Fi”, respectively.
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// lowercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
#[unstable(feature = "unicode", reason = "recently added")]
#[inline]
pub fn to_titlecase(self) -> ToTitlecase {
ToTitlecase(CaseMappingIter::new(conversions::to_title(self)))
}
/// Converts a character to its uppercase equivalent.
///
/// This performs complex unconditional mappings with no tailoring:
/// it maps one Unicode character to its uppercase equivalent
/// according to the Unicode database [1]
/// and the additional complex mappings [`SpecialCasing.txt`].
/// Conditional mappings (based on context or language) are not considerd here.
///
/// A full reference can be found here [2].
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// uppercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
///
/// [1]: ftp://ftp.unicode.org/Public/UNIDATA/UnicodeData.txt
///
/// [`SpecialCasing.txt`]: ftp://ftp.unicode.org/Public/UNIDATA/SpecialCasing.txt
///
/// [2]: http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_uppercase(self) -> ToUppercase {
ToUppercase(CaseMappingIter::new(conversions::to_upper(self)))
}
/// Returns this character's displayed width in columns, or `None` if it is a
/// control character other than `'\x00'`.
///
/// `is_cjk` determines behavior for characters in the Ambiguous category:
/// if `is_cjk` is `true`, these are 2 columns wide; otherwise, they are 1.
/// In CJK contexts, `is_cjk` should be `true`, else it should be `false`.
/// [Unicode Standard Annex #11](http://www.unicode.org/reports/tr11/)
/// recommends that these characters be treated as 1 column (i.e.,
/// `is_cjk` = `false`) if the context cannot be reliably determined.
#[deprecated(reason = "use the crates.io `unicode-width` library instead",
since = "1.0.0")]
#[unstable(feature = "unicode",
reason = "needs expert opinion. is_cjk flag stands out as ugly")]
pub fn width(self, is_cjk: bool) -> Option<usize> { charwidth::width(self, is_cjk) }
} | /// An iterator over the uppercase mapping of a given character, returned from
/// the [`to_uppercase` method](../primitive.char.html#method.to_uppercase) on
/// characters. | random_line_split |
char.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Character manipulation (`char` type, Unicode Scalar Value)
//!
//! This module provides the `CharExt` trait, as well as its
//! implementation for the primitive `char` type, in order to allow
//! basic character manipulation.
//!
//! A `char` actually represents a
//! *[Unicode Scalar
//! Value](http://www.unicode.org/glossary/#unicode_scalar_value)*, as it can
//! contain any Unicode code point except high-surrogate and low-surrogate code
//! points.
//!
//! As such, only values in the ranges \[0x0,0xD7FF\] and \[0xE000,0x10FFFF\]
//! (inclusive) are allowed. A `char` can always be safely cast to a `u32`;
//! however the converse is not always true due to the above range limits
//! and, as such, should be performed via the `from_u32` function.
#![stable(feature = "rust1", since = "1.0.0")]
#![doc(primitive = "char")]
use core::char::CharExt as C;
use core::option::Option::{self, Some, None};
use core::iter::Iterator;
use tables::{derived_property, property, general_category, conversions, charwidth};
// stable reexports
pub use core::char::{MAX, from_u32, from_digit, EscapeUnicode, EscapeDefault};
// unstable reexports
#[allow(deprecated)]
pub use normalize::{decompose_canonical, decompose_compatible, compose};
#[allow(deprecated)]
pub use tables::normalization::canonical_combining_class;
pub use tables::UNICODE_VERSION;
/// An iterator over the lowercase mapping of a given character, returned from
/// the [`to_lowercase` method](../primitive.char.html#method.to_lowercase) on
/// characters.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ToLowercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ToLowercase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
/// An iterator over the uppercase mapping of a given character, returned from
/// the [`to_uppercase` method](../primitive.char.html#method.to_uppercase) on
/// characters.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ToUppercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ToUppercase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
/// An iterator over the titlecase mapping of a given character, returned from
/// the [`to_titlecase` method](../primitive.char.html#method.to_titlecase) on
/// characters.
#[unstable(feature = "unicode", reason = "recently added")]
pub struct ToTitlecase(CaseMappingIter);
#[stable(feature = "unicode_case_mapping", since = "1.2.0")]
impl Iterator for ToTitlecase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
enum CaseMappingIter {
Three(char, char, char),
Two(char, char),
One(char),
Zero
}
impl CaseMappingIter {
fn new(chars: [char; 3]) -> CaseMappingIter {
if chars[2] == '\0' {
if chars[1] == '\0' {
CaseMappingIter::One(chars[0]) // Including if chars[0] == '\0'
} else {
CaseMappingIter::Two(chars[0], chars[1])
}
} else {
CaseMappingIter::Three(chars[0], chars[1], chars[2])
}
}
}
impl Iterator for CaseMappingIter {
type Item = char;
fn next(&mut self) -> Option<char> {
match *self {
CaseMappingIter::Three(a, b, c) => {
*self = CaseMappingIter::Two(b, c);
Some(a)
}
CaseMappingIter::Two(b, c) => {
*self = CaseMappingIter::One(c);
Some(b)
}
CaseMappingIter::One(c) => {
*self = CaseMappingIter::Zero;
Some(c)
}
CaseMappingIter::Zero => None,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[lang = "char"]
impl char {
/// Checks if a `char` parses as a numeric digit in the given radix.
///
/// Compared to `is_numeric()`, this function only recognizes the characters
/// `0-9`, `a-z` and `A-Z`.
///
/// # Return value
///
/// Returns `true` if `c` is a valid digit under `radix`, and `false`
/// otherwise.
///
/// # Panics
///
/// Panics if given a radix > 36.
///
/// # Examples
///
/// ```
/// let c = '1';
///
/// assert!(c.is_digit(10));
///
/// assert!('f'.is_digit(16));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_digit(self, radix: u32) -> bool { C::is_digit(self, radix) }
/// Converts a character to the corresponding digit.
///
/// # Return value
///
/// If `c` is between '0' and '9', the corresponding value between 0 and
/// 9. If `c` is 'a' or 'A', 10. If `c` is 'b' or 'B', 11, etc. Returns
/// none if the character does not refer to a digit in the given radix.
///
/// # Panics
///
/// Panics if given a radix outside the range [0..36].
///
/// # Examples
///
/// ```
/// let c = '1';
///
/// assert_eq!(c.to_digit(10), Some(1));
///
/// assert_eq!('f'.to_digit(16), Some(15));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_digit(self, radix: u32) -> Option<u32> { C::to_digit(self, radix) }
/// Returns an iterator that yields the hexadecimal Unicode escape of a
/// character, as `char`s.
///
/// All characters are escaped with Rust syntax of the form `\\u{NNNN}`
/// where `NNNN` is the shortest hexadecimal representation of the code
/// point.
///
/// # Examples
///
/// ```
/// for i in '❤'.escape_unicode() {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// \
/// u
/// {
/// 2
/// 7
/// 6
/// 4
/// }
/// ```
///
/// Collecting into a `String`:
///
/// ```
/// let heart: String = '❤'.escape_unicode().collect();
///
/// assert_eq!(heart, r"\u{2764}");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn escape_unicode(self) -> EscapeUnicode { C::escape_unicode(self) }
/// Returns an iterator that yields the 'default' ASCII and
/// C++11-like literal escape of a character, as `char`s.
///
/// The default is chosen with a bias toward producing literals that are
/// legal in a variety of languages, including C++11 and similar C-family
/// languages. The exact rules are:
///
/// * Tab, CR and LF are escaped as '\t', '\r' and '\n' respectively.
/// * Single-quote, double-quote and backslash chars are backslash-
/// escaped.
/// * Any other chars in the range [0x20,0x7e] are not escaped.
/// * Any other chars are given hex Unicode escapes; see `escape_unicode`.
///
/// # Examples
///
/// ```
/// for i in '"'.escape_default() {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// \
/// "
/// ```
///
/// Collecting into a `String`:
///
/// ```
/// let quote: String = '"'.escape_default().collect();
///
/// assert_eq!(quote, "\\\"");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn escape_default(self) -> EscapeDefault { C::escape_default(self) }
/// Returns the number of bytes this character would need if encoded in
/// UTF-8.
///
/// # Examples
///
/// ```
/// let n = 'ß'.len_utf8();
///
/// assert_eq!(n, 2);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len_utf8(self) -> usize { C::len_utf8(self) }
/// Returns the number of 16-bit code units this character would need if
/// encoded in UTF-16.
///
/// # Examples
///
/// ```
/// let n = 'ß'.len_utf16();
///
/// assert_eq!(n, 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len_utf16(self) -> usize { C::len_utf16(self) }
/// Encodes this character as UTF-8 into the provided byte buffer, and then
/// returns the number of bytes written.
///
/// If the buffer is not large enough, nothing will be written into it and a
/// `None` will be returned. A buffer of length four is large enough to
/// encode any `char`.
///
/// # Examples
///
/// In both of these examples, 'ß' takes two bytes to encode.
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 2];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, Some(2));
/// ```
///
/// A buffer that's too small:
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 1];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, None);
/// ```
#[unstable(feature = "unicode",
reason = "pending decision about Iterator/Writer/Reader")]
pub fn encode_utf8(self, dst: &mut [u8]) -> Option<usize> { C::encode_utf8(self, dst) }
/// Encodes this character as UTF-16 into the provided `u16` buffer, and
/// then returns the number of `u16`s written.
///
/// If the buffer is not large enough, nothing will be written into it and a
/// `None` will be returned. A buffer of length 2 is large enough to encode
/// any `char`.
///
/// # Examples
///
/// In both of these examples, 'ß' takes one `u16` to encode.
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 1];
///
/// let result = 'ß'.encode_utf16(&mut b);
///
/// assert_eq!(result, Some(1));
/// ```
///
/// A buffer that's too small:
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 0];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, None);
/// ```
#[unstable(feature = "unicode",
reason = "pending decision about Iterator/Writer/Reader")]
pub fn encode_utf16(self, dst: &mut [u16]) -> Option<usize> { C::encode_utf16(self, dst) }
/// Returns whether the specified character is considered a Unicode
/// alphabetic code point.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_alphabetic(self) -> bool {
match self {
'a' ... 'z' | 'A' ... 'Z' => true,
c if c > '\x7f' => derived_property::Alphabetic(c),
_ => false
}
}
/// Returns whether the specified character satisfies the 'XID_Start'
/// Unicode property.
///
/// 'XID_Start' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to ID_Start but modified for closure under NFKx.
#[unstable(feature = "unicode",
reason = "mainly needed for compiler internals")]
#[inline]
pub fn is_xid_start(self) -> bool { derived_property::XID_Start(self) }
/// Returns whether the specified `char` satisfies the 'XID_Continue'
/// Unicode property.
///
/// 'XID_Continue' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to 'ID_Continue' but modified for closure under NFKx.
#[unstable(feature = "unicode",
reason = "mainly needed for compiler internals")]
#[inline]
pub fn is_xid_continue(self) -> bool { derived_property::XID_Continue(self) }
/// Indicates whether a character is in lowercase.
///
/// This is defined according to the terms of the Unicode Derived Core
/// Property `Lowercase`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_lowercase(self) -> bool {
match self {
'a' ... 'z' => true,
c if c > '\x7f' => derived_property::Lowercase(c),
_ => false
}
}
/// Indicates whether a character is in uppercase.
///
/// This is defined according to the terms of the Unicode Derived Core
/// Property `Uppercase`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_uppercase(self) -> bool {
match self {
'A' ... 'Z' => true,
c if c > '\x7f' => derived_property::Uppercase(c),
_ => false
}
}
/// Indicates whether a character is whitespace.
///
/// Whitespace is defined in terms of the Unicode Property `White_Space`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_whitespace(self) -> bool {
match self {
' ' | '\x09' ... '\x0d' => true,
c if c > '\x7f' => property::White_Space(c),
_ => false
}
}
/// Indicates whether a character is alphanumeric.
///
/// Alphanumericness is defined in terms of the Unicode General Categories
/// 'Nd', 'Nl', 'No' and the Derived Core Property 'Alphabetic'.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_alphanumeric(self) -> bool {
self.is_alphabetic() || self.is_numeric()
}
/// Indicates whether a character is a control code point.
///
/// Control code points are defined in terms of the Unicode General
/// Category `Cc`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_control(self) -> bool { general_category::Cc(self) }
/// Indicates whether the character is numeric (Nd, Nl, or No).
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_numeric(self) -> bool {
match self {
'0' ... '9' => true,
c if c > '\x7f' => general_category::N(c),
_ => false
}
}
/// Converts a character to its lowercase equivalent.
///
/// This performs complex unconditional mappings with no tailoring.
/// See `to_uppercase()` for references and more information.
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// lowercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_lowercase(self) -> ToLowercase {
ToLowercase(CaseMappingIter::new(conversions::to_lower(self)))
}
/// Converts a character to its titlecase equivalent.
///
/// This performs complex unconditional mappings with no tailoring.
/// See `to_uppercase()` for references and more information.
///
/// This differs from `to_uppercase()` since Unicode contains
/// digraphs and ligature characters.
/// For example, U+01F3 “dz” and U+FB01 “fi”
/// map to U+01F1 “DZ” and U+0046 U+0069 “Fi”, respectively.
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// lowercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
#[unstable(feature = "unicode", reason = "recently added")]
#[inline]
pub fn to_titlecase(self) -> ToTitlecase {
ToTitlecase(CaseMappingIter::new(conversions::to_title(self)))
}
/// Converts a character to its uppercase equivalent.
///
/// This performs complex unconditional mappings with no tailoring:
/// it maps one Unicode character to its uppercase equivalent
/// according to the Unicode database [1]
/// and the additional complex mappings [`SpecialCasing.txt`].
/// Conditional mappings (based on context or language) are not considerd here.
///
/// A full reference can be found here [2].
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// uppercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
///
/// [1]: ftp://ftp.unicode.org/Public/UNIDATA/UnicodeData.txt
///
/// [`SpecialCasing.txt`]: ftp://ftp.unicode.org/Public/UNIDATA/SpecialCasing.txt
///
/// [2]: http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_uppercase(self) -> ToUppercase {
ToUppercase(CaseMappin | 's displayed width in columns, or `None` if it is a
/// control character other than `'\x00'`.
///
/// `is_cjk` determines behavior for characters in the Ambiguous category:
/// if `is_cjk` is `true`, these are 2 columns wide; otherwise, they are 1.
/// In CJK contexts, `is_cjk` should be `true`, else it should be `false`.
/// [Unicode Standard Annex #11](http://www.unicode.org/reports/tr11/)
/// recommends that these characters be treated as 1 column (i.e.,
/// `is_cjk` = `false`) if the context cannot be reliably determined.
#[deprecated(reason = "use the crates.io `unicode-width` library instead",
since = "1.0.0")]
#[unstable(feature = "unicode",
reason = "needs expert opinion. is_cjk flag stands out as ugly")]
pub fn width(self, is_cjk: bool) -> Option<usize> { charwidth::width(self, is_cjk) }
}
| gIter::new(conversions::to_upper(self)))
}
/// Returns this character | identifier_body |
char.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Character manipulation (`char` type, Unicode Scalar Value)
//!
//! This module provides the `CharExt` trait, as well as its
//! implementation for the primitive `char` type, in order to allow
//! basic character manipulation.
//!
//! A `char` actually represents a
//! *[Unicode Scalar
//! Value](http://www.unicode.org/glossary/#unicode_scalar_value)*, as it can
//! contain any Unicode code point except high-surrogate and low-surrogate code
//! points.
//!
//! As such, only values in the ranges \[0x0,0xD7FF\] and \[0xE000,0x10FFFF\]
//! (inclusive) are allowed. A `char` can always be safely cast to a `u32`;
//! however the converse is not always true due to the above range limits
//! and, as such, should be performed via the `from_u32` function.
#![stable(feature = "rust1", since = "1.0.0")]
#![doc(primitive = "char")]
use core::char::CharExt as C;
use core::option::Option::{self, Some, None};
use core::iter::Iterator;
use tables::{derived_property, property, general_category, conversions, charwidth};
// stable reexports
pub use core::char::{MAX, from_u32, from_digit, EscapeUnicode, EscapeDefault};
// unstable reexports
#[allow(deprecated)]
pub use normalize::{decompose_canonical, decompose_compatible, compose};
#[allow(deprecated)]
pub use tables::normalization::canonical_combining_class;
pub use tables::UNICODE_VERSION;
/// An iterator over the lowercase mapping of a given character, returned from
/// the [`to_lowercase` method](../primitive.char.html#method.to_lowercase) on
/// characters.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ToLowercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ToLowercase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
/// An iterator over the uppercase mapping of a given character, returned from
/// the [`to_uppercase` method](../primitive.char.html#method.to_uppercase) on
/// characters.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ToUppercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ToUppercase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
/// An iterator over the titlecase mapping of a given character, returned from
/// the [`to_titlecase` method](../primitive.char.html#method.to_titlecase) on
/// characters.
#[unstable(feature = "unicode", reason = "recently added")]
pub struct ToTitlecase(CaseMappingIter);
#[stable(feature = "unicode_case_mapping", since = "1.2.0")]
impl Iterator for ToTitlecase {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
}
enum CaseMappingIter {
Three(char, char, char),
Two(char, char),
One(char),
Zero
}
impl CaseMappingIter {
fn new(chars: [char; 3]) -> CaseMappingIter {
if chars[2] == '\0' {
if chars[1] == '\0' {
CaseMappingIter::One(chars[0]) // Including if chars[0] == '\0'
} else {
CaseMappingIter::Two(chars[0], chars[1])
}
} else {
CaseMappingIter::Three(chars[0], chars[1], chars[2])
}
}
}
impl Iterator for CaseMappingIter {
type Item = char;
fn next(&mut self) -> Option<char> {
match *self {
CaseMappingIter::Three(a, b, c) => {
*self = CaseMappingIter::Two(b, c);
Some(a)
}
CaseMappingIter::Two(b, c) => {
*self = CaseMappingIter::One(c);
Some(b)
}
CaseMappingIter::One(c) => {
*self = CaseMappingIter::Zero;
Some(c)
}
CaseMappingIter::Zero => None,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[lang = "char"]
impl char {
/// Checks if a `char` parses as a numeric digit in the given radix.
///
/// Compared to `is_numeric()`, this function only recognizes the characters
/// `0-9`, `a-z` and `A-Z`.
///
/// # Return value
///
/// Returns `true` if `c` is a valid digit under `radix`, and `false`
/// otherwise.
///
/// # Panics
///
/// Panics if given a radix > 36.
///
/// # Examples
///
/// ```
/// let c = '1';
///
/// assert!(c.is_digit(10));
///
/// assert!('f'.is_digit(16));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_digit(self, radix: u32) -> bool { C::is_digit(self, radix) }
/// Converts a character to the corresponding digit.
///
/// # Return value
///
/// If `c` is between '0' and '9', the corresponding value between 0 and
/// 9. If `c` is 'a' or 'A', 10. If `c` is 'b' or 'B', 11, etc. Returns
/// none if the character does not refer to a digit in the given radix.
///
/// # Panics
///
/// Panics if given a radix outside the range [0..36].
///
/// # Examples
///
/// ```
/// let c = '1';
///
/// assert_eq!(c.to_digit(10), Some(1));
///
/// assert_eq!('f'.to_digit(16), Some(15));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_digit(self, radix: u32) -> Option<u32> { C::to_digit(self, radix) }
/// Returns an iterator that yields the hexadecimal Unicode escape of a
/// character, as `char`s.
///
/// All characters are escaped with Rust syntax of the form `\\u{NNNN}`
/// where `NNNN` is the shortest hexadecimal representation of the code
/// point.
///
/// # Examples
///
/// ```
/// for i in '❤'.escape_unicode() {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// \
/// u
/// {
/// 2
/// 7
/// 6
/// 4
/// }
/// ```
///
/// Collecting into a `String`:
///
/// ```
/// let heart: String = '❤'.escape_unicode().collect();
///
/// assert_eq!(heart, r"\u{2764}");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn escape_unicode(self) -> EscapeUnicode { C::escape_unicode(self) }
/// Returns an iterator that yields the 'default' ASCII and
/// C++11-like literal escape of a character, as `char`s.
///
/// The default is chosen with a bias toward producing literals that are
/// legal in a variety of languages, including C++11 and similar C-family
/// languages. The exact rules are:
///
/// * Tab, CR and LF are escaped as '\t', '\r' and '\n' respectively.
/// * Single-quote, double-quote and backslash chars are backslash-
/// escaped.
/// * Any other chars in the range [0x20,0x7e] are not escaped.
/// * Any other chars are given hex Unicode escapes; see `escape_unicode`.
///
/// # Examples
///
/// ```
/// for i in '"'.escape_default() {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// \
/// "
/// ```
///
/// Collecting into a `String`:
///
/// ```
/// let quote: String = '"'.escape_default().collect();
///
/// assert_eq!(quote, "\\\"");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn esca | f) -> EscapeDefault { C::escape_default(self) }
/// Returns the number of bytes this character would need if encoded in
/// UTF-8.
///
/// # Examples
///
/// ```
/// let n = 'ß'.len_utf8();
///
/// assert_eq!(n, 2);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len_utf8(self) -> usize { C::len_utf8(self) }
/// Returns the number of 16-bit code units this character would need if
/// encoded in UTF-16.
///
/// # Examples
///
/// ```
/// let n = 'ß'.len_utf16();
///
/// assert_eq!(n, 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len_utf16(self) -> usize { C::len_utf16(self) }
/// Encodes this character as UTF-8 into the provided byte buffer, and then
/// returns the number of bytes written.
///
/// If the buffer is not large enough, nothing will be written into it and a
/// `None` will be returned. A buffer of length four is large enough to
/// encode any `char`.
///
/// # Examples
///
/// In both of these examples, 'ß' takes two bytes to encode.
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 2];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, Some(2));
/// ```
///
/// A buffer that's too small:
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 1];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, None);
/// ```
#[unstable(feature = "unicode",
reason = "pending decision about Iterator/Writer/Reader")]
pub fn encode_utf8(self, dst: &mut [u8]) -> Option<usize> { C::encode_utf8(self, dst) }
/// Encodes this character as UTF-16 into the provided `u16` buffer, and
/// then returns the number of `u16`s written.
///
/// If the buffer is not large enough, nothing will be written into it and a
/// `None` will be returned. A buffer of length 2 is large enough to encode
/// any `char`.
///
/// # Examples
///
/// In both of these examples, 'ß' takes one `u16` to encode.
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 1];
///
/// let result = 'ß'.encode_utf16(&mut b);
///
/// assert_eq!(result, Some(1));
/// ```
///
/// A buffer that's too small:
///
/// ```
/// # #![feature(unicode)]
/// let mut b = [0; 0];
///
/// let result = 'ß'.encode_utf8(&mut b);
///
/// assert_eq!(result, None);
/// ```
#[unstable(feature = "unicode",
reason = "pending decision about Iterator/Writer/Reader")]
pub fn encode_utf16(self, dst: &mut [u16]) -> Option<usize> { C::encode_utf16(self, dst) }
/// Returns whether the specified character is considered a Unicode
/// alphabetic code point.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_alphabetic(self) -> bool {
match self {
'a' ... 'z' | 'A' ... 'Z' => true,
c if c > '\x7f' => derived_property::Alphabetic(c),
_ => false
}
}
/// Returns whether the specified character satisfies the 'XID_Start'
/// Unicode property.
///
/// 'XID_Start' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to ID_Start but modified for closure under NFKx.
#[unstable(feature = "unicode",
reason = "mainly needed for compiler internals")]
#[inline]
pub fn is_xid_start(self) -> bool { derived_property::XID_Start(self) }
/// Returns whether the specified `char` satisfies the 'XID_Continue'
/// Unicode property.
///
/// 'XID_Continue' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to 'ID_Continue' but modified for closure under NFKx.
#[unstable(feature = "unicode",
reason = "mainly needed for compiler internals")]
#[inline]
pub fn is_xid_continue(self) -> bool { derived_property::XID_Continue(self) }
/// Indicates whether a character is in lowercase.
///
/// This is defined according to the terms of the Unicode Derived Core
/// Property `Lowercase`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_lowercase(self) -> bool {
match self {
'a' ... 'z' => true,
c if c > '\x7f' => derived_property::Lowercase(c),
_ => false
}
}
/// Indicates whether a character is in uppercase.
///
/// This is defined according to the terms of the Unicode Derived Core
/// Property `Uppercase`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_uppercase(self) -> bool {
match self {
'A' ... 'Z' => true,
c if c > '\x7f' => derived_property::Uppercase(c),
_ => false
}
}
/// Indicates whether a character is whitespace.
///
/// Whitespace is defined in terms of the Unicode Property `White_Space`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_whitespace(self) -> bool {
match self {
' ' | '\x09' ... '\x0d' => true,
c if c > '\x7f' => property::White_Space(c),
_ => false
}
}
/// Indicates whether a character is alphanumeric.
///
/// Alphanumericness is defined in terms of the Unicode General Categories
/// 'Nd', 'Nl', 'No' and the Derived Core Property 'Alphabetic'.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_alphanumeric(self) -> bool {
self.is_alphabetic() || self.is_numeric()
}
/// Indicates whether a character is a control code point.
///
/// Control code points are defined in terms of the Unicode General
/// Category `Cc`.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_control(self) -> bool { general_category::Cc(self) }
/// Indicates whether the character is numeric (Nd, Nl, or No).
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_numeric(self) -> bool {
match self {
'0' ... '9' => true,
c if c > '\x7f' => general_category::N(c),
_ => false
}
}
/// Converts a character to its lowercase equivalent.
///
/// This performs complex unconditional mappings with no tailoring.
/// See `to_uppercase()` for references and more information.
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// lowercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_lowercase(self) -> ToLowercase {
ToLowercase(CaseMappingIter::new(conversions::to_lower(self)))
}
/// Converts a character to its titlecase equivalent.
///
/// This performs complex unconditional mappings with no tailoring.
/// See `to_uppercase()` for references and more information.
///
/// This differs from `to_uppercase()` since Unicode contains
/// digraphs and ligature characters.
/// For example, U+01F3 “dz” and U+FB01 “fi”
/// map to U+01F1 “DZ” and U+0046 U+0069 “Fi”, respectively.
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// lowercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
#[unstable(feature = "unicode", reason = "recently added")]
#[inline]
pub fn to_titlecase(self) -> ToTitlecase {
ToTitlecase(CaseMappingIter::new(conversions::to_title(self)))
}
/// Converts a character to its uppercase equivalent.
///
/// This performs complex unconditional mappings with no tailoring:
/// it maps one Unicode character to its uppercase equivalent
/// according to the Unicode database [1]
/// and the additional complex mappings [`SpecialCasing.txt`].
/// Conditional mappings (based on context or language) are not considerd here.
///
/// A full reference can be found here [2].
///
/// # Return value
///
/// Returns an iterator which yields the characters corresponding to the
/// uppercase equivalent of the character. If no conversion is possible then
/// an iterator with just the input character is returned.
///
/// [1]: ftp://ftp.unicode.org/Public/UNIDATA/UnicodeData.txt
///
/// [`SpecialCasing.txt`]: ftp://ftp.unicode.org/Public/UNIDATA/SpecialCasing.txt
///
/// [2]: http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_uppercase(self) -> ToUppercase {
ToUppercase(CaseMappingIter::new(conversions::to_upper(self)))
}
/// Returns this character's displayed width in columns, or `None` if it is a
/// control character other than `'\x00'`.
///
/// `is_cjk` determines behavior for characters in the Ambiguous category:
/// if `is_cjk` is `true`, these are 2 columns wide; otherwise, they are 1.
/// In CJK contexts, `is_cjk` should be `true`, else it should be `false`.
/// [Unicode Standard Annex #11](http://www.unicode.org/reports/tr11/)
/// recommends that these characters be treated as 1 column (i.e.,
/// `is_cjk` = `false`) if the context cannot be reliably determined.
#[deprecated(reason = "use the crates.io `unicode-width` library instead",
since = "1.0.0")]
#[unstable(feature = "unicode",
reason = "needs expert opinion. is_cjk flag stands out as ugly")]
pub fn width(self, is_cjk: bool) -> Option<usize> { charwidth::width(self, is_cjk) }
}
| pe_default(sel | identifier_name |
basename.rs | #![crate_name = "uu_basename"]
/*
* This file is part of the uutils coreutils package.
*
* (c) Jimmy Lu <jimmy.lu.2011@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#[macro_use]
extern crate uucore;
use std::io::Write;
use std::path::{is_separator, PathBuf};
static NAME: &'static str = "basename";
static SYNTAX: &'static str = "NAME [SUFFIX]";
static SUMMARY: &'static str = "Print NAME with any leading directory components removed
If specified, also remove a trailing SUFFIX";
static LONG_HELP: &'static str = "";
pub fn uumain(args: Vec<String>) -> i32 {
//
// Argument parsing
//
let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)
.optflag("a", "multiple", "Support more than one argument. Treat every argument as a name.")
.optopt("s", "suffix", "Remove a trailing suffix. This option implies the -a option.", "SUFFIX")
.optflag("z", "zero", "Output a zero byte (ASCII NUL) at the end of each line, rather than a newline.")
.parse(args);
// too few arguments
if matches.free.len() < 1 {
crash!(
1,
"{0}: {1}\nTry '{0} --help' for more information.",
NAME,
"missing operand"
);
}
let opt_s = matches.opt_present("s");
let opt_a = matches.opt_present("a");
let opt_z = matches.opt_present("z");
let multiple_paths = opt_s || opt_a;
// too many arguments
if !multiple_paths && matches.free.len() > 2 {
crash!(
1,
"{0}: extra operand '{1}'\nTry '{0} --help' for more information.",
NAME,
matches.free[2]
);
}
let suffix = if opt_s {
matches.opt_str("s").unwrap()
} else if !opt_a && matches.free.len() > 1 {
matches.free[1].clone()
} else {
"".to_owned()
};
//
// Main Program Processing
//
let paths = if multiple_paths {
&matches.free[..]
} else {
&matches.free[0..1]
};
let line_ending = if opt_z { "\0" } else { "\n" };
for path in paths {
print!("{}{}", basename(&path, &suffix), line_ending);
}
0
}
fn basename(fullname: &str, suffix: &str) -> String {
// Remove all platform-specific path separators from the end
let mut path: String = fullname.chars().rev().skip_while(|&ch| is_separator(ch)).collect();
// Undo reverse
path = path.chars().rev().collect();
// Convert to path buffer and get last path component
let pb = PathBuf::from(path);
match pb.components().last() {
Some(c) => strip_suffix(c.as_os_str().to_str().unwrap(), suffix),
None => "".to_owned()
}
}
| if name.ends_with(suffix) {
return name[..name.len() - suffix.len()].to_owned();
}
name.to_owned()
} | fn strip_suffix(name: &str, suffix: &str) -> String {
if name == suffix {
return name.to_owned();
}
| random_line_split |
basename.rs | #![crate_name = "uu_basename"]
/*
* This file is part of the uutils coreutils package.
*
* (c) Jimmy Lu <jimmy.lu.2011@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#[macro_use]
extern crate uucore;
use std::io::Write;
use std::path::{is_separator, PathBuf};
static NAME: &'static str = "basename";
static SYNTAX: &'static str = "NAME [SUFFIX]";
static SUMMARY: &'static str = "Print NAME with any leading directory components removed
If specified, also remove a trailing SUFFIX";
static LONG_HELP: &'static str = "";
pub fn uumain(args: Vec<String>) -> i32 {
//
// Argument parsing
//
let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)
.optflag("a", "multiple", "Support more than one argument. Treat every argument as a name.")
.optopt("s", "suffix", "Remove a trailing suffix. This option implies the -a option.", "SUFFIX")
.optflag("z", "zero", "Output a zero byte (ASCII NUL) at the end of each line, rather than a newline.")
.parse(args);
// too few arguments
if matches.free.len() < 1 {
crash!(
1,
"{0}: {1}\nTry '{0} --help' for more information.",
NAME,
"missing operand"
);
}
let opt_s = matches.opt_present("s");
let opt_a = matches.opt_present("a");
let opt_z = matches.opt_present("z");
let multiple_paths = opt_s || opt_a;
// too many arguments
if !multiple_paths && matches.free.len() > 2 {
crash!(
1,
"{0}: extra operand '{1}'\nTry '{0} --help' for more information.",
NAME,
matches.free[2]
);
}
let suffix = if opt_s {
matches.opt_str("s").unwrap()
} else if !opt_a && matches.free.len() > 1 {
matches.free[1].clone()
} else {
"".to_owned()
};
//
// Main Program Processing
//
let paths = if multiple_paths {
&matches.free[..]
} else {
&matches.free[0..1]
};
let line_ending = if opt_z { "\0" } else { "\n" };
for path in paths {
print!("{}{}", basename(&path, &suffix), line_ending);
}
0
}
fn basename(fullname: &str, suffix: &str) -> String {
// Remove all platform-specific path separators from the end
let mut path: String = fullname.chars().rev().skip_while(|&ch| is_separator(ch)).collect();
// Undo reverse
path = path.chars().rev().collect();
// Convert to path buffer and get last path component
let pb = PathBuf::from(path);
match pb.components().last() {
Some(c) => strip_suffix(c.as_os_str().to_str().unwrap(), suffix),
None => "".to_owned()
}
}
fn strip_suffix(name: &str, suffix: &str) -> String | {
if name == suffix {
return name.to_owned();
}
if name.ends_with(suffix) {
return name[..name.len() - suffix.len()].to_owned();
}
name.to_owned()
} | identifier_body | |
basename.rs | #![crate_name = "uu_basename"]
/*
* This file is part of the uutils coreutils package.
*
* (c) Jimmy Lu <jimmy.lu.2011@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#[macro_use]
extern crate uucore;
use std::io::Write;
use std::path::{is_separator, PathBuf};
static NAME: &'static str = "basename";
static SYNTAX: &'static str = "NAME [SUFFIX]";
static SUMMARY: &'static str = "Print NAME with any leading directory components removed
If specified, also remove a trailing SUFFIX";
static LONG_HELP: &'static str = "";
pub fn | (args: Vec<String>) -> i32 {
//
// Argument parsing
//
let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)
.optflag("a", "multiple", "Support more than one argument. Treat every argument as a name.")
.optopt("s", "suffix", "Remove a trailing suffix. This option implies the -a option.", "SUFFIX")
.optflag("z", "zero", "Output a zero byte (ASCII NUL) at the end of each line, rather than a newline.")
.parse(args);
// too few arguments
if matches.free.len() < 1 {
crash!(
1,
"{0}: {1}\nTry '{0} --help' for more information.",
NAME,
"missing operand"
);
}
let opt_s = matches.opt_present("s");
let opt_a = matches.opt_present("a");
let opt_z = matches.opt_present("z");
let multiple_paths = opt_s || opt_a;
// too many arguments
if !multiple_paths && matches.free.len() > 2 {
crash!(
1,
"{0}: extra operand '{1}'\nTry '{0} --help' for more information.",
NAME,
matches.free[2]
);
}
let suffix = if opt_s {
matches.opt_str("s").unwrap()
} else if !opt_a && matches.free.len() > 1 {
matches.free[1].clone()
} else {
"".to_owned()
};
//
// Main Program Processing
//
let paths = if multiple_paths {
&matches.free[..]
} else {
&matches.free[0..1]
};
let line_ending = if opt_z { "\0" } else { "\n" };
for path in paths {
print!("{}{}", basename(&path, &suffix), line_ending);
}
0
}
fn basename(fullname: &str, suffix: &str) -> String {
// Remove all platform-specific path separators from the end
let mut path: String = fullname.chars().rev().skip_while(|&ch| is_separator(ch)).collect();
// Undo reverse
path = path.chars().rev().collect();
// Convert to path buffer and get last path component
let pb = PathBuf::from(path);
match pb.components().last() {
Some(c) => strip_suffix(c.as_os_str().to_str().unwrap(), suffix),
None => "".to_owned()
}
}
fn strip_suffix(name: &str, suffix: &str) -> String {
if name == suffix {
return name.to_owned();
}
if name.ends_with(suffix) {
return name[..name.len() - suffix.len()].to_owned();
}
name.to_owned()
}
| uumain | identifier_name |
basename.rs | #![crate_name = "uu_basename"]
/*
* This file is part of the uutils coreutils package.
*
* (c) Jimmy Lu <jimmy.lu.2011@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#[macro_use]
extern crate uucore;
use std::io::Write;
use std::path::{is_separator, PathBuf};
static NAME: &'static str = "basename";
static SYNTAX: &'static str = "NAME [SUFFIX]";
static SUMMARY: &'static str = "Print NAME with any leading directory components removed
If specified, also remove a trailing SUFFIX";
static LONG_HELP: &'static str = "";
pub fn uumain(args: Vec<String>) -> i32 {
//
// Argument parsing
//
let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)
.optflag("a", "multiple", "Support more than one argument. Treat every argument as a name.")
.optopt("s", "suffix", "Remove a trailing suffix. This option implies the -a option.", "SUFFIX")
.optflag("z", "zero", "Output a zero byte (ASCII NUL) at the end of each line, rather than a newline.")
.parse(args);
// too few arguments
if matches.free.len() < 1 {
crash!(
1,
"{0}: {1}\nTry '{0} --help' for more information.",
NAME,
"missing operand"
);
}
let opt_s = matches.opt_present("s");
let opt_a = matches.opt_present("a");
let opt_z = matches.opt_present("z");
let multiple_paths = opt_s || opt_a;
// too many arguments
if !multiple_paths && matches.free.len() > 2 {
crash!(
1,
"{0}: extra operand '{1}'\nTry '{0} --help' for more information.",
NAME,
matches.free[2]
);
}
let suffix = if opt_s {
matches.opt_str("s").unwrap()
} else if !opt_a && matches.free.len() > 1 {
matches.free[1].clone()
} else {
"".to_owned()
};
//
// Main Program Processing
//
let paths = if multiple_paths {
&matches.free[..]
} else {
&matches.free[0..1]
};
let line_ending = if opt_z { "\0" } else | ;
for path in paths {
print!("{}{}", basename(&path, &suffix), line_ending);
}
0
}
fn basename(fullname: &str, suffix: &str) -> String {
// Remove all platform-specific path separators from the end
let mut path: String = fullname.chars().rev().skip_while(|&ch| is_separator(ch)).collect();
// Undo reverse
path = path.chars().rev().collect();
// Convert to path buffer and get last path component
let pb = PathBuf::from(path);
match pb.components().last() {
Some(c) => strip_suffix(c.as_os_str().to_str().unwrap(), suffix),
None => "".to_owned()
}
}
fn strip_suffix(name: &str, suffix: &str) -> String {
if name == suffix {
return name.to_owned();
}
if name.ends_with(suffix) {
return name[..name.len() - suffix.len()].to_owned();
}
name.to_owned()
}
| { "\n" } | conditional_block |
Suite.ts | import o from "ospec"
import "./common/LoggerTest.js"
import "./common/BirthdayUtilsTest"
import "./rest/EntityRestClientTest"
import "./crypto/CryptoFacadeTest.js"
import "./crypto/CompatibilityTest"
import "./error/RestErrorTest"
import "./error/TutanotaErrorTest"
import "./rest/RestClientTest"
import "./rest/EntityRestCacheTest"
import "./worker/EventBusClientTest"
import "./worker/search/TokenizerTest"
import "./worker/search/IndexerTest"
import "./worker/search/IndexerCoreTest"
import "./worker/search/ContactIndexerTest"
import "./worker/search/GroupInfoIndexerTest"
import "./worker/search/MailIndexerTest"
import "./worker/search/IndexUtilsTest"
import "./worker/search/SearchFacadeTest"
import "./worker/search/SuggestionFacadeTest"
import "./worker/search/SearchIndexEncodingTest"
import "./common/SwTest"
import "./worker/search/EventQueueTest"
import "./common/IndexerDebugLoggerTest"
import "./worker/facades/MailFacadeTest"
import "./worker/facades/CalendarFacadeTest"
import "./worker/SuspensionHandlerTest"
import "./worker/ConfigurationDbTest"
import "./worker/CompressionTest"
import "../api/common/PlainTextSearchTest"
import "../api/common/EntityUtilsTest"
import "./rest/CborDateEncoderTest.js"
import "./worker/utils/SleepDetectorTest.js"
import {preTest, reportTest} from "./TestUtils"
import {random} from "@tutao/tutanota-crypto"
import * as td from "testdouble"
(async function () {
const {WorkerImpl} = await import("../../src/api/worker/WorkerImpl")
globalThis.testWorker = WorkerImpl
if (typeof process != "undefined") {
if (process.argv.includes("-i")) {
console.log("\nRunning with integration tests because was run with -i\n")
await import("./common/WorkerTest")
await import("./common/IntegrationTest")
} else |
}
// setup the Entropy for all testcases
await random.addEntropy([{data: 36, entropy: 256, source: "key"}])
preTest()
o.before(function () {
// testdouble complains about certain mocking related code smells, and also prints a warning whenever you replace a property on an object.
// it's very very noisy, so we turn it off
td.config({
ignoreWarnings: true
})
})
o.afterEach(function () {
td.reset()
})
// @ts-ignore
o.run(reportTest)
})() | {
console.log("\nRunning without integration tests because run without -i\n")
} | conditional_block |
Suite.ts | import o from "ospec"
import "./common/LoggerTest.js"
import "./common/BirthdayUtilsTest"
import "./rest/EntityRestClientTest"
import "./crypto/CryptoFacadeTest.js"
import "./crypto/CompatibilityTest"
import "./error/RestErrorTest"
import "./error/TutanotaErrorTest"
import "./rest/RestClientTest"
import "./rest/EntityRestCacheTest"
import "./worker/EventBusClientTest"
import "./worker/search/TokenizerTest"
import "./worker/search/IndexerTest"
import "./worker/search/IndexerCoreTest"
import "./worker/search/ContactIndexerTest"
import "./worker/search/GroupInfoIndexerTest"
import "./worker/search/MailIndexerTest"
import "./worker/search/IndexUtilsTest"
import "./worker/search/SearchFacadeTest"
import "./worker/search/SuggestionFacadeTest"
import "./worker/search/SearchIndexEncodingTest"
import "./common/SwTest"
import "./worker/search/EventQueueTest"
import "./common/IndexerDebugLoggerTest"
import "./worker/facades/MailFacadeTest"
import "./worker/facades/CalendarFacadeTest"
import "./worker/SuspensionHandlerTest"
import "./worker/ConfigurationDbTest"
import "./worker/CompressionTest"
import "../api/common/PlainTextSearchTest"
import "../api/common/EntityUtilsTest"
import "./rest/CborDateEncoderTest.js"
import "./worker/utils/SleepDetectorTest.js"
import {preTest, reportTest} from "./TestUtils"
import {random} from "@tutao/tutanota-crypto"
import * as td from "testdouble"
(async function () {
const {WorkerImpl} = await import("../../src/api/worker/WorkerImpl")
globalThis.testWorker = WorkerImpl
if (typeof process != "undefined") {
if (process.argv.includes("-i")) {
console.log("\nRunning with integration tests because was run with -i\n")
await import("./common/WorkerTest")
await import("./common/IntegrationTest")
} else {
console.log("\nRunning without integration tests because run without -i\n")
}
}
// setup the Entropy for all testcases
await random.addEntropy([{data: 36, entropy: 256, source: "key"}])
preTest()
o.before(function () {
// testdouble complains about certain mocking related code smells, and also prints a warning whenever you replace a property on an object.
// it's very very noisy, so we turn it off
td.config({
ignoreWarnings: true
})
})
o.afterEach(function () { | td.reset()
})
// @ts-ignore
o.run(reportTest)
})() | random_line_split | |
filecache.py | # Bulletproof Arma Launcher
# Copyright (C) 2017 Lukasz Taczuk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
from __future__ import unicode_literals
import errno
import hashlib
import os
from utils import paths
from utils import context
def get_cache_directory():
return paths.get_launcher_directory('filecache')
def map_file(url):
"""Get the path where the file should be stored in the cache."""
file_name = hashlib.sha256(url).hexdigest()
return os.path.join(get_cache_directory(), file_name)
def get_file(url):
|
def save_file(url, data):
"""Save the file contents to the cache.
The contents of the file are saved to a temporary file and then moved to
ensure that no truncated file is present in the cache.
"""
# Ensure the directory exists
paths.mkdir_p(get_cache_directory())
path = map_file(url)
tmp_path = path + '_tmp'
f = open(tmp_path, 'wb')
f.write(data)
f.close()
# Ensure the file does not exist (would raise an exception on Windows
with context.ignore_nosuchfile_exception():
os.unlink(path)
os.rename(tmp_path, path)
| """Get the file contents from the cache or None if the file is not present
in the cache.
"""
path = map_file(url)
f = None
try:
f = open(path, 'rb')
return f.read()
except IOError as ex:
if ex.errno == errno.ENOENT: # No such file
return None
raise
finally:
if f:
f.close() | identifier_body |
filecache.py | # Bulletproof Arma Launcher
# Copyright (C) 2017 Lukasz Taczuk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
from __future__ import unicode_literals
import errno
import hashlib
import os
from utils import paths
from utils import context
def get_cache_directory():
return paths.get_launcher_directory('filecache')
def map_file(url):
"""Get the path where the file should be stored in the cache."""
file_name = hashlib.sha256(url).hexdigest()
return os.path.join(get_cache_directory(), file_name)
def get_file(url):
"""Get the file contents from the cache or None if the file is not present
in the cache.
"""
path = map_file(url)
f = None
try:
f = open(path, 'rb')
return f.read()
except IOError as ex:
if ex.errno == errno.ENOENT: # No such file
return None
raise |
def save_file(url, data):
"""Save the file contents to the cache.
The contents of the file are saved to a temporary file and then moved to
ensure that no truncated file is present in the cache.
"""
# Ensure the directory exists
paths.mkdir_p(get_cache_directory())
path = map_file(url)
tmp_path = path + '_tmp'
f = open(tmp_path, 'wb')
f.write(data)
f.close()
# Ensure the file does not exist (would raise an exception on Windows
with context.ignore_nosuchfile_exception():
os.unlink(path)
os.rename(tmp_path, path) |
finally:
if f:
f.close() | random_line_split |
filecache.py | # Bulletproof Arma Launcher
# Copyright (C) 2017 Lukasz Taczuk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
from __future__ import unicode_literals
import errno
import hashlib
import os
from utils import paths
from utils import context
def get_cache_directory():
return paths.get_launcher_directory('filecache')
def | (url):
"""Get the path where the file should be stored in the cache."""
file_name = hashlib.sha256(url).hexdigest()
return os.path.join(get_cache_directory(), file_name)
def get_file(url):
"""Get the file contents from the cache or None if the file is not present
in the cache.
"""
path = map_file(url)
f = None
try:
f = open(path, 'rb')
return f.read()
except IOError as ex:
if ex.errno == errno.ENOENT: # No such file
return None
raise
finally:
if f:
f.close()
def save_file(url, data):
"""Save the file contents to the cache.
The contents of the file are saved to a temporary file and then moved to
ensure that no truncated file is present in the cache.
"""
# Ensure the directory exists
paths.mkdir_p(get_cache_directory())
path = map_file(url)
tmp_path = path + '_tmp'
f = open(tmp_path, 'wb')
f.write(data)
f.close()
# Ensure the file does not exist (would raise an exception on Windows
with context.ignore_nosuchfile_exception():
os.unlink(path)
os.rename(tmp_path, path)
| map_file | identifier_name |
filecache.py | # Bulletproof Arma Launcher
# Copyright (C) 2017 Lukasz Taczuk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
from __future__ import unicode_literals
import errno
import hashlib
import os
from utils import paths
from utils import context
def get_cache_directory():
return paths.get_launcher_directory('filecache')
def map_file(url):
"""Get the path where the file should be stored in the cache."""
file_name = hashlib.sha256(url).hexdigest()
return os.path.join(get_cache_directory(), file_name)
def get_file(url):
"""Get the file contents from the cache or None if the file is not present
in the cache.
"""
path = map_file(url)
f = None
try:
f = open(path, 'rb')
return f.read()
except IOError as ex:
if ex.errno == errno.ENOENT: # No such file
return None
raise
finally:
if f:
|
def save_file(url, data):
"""Save the file contents to the cache.
The contents of the file are saved to a temporary file and then moved to
ensure that no truncated file is present in the cache.
"""
# Ensure the directory exists
paths.mkdir_p(get_cache_directory())
path = map_file(url)
tmp_path = path + '_tmp'
f = open(tmp_path, 'wb')
f.write(data)
f.close()
# Ensure the file does not exist (would raise an exception on Windows
with context.ignore_nosuchfile_exception():
os.unlink(path)
os.rename(tmp_path, path)
| f.close() | conditional_block |
triggerHandler.js | /* global logger, processWebhookMessage */
import moment from 'moment';
RocketChat.integrations.triggerHandler = new class RocketChatIntegrationHandler {
constructor() {
this.vm = Npm.require('vm');
this.successResults = [200, 201, 202];
this.compiledScripts = {};
this.triggers = {};
RocketChat.models.Integrations.find({type: 'webhook-outgoing'}).observe({
added: (record) => {
this.addIntegration(record);
},
changed: (record) => {
this.removeIntegration(record);
this.addIntegration(record);
},
removed: (record) => {
this.removeIntegration(record);
}
});
}
addIntegration(record) {
logger.outgoing.debug(`Adding the integration ${ record.name } of the event ${ record.event }!`);
let channels;
if (record.event && !RocketChat.integrations.outgoingEvents[record.event].use.channel) {
logger.outgoing.debug('The integration doesnt rely on channels.');
//We don't use any channels, so it's special ;)
channels = ['__any'];
} else if (_.isEmpty(record.channel)) {
logger.outgoing.debug('The integration had an empty channel property, so it is going on all the public channels.');
channels = ['all_public_channels'];
} else {
logger.outgoing.debug('The integration is going on these channels:', record.channel);
channels = [].concat(record.channel);
}
for (const channel of channels) {
if (!this.triggers[channel]) {
this.triggers[channel] = {};
}
this.triggers[channel][record._id] = record;
}
}
removeIntegration(record) {
for (const trigger of Object.values(this.triggers)) {
delete trigger[record._id];
}
}
isTriggerEnabled(trigger) {
for (const trig of Object.values(this.triggers)) {
if (trig[trigger._id]) {
return trig[trigger._id].enabled;
}
}
return false;
}
updateHistory({ historyId, step, integration, event, data, triggerWord, ranPrepareScript, prepareSentMessage, processSentMessage, resultMessage, finished, url, httpCallData, httpError, httpResult, error, errorStack }) {
const history = {
type: 'outgoing-webhook',
step
};
// Usually is only added on initial insert
if (integration) {
history.integration = integration;
}
// Usually is only added on initial insert
if (event) {
history.event = event;
}
if (data) {
history.data = data;
if (data.user) {
history.data.user = _.omit(data.user, ['meta', '$loki', 'services']);
}
if (data.room) {
history.data.room = _.omit(data.room, ['meta', '$loki', 'usernames']);
history.data.room.usernames = ['this_will_be_filled_in_with_usernames_when_replayed'];
}
}
if (triggerWord) {
history.triggerWord = triggerWord;
}
if (typeof ranPrepareScript !== 'undefined') {
history.ranPrepareScript = ranPrepareScript;
}
if (prepareSentMessage) {
history.prepareSentMessage = prepareSentMessage;
}
if (processSentMessage) {
history.processSentMessage = processSentMessage;
}
if (resultMessage) {
history.resultMessage = resultMessage;
}
if (typeof finished !== 'undefined') {
history.finished = finished;
}
if (url) {
history.url = url;
}
if (typeof httpCallData !== 'undefined') {
history.httpCallData = httpCallData;
}
if (httpError) {
history.httpError = httpError;
}
if (typeof httpResult !== 'undefined') |
if (typeof error !== 'undefined') {
history.error = error;
}
if (typeof errorStack !== 'undefined') {
history.errorStack = errorStack;
}
if (historyId) {
RocketChat.models.IntegrationHistory.update({ _id: historyId }, { $set: history });
return historyId;
} else {
history._createdAt = new Date();
return RocketChat.models.IntegrationHistory.insert(Object.assign({ _id: Random.id() }, history));
}
}
//Trigger is the trigger, nameOrId is a string which is used to try and find a room, room is a room, message is a message, and data contains "user_name" if trigger.impersonateUser is truthful.
sendMessage({ trigger, nameOrId = '', room, message, data }) {
let user;
//Try to find the user who we are impersonating
if (trigger.impersonateUser) {
user = RocketChat.models.Users.findOneByUsername(data.user_name);
}
//If they don't exist (aka the trigger didn't contain a user) then we set the user based upon the
//configured username for the integration since this is required at all times.
if (!user) {
user = RocketChat.models.Users.findOneByUsername(trigger.username);
}
let tmpRoom;
if (nameOrId || trigger.targetRoom) {
tmpRoom = RocketChat.getRoomByNameOrIdWithOptionToJoin({ currentUserId: user._id, nameOrId: nameOrId || trigger.targetRoom, errorOnEmpty: false }) || room;
} else {
tmpRoom = room;
}
//If no room could be found, we won't be sending any messages but we'll warn in the logs
if (!tmpRoom) {
logger.outgoing.warn(`The Integration "${ trigger.name }" doesn't have a room configured nor did it provide a room to send the message to.`);
return;
}
logger.outgoing.debug(`Found a room for ${ trigger.name } which is: ${ tmpRoom.name } with a type of ${ tmpRoom.t }`);
message.bot = { i: trigger._id };
const defaultValues = {
alias: trigger.alias,
avatar: trigger.avatar,
emoji: trigger.emoji
};
if (tmpRoom.t === 'd') {
message.channel = `@${ tmpRoom._id }`;
} else {
message.channel = `#${ tmpRoom._id }`;
}
message = processWebhookMessage(message, user, defaultValues);
return message;
}
buildSandbox(store = {}) {
const sandbox = {
_, s, console, moment,
Store: {
set: (key, val) => store[key] = val,
get: (key) => store[key]
},
HTTP: (method, url, options) => {
try {
return {
result: HTTP.call(method, url, options)
};
} catch (error) {
return { error };
}
}
};
Object.keys(RocketChat.models).filter(k => !k.startsWith('_')).forEach(k => {
sandbox[k] = RocketChat.models[k];
});
return { store, sandbox };
}
getIntegrationScript(integration) {
const compiledScript = this.compiledScripts[integration._id];
if (compiledScript && +compiledScript._updatedAt === +integration._updatedAt) {
return compiledScript.script;
}
const script = integration.scriptCompiled;
const { store, sandbox } = this.buildSandbox();
let vmScript;
try {
logger.outgoing.info('Will evaluate script of Trigger', integration.name);
logger.outgoing.debug(script);
vmScript = this.vm.createScript(script, 'script.js');
vmScript.runInNewContext(sandbox);
if (sandbox.Script) {
this.compiledScripts[integration._id] = {
script: new sandbox.Script(),
store,
_updatedAt: integration._updatedAt
};
return this.compiledScripts[integration._id].script;
}
} catch (e) {
logger.outgoing.error(`Error evaluating Script in Trigger ${ integration.name }:`);
logger.outgoing.error(script.replace(/^/gm, ' '));
logger.outgoing.error('Stack Trace:');
logger.outgoing.error(e.stack.replace(/^/gm, ' '));
throw new Meteor.Error('error-evaluating-script');
}
if (!sandbox.Script) {
logger.outgoing.error(`Class "Script" not in Trigger ${ integration.name }:`);
throw new Meteor.Error('class-script-not-found');
}
}
hasScriptAndMethod(integration, method) {
if (integration.scriptEnabled !== true || !integration.scriptCompiled || integration.scriptCompiled.trim() === '') {
return false;
}
let script;
try {
script = this.getIntegrationScript(integration);
} catch (e) {
return false;
}
return typeof script[method] !== 'undefined';
}
executeScript(integration, method, params, historyId) {
let script;
try {
script = this.getIntegrationScript(integration);
} catch (e) {
this.updateHistory({ historyId, step: 'execute-script-getting-script', error: true, errorStack: e });
return;
}
if (!script[method]) {
logger.outgoing.error(`Method "${ method }" no found in the Integration "${ integration.name }"`);
this.updateHistory({ historyId, step: `execute-script-no-method-${ method }` });
return;
}
try {
const { sandbox } = this.buildSandbox(this.compiledScripts[integration._id].store);
sandbox.script = script;
sandbox.method = method;
sandbox.params = params;
this.updateHistory({ historyId, step: `execute-script-before-running-${ method }` });
const result = this.vm.runInNewContext('script[method](params)', sandbox, { timeout: 3000 });
logger.outgoing.debug(`Script method "${ method }" result of the Integration "${ integration.name }" is:`);
logger.outgoing.debug(result);
return result;
} catch (e) {
this.updateHistory({ historyId, step: `execute-script-error-running-${ method }`, error: true, errorStack: e.stack.replace(/^/gm, ' ') });
logger.outgoing.error(`Error running Script in the Integration ${ integration.name }:`);
logger.outgoing.debug(integration.scriptCompiled.replace(/^/gm, ' ')); // Only output the compiled script if debugging is enabled, so the logs don't get spammed.
logger.outgoing.error('Stack:');
logger.outgoing.error(e.stack.replace(/^/gm, ' '));
return;
}
}
eventNameArgumentsToObject() {
const argObject = {
event: arguments[0]
};
switch (argObject.event) {
case 'sendMessage':
if (arguments.length >= 3) {
argObject.message = arguments[1];
argObject.room = arguments[2];
}
break;
case 'fileUploaded':
if (arguments.length >= 2) {
const arghhh = arguments[1];
argObject.user = arghhh.user;
argObject.room = arghhh.room;
argObject.message = arghhh.message;
}
break;
case 'roomArchived':
if (arguments.length >= 3) {
argObject.room = arguments[1];
argObject.user = arguments[2];
}
break;
case 'roomCreated':
if (arguments.length >= 3) {
argObject.owner = arguments[1];
argObject.room = arguments[2];
}
break;
case 'roomJoined':
case 'roomLeft':
if (arguments.length >= 3) {
argObject.user = arguments[1];
argObject.room = arguments[2];
}
break;
case 'userCreated':
if (arguments.length >= 2) {
argObject.user = arguments[1];
}
break;
default:
logger.outgoing.warn(`An Unhandled Trigger Event was called: ${ argObject.event }`);
argObject.event = undefined;
break;
}
logger.outgoing.debug(`Got the event arguments for the event: ${ argObject.event }`, argObject);
return argObject;
}
mapEventArgsToData(data, { event, message, room, owner, user }) {
switch (event) {
case 'sendMessage':
data.channel_id = room._id;
data.channel_name = room.name;
data.message_id = message._id;
data.timestamp = message.ts;
data.user_id = message.u._id;
data.user_name = message.u.username;
data.text = message.msg;
if (message.alias) {
data.alias = message.alias;
}
if (message.bot) {
data.bot = message.bot;
}
break;
case 'fileUploaded':
data.channel_id = room._id;
data.channel_name = room.name;
data.message_id = message._id;
data.timestamp = message.ts;
data.user_id = message.u._id;
data.user_name = message.u.username;
data.text = message.msg;
data.user = user;
data.room = room;
data.message = message;
if (message.alias) {
data.alias = message.alias;
}
if (message.bot) {
data.bot = message.bot;
}
break;
case 'roomCreated':
data.channel_id = room._id;
data.channel_name = room.name;
data.timestamp = room.ts;
data.user_id = owner._id;
data.user_name = owner.username;
data.owner = owner;
data.room = room;
break;
case 'roomArchived':
case 'roomJoined':
case 'roomLeft':
data.timestamp = new Date();
data.channel_id = room._id;
data.channel_name = room.name;
data.user_id = user._id;
data.user_name = user.username;
data.user = user;
data.room = room;
if (user.type === 'bot') {
data.bot = true;
}
break;
case 'userCreated':
data.timestamp = user.createdAt;
data.user_id = user._id;
data.user_name = user.username;
data.user = user;
if (user.type === 'bot') {
data.bot = true;
}
break;
default:
break;
}
}
executeTriggers() {
logger.outgoing.debug('Execute Trigger:', arguments[0]);
const argObject = this.eventNameArgumentsToObject(...arguments);
const { event, message, room } = argObject;
//Each type of event should have an event and a room attached, otherwise we
//wouldn't know how to handle the trigger nor would we have anywhere to send the
//result of the integration
if (!event) {
return;
}
const triggersToExecute = [];
logger.outgoing.debug('Starting search for triggers for the room:', room ? room._id : '__any');
if (room) {
switch (room.t) {
case 'd':
const id = room._id.replace(message.u._id, '');
const username = _.without(room.usernames, message.u.username)[0];
if (this.triggers[`@${ id }`]) {
for (const trigger of Object.values(this.triggers[`@${ id }`])) {
triggersToExecute.push(trigger);
}
}
if (this.triggers.all_direct_messages) {
for (const trigger of Object.values(this.triggers.all_direct_messages)) {
triggersToExecute.push(trigger);
}
}
if (id !== username && this.triggers[`@${ username }`]) {
for (const trigger of Object.values(this.triggers[`@${ username }`])) {
triggersToExecute.push(trigger);
}
}
break;
case 'c':
if (this.triggers.all_public_channels) {
for (const trigger of Object.values(this.triggers.all_public_channels)) {
triggersToExecute.push(trigger);
}
}
if (this.triggers[`#${ room._id }`]) {
for (const trigger of Object.values(this.triggers[`#${ room._id }`])) {
triggersToExecute.push(trigger);
}
}
if (room._id !== room.name && this.triggers[`#${ room.name }`]) {
for (const trigger of Object.values(this.triggers[`#${ room.name }`])) {
triggersToExecute.push(trigger);
}
}
break;
default:
if (this.triggers.all_private_groups) {
for (const trigger of Object.values(this.triggers.all_private_groups)) {
triggersToExecute.push(trigger);
}
}
if (this.triggers[`#${ room._id }`]) {
for (const trigger of Object.values(this.triggers[`#${ room._id }`])) {
triggersToExecute.push(trigger);
}
}
if (room._id !== room.name && this.triggers[`#${ room.name }`]) {
for (const trigger of Object.values(this.triggers[`#${ room.name }`])) {
triggersToExecute.push(trigger);
}
}
break;
}
}
if (this.triggers.__any) {
//For outgoing integration which don't rely on rooms.
for (const trigger of Object.values(this.triggers.__any)) {
triggersToExecute.push(trigger);
}
}
logger.outgoing.debug(`Found ${ triggersToExecute.length } to iterate over and see if the match the event.`);
for (const triggerToExecute of triggersToExecute) {
logger.outgoing.debug(`Is "${ triggerToExecute.name }" enabled, ${ triggerToExecute.enabled }, and what is the event? ${ triggerToExecute.event }`);
if (triggerToExecute.enabled === true && triggerToExecute.event === event) {
this.executeTrigger(triggerToExecute, argObject);
}
}
}
executeTrigger(trigger, argObject) {
for (const url of trigger.urls) {
this.executeTriggerUrl(url, trigger, argObject, 0);
}
}
executeTriggerUrl(url, trigger, { event, message, room, owner, user }, theHistoryId, tries = 0) {
if (!this.isTriggerEnabled(trigger)) {
logger.outgoing.warn(`The trigger "${ trigger.name }" is no longer enabled, stopping execution of it at try: ${ tries }`);
return;
}
logger.outgoing.debug(`Starting to execute trigger: ${ trigger.name } (${ trigger._id })`);
let word;
//Not all triggers/events support triggerWords
if (RocketChat.integrations.outgoingEvents[event].use.triggerWords) {
if (trigger.triggerWords && trigger.triggerWords.length > 0) {
for (const triggerWord of trigger.triggerWords) {
if (!trigger.triggerWordAnywhere && message.msg.indexOf(triggerWord) === 0) {
word = triggerWord;
break;
} else if (trigger.triggerWordAnywhere && message.msg.includes(triggerWord)) {
word = triggerWord;
break;
}
}
// Stop if there are triggerWords but none match
if (!word) {
logger.outgoing.debug(`The trigger word which "${ trigger.name }" was expecting could not be found, not executing.`);
return;
}
}
}
const historyId = this.updateHistory({ step: 'start-execute-trigger-url', integration: trigger, event });
const data = {
token: trigger.token,
bot: false
};
if (word) {
data.trigger_word = word;
}
this.mapEventArgsToData(data, { trigger, event, message, room, owner, user });
this.updateHistory({ historyId, step: 'mapped-args-to-data', data, triggerWord: word });
logger.outgoing.info(`Will be executing the Integration "${ trigger.name }" to the url: ${ url }`);
logger.outgoing.debug(data);
let opts = {
params: {},
method: 'POST',
url,
data,
auth: undefined,
npmRequestOptions: {
rejectUnauthorized: !RocketChat.settings.get('Allow_Invalid_SelfSigned_Certs'),
strictSSL: !RocketChat.settings.get('Allow_Invalid_SelfSigned_Certs')
},
headers: {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36'
}
};
if (this.hasScriptAndMethod(trigger, 'prepare_outgoing_request')) {
opts = this.executeScript(trigger, 'prepare_outgoing_request', { request: opts }, historyId);
}
this.updateHistory({ historyId, step: 'after-maybe-ran-prepare', ranPrepareScript: true });
if (!opts) {
this.updateHistory({ historyId, step: 'after-prepare-no-opts', finished: true });
return;
}
if (opts.message) {
const prepareMessage = this.sendMessage({ trigger, room, message: opts.message, data });
this.updateHistory({ historyId, step: 'after-prepare-send-message', prepareSentMessage: prepareMessage });
}
if (!opts.url || !opts.method) {
this.updateHistory({ historyId, step: 'after-prepare-no-url_or_method', finished: true });
return;
}
this.updateHistory({ historyId, step: 'pre-http-call', url: opts.url, httpCallData: opts.data });
HTTP.call(opts.method, opts.url, opts, (error, result) => {
if (!result) {
logger.outgoing.warn(`Result for the Integration ${ trigger.name } to ${ url } is empty`);
} else {
logger.outgoing.info(`Status code for the Integration ${ trigger.name } to ${ url } is ${ result.statusCode }`);
}
this.updateHistory({ historyId, step: 'after-http-call', httpError: error, httpResult: result });
if (this.hasScriptAndMethod(trigger, 'process_outgoing_response')) {
const sandbox = {
request: opts,
response: {
error,
status_code: result ? result.statusCode : undefined, //These values will be undefined to close issues #4175, #5762, and #5896
content: result ? result.data : undefined,
content_raw: result ? result.content : undefined,
headers: result ? result.headers : {}
}
};
const scriptResult = this.executeScript(trigger, 'process_outgoing_response', sandbox, historyId);
if (scriptResult && scriptResult.content) {
const resultMessage = this.sendMessage({ trigger, room, message: scriptResult.content, data });
this.updateHistory({ historyId, step: 'after-process-send-message', processSentMessage: resultMessage, finished: true });
return;
}
if (scriptResult === false) {
this.updateHistory({ historyId, step: 'after-process-false-result', finished: true });
return;
}
}
// if the result contained nothing or wasn't a successful statusCode
if (!result || !this.successResults.includes(result.statusCode)) {
if (error) {
logger.outgoing.error(`Error for the Integration "${ trigger.name }" to ${ url } is:`);
logger.outgoing.error(error);
}
if (result) {
logger.outgoing.error(`Error for the Integration "${ trigger.name }" to ${ url } is:`);
logger.outgoing.error(result);
if (result.statusCode === 410) {
this.updateHistory({ historyId, step: 'after-process-http-status-410', error: true });
logger.outgoing.error(`Disabling the Integration "${ trigger.name }" because the status code was 401 (Gone).`);
RocketChat.models.Integrations.update({ _id: trigger._id }, { $set: { enabled: false }});
return;
}
if (result.statusCode === 500) {
this.updateHistory({ historyId, step: 'after-process-http-status-500', error: true });
logger.outgoing.error(`Error "500" for the Integration "${ trigger.name }" to ${ url }.`);
logger.outgoing.error(result.content);
return;
}
}
if (trigger.retryFailedCalls) {
if (tries < trigger.retryCount && trigger.retryDelay) {
this.updateHistory({ historyId, error: true, step: `going-to-retry-${ tries + 1 }` });
let waitTime;
switch (trigger.retryDelay) {
case 'powers-of-ten':
// Try again in 0.1s, 1s, 10s, 1m40s, 16m40s, 2h46m40s, 27h46m40s, etc
waitTime = Math.pow(10, tries + 2);
break;
case 'powers-of-two':
// 2 seconds, 4 seconds, 8 seconds
waitTime = Math.pow(2, tries + 1) * 1000;
break;
case 'increments-of-two':
// 2 second, 4 seconds, 6 seconds, etc
waitTime = (tries + 1) * 2 * 1000;
break;
default:
const er = new Error('The integration\'s retryDelay setting is invalid.');
this.updateHistory({ historyId, step: 'failed-and-retry-delay-is-invalid', error: true, errorStack: er.stack });
return;
}
logger.outgoing.info(`Trying the Integration ${ trigger.name } to ${ url } again in ${ waitTime } milliseconds.`);
Meteor.setTimeout(() => {
this.executeTriggerUrl(url, trigger, { event, message, room, owner, user }, historyId, tries + 1);
}, waitTime);
} else {
this.updateHistory({ historyId, step: 'too-many-retries', error: true });
}
} else {
this.updateHistory({ historyId, step: 'failed-and-not-configured-to-retry', error: true });
}
return;
}
//process outgoing webhook response as a new message
if (result && this.successResults.includes(result.statusCode)) {
if (result && result.data && (result.data.text || result.data.attachments)) {
const resultMsg = this.sendMessage({ trigger, room, message: result.data, data });
this.updateHistory({ historyId, step: 'url-response-sent-message', resultMessage: resultMsg, finished: true });
}
}
});
}
replay(integration, history) {
if (!integration || integration.type !== 'webhook-outgoing') {
throw new Meteor.Error('integration-type-must-be-outgoing', 'The integration type to replay must be an outgoing webhook.');
}
if (!history || !history.data) {
throw new Meteor.Error('history-data-must-be-defined', 'The history data must be defined to replay an integration.');
}
const event = history.event;
const message = RocketChat.models.Messages.findOneById(history.data.message_id);
const room = RocketChat.models.Rooms.findOneById(history.data.channel_id);
const user = RocketChat.models.Users.findOneById(history.data.user_id);
let owner;
if (history.data.owner && history.data.owner._id) {
owner = RocketChat.models.Users.findOneById(history.data.owner._id);
}
this.executeTriggerUrl(history.url, integration, { event, message, room, owner, user });
}
};
| {
history.httpResult = httpResult;
} | conditional_block |
triggerHandler.js | /* global logger, processWebhookMessage */
import moment from 'moment';
RocketChat.integrations.triggerHandler = new class RocketChatIntegrationHandler {
constructor() {
this.vm = Npm.require('vm');
this.successResults = [200, 201, 202];
this.compiledScripts = {};
this.triggers = {};
RocketChat.models.Integrations.find({type: 'webhook-outgoing'}).observe({
added: (record) => {
this.addIntegration(record);
},
changed: (record) => {
this.removeIntegration(record);
this.addIntegration(record);
},
removed: (record) => {
this.removeIntegration(record);
}
});
}
addIntegration(record) {
logger.outgoing.debug(`Adding the integration ${ record.name } of the event ${ record.event }!`);
let channels;
if (record.event && !RocketChat.integrations.outgoingEvents[record.event].use.channel) {
logger.outgoing.debug('The integration doesnt rely on channels.');
//We don't use any channels, so it's special ;)
channels = ['__any'];
} else if (_.isEmpty(record.channel)) {
logger.outgoing.debug('The integration had an empty channel property, so it is going on all the public channels.');
channels = ['all_public_channels'];
} else {
logger.outgoing.debug('The integration is going on these channels:', record.channel);
channels = [].concat(record.channel);
}
for (const channel of channels) {
if (!this.triggers[channel]) {
this.triggers[channel] = {};
}
this.triggers[channel][record._id] = record;
}
}
removeIntegration(record) |
isTriggerEnabled(trigger) {
for (const trig of Object.values(this.triggers)) {
if (trig[trigger._id]) {
return trig[trigger._id].enabled;
}
}
return false;
}
updateHistory({ historyId, step, integration, event, data, triggerWord, ranPrepareScript, prepareSentMessage, processSentMessage, resultMessage, finished, url, httpCallData, httpError, httpResult, error, errorStack }) {
const history = {
type: 'outgoing-webhook',
step
};
// Usually is only added on initial insert
if (integration) {
history.integration = integration;
}
// Usually is only added on initial insert
if (event) {
history.event = event;
}
if (data) {
history.data = data;
if (data.user) {
history.data.user = _.omit(data.user, ['meta', '$loki', 'services']);
}
if (data.room) {
history.data.room = _.omit(data.room, ['meta', '$loki', 'usernames']);
history.data.room.usernames = ['this_will_be_filled_in_with_usernames_when_replayed'];
}
}
if (triggerWord) {
history.triggerWord = triggerWord;
}
if (typeof ranPrepareScript !== 'undefined') {
history.ranPrepareScript = ranPrepareScript;
}
if (prepareSentMessage) {
history.prepareSentMessage = prepareSentMessage;
}
if (processSentMessage) {
history.processSentMessage = processSentMessage;
}
if (resultMessage) {
history.resultMessage = resultMessage;
}
if (typeof finished !== 'undefined') {
history.finished = finished;
}
if (url) {
history.url = url;
}
if (typeof httpCallData !== 'undefined') {
history.httpCallData = httpCallData;
}
if (httpError) {
history.httpError = httpError;
}
if (typeof httpResult !== 'undefined') {
history.httpResult = httpResult;
}
if (typeof error !== 'undefined') {
history.error = error;
}
if (typeof errorStack !== 'undefined') {
history.errorStack = errorStack;
}
if (historyId) {
RocketChat.models.IntegrationHistory.update({ _id: historyId }, { $set: history });
return historyId;
} else {
history._createdAt = new Date();
return RocketChat.models.IntegrationHistory.insert(Object.assign({ _id: Random.id() }, history));
}
}
//Trigger is the trigger, nameOrId is a string which is used to try and find a room, room is a room, message is a message, and data contains "user_name" if trigger.impersonateUser is truthful.
sendMessage({ trigger, nameOrId = '', room, message, data }) {
let user;
//Try to find the user who we are impersonating
if (trigger.impersonateUser) {
user = RocketChat.models.Users.findOneByUsername(data.user_name);
}
//If they don't exist (aka the trigger didn't contain a user) then we set the user based upon the
//configured username for the integration since this is required at all times.
if (!user) {
user = RocketChat.models.Users.findOneByUsername(trigger.username);
}
let tmpRoom;
if (nameOrId || trigger.targetRoom) {
tmpRoom = RocketChat.getRoomByNameOrIdWithOptionToJoin({ currentUserId: user._id, nameOrId: nameOrId || trigger.targetRoom, errorOnEmpty: false }) || room;
} else {
tmpRoom = room;
}
//If no room could be found, we won't be sending any messages but we'll warn in the logs
if (!tmpRoom) {
logger.outgoing.warn(`The Integration "${ trigger.name }" doesn't have a room configured nor did it provide a room to send the message to.`);
return;
}
logger.outgoing.debug(`Found a room for ${ trigger.name } which is: ${ tmpRoom.name } with a type of ${ tmpRoom.t }`);
message.bot = { i: trigger._id };
const defaultValues = {
alias: trigger.alias,
avatar: trigger.avatar,
emoji: trigger.emoji
};
if (tmpRoom.t === 'd') {
message.channel = `@${ tmpRoom._id }`;
} else {
message.channel = `#${ tmpRoom._id }`;
}
message = processWebhookMessage(message, user, defaultValues);
return message;
}
buildSandbox(store = {}) {
const sandbox = {
_, s, console, moment,
Store: {
set: (key, val) => store[key] = val,
get: (key) => store[key]
},
HTTP: (method, url, options) => {
try {
return {
result: HTTP.call(method, url, options)
};
} catch (error) {
return { error };
}
}
};
Object.keys(RocketChat.models).filter(k => !k.startsWith('_')).forEach(k => {
sandbox[k] = RocketChat.models[k];
});
return { store, sandbox };
}
getIntegrationScript(integration) {
const compiledScript = this.compiledScripts[integration._id];
if (compiledScript && +compiledScript._updatedAt === +integration._updatedAt) {
return compiledScript.script;
}
const script = integration.scriptCompiled;
const { store, sandbox } = this.buildSandbox();
let vmScript;
try {
logger.outgoing.info('Will evaluate script of Trigger', integration.name);
logger.outgoing.debug(script);
vmScript = this.vm.createScript(script, 'script.js');
vmScript.runInNewContext(sandbox);
if (sandbox.Script) {
this.compiledScripts[integration._id] = {
script: new sandbox.Script(),
store,
_updatedAt: integration._updatedAt
};
return this.compiledScripts[integration._id].script;
}
} catch (e) {
logger.outgoing.error(`Error evaluating Script in Trigger ${ integration.name }:`);
logger.outgoing.error(script.replace(/^/gm, ' '));
logger.outgoing.error('Stack Trace:');
logger.outgoing.error(e.stack.replace(/^/gm, ' '));
throw new Meteor.Error('error-evaluating-script');
}
if (!sandbox.Script) {
logger.outgoing.error(`Class "Script" not in Trigger ${ integration.name }:`);
throw new Meteor.Error('class-script-not-found');
}
}
hasScriptAndMethod(integration, method) {
if (integration.scriptEnabled !== true || !integration.scriptCompiled || integration.scriptCompiled.trim() === '') {
return false;
}
let script;
try {
script = this.getIntegrationScript(integration);
} catch (e) {
return false;
}
return typeof script[method] !== 'undefined';
}
executeScript(integration, method, params, historyId) {
let script;
try {
script = this.getIntegrationScript(integration);
} catch (e) {
this.updateHistory({ historyId, step: 'execute-script-getting-script', error: true, errorStack: e });
return;
}
if (!script[method]) {
logger.outgoing.error(`Method "${ method }" no found in the Integration "${ integration.name }"`);
this.updateHistory({ historyId, step: `execute-script-no-method-${ method }` });
return;
}
try {
const { sandbox } = this.buildSandbox(this.compiledScripts[integration._id].store);
sandbox.script = script;
sandbox.method = method;
sandbox.params = params;
this.updateHistory({ historyId, step: `execute-script-before-running-${ method }` });
const result = this.vm.runInNewContext('script[method](params)', sandbox, { timeout: 3000 });
logger.outgoing.debug(`Script method "${ method }" result of the Integration "${ integration.name }" is:`);
logger.outgoing.debug(result);
return result;
} catch (e) {
this.updateHistory({ historyId, step: `execute-script-error-running-${ method }`, error: true, errorStack: e.stack.replace(/^/gm, ' ') });
logger.outgoing.error(`Error running Script in the Integration ${ integration.name }:`);
logger.outgoing.debug(integration.scriptCompiled.replace(/^/gm, ' ')); // Only output the compiled script if debugging is enabled, so the logs don't get spammed.
logger.outgoing.error('Stack:');
logger.outgoing.error(e.stack.replace(/^/gm, ' '));
return;
}
}
eventNameArgumentsToObject() {
const argObject = {
event: arguments[0]
};
switch (argObject.event) {
case 'sendMessage':
if (arguments.length >= 3) {
argObject.message = arguments[1];
argObject.room = arguments[2];
}
break;
case 'fileUploaded':
if (arguments.length >= 2) {
const arghhh = arguments[1];
argObject.user = arghhh.user;
argObject.room = arghhh.room;
argObject.message = arghhh.message;
}
break;
case 'roomArchived':
if (arguments.length >= 3) {
argObject.room = arguments[1];
argObject.user = arguments[2];
}
break;
case 'roomCreated':
if (arguments.length >= 3) {
argObject.owner = arguments[1];
argObject.room = arguments[2];
}
break;
case 'roomJoined':
case 'roomLeft':
if (arguments.length >= 3) {
argObject.user = arguments[1];
argObject.room = arguments[2];
}
break;
case 'userCreated':
if (arguments.length >= 2) {
argObject.user = arguments[1];
}
break;
default:
logger.outgoing.warn(`An Unhandled Trigger Event was called: ${ argObject.event }`);
argObject.event = undefined;
break;
}
logger.outgoing.debug(`Got the event arguments for the event: ${ argObject.event }`, argObject);
return argObject;
}
mapEventArgsToData(data, { event, message, room, owner, user }) {
switch (event) {
case 'sendMessage':
data.channel_id = room._id;
data.channel_name = room.name;
data.message_id = message._id;
data.timestamp = message.ts;
data.user_id = message.u._id;
data.user_name = message.u.username;
data.text = message.msg;
if (message.alias) {
data.alias = message.alias;
}
if (message.bot) {
data.bot = message.bot;
}
break;
case 'fileUploaded':
data.channel_id = room._id;
data.channel_name = room.name;
data.message_id = message._id;
data.timestamp = message.ts;
data.user_id = message.u._id;
data.user_name = message.u.username;
data.text = message.msg;
data.user = user;
data.room = room;
data.message = message;
if (message.alias) {
data.alias = message.alias;
}
if (message.bot) {
data.bot = message.bot;
}
break;
case 'roomCreated':
data.channel_id = room._id;
data.channel_name = room.name;
data.timestamp = room.ts;
data.user_id = owner._id;
data.user_name = owner.username;
data.owner = owner;
data.room = room;
break;
case 'roomArchived':
case 'roomJoined':
case 'roomLeft':
data.timestamp = new Date();
data.channel_id = room._id;
data.channel_name = room.name;
data.user_id = user._id;
data.user_name = user.username;
data.user = user;
data.room = room;
if (user.type === 'bot') {
data.bot = true;
}
break;
case 'userCreated':
data.timestamp = user.createdAt;
data.user_id = user._id;
data.user_name = user.username;
data.user = user;
if (user.type === 'bot') {
data.bot = true;
}
break;
default:
break;
}
}
executeTriggers() {
logger.outgoing.debug('Execute Trigger:', arguments[0]);
const argObject = this.eventNameArgumentsToObject(...arguments);
const { event, message, room } = argObject;
//Each type of event should have an event and a room attached, otherwise we
//wouldn't know how to handle the trigger nor would we have anywhere to send the
//result of the integration
if (!event) {
return;
}
const triggersToExecute = [];
logger.outgoing.debug('Starting search for triggers for the room:', room ? room._id : '__any');
if (room) {
switch (room.t) {
case 'd':
const id = room._id.replace(message.u._id, '');
const username = _.without(room.usernames, message.u.username)[0];
if (this.triggers[`@${ id }`]) {
for (const trigger of Object.values(this.triggers[`@${ id }`])) {
triggersToExecute.push(trigger);
}
}
if (this.triggers.all_direct_messages) {
for (const trigger of Object.values(this.triggers.all_direct_messages)) {
triggersToExecute.push(trigger);
}
}
if (id !== username && this.triggers[`@${ username }`]) {
for (const trigger of Object.values(this.triggers[`@${ username }`])) {
triggersToExecute.push(trigger);
}
}
break;
case 'c':
if (this.triggers.all_public_channels) {
for (const trigger of Object.values(this.triggers.all_public_channels)) {
triggersToExecute.push(trigger);
}
}
if (this.triggers[`#${ room._id }`]) {
for (const trigger of Object.values(this.triggers[`#${ room._id }`])) {
triggersToExecute.push(trigger);
}
}
if (room._id !== room.name && this.triggers[`#${ room.name }`]) {
for (const trigger of Object.values(this.triggers[`#${ room.name }`])) {
triggersToExecute.push(trigger);
}
}
break;
default:
if (this.triggers.all_private_groups) {
for (const trigger of Object.values(this.triggers.all_private_groups)) {
triggersToExecute.push(trigger);
}
}
if (this.triggers[`#${ room._id }`]) {
for (const trigger of Object.values(this.triggers[`#${ room._id }`])) {
triggersToExecute.push(trigger);
}
}
if (room._id !== room.name && this.triggers[`#${ room.name }`]) {
for (const trigger of Object.values(this.triggers[`#${ room.name }`])) {
triggersToExecute.push(trigger);
}
}
break;
}
}
if (this.triggers.__any) {
//For outgoing integration which don't rely on rooms.
for (const trigger of Object.values(this.triggers.__any)) {
triggersToExecute.push(trigger);
}
}
logger.outgoing.debug(`Found ${ triggersToExecute.length } to iterate over and see if the match the event.`);
for (const triggerToExecute of triggersToExecute) {
logger.outgoing.debug(`Is "${ triggerToExecute.name }" enabled, ${ triggerToExecute.enabled }, and what is the event? ${ triggerToExecute.event }`);
if (triggerToExecute.enabled === true && triggerToExecute.event === event) {
this.executeTrigger(triggerToExecute, argObject);
}
}
}
executeTrigger(trigger, argObject) {
for (const url of trigger.urls) {
this.executeTriggerUrl(url, trigger, argObject, 0);
}
}
executeTriggerUrl(url, trigger, { event, message, room, owner, user }, theHistoryId, tries = 0) {
if (!this.isTriggerEnabled(trigger)) {
logger.outgoing.warn(`The trigger "${ trigger.name }" is no longer enabled, stopping execution of it at try: ${ tries }`);
return;
}
logger.outgoing.debug(`Starting to execute trigger: ${ trigger.name } (${ trigger._id })`);
let word;
//Not all triggers/events support triggerWords
if (RocketChat.integrations.outgoingEvents[event].use.triggerWords) {
if (trigger.triggerWords && trigger.triggerWords.length > 0) {
for (const triggerWord of trigger.triggerWords) {
if (!trigger.triggerWordAnywhere && message.msg.indexOf(triggerWord) === 0) {
word = triggerWord;
break;
} else if (trigger.triggerWordAnywhere && message.msg.includes(triggerWord)) {
word = triggerWord;
break;
}
}
// Stop if there are triggerWords but none match
if (!word) {
logger.outgoing.debug(`The trigger word which "${ trigger.name }" was expecting could not be found, not executing.`);
return;
}
}
}
const historyId = this.updateHistory({ step: 'start-execute-trigger-url', integration: trigger, event });
const data = {
token: trigger.token,
bot: false
};
if (word) {
data.trigger_word = word;
}
this.mapEventArgsToData(data, { trigger, event, message, room, owner, user });
this.updateHistory({ historyId, step: 'mapped-args-to-data', data, triggerWord: word });
logger.outgoing.info(`Will be executing the Integration "${ trigger.name }" to the url: ${ url }`);
logger.outgoing.debug(data);
let opts = {
params: {},
method: 'POST',
url,
data,
auth: undefined,
npmRequestOptions: {
rejectUnauthorized: !RocketChat.settings.get('Allow_Invalid_SelfSigned_Certs'),
strictSSL: !RocketChat.settings.get('Allow_Invalid_SelfSigned_Certs')
},
headers: {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36'
}
};
if (this.hasScriptAndMethod(trigger, 'prepare_outgoing_request')) {
opts = this.executeScript(trigger, 'prepare_outgoing_request', { request: opts }, historyId);
}
this.updateHistory({ historyId, step: 'after-maybe-ran-prepare', ranPrepareScript: true });
if (!opts) {
this.updateHistory({ historyId, step: 'after-prepare-no-opts', finished: true });
return;
}
if (opts.message) {
const prepareMessage = this.sendMessage({ trigger, room, message: opts.message, data });
this.updateHistory({ historyId, step: 'after-prepare-send-message', prepareSentMessage: prepareMessage });
}
if (!opts.url || !opts.method) {
this.updateHistory({ historyId, step: 'after-prepare-no-url_or_method', finished: true });
return;
}
this.updateHistory({ historyId, step: 'pre-http-call', url: opts.url, httpCallData: opts.data });
HTTP.call(opts.method, opts.url, opts, (error, result) => {
if (!result) {
logger.outgoing.warn(`Result for the Integration ${ trigger.name } to ${ url } is empty`);
} else {
logger.outgoing.info(`Status code for the Integration ${ trigger.name } to ${ url } is ${ result.statusCode }`);
}
this.updateHistory({ historyId, step: 'after-http-call', httpError: error, httpResult: result });
if (this.hasScriptAndMethod(trigger, 'process_outgoing_response')) {
const sandbox = {
request: opts,
response: {
error,
status_code: result ? result.statusCode : undefined, //These values will be undefined to close issues #4175, #5762, and #5896
content: result ? result.data : undefined,
content_raw: result ? result.content : undefined,
headers: result ? result.headers : {}
}
};
const scriptResult = this.executeScript(trigger, 'process_outgoing_response', sandbox, historyId);
if (scriptResult && scriptResult.content) {
const resultMessage = this.sendMessage({ trigger, room, message: scriptResult.content, data });
this.updateHistory({ historyId, step: 'after-process-send-message', processSentMessage: resultMessage, finished: true });
return;
}
if (scriptResult === false) {
this.updateHistory({ historyId, step: 'after-process-false-result', finished: true });
return;
}
}
// if the result contained nothing or wasn't a successful statusCode
if (!result || !this.successResults.includes(result.statusCode)) {
if (error) {
logger.outgoing.error(`Error for the Integration "${ trigger.name }" to ${ url } is:`);
logger.outgoing.error(error);
}
if (result) {
logger.outgoing.error(`Error for the Integration "${ trigger.name }" to ${ url } is:`);
logger.outgoing.error(result);
if (result.statusCode === 410) {
this.updateHistory({ historyId, step: 'after-process-http-status-410', error: true });
logger.outgoing.error(`Disabling the Integration "${ trigger.name }" because the status code was 401 (Gone).`);
RocketChat.models.Integrations.update({ _id: trigger._id }, { $set: { enabled: false }});
return;
}
if (result.statusCode === 500) {
this.updateHistory({ historyId, step: 'after-process-http-status-500', error: true });
logger.outgoing.error(`Error "500" for the Integration "${ trigger.name }" to ${ url }.`);
logger.outgoing.error(result.content);
return;
}
}
if (trigger.retryFailedCalls) {
if (tries < trigger.retryCount && trigger.retryDelay) {
this.updateHistory({ historyId, error: true, step: `going-to-retry-${ tries + 1 }` });
let waitTime;
switch (trigger.retryDelay) {
case 'powers-of-ten':
// Try again in 0.1s, 1s, 10s, 1m40s, 16m40s, 2h46m40s, 27h46m40s, etc
waitTime = Math.pow(10, tries + 2);
break;
case 'powers-of-two':
// 2 seconds, 4 seconds, 8 seconds
waitTime = Math.pow(2, tries + 1) * 1000;
break;
case 'increments-of-two':
// 2 second, 4 seconds, 6 seconds, etc
waitTime = (tries + 1) * 2 * 1000;
break;
default:
const er = new Error('The integration\'s retryDelay setting is invalid.');
this.updateHistory({ historyId, step: 'failed-and-retry-delay-is-invalid', error: true, errorStack: er.stack });
return;
}
logger.outgoing.info(`Trying the Integration ${ trigger.name } to ${ url } again in ${ waitTime } milliseconds.`);
Meteor.setTimeout(() => {
this.executeTriggerUrl(url, trigger, { event, message, room, owner, user }, historyId, tries + 1);
}, waitTime);
} else {
this.updateHistory({ historyId, step: 'too-many-retries', error: true });
}
} else {
this.updateHistory({ historyId, step: 'failed-and-not-configured-to-retry', error: true });
}
return;
}
//process outgoing webhook response as a new message
if (result && this.successResults.includes(result.statusCode)) {
if (result && result.data && (result.data.text || result.data.attachments)) {
const resultMsg = this.sendMessage({ trigger, room, message: result.data, data });
this.updateHistory({ historyId, step: 'url-response-sent-message', resultMessage: resultMsg, finished: true });
}
}
});
}
replay(integration, history) {
if (!integration || integration.type !== 'webhook-outgoing') {
throw new Meteor.Error('integration-type-must-be-outgoing', 'The integration type to replay must be an outgoing webhook.');
}
if (!history || !history.data) {
throw new Meteor.Error('history-data-must-be-defined', 'The history data must be defined to replay an integration.');
}
const event = history.event;
const message = RocketChat.models.Messages.findOneById(history.data.message_id);
const room = RocketChat.models.Rooms.findOneById(history.data.channel_id);
const user = RocketChat.models.Users.findOneById(history.data.user_id);
let owner;
if (history.data.owner && history.data.owner._id) {
owner = RocketChat.models.Users.findOneById(history.data.owner._id);
}
this.executeTriggerUrl(history.url, integration, { event, message, room, owner, user });
}
};
| {
for (const trigger of Object.values(this.triggers)) {
delete trigger[record._id];
}
} | identifier_body |
triggerHandler.js | /* global logger, processWebhookMessage */
import moment from 'moment';
RocketChat.integrations.triggerHandler = new class RocketChatIntegrationHandler {
constructor() {
this.vm = Npm.require('vm');
this.successResults = [200, 201, 202];
this.compiledScripts = {};
this.triggers = {};
RocketChat.models.Integrations.find({type: 'webhook-outgoing'}).observe({
added: (record) => {
this.addIntegration(record);
},
changed: (record) => {
this.removeIntegration(record);
this.addIntegration(record);
},
removed: (record) => {
this.removeIntegration(record);
}
});
}
addIntegration(record) {
logger.outgoing.debug(`Adding the integration ${ record.name } of the event ${ record.event }!`);
let channels;
if (record.event && !RocketChat.integrations.outgoingEvents[record.event].use.channel) {
logger.outgoing.debug('The integration doesnt rely on channels.');
//We don't use any channels, so it's special ;)
channels = ['__any'];
} else if (_.isEmpty(record.channel)) {
logger.outgoing.debug('The integration had an empty channel property, so it is going on all the public channels.');
channels = ['all_public_channels'];
} else {
logger.outgoing.debug('The integration is going on these channels:', record.channel);
channels = [].concat(record.channel);
}
for (const channel of channels) {
if (!this.triggers[channel]) {
this.triggers[channel] = {};
}
this.triggers[channel][record._id] = record;
}
}
removeIntegration(record) {
for (const trigger of Object.values(this.triggers)) {
delete trigger[record._id];
}
}
isTriggerEnabled(trigger) {
for (const trig of Object.values(this.triggers)) {
if (trig[trigger._id]) {
return trig[trigger._id].enabled;
}
}
return false;
}
| ({ historyId, step, integration, event, data, triggerWord, ranPrepareScript, prepareSentMessage, processSentMessage, resultMessage, finished, url, httpCallData, httpError, httpResult, error, errorStack }) {
const history = {
type: 'outgoing-webhook',
step
};
// Usually is only added on initial insert
if (integration) {
history.integration = integration;
}
// Usually is only added on initial insert
if (event) {
history.event = event;
}
if (data) {
history.data = data;
if (data.user) {
history.data.user = _.omit(data.user, ['meta', '$loki', 'services']);
}
if (data.room) {
history.data.room = _.omit(data.room, ['meta', '$loki', 'usernames']);
history.data.room.usernames = ['this_will_be_filled_in_with_usernames_when_replayed'];
}
}
if (triggerWord) {
history.triggerWord = triggerWord;
}
if (typeof ranPrepareScript !== 'undefined') {
history.ranPrepareScript = ranPrepareScript;
}
if (prepareSentMessage) {
history.prepareSentMessage = prepareSentMessage;
}
if (processSentMessage) {
history.processSentMessage = processSentMessage;
}
if (resultMessage) {
history.resultMessage = resultMessage;
}
if (typeof finished !== 'undefined') {
history.finished = finished;
}
if (url) {
history.url = url;
}
if (typeof httpCallData !== 'undefined') {
history.httpCallData = httpCallData;
}
if (httpError) {
history.httpError = httpError;
}
if (typeof httpResult !== 'undefined') {
history.httpResult = httpResult;
}
if (typeof error !== 'undefined') {
history.error = error;
}
if (typeof errorStack !== 'undefined') {
history.errorStack = errorStack;
}
if (historyId) {
RocketChat.models.IntegrationHistory.update({ _id: historyId }, { $set: history });
return historyId;
} else {
history._createdAt = new Date();
return RocketChat.models.IntegrationHistory.insert(Object.assign({ _id: Random.id() }, history));
}
}
//Trigger is the trigger, nameOrId is a string which is used to try and find a room, room is a room, message is a message, and data contains "user_name" if trigger.impersonateUser is truthful.
sendMessage({ trigger, nameOrId = '', room, message, data }) {
let user;
//Try to find the user who we are impersonating
if (trigger.impersonateUser) {
user = RocketChat.models.Users.findOneByUsername(data.user_name);
}
//If they don't exist (aka the trigger didn't contain a user) then we set the user based upon the
//configured username for the integration since this is required at all times.
if (!user) {
user = RocketChat.models.Users.findOneByUsername(trigger.username);
}
let tmpRoom;
if (nameOrId || trigger.targetRoom) {
tmpRoom = RocketChat.getRoomByNameOrIdWithOptionToJoin({ currentUserId: user._id, nameOrId: nameOrId || trigger.targetRoom, errorOnEmpty: false }) || room;
} else {
tmpRoom = room;
}
//If no room could be found, we won't be sending any messages but we'll warn in the logs
if (!tmpRoom) {
logger.outgoing.warn(`The Integration "${ trigger.name }" doesn't have a room configured nor did it provide a room to send the message to.`);
return;
}
logger.outgoing.debug(`Found a room for ${ trigger.name } which is: ${ tmpRoom.name } with a type of ${ tmpRoom.t }`);
message.bot = { i: trigger._id };
const defaultValues = {
alias: trigger.alias,
avatar: trigger.avatar,
emoji: trigger.emoji
};
if (tmpRoom.t === 'd') {
message.channel = `@${ tmpRoom._id }`;
} else {
message.channel = `#${ tmpRoom._id }`;
}
message = processWebhookMessage(message, user, defaultValues);
return message;
}
buildSandbox(store = {}) {
const sandbox = {
_, s, console, moment,
Store: {
set: (key, val) => store[key] = val,
get: (key) => store[key]
},
HTTP: (method, url, options) => {
try {
return {
result: HTTP.call(method, url, options)
};
} catch (error) {
return { error };
}
}
};
Object.keys(RocketChat.models).filter(k => !k.startsWith('_')).forEach(k => {
sandbox[k] = RocketChat.models[k];
});
return { store, sandbox };
}
getIntegrationScript(integration) {
const compiledScript = this.compiledScripts[integration._id];
if (compiledScript && +compiledScript._updatedAt === +integration._updatedAt) {
return compiledScript.script;
}
const script = integration.scriptCompiled;
const { store, sandbox } = this.buildSandbox();
let vmScript;
try {
logger.outgoing.info('Will evaluate script of Trigger', integration.name);
logger.outgoing.debug(script);
vmScript = this.vm.createScript(script, 'script.js');
vmScript.runInNewContext(sandbox);
if (sandbox.Script) {
this.compiledScripts[integration._id] = {
script: new sandbox.Script(),
store,
_updatedAt: integration._updatedAt
};
return this.compiledScripts[integration._id].script;
}
} catch (e) {
logger.outgoing.error(`Error evaluating Script in Trigger ${ integration.name }:`);
logger.outgoing.error(script.replace(/^/gm, ' '));
logger.outgoing.error('Stack Trace:');
logger.outgoing.error(e.stack.replace(/^/gm, ' '));
throw new Meteor.Error('error-evaluating-script');
}
if (!sandbox.Script) {
logger.outgoing.error(`Class "Script" not in Trigger ${ integration.name }:`);
throw new Meteor.Error('class-script-not-found');
}
}
hasScriptAndMethod(integration, method) {
if (integration.scriptEnabled !== true || !integration.scriptCompiled || integration.scriptCompiled.trim() === '') {
return false;
}
let script;
try {
script = this.getIntegrationScript(integration);
} catch (e) {
return false;
}
return typeof script[method] !== 'undefined';
}
executeScript(integration, method, params, historyId) {
let script;
try {
script = this.getIntegrationScript(integration);
} catch (e) {
this.updateHistory({ historyId, step: 'execute-script-getting-script', error: true, errorStack: e });
return;
}
if (!script[method]) {
logger.outgoing.error(`Method "${ method }" no found in the Integration "${ integration.name }"`);
this.updateHistory({ historyId, step: `execute-script-no-method-${ method }` });
return;
}
try {
const { sandbox } = this.buildSandbox(this.compiledScripts[integration._id].store);
sandbox.script = script;
sandbox.method = method;
sandbox.params = params;
this.updateHistory({ historyId, step: `execute-script-before-running-${ method }` });
const result = this.vm.runInNewContext('script[method](params)', sandbox, { timeout: 3000 });
logger.outgoing.debug(`Script method "${ method }" result of the Integration "${ integration.name }" is:`);
logger.outgoing.debug(result);
return result;
} catch (e) {
this.updateHistory({ historyId, step: `execute-script-error-running-${ method }`, error: true, errorStack: e.stack.replace(/^/gm, ' ') });
logger.outgoing.error(`Error running Script in the Integration ${ integration.name }:`);
logger.outgoing.debug(integration.scriptCompiled.replace(/^/gm, ' ')); // Only output the compiled script if debugging is enabled, so the logs don't get spammed.
logger.outgoing.error('Stack:');
logger.outgoing.error(e.stack.replace(/^/gm, ' '));
return;
}
}
eventNameArgumentsToObject() {
const argObject = {
event: arguments[0]
};
switch (argObject.event) {
case 'sendMessage':
if (arguments.length >= 3) {
argObject.message = arguments[1];
argObject.room = arguments[2];
}
break;
case 'fileUploaded':
if (arguments.length >= 2) {
const arghhh = arguments[1];
argObject.user = arghhh.user;
argObject.room = arghhh.room;
argObject.message = arghhh.message;
}
break;
case 'roomArchived':
if (arguments.length >= 3) {
argObject.room = arguments[1];
argObject.user = arguments[2];
}
break;
case 'roomCreated':
if (arguments.length >= 3) {
argObject.owner = arguments[1];
argObject.room = arguments[2];
}
break;
case 'roomJoined':
case 'roomLeft':
if (arguments.length >= 3) {
argObject.user = arguments[1];
argObject.room = arguments[2];
}
break;
case 'userCreated':
if (arguments.length >= 2) {
argObject.user = arguments[1];
}
break;
default:
logger.outgoing.warn(`An Unhandled Trigger Event was called: ${ argObject.event }`);
argObject.event = undefined;
break;
}
logger.outgoing.debug(`Got the event arguments for the event: ${ argObject.event }`, argObject);
return argObject;
}
mapEventArgsToData(data, { event, message, room, owner, user }) {
switch (event) {
case 'sendMessage':
data.channel_id = room._id;
data.channel_name = room.name;
data.message_id = message._id;
data.timestamp = message.ts;
data.user_id = message.u._id;
data.user_name = message.u.username;
data.text = message.msg;
if (message.alias) {
data.alias = message.alias;
}
if (message.bot) {
data.bot = message.bot;
}
break;
case 'fileUploaded':
data.channel_id = room._id;
data.channel_name = room.name;
data.message_id = message._id;
data.timestamp = message.ts;
data.user_id = message.u._id;
data.user_name = message.u.username;
data.text = message.msg;
data.user = user;
data.room = room;
data.message = message;
if (message.alias) {
data.alias = message.alias;
}
if (message.bot) {
data.bot = message.bot;
}
break;
case 'roomCreated':
data.channel_id = room._id;
data.channel_name = room.name;
data.timestamp = room.ts;
data.user_id = owner._id;
data.user_name = owner.username;
data.owner = owner;
data.room = room;
break;
case 'roomArchived':
case 'roomJoined':
case 'roomLeft':
data.timestamp = new Date();
data.channel_id = room._id;
data.channel_name = room.name;
data.user_id = user._id;
data.user_name = user.username;
data.user = user;
data.room = room;
if (user.type === 'bot') {
data.bot = true;
}
break;
case 'userCreated':
data.timestamp = user.createdAt;
data.user_id = user._id;
data.user_name = user.username;
data.user = user;
if (user.type === 'bot') {
data.bot = true;
}
break;
default:
break;
}
}
executeTriggers() {
logger.outgoing.debug('Execute Trigger:', arguments[0]);
const argObject = this.eventNameArgumentsToObject(...arguments);
const { event, message, room } = argObject;
//Each type of event should have an event and a room attached, otherwise we
//wouldn't know how to handle the trigger nor would we have anywhere to send the
//result of the integration
if (!event) {
return;
}
const triggersToExecute = [];
logger.outgoing.debug('Starting search for triggers for the room:', room ? room._id : '__any');
if (room) {
switch (room.t) {
case 'd':
const id = room._id.replace(message.u._id, '');
const username = _.without(room.usernames, message.u.username)[0];
if (this.triggers[`@${ id }`]) {
for (const trigger of Object.values(this.triggers[`@${ id }`])) {
triggersToExecute.push(trigger);
}
}
if (this.triggers.all_direct_messages) {
for (const trigger of Object.values(this.triggers.all_direct_messages)) {
triggersToExecute.push(trigger);
}
}
if (id !== username && this.triggers[`@${ username }`]) {
for (const trigger of Object.values(this.triggers[`@${ username }`])) {
triggersToExecute.push(trigger);
}
}
break;
case 'c':
if (this.triggers.all_public_channels) {
for (const trigger of Object.values(this.triggers.all_public_channels)) {
triggersToExecute.push(trigger);
}
}
if (this.triggers[`#${ room._id }`]) {
for (const trigger of Object.values(this.triggers[`#${ room._id }`])) {
triggersToExecute.push(trigger);
}
}
if (room._id !== room.name && this.triggers[`#${ room.name }`]) {
for (const trigger of Object.values(this.triggers[`#${ room.name }`])) {
triggersToExecute.push(trigger);
}
}
break;
default:
if (this.triggers.all_private_groups) {
for (const trigger of Object.values(this.triggers.all_private_groups)) {
triggersToExecute.push(trigger);
}
}
if (this.triggers[`#${ room._id }`]) {
for (const trigger of Object.values(this.triggers[`#${ room._id }`])) {
triggersToExecute.push(trigger);
}
}
if (room._id !== room.name && this.triggers[`#${ room.name }`]) {
for (const trigger of Object.values(this.triggers[`#${ room.name }`])) {
triggersToExecute.push(trigger);
}
}
break;
}
}
if (this.triggers.__any) {
//For outgoing integration which don't rely on rooms.
for (const trigger of Object.values(this.triggers.__any)) {
triggersToExecute.push(trigger);
}
}
logger.outgoing.debug(`Found ${ triggersToExecute.length } to iterate over and see if the match the event.`);
for (const triggerToExecute of triggersToExecute) {
logger.outgoing.debug(`Is "${ triggerToExecute.name }" enabled, ${ triggerToExecute.enabled }, and what is the event? ${ triggerToExecute.event }`);
if (triggerToExecute.enabled === true && triggerToExecute.event === event) {
this.executeTrigger(triggerToExecute, argObject);
}
}
}
executeTrigger(trigger, argObject) {
for (const url of trigger.urls) {
this.executeTriggerUrl(url, trigger, argObject, 0);
}
}
executeTriggerUrl(url, trigger, { event, message, room, owner, user }, theHistoryId, tries = 0) {
if (!this.isTriggerEnabled(trigger)) {
logger.outgoing.warn(`The trigger "${ trigger.name }" is no longer enabled, stopping execution of it at try: ${ tries }`);
return;
}
logger.outgoing.debug(`Starting to execute trigger: ${ trigger.name } (${ trigger._id })`);
let word;
//Not all triggers/events support triggerWords
if (RocketChat.integrations.outgoingEvents[event].use.triggerWords) {
if (trigger.triggerWords && trigger.triggerWords.length > 0) {
for (const triggerWord of trigger.triggerWords) {
if (!trigger.triggerWordAnywhere && message.msg.indexOf(triggerWord) === 0) {
word = triggerWord;
break;
} else if (trigger.triggerWordAnywhere && message.msg.includes(triggerWord)) {
word = triggerWord;
break;
}
}
// Stop if there are triggerWords but none match
if (!word) {
logger.outgoing.debug(`The trigger word which "${ trigger.name }" was expecting could not be found, not executing.`);
return;
}
}
}
const historyId = this.updateHistory({ step: 'start-execute-trigger-url', integration: trigger, event });
const data = {
token: trigger.token,
bot: false
};
if (word) {
data.trigger_word = word;
}
this.mapEventArgsToData(data, { trigger, event, message, room, owner, user });
this.updateHistory({ historyId, step: 'mapped-args-to-data', data, triggerWord: word });
logger.outgoing.info(`Will be executing the Integration "${ trigger.name }" to the url: ${ url }`);
logger.outgoing.debug(data);
let opts = {
params: {},
method: 'POST',
url,
data,
auth: undefined,
npmRequestOptions: {
rejectUnauthorized: !RocketChat.settings.get('Allow_Invalid_SelfSigned_Certs'),
strictSSL: !RocketChat.settings.get('Allow_Invalid_SelfSigned_Certs')
},
headers: {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36'
}
};
if (this.hasScriptAndMethod(trigger, 'prepare_outgoing_request')) {
opts = this.executeScript(trigger, 'prepare_outgoing_request', { request: opts }, historyId);
}
this.updateHistory({ historyId, step: 'after-maybe-ran-prepare', ranPrepareScript: true });
if (!opts) {
this.updateHistory({ historyId, step: 'after-prepare-no-opts', finished: true });
return;
}
if (opts.message) {
const prepareMessage = this.sendMessage({ trigger, room, message: opts.message, data });
this.updateHistory({ historyId, step: 'after-prepare-send-message', prepareSentMessage: prepareMessage });
}
if (!opts.url || !opts.method) {
this.updateHistory({ historyId, step: 'after-prepare-no-url_or_method', finished: true });
return;
}
this.updateHistory({ historyId, step: 'pre-http-call', url: opts.url, httpCallData: opts.data });
HTTP.call(opts.method, opts.url, opts, (error, result) => {
if (!result) {
logger.outgoing.warn(`Result for the Integration ${ trigger.name } to ${ url } is empty`);
} else {
logger.outgoing.info(`Status code for the Integration ${ trigger.name } to ${ url } is ${ result.statusCode }`);
}
this.updateHistory({ historyId, step: 'after-http-call', httpError: error, httpResult: result });
if (this.hasScriptAndMethod(trigger, 'process_outgoing_response')) {
const sandbox = {
request: opts,
response: {
error,
status_code: result ? result.statusCode : undefined, //These values will be undefined to close issues #4175, #5762, and #5896
content: result ? result.data : undefined,
content_raw: result ? result.content : undefined,
headers: result ? result.headers : {}
}
};
const scriptResult = this.executeScript(trigger, 'process_outgoing_response', sandbox, historyId);
if (scriptResult && scriptResult.content) {
const resultMessage = this.sendMessage({ trigger, room, message: scriptResult.content, data });
this.updateHistory({ historyId, step: 'after-process-send-message', processSentMessage: resultMessage, finished: true });
return;
}
if (scriptResult === false) {
this.updateHistory({ historyId, step: 'after-process-false-result', finished: true });
return;
}
}
// if the result contained nothing or wasn't a successful statusCode
if (!result || !this.successResults.includes(result.statusCode)) {
if (error) {
logger.outgoing.error(`Error for the Integration "${ trigger.name }" to ${ url } is:`);
logger.outgoing.error(error);
}
if (result) {
logger.outgoing.error(`Error for the Integration "${ trigger.name }" to ${ url } is:`);
logger.outgoing.error(result);
if (result.statusCode === 410) {
this.updateHistory({ historyId, step: 'after-process-http-status-410', error: true });
logger.outgoing.error(`Disabling the Integration "${ trigger.name }" because the status code was 401 (Gone).`);
RocketChat.models.Integrations.update({ _id: trigger._id }, { $set: { enabled: false }});
return;
}
if (result.statusCode === 500) {
this.updateHistory({ historyId, step: 'after-process-http-status-500', error: true });
logger.outgoing.error(`Error "500" for the Integration "${ trigger.name }" to ${ url }.`);
logger.outgoing.error(result.content);
return;
}
}
if (trigger.retryFailedCalls) {
if (tries < trigger.retryCount && trigger.retryDelay) {
this.updateHistory({ historyId, error: true, step: `going-to-retry-${ tries + 1 }` });
let waitTime;
switch (trigger.retryDelay) {
case 'powers-of-ten':
// Try again in 0.1s, 1s, 10s, 1m40s, 16m40s, 2h46m40s, 27h46m40s, etc
waitTime = Math.pow(10, tries + 2);
break;
case 'powers-of-two':
// 2 seconds, 4 seconds, 8 seconds
waitTime = Math.pow(2, tries + 1) * 1000;
break;
case 'increments-of-two':
// 2 second, 4 seconds, 6 seconds, etc
waitTime = (tries + 1) * 2 * 1000;
break;
default:
const er = new Error('The integration\'s retryDelay setting is invalid.');
this.updateHistory({ historyId, step: 'failed-and-retry-delay-is-invalid', error: true, errorStack: er.stack });
return;
}
logger.outgoing.info(`Trying the Integration ${ trigger.name } to ${ url } again in ${ waitTime } milliseconds.`);
Meteor.setTimeout(() => {
this.executeTriggerUrl(url, trigger, { event, message, room, owner, user }, historyId, tries + 1);
}, waitTime);
} else {
this.updateHistory({ historyId, step: 'too-many-retries', error: true });
}
} else {
this.updateHistory({ historyId, step: 'failed-and-not-configured-to-retry', error: true });
}
return;
}
//process outgoing webhook response as a new message
if (result && this.successResults.includes(result.statusCode)) {
if (result && result.data && (result.data.text || result.data.attachments)) {
const resultMsg = this.sendMessage({ trigger, room, message: result.data, data });
this.updateHistory({ historyId, step: 'url-response-sent-message', resultMessage: resultMsg, finished: true });
}
}
});
}
replay(integration, history) {
if (!integration || integration.type !== 'webhook-outgoing') {
throw new Meteor.Error('integration-type-must-be-outgoing', 'The integration type to replay must be an outgoing webhook.');
}
if (!history || !history.data) {
throw new Meteor.Error('history-data-must-be-defined', 'The history data must be defined to replay an integration.');
}
const event = history.event;
const message = RocketChat.models.Messages.findOneById(history.data.message_id);
const room = RocketChat.models.Rooms.findOneById(history.data.channel_id);
const user = RocketChat.models.Users.findOneById(history.data.user_id);
let owner;
if (history.data.owner && history.data.owner._id) {
owner = RocketChat.models.Users.findOneById(history.data.owner._id);
}
this.executeTriggerUrl(history.url, integration, { event, message, room, owner, user });
}
};
| updateHistory | identifier_name |
triggerHandler.js | /* global logger, processWebhookMessage */
import moment from 'moment';
RocketChat.integrations.triggerHandler = new class RocketChatIntegrationHandler {
constructor() {
this.vm = Npm.require('vm');
this.successResults = [200, 201, 202];
this.compiledScripts = {};
this.triggers = {};
RocketChat.models.Integrations.find({type: 'webhook-outgoing'}).observe({
added: (record) => {
this.addIntegration(record);
},
changed: (record) => {
this.removeIntegration(record);
this.addIntegration(record);
},
removed: (record) => {
this.removeIntegration(record);
}
});
}
addIntegration(record) {
logger.outgoing.debug(`Adding the integration ${ record.name } of the event ${ record.event }!`);
let channels;
if (record.event && !RocketChat.integrations.outgoingEvents[record.event].use.channel) {
logger.outgoing.debug('The integration doesnt rely on channels.');
//We don't use any channels, so it's special ;)
channels = ['__any'];
} else if (_.isEmpty(record.channel)) {
logger.outgoing.debug('The integration had an empty channel property, so it is going on all the public channels.');
channels = ['all_public_channels'];
} else {
logger.outgoing.debug('The integration is going on these channels:', record.channel);
channels = [].concat(record.channel);
}
for (const channel of channels) {
if (!this.triggers[channel]) {
this.triggers[channel] = {};
}
this.triggers[channel][record._id] = record;
}
}
removeIntegration(record) {
for (const trigger of Object.values(this.triggers)) {
delete trigger[record._id];
}
}
isTriggerEnabled(trigger) {
for (const trig of Object.values(this.triggers)) {
if (trig[trigger._id]) {
return trig[trigger._id].enabled;
}
}
return false;
}
updateHistory({ historyId, step, integration, event, data, triggerWord, ranPrepareScript, prepareSentMessage, processSentMessage, resultMessage, finished, url, httpCallData, httpError, httpResult, error, errorStack }) {
const history = {
type: 'outgoing-webhook',
step
};
// Usually is only added on initial insert
if (integration) {
history.integration = integration;
}
// Usually is only added on initial insert
if (event) {
history.event = event;
}
if (data) {
history.data = data;
if (data.user) {
history.data.user = _.omit(data.user, ['meta', '$loki', 'services']);
}
if (data.room) {
history.data.room = _.omit(data.room, ['meta', '$loki', 'usernames']);
history.data.room.usernames = ['this_will_be_filled_in_with_usernames_when_replayed'];
}
}
if (triggerWord) {
history.triggerWord = triggerWord;
}
if (typeof ranPrepareScript !== 'undefined') {
history.ranPrepareScript = ranPrepareScript;
}
if (prepareSentMessage) {
history.prepareSentMessage = prepareSentMessage;
}
if (processSentMessage) {
history.processSentMessage = processSentMessage;
}
if (resultMessage) {
history.resultMessage = resultMessage;
}
if (typeof finished !== 'undefined') {
history.finished = finished;
}
if (url) {
history.url = url;
}
if (typeof httpCallData !== 'undefined') {
history.httpCallData = httpCallData;
}
if (httpError) {
history.httpError = httpError;
}
if (typeof httpResult !== 'undefined') {
history.httpResult = httpResult;
}
if (typeof error !== 'undefined') {
history.error = error;
}
if (typeof errorStack !== 'undefined') {
history.errorStack = errorStack;
}
if (historyId) {
RocketChat.models.IntegrationHistory.update({ _id: historyId }, { $set: history });
return historyId;
} else {
history._createdAt = new Date();
return RocketChat.models.IntegrationHistory.insert(Object.assign({ _id: Random.id() }, history));
}
}
//Trigger is the trigger, nameOrId is a string which is used to try and find a room, room is a room, message is a message, and data contains "user_name" if trigger.impersonateUser is truthful.
sendMessage({ trigger, nameOrId = '', room, message, data }) {
let user;
//Try to find the user who we are impersonating
if (trigger.impersonateUser) {
user = RocketChat.models.Users.findOneByUsername(data.user_name);
}
//If they don't exist (aka the trigger didn't contain a user) then we set the user based upon the
//configured username for the integration since this is required at all times.
if (!user) {
user = RocketChat.models.Users.findOneByUsername(trigger.username);
}
let tmpRoom;
if (nameOrId || trigger.targetRoom) {
tmpRoom = RocketChat.getRoomByNameOrIdWithOptionToJoin({ currentUserId: user._id, nameOrId: nameOrId || trigger.targetRoom, errorOnEmpty: false }) || room;
} else {
tmpRoom = room;
}
//If no room could be found, we won't be sending any messages but we'll warn in the logs
if (!tmpRoom) {
logger.outgoing.warn(`The Integration "${ trigger.name }" doesn't have a room configured nor did it provide a room to send the message to.`);
return;
}
logger.outgoing.debug(`Found a room for ${ trigger.name } which is: ${ tmpRoom.name } with a type of ${ tmpRoom.t }`);
message.bot = { i: trigger._id };
const defaultValues = {
alias: trigger.alias,
avatar: trigger.avatar,
emoji: trigger.emoji
};
if (tmpRoom.t === 'd') {
message.channel = `@${ tmpRoom._id }`;
} else {
message.channel = `#${ tmpRoom._id }`;
}
message = processWebhookMessage(message, user, defaultValues);
return message;
}
buildSandbox(store = {}) {
const sandbox = {
_, s, console, moment,
Store: {
set: (key, val) => store[key] = val,
get: (key) => store[key]
},
HTTP: (method, url, options) => {
try {
return {
result: HTTP.call(method, url, options)
};
} catch (error) {
return { error };
}
}
};
Object.keys(RocketChat.models).filter(k => !k.startsWith('_')).forEach(k => {
sandbox[k] = RocketChat.models[k];
});
return { store, sandbox };
}
getIntegrationScript(integration) {
const compiledScript = this.compiledScripts[integration._id];
if (compiledScript && +compiledScript._updatedAt === +integration._updatedAt) {
return compiledScript.script;
}
const script = integration.scriptCompiled;
const { store, sandbox } = this.buildSandbox();
let vmScript;
try {
logger.outgoing.info('Will evaluate script of Trigger', integration.name);
logger.outgoing.debug(script);
vmScript = this.vm.createScript(script, 'script.js');
vmScript.runInNewContext(sandbox);
if (sandbox.Script) {
this.compiledScripts[integration._id] = {
script: new sandbox.Script(),
store,
_updatedAt: integration._updatedAt
};
return this.compiledScripts[integration._id].script;
}
} catch (e) {
logger.outgoing.error(`Error evaluating Script in Trigger ${ integration.name }:`);
logger.outgoing.error(script.replace(/^/gm, ' '));
logger.outgoing.error('Stack Trace:');
logger.outgoing.error(e.stack.replace(/^/gm, ' '));
throw new Meteor.Error('error-evaluating-script');
}
if (!sandbox.Script) {
logger.outgoing.error(`Class "Script" not in Trigger ${ integration.name }:`);
throw new Meteor.Error('class-script-not-found');
}
}
hasScriptAndMethod(integration, method) {
if (integration.scriptEnabled !== true || !integration.scriptCompiled || integration.scriptCompiled.trim() === '') {
return false;
}
let script;
try {
script = this.getIntegrationScript(integration);
} catch (e) {
return false;
}
return typeof script[method] !== 'undefined';
}
executeScript(integration, method, params, historyId) {
let script;
try {
script = this.getIntegrationScript(integration);
} catch (e) {
this.updateHistory({ historyId, step: 'execute-script-getting-script', error: true, errorStack: e });
return;
}
if (!script[method]) {
logger.outgoing.error(`Method "${ method }" no found in the Integration "${ integration.name }"`);
this.updateHistory({ historyId, step: `execute-script-no-method-${ method }` });
return;
}
try {
const { sandbox } = this.buildSandbox(this.compiledScripts[integration._id].store);
sandbox.script = script;
sandbox.method = method;
sandbox.params = params;
this.updateHistory({ historyId, step: `execute-script-before-running-${ method }` });
const result = this.vm.runInNewContext('script[method](params)', sandbox, { timeout: 3000 });
logger.outgoing.debug(`Script method "${ method }" result of the Integration "${ integration.name }" is:`);
logger.outgoing.debug(result);
return result;
} catch (e) {
this.updateHistory({ historyId, step: `execute-script-error-running-${ method }`, error: true, errorStack: e.stack.replace(/^/gm, ' ') });
logger.outgoing.error(`Error running Script in the Integration ${ integration.name }:`);
logger.outgoing.debug(integration.scriptCompiled.replace(/^/gm, ' ')); // Only output the compiled script if debugging is enabled, so the logs don't get spammed.
logger.outgoing.error('Stack:');
logger.outgoing.error(e.stack.replace(/^/gm, ' '));
return;
}
}
eventNameArgumentsToObject() {
const argObject = {
event: arguments[0]
};
switch (argObject.event) {
case 'sendMessage':
if (arguments.length >= 3) {
argObject.message = arguments[1];
argObject.room = arguments[2];
}
break;
case 'fileUploaded':
if (arguments.length >= 2) {
const arghhh = arguments[1];
argObject.user = arghhh.user;
argObject.room = arghhh.room;
argObject.message = arghhh.message;
}
break;
case 'roomArchived':
if (arguments.length >= 3) {
argObject.room = arguments[1];
argObject.user = arguments[2];
}
break;
case 'roomCreated':
if (arguments.length >= 3) {
argObject.owner = arguments[1];
argObject.room = arguments[2];
}
break;
case 'roomJoined':
case 'roomLeft':
if (arguments.length >= 3) {
argObject.user = arguments[1];
argObject.room = arguments[2];
}
break;
case 'userCreated':
if (arguments.length >= 2) {
argObject.user = arguments[1];
}
break;
default:
logger.outgoing.warn(`An Unhandled Trigger Event was called: ${ argObject.event }`);
argObject.event = undefined;
break;
}
logger.outgoing.debug(`Got the event arguments for the event: ${ argObject.event }`, argObject);
return argObject;
}
mapEventArgsToData(data, { event, message, room, owner, user }) {
switch (event) {
case 'sendMessage':
data.channel_id = room._id;
data.channel_name = room.name;
data.message_id = message._id;
data.timestamp = message.ts;
data.user_id = message.u._id;
data.user_name = message.u.username;
data.text = message.msg;
if (message.alias) {
data.alias = message.alias;
}
if (message.bot) {
data.bot = message.bot;
}
break;
case 'fileUploaded':
data.channel_id = room._id;
data.channel_name = room.name;
data.message_id = message._id;
data.timestamp = message.ts;
data.user_id = message.u._id;
data.user_name = message.u.username;
data.text = message.msg;
data.user = user;
data.room = room;
data.message = message;
if (message.alias) {
data.alias = message.alias;
}
if (message.bot) {
data.bot = message.bot;
}
break;
case 'roomCreated':
data.channel_id = room._id;
data.channel_name = room.name;
data.timestamp = room.ts;
data.user_id = owner._id;
data.user_name = owner.username;
data.owner = owner;
data.room = room;
break;
case 'roomArchived':
case 'roomJoined':
case 'roomLeft':
data.timestamp = new Date();
data.channel_id = room._id;
data.channel_name = room.name;
data.user_id = user._id;
data.user_name = user.username;
data.user = user;
data.room = room;
if (user.type === 'bot') {
data.bot = true;
}
break;
case 'userCreated':
data.timestamp = user.createdAt;
data.user_id = user._id;
data.user_name = user.username;
data.user = user;
if (user.type === 'bot') {
data.bot = true;
}
break;
default:
break;
}
}
executeTriggers() {
logger.outgoing.debug('Execute Trigger:', arguments[0]);
const argObject = this.eventNameArgumentsToObject(...arguments);
const { event, message, room } = argObject; | if (!event) {
return;
}
const triggersToExecute = [];
logger.outgoing.debug('Starting search for triggers for the room:', room ? room._id : '__any');
if (room) {
switch (room.t) {
case 'd':
const id = room._id.replace(message.u._id, '');
const username = _.without(room.usernames, message.u.username)[0];
if (this.triggers[`@${ id }`]) {
for (const trigger of Object.values(this.triggers[`@${ id }`])) {
triggersToExecute.push(trigger);
}
}
if (this.triggers.all_direct_messages) {
for (const trigger of Object.values(this.triggers.all_direct_messages)) {
triggersToExecute.push(trigger);
}
}
if (id !== username && this.triggers[`@${ username }`]) {
for (const trigger of Object.values(this.triggers[`@${ username }`])) {
triggersToExecute.push(trigger);
}
}
break;
case 'c':
if (this.triggers.all_public_channels) {
for (const trigger of Object.values(this.triggers.all_public_channels)) {
triggersToExecute.push(trigger);
}
}
if (this.triggers[`#${ room._id }`]) {
for (const trigger of Object.values(this.triggers[`#${ room._id }`])) {
triggersToExecute.push(trigger);
}
}
if (room._id !== room.name && this.triggers[`#${ room.name }`]) {
for (const trigger of Object.values(this.triggers[`#${ room.name }`])) {
triggersToExecute.push(trigger);
}
}
break;
default:
if (this.triggers.all_private_groups) {
for (const trigger of Object.values(this.triggers.all_private_groups)) {
triggersToExecute.push(trigger);
}
}
if (this.triggers[`#${ room._id }`]) {
for (const trigger of Object.values(this.triggers[`#${ room._id }`])) {
triggersToExecute.push(trigger);
}
}
if (room._id !== room.name && this.triggers[`#${ room.name }`]) {
for (const trigger of Object.values(this.triggers[`#${ room.name }`])) {
triggersToExecute.push(trigger);
}
}
break;
}
}
if (this.triggers.__any) {
//For outgoing integration which don't rely on rooms.
for (const trigger of Object.values(this.triggers.__any)) {
triggersToExecute.push(trigger);
}
}
logger.outgoing.debug(`Found ${ triggersToExecute.length } to iterate over and see if the match the event.`);
for (const triggerToExecute of triggersToExecute) {
logger.outgoing.debug(`Is "${ triggerToExecute.name }" enabled, ${ triggerToExecute.enabled }, and what is the event? ${ triggerToExecute.event }`);
if (triggerToExecute.enabled === true && triggerToExecute.event === event) {
this.executeTrigger(triggerToExecute, argObject);
}
}
}
executeTrigger(trigger, argObject) {
for (const url of trigger.urls) {
this.executeTriggerUrl(url, trigger, argObject, 0);
}
}
executeTriggerUrl(url, trigger, { event, message, room, owner, user }, theHistoryId, tries = 0) {
if (!this.isTriggerEnabled(trigger)) {
logger.outgoing.warn(`The trigger "${ trigger.name }" is no longer enabled, stopping execution of it at try: ${ tries }`);
return;
}
logger.outgoing.debug(`Starting to execute trigger: ${ trigger.name } (${ trigger._id })`);
let word;
//Not all triggers/events support triggerWords
if (RocketChat.integrations.outgoingEvents[event].use.triggerWords) {
if (trigger.triggerWords && trigger.triggerWords.length > 0) {
for (const triggerWord of trigger.triggerWords) {
if (!trigger.triggerWordAnywhere && message.msg.indexOf(triggerWord) === 0) {
word = triggerWord;
break;
} else if (trigger.triggerWordAnywhere && message.msg.includes(triggerWord)) {
word = triggerWord;
break;
}
}
// Stop if there are triggerWords but none match
if (!word) {
logger.outgoing.debug(`The trigger word which "${ trigger.name }" was expecting could not be found, not executing.`);
return;
}
}
}
const historyId = this.updateHistory({ step: 'start-execute-trigger-url', integration: trigger, event });
const data = {
token: trigger.token,
bot: false
};
if (word) {
data.trigger_word = word;
}
this.mapEventArgsToData(data, { trigger, event, message, room, owner, user });
this.updateHistory({ historyId, step: 'mapped-args-to-data', data, triggerWord: word });
logger.outgoing.info(`Will be executing the Integration "${ trigger.name }" to the url: ${ url }`);
logger.outgoing.debug(data);
let opts = {
params: {},
method: 'POST',
url,
data,
auth: undefined,
npmRequestOptions: {
rejectUnauthorized: !RocketChat.settings.get('Allow_Invalid_SelfSigned_Certs'),
strictSSL: !RocketChat.settings.get('Allow_Invalid_SelfSigned_Certs')
},
headers: {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36'
}
};
if (this.hasScriptAndMethod(trigger, 'prepare_outgoing_request')) {
opts = this.executeScript(trigger, 'prepare_outgoing_request', { request: opts }, historyId);
}
this.updateHistory({ historyId, step: 'after-maybe-ran-prepare', ranPrepareScript: true });
if (!opts) {
this.updateHistory({ historyId, step: 'after-prepare-no-opts', finished: true });
return;
}
if (opts.message) {
const prepareMessage = this.sendMessage({ trigger, room, message: opts.message, data });
this.updateHistory({ historyId, step: 'after-prepare-send-message', prepareSentMessage: prepareMessage });
}
if (!opts.url || !opts.method) {
this.updateHistory({ historyId, step: 'after-prepare-no-url_or_method', finished: true });
return;
}
this.updateHistory({ historyId, step: 'pre-http-call', url: opts.url, httpCallData: opts.data });
HTTP.call(opts.method, opts.url, opts, (error, result) => {
if (!result) {
logger.outgoing.warn(`Result for the Integration ${ trigger.name } to ${ url } is empty`);
} else {
logger.outgoing.info(`Status code for the Integration ${ trigger.name } to ${ url } is ${ result.statusCode }`);
}
this.updateHistory({ historyId, step: 'after-http-call', httpError: error, httpResult: result });
if (this.hasScriptAndMethod(trigger, 'process_outgoing_response')) {
const sandbox = {
request: opts,
response: {
error,
status_code: result ? result.statusCode : undefined, //These values will be undefined to close issues #4175, #5762, and #5896
content: result ? result.data : undefined,
content_raw: result ? result.content : undefined,
headers: result ? result.headers : {}
}
};
const scriptResult = this.executeScript(trigger, 'process_outgoing_response', sandbox, historyId);
if (scriptResult && scriptResult.content) {
const resultMessage = this.sendMessage({ trigger, room, message: scriptResult.content, data });
this.updateHistory({ historyId, step: 'after-process-send-message', processSentMessage: resultMessage, finished: true });
return;
}
if (scriptResult === false) {
this.updateHistory({ historyId, step: 'after-process-false-result', finished: true });
return;
}
}
// if the result contained nothing or wasn't a successful statusCode
if (!result || !this.successResults.includes(result.statusCode)) {
if (error) {
logger.outgoing.error(`Error for the Integration "${ trigger.name }" to ${ url } is:`);
logger.outgoing.error(error);
}
if (result) {
logger.outgoing.error(`Error for the Integration "${ trigger.name }" to ${ url } is:`);
logger.outgoing.error(result);
if (result.statusCode === 410) {
this.updateHistory({ historyId, step: 'after-process-http-status-410', error: true });
logger.outgoing.error(`Disabling the Integration "${ trigger.name }" because the status code was 401 (Gone).`);
RocketChat.models.Integrations.update({ _id: trigger._id }, { $set: { enabled: false }});
return;
}
if (result.statusCode === 500) {
this.updateHistory({ historyId, step: 'after-process-http-status-500', error: true });
logger.outgoing.error(`Error "500" for the Integration "${ trigger.name }" to ${ url }.`);
logger.outgoing.error(result.content);
return;
}
}
if (trigger.retryFailedCalls) {
if (tries < trigger.retryCount && trigger.retryDelay) {
this.updateHistory({ historyId, error: true, step: `going-to-retry-${ tries + 1 }` });
let waitTime;
switch (trigger.retryDelay) {
case 'powers-of-ten':
// Try again in 0.1s, 1s, 10s, 1m40s, 16m40s, 2h46m40s, 27h46m40s, etc
waitTime = Math.pow(10, tries + 2);
break;
case 'powers-of-two':
// 2 seconds, 4 seconds, 8 seconds
waitTime = Math.pow(2, tries + 1) * 1000;
break;
case 'increments-of-two':
// 2 second, 4 seconds, 6 seconds, etc
waitTime = (tries + 1) * 2 * 1000;
break;
default:
const er = new Error('The integration\'s retryDelay setting is invalid.');
this.updateHistory({ historyId, step: 'failed-and-retry-delay-is-invalid', error: true, errorStack: er.stack });
return;
}
logger.outgoing.info(`Trying the Integration ${ trigger.name } to ${ url } again in ${ waitTime } milliseconds.`);
Meteor.setTimeout(() => {
this.executeTriggerUrl(url, trigger, { event, message, room, owner, user }, historyId, tries + 1);
}, waitTime);
} else {
this.updateHistory({ historyId, step: 'too-many-retries', error: true });
}
} else {
this.updateHistory({ historyId, step: 'failed-and-not-configured-to-retry', error: true });
}
return;
}
//process outgoing webhook response as a new message
if (result && this.successResults.includes(result.statusCode)) {
if (result && result.data && (result.data.text || result.data.attachments)) {
const resultMsg = this.sendMessage({ trigger, room, message: result.data, data });
this.updateHistory({ historyId, step: 'url-response-sent-message', resultMessage: resultMsg, finished: true });
}
}
});
}
replay(integration, history) {
if (!integration || integration.type !== 'webhook-outgoing') {
throw new Meteor.Error('integration-type-must-be-outgoing', 'The integration type to replay must be an outgoing webhook.');
}
if (!history || !history.data) {
throw new Meteor.Error('history-data-must-be-defined', 'The history data must be defined to replay an integration.');
}
const event = history.event;
const message = RocketChat.models.Messages.findOneById(history.data.message_id);
const room = RocketChat.models.Rooms.findOneById(history.data.channel_id);
const user = RocketChat.models.Users.findOneById(history.data.user_id);
let owner;
if (history.data.owner && history.data.owner._id) {
owner = RocketChat.models.Users.findOneById(history.data.owner._id);
}
this.executeTriggerUrl(history.url, integration, { event, message, room, owner, user });
}
}; |
//Each type of event should have an event and a room attached, otherwise we
//wouldn't know how to handle the trigger nor would we have anywhere to send the
//result of the integration | random_line_split |
pad-test.js | var assert = require('chai').assert;
var Pad = require('../lib/pad');
describe('Pad', function() {
it('should be an object', function() {
var pad = new Pad();
assert.isObject(pad);
});
it('should have a x coordinate of 310 by default', function() {
var terminator = new Pad();
assert.equal(terminator.x, 310);
});
it('should have a y coordinate of 470 by default', function() {
var jon = new Pad();
assert.equal(jon.y, 470);
});
it('should have a r value of 23 by default', function() {
var terminator = new Pad();
assert.equal(terminator.r, 23);
});
it('should have a sAngle value of 0 by default', function() {
var jon = new Pad();
assert.equal(jon.sAngle, 0);
}); |
it('should have an eAngle value of 2*Math.PI by default', function() {
var jon = new Pad();
assert.equal(jon.eAngle, 2*Math.PI);
});
it('should have a draw function', function(){
var jon = new Pad();
assert.isFunction(jon.draw);
});
}); | random_line_split | |
views.py | from django.http import HttpResponse
from django.core.servers.basehttp import FileWrapper
from django.contrib.auth.models import User
from django.shortcuts import render_to_response, redirect, get_object_or_404
from requests import get
from urllib import urlretrieve
from common.models import Repository
from common.util import get_context
def cgit_url(user_name, repo_name, method, path, query=None):
url = 'http://localhost:8080/view'
if method == 'summary':
base = '%s/%s/%s' %(url, user_name, repo_name)
else:
base = '%s/%s/%s/%s' %(url, user_name, repo_name, method)
if path is not None:
base = '%s/%s' %(base, path)
if query is not None and len(query)>1:
base = "%s?%s" % (base, query)
print base
return base
def cumulative_path(path):
if path is None or len(path) == 0:
return path
c = [path[0]]
for part in path[1:]:
c.append('%s/%s'%(c[-1], part))
return c
def view_index(request):
return redirect('index')
def user_index(request, user_name):
return redirect('repo_list', user_name)
def repo_plain(request, user_name, repo_name, path, prefix='plain'):
|
def repo_snapshot(request, user_name, repo_name, path):
user = request.user
owner = get_object_or_404(User, username=user_name)
repo = get_object_or_404(Repository, owner=owner, name=repo_name)
collaborators = repo.collaborators.all()
access = repo.user_access(user)
if access is None:
return HttpResponse('Not authorized', status=401)
query = request.GET.urlencode()
filename = path.split('/')[-1]
url = cgit_url(user_name, repo_name, 'snapshot', path, query)
(fname, info) = urlretrieve(url)
response = HttpResponse(FileWrapper(open(fname)), content_type='application/force-download')
response['Content-Disposition'] = 'attachment; filename="%s"' % filename
return response
def repo_browse(request, user_name, repo_name, method='summary', path=None):
user = request.user
owner = get_object_or_404(User, username=user_name)
repo = get_object_or_404(Repository, owner=owner, name=repo_name)
collaborators = repo.collaborators.all()
access = repo.user_access(user)
if access is None:
return HttpResponse('Not authorized', status=401)
commit_id = request.GET.get('id')
q = request.GET.get('q', '')
qtype = request.GET.get('qt', 'grep')
messages = {
'grep' : 'Log Message',
'author': 'Author',
'committer' : 'Committer',
'range' : 'Range' }
search_text = messages.get(qtype, messages['grep'])
if method == 'tree':
file_path = path.split('/')
path_parts = cumulative_path(file_path)
file_path = zip(file_path, path_parts)
else:
file_path = None
query = request.GET.urlencode()
url = cgit_url(user_name, repo_name, method, path, query)
text = get(url)
context = get_context(request, {'owner': owner, 'repo_html':text.text, 'repo':repo,
'access':access, 'id':commit_id, 'method':method,
'q':q, 'qtype':qtype, 'search_text':search_text, 'file_path':file_path})
return render_to_response('viewer/repo_view.html', context)
| user = request.user
owner = get_object_or_404(User, username=user_name)
repo = get_object_or_404(Repository, owner=owner, name=repo_name)
collaborators = repo.collaborators.all()
access = repo.user_access(user)
if access is None:
return HttpResponse('Not authorized', status=401)
query = request.GET.urlencode()
print query
url = cgit_url(user_name, repo_name, prefix, path, query)
(fname, info) = urlretrieve(url)
response = HttpResponse(FileWrapper(open(fname)), content_type='text/plain')
return response | identifier_body |
views.py | from django.http import HttpResponse
from django.core.servers.basehttp import FileWrapper
from django.contrib.auth.models import User
from django.shortcuts import render_to_response, redirect, get_object_or_404
from requests import get
from urllib import urlretrieve
from common.models import Repository
from common.util import get_context
def cgit_url(user_name, repo_name, method, path, query=None):
url = 'http://localhost:8080/view'
if method == 'summary':
base = '%s/%s/%s' %(url, user_name, repo_name)
else:
base = '%s/%s/%s/%s' %(url, user_name, repo_name, method)
if path is not None:
base = '%s/%s' %(base, path)
if query is not None and len(query)>1:
base = "%s?%s" % (base, query)
print base
return base
def cumulative_path(path):
if path is None or len(path) == 0:
return path
c = [path[0]]
for part in path[1:]:
c.append('%s/%s'%(c[-1], part))
return c
def view_index(request):
return redirect('index')
def user_index(request, user_name):
return redirect('repo_list', user_name)
def repo_plain(request, user_name, repo_name, path, prefix='plain'):
user = request.user
owner = get_object_or_404(User, username=user_name)
repo = get_object_or_404(Repository, owner=owner, name=repo_name)
collaborators = repo.collaborators.all()
access = repo.user_access(user)
if access is None:
return HttpResponse('Not authorized', status=401)
query = request.GET.urlencode()
print query
url = cgit_url(user_name, repo_name, prefix, path, query)
(fname, info) = urlretrieve(url)
response = HttpResponse(FileWrapper(open(fname)), content_type='text/plain')
return response
def repo_snapshot(request, user_name, repo_name, path):
user = request.user
owner = get_object_or_404(User, username=user_name)
repo = get_object_or_404(Repository, owner=owner, name=repo_name)
collaborators = repo.collaborators.all() | return HttpResponse('Not authorized', status=401)
query = request.GET.urlencode()
filename = path.split('/')[-1]
url = cgit_url(user_name, repo_name, 'snapshot', path, query)
(fname, info) = urlretrieve(url)
response = HttpResponse(FileWrapper(open(fname)), content_type='application/force-download')
response['Content-Disposition'] = 'attachment; filename="%s"' % filename
return response
def repo_browse(request, user_name, repo_name, method='summary', path=None):
user = request.user
owner = get_object_or_404(User, username=user_name)
repo = get_object_or_404(Repository, owner=owner, name=repo_name)
collaborators = repo.collaborators.all()
access = repo.user_access(user)
if access is None:
return HttpResponse('Not authorized', status=401)
commit_id = request.GET.get('id')
q = request.GET.get('q', '')
qtype = request.GET.get('qt', 'grep')
messages = {
'grep' : 'Log Message',
'author': 'Author',
'committer' : 'Committer',
'range' : 'Range' }
search_text = messages.get(qtype, messages['grep'])
if method == 'tree':
file_path = path.split('/')
path_parts = cumulative_path(file_path)
file_path = zip(file_path, path_parts)
else:
file_path = None
query = request.GET.urlencode()
url = cgit_url(user_name, repo_name, method, path, query)
text = get(url)
context = get_context(request, {'owner': owner, 'repo_html':text.text, 'repo':repo,
'access':access, 'id':commit_id, 'method':method,
'q':q, 'qtype':qtype, 'search_text':search_text, 'file_path':file_path})
return render_to_response('viewer/repo_view.html', context) |
access = repo.user_access(user)
if access is None: | random_line_split |
views.py | from django.http import HttpResponse
from django.core.servers.basehttp import FileWrapper
from django.contrib.auth.models import User
from django.shortcuts import render_to_response, redirect, get_object_or_404
from requests import get
from urllib import urlretrieve
from common.models import Repository
from common.util import get_context
def cgit_url(user_name, repo_name, method, path, query=None):
url = 'http://localhost:8080/view'
if method == 'summary':
base = '%s/%s/%s' %(url, user_name, repo_name)
else:
base = '%s/%s/%s/%s' %(url, user_name, repo_name, method)
if path is not None:
base = '%s/%s' %(base, path)
if query is not None and len(query)>1:
base = "%s?%s" % (base, query)
print base
return base
def cumulative_path(path):
if path is None or len(path) == 0:
return path
c = [path[0]]
for part in path[1:]:
c.append('%s/%s'%(c[-1], part))
return c
def view_index(request):
return redirect('index')
def user_index(request, user_name):
return redirect('repo_list', user_name)
def repo_plain(request, user_name, repo_name, path, prefix='plain'):
user = request.user
owner = get_object_or_404(User, username=user_name)
repo = get_object_or_404(Repository, owner=owner, name=repo_name)
collaborators = repo.collaborators.all()
access = repo.user_access(user)
if access is None:
|
query = request.GET.urlencode()
print query
url = cgit_url(user_name, repo_name, prefix, path, query)
(fname, info) = urlretrieve(url)
response = HttpResponse(FileWrapper(open(fname)), content_type='text/plain')
return response
def repo_snapshot(request, user_name, repo_name, path):
user = request.user
owner = get_object_or_404(User, username=user_name)
repo = get_object_or_404(Repository, owner=owner, name=repo_name)
collaborators = repo.collaborators.all()
access = repo.user_access(user)
if access is None:
return HttpResponse('Not authorized', status=401)
query = request.GET.urlencode()
filename = path.split('/')[-1]
url = cgit_url(user_name, repo_name, 'snapshot', path, query)
(fname, info) = urlretrieve(url)
response = HttpResponse(FileWrapper(open(fname)), content_type='application/force-download')
response['Content-Disposition'] = 'attachment; filename="%s"' % filename
return response
def repo_browse(request, user_name, repo_name, method='summary', path=None):
user = request.user
owner = get_object_or_404(User, username=user_name)
repo = get_object_or_404(Repository, owner=owner, name=repo_name)
collaborators = repo.collaborators.all()
access = repo.user_access(user)
if access is None:
return HttpResponse('Not authorized', status=401)
commit_id = request.GET.get('id')
q = request.GET.get('q', '')
qtype = request.GET.get('qt', 'grep')
messages = {
'grep' : 'Log Message',
'author': 'Author',
'committer' : 'Committer',
'range' : 'Range' }
search_text = messages.get(qtype, messages['grep'])
if method == 'tree':
file_path = path.split('/')
path_parts = cumulative_path(file_path)
file_path = zip(file_path, path_parts)
else:
file_path = None
query = request.GET.urlencode()
url = cgit_url(user_name, repo_name, method, path, query)
text = get(url)
context = get_context(request, {'owner': owner, 'repo_html':text.text, 'repo':repo,
'access':access, 'id':commit_id, 'method':method,
'q':q, 'qtype':qtype, 'search_text':search_text, 'file_path':file_path})
return render_to_response('viewer/repo_view.html', context)
| return HttpResponse('Not authorized', status=401) | conditional_block |
views.py | from django.http import HttpResponse
from django.core.servers.basehttp import FileWrapper
from django.contrib.auth.models import User
from django.shortcuts import render_to_response, redirect, get_object_or_404
from requests import get
from urllib import urlretrieve
from common.models import Repository
from common.util import get_context
def cgit_url(user_name, repo_name, method, path, query=None):
url = 'http://localhost:8080/view'
if method == 'summary':
base = '%s/%s/%s' %(url, user_name, repo_name)
else:
base = '%s/%s/%s/%s' %(url, user_name, repo_name, method)
if path is not None:
base = '%s/%s' %(base, path)
if query is not None and len(query)>1:
base = "%s?%s" % (base, query)
print base
return base
def cumulative_path(path):
if path is None or len(path) == 0:
return path
c = [path[0]]
for part in path[1:]:
c.append('%s/%s'%(c[-1], part))
return c
def view_index(request):
return redirect('index')
def user_index(request, user_name):
return redirect('repo_list', user_name)
def repo_plain(request, user_name, repo_name, path, prefix='plain'):
user = request.user
owner = get_object_or_404(User, username=user_name)
repo = get_object_or_404(Repository, owner=owner, name=repo_name)
collaborators = repo.collaborators.all()
access = repo.user_access(user)
if access is None:
return HttpResponse('Not authorized', status=401)
query = request.GET.urlencode()
print query
url = cgit_url(user_name, repo_name, prefix, path, query)
(fname, info) = urlretrieve(url)
response = HttpResponse(FileWrapper(open(fname)), content_type='text/plain')
return response
def repo_snapshot(request, user_name, repo_name, path):
user = request.user
owner = get_object_or_404(User, username=user_name)
repo = get_object_or_404(Repository, owner=owner, name=repo_name)
collaborators = repo.collaborators.all()
access = repo.user_access(user)
if access is None:
return HttpResponse('Not authorized', status=401)
query = request.GET.urlencode()
filename = path.split('/')[-1]
url = cgit_url(user_name, repo_name, 'snapshot', path, query)
(fname, info) = urlretrieve(url)
response = HttpResponse(FileWrapper(open(fname)), content_type='application/force-download')
response['Content-Disposition'] = 'attachment; filename="%s"' % filename
return response
def | (request, user_name, repo_name, method='summary', path=None):
user = request.user
owner = get_object_or_404(User, username=user_name)
repo = get_object_or_404(Repository, owner=owner, name=repo_name)
collaborators = repo.collaborators.all()
access = repo.user_access(user)
if access is None:
return HttpResponse('Not authorized', status=401)
commit_id = request.GET.get('id')
q = request.GET.get('q', '')
qtype = request.GET.get('qt', 'grep')
messages = {
'grep' : 'Log Message',
'author': 'Author',
'committer' : 'Committer',
'range' : 'Range' }
search_text = messages.get(qtype, messages['grep'])
if method == 'tree':
file_path = path.split('/')
path_parts = cumulative_path(file_path)
file_path = zip(file_path, path_parts)
else:
file_path = None
query = request.GET.urlencode()
url = cgit_url(user_name, repo_name, method, path, query)
text = get(url)
context = get_context(request, {'owner': owner, 'repo_html':text.text, 'repo':repo,
'access':access, 'id':commit_id, 'method':method,
'q':q, 'qtype':qtype, 'search_text':search_text, 'file_path':file_path})
return render_to_response('viewer/repo_view.html', context)
| repo_browse | identifier_name |
structarm__dct4__instance__q31.js | var structarm__dct4__instance__q31 =
| [ "N", "structarm__dct4__instance__q31.html#a46a9f136457350676e2bfd3768ff9d6d", null ],
[ "Nby2", "structarm__dct4__instance__q31.html#a32d3268ba4629908dba056599f0a904d", null ],
[ "normalize", "structarm__dct4__instance__q31.html#ac80ff7b28fca36aeef74dea12e8312dd", null ],
[ "pCfft", "structarm__dct4__instance__q31.html#ac96579cfb28d08bb11dd2fe4c6303833", null ],
[ "pCosFactor", "structarm__dct4__instance__q31.html#af97204d1838925621fc82021a0c2d6c1", null ],
[ "pRfft", "structarm__dct4__instance__q31.html#af1487dab5e7963b85dc0fdc6bf492542", null ],
[ "pTwiddle", "structarm__dct4__instance__q31.html#a7db236e22673146bb1d2c962f0713f08", null ]
]; | [
| random_line_split |
test.py | # Copyright (c) 2006 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Joel Hestness
options.clusters = 4 | options.options = '1024 1 0' | options.cmd = 'gem5_gpu_bh' | random_line_split |
markers.py | # The plot server must be running
# Go to http://localhost:5006/bokeh to view this plot
from numpy.random import random
from bokeh.plotting import *
def mscatter(p, x, y, typestr): |
def mtext(p, x, y, textstr):
p.text(x, y, text=textstr,
text_color="#449944", text_align="center", text_font_size="10pt")
output_server("markers")
p = figure(title="markers.py example")
N = 10
mscatter(p, random(N)+2, random(N)+1, "circle")
mscatter(p, random(N)+4, random(N)+1, "square")
mscatter(p, random(N)+6, random(N)+1, "triangle")
mscatter(p, random(N)+8, random(N)+1, "asterisk")
mscatter(p, random(N)+2, random(N)+4, "circle_x")
mscatter(p, random(N)+4, random(N)+4, "square_x")
mscatter(p, random(N)+6, random(N)+4, "inverted_triangle")
mscatter(p, random(N)+8, random(N)+4, "x")
mscatter(p, random(N)+2, random(N)+7, "circle_cross")
mscatter(p, random(N)+4, random(N)+7, "square_cross")
mscatter(p, random(N)+6, random(N)+7, "diamond")
mscatter(p, random(N)+8, random(N)+7, "cross")
mtext(p, [2.5], [0.5], "circle / o")
mtext(p, [4.5], [0.5], "square")
mtext(p, [6.5], [0.5], "triangle")
mtext(p, [8.5], [0.5], "asterisk / *")
mtext(p, [2.5], [3.5], "circle_x / ox")
mtext(p, [4.5], [3.5], "square_x")
mtext(p, [6.5], [3.5], "inverted_triangle")
mtext(p, [8.5], [3.5], "x")
mtext(p, [2.5], [6.5], "circle_cross / o+")
mtext(p, [4.5], [6.5], "square_cross")
mtext(p, [6.5], [6.5], "diamond")
mtext(p, [8.5], [6.5], "cross / +")
show(p) # open a browser | p.scatter(x, y, marker=typestr,
line_color="#6666ee", fill_color="#ee6666", fill_alpha=0.5, size=12) | random_line_split |
markers.py | # The plot server must be running
# Go to http://localhost:5006/bokeh to view this plot
from numpy.random import random
from bokeh.plotting import *
def | (p, x, y, typestr):
p.scatter(x, y, marker=typestr,
line_color="#6666ee", fill_color="#ee6666", fill_alpha=0.5, size=12)
def mtext(p, x, y, textstr):
p.text(x, y, text=textstr,
text_color="#449944", text_align="center", text_font_size="10pt")
output_server("markers")
p = figure(title="markers.py example")
N = 10
mscatter(p, random(N)+2, random(N)+1, "circle")
mscatter(p, random(N)+4, random(N)+1, "square")
mscatter(p, random(N)+6, random(N)+1, "triangle")
mscatter(p, random(N)+8, random(N)+1, "asterisk")
mscatter(p, random(N)+2, random(N)+4, "circle_x")
mscatter(p, random(N)+4, random(N)+4, "square_x")
mscatter(p, random(N)+6, random(N)+4, "inverted_triangle")
mscatter(p, random(N)+8, random(N)+4, "x")
mscatter(p, random(N)+2, random(N)+7, "circle_cross")
mscatter(p, random(N)+4, random(N)+7, "square_cross")
mscatter(p, random(N)+6, random(N)+7, "diamond")
mscatter(p, random(N)+8, random(N)+7, "cross")
mtext(p, [2.5], [0.5], "circle / o")
mtext(p, [4.5], [0.5], "square")
mtext(p, [6.5], [0.5], "triangle")
mtext(p, [8.5], [0.5], "asterisk / *")
mtext(p, [2.5], [3.5], "circle_x / ox")
mtext(p, [4.5], [3.5], "square_x")
mtext(p, [6.5], [3.5], "inverted_triangle")
mtext(p, [8.5], [3.5], "x")
mtext(p, [2.5], [6.5], "circle_cross / o+")
mtext(p, [4.5], [6.5], "square_cross")
mtext(p, [6.5], [6.5], "diamond")
mtext(p, [8.5], [6.5], "cross / +")
show(p) # open a browser
| mscatter | identifier_name |
markers.py | # The plot server must be running
# Go to http://localhost:5006/bokeh to view this plot
from numpy.random import random
from bokeh.plotting import *
def mscatter(p, x, y, typestr):
|
def mtext(p, x, y, textstr):
p.text(x, y, text=textstr,
text_color="#449944", text_align="center", text_font_size="10pt")
output_server("markers")
p = figure(title="markers.py example")
N = 10
mscatter(p, random(N)+2, random(N)+1, "circle")
mscatter(p, random(N)+4, random(N)+1, "square")
mscatter(p, random(N)+6, random(N)+1, "triangle")
mscatter(p, random(N)+8, random(N)+1, "asterisk")
mscatter(p, random(N)+2, random(N)+4, "circle_x")
mscatter(p, random(N)+4, random(N)+4, "square_x")
mscatter(p, random(N)+6, random(N)+4, "inverted_triangle")
mscatter(p, random(N)+8, random(N)+4, "x")
mscatter(p, random(N)+2, random(N)+7, "circle_cross")
mscatter(p, random(N)+4, random(N)+7, "square_cross")
mscatter(p, random(N)+6, random(N)+7, "diamond")
mscatter(p, random(N)+8, random(N)+7, "cross")
mtext(p, [2.5], [0.5], "circle / o")
mtext(p, [4.5], [0.5], "square")
mtext(p, [6.5], [0.5], "triangle")
mtext(p, [8.5], [0.5], "asterisk / *")
mtext(p, [2.5], [3.5], "circle_x / ox")
mtext(p, [4.5], [3.5], "square_x")
mtext(p, [6.5], [3.5], "inverted_triangle")
mtext(p, [8.5], [3.5], "x")
mtext(p, [2.5], [6.5], "circle_cross / o+")
mtext(p, [4.5], [6.5], "square_cross")
mtext(p, [6.5], [6.5], "diamond")
mtext(p, [8.5], [6.5], "cross / +")
show(p) # open a browser
| p.scatter(x, y, marker=typestr,
line_color="#6666ee", fill_color="#ee6666", fill_alpha=0.5, size=12) | identifier_body |
camera_utils.py | from bpy_extras.view3d_utils import location_3d_to_region_2d
def render_get_resolution_(r):
xres = int(r.resolution_x * r.resolution_percentage * 0.01)
yres = int(r.resolution_y * r.resolution_percentage * 0.01)
return xres, yres
def render_get_aspect_(r, camera=None, x=-1, y=-1):
if x != -1 and y != -1:
xratio = x * r.pixel_aspect_x / 200.0
yratio = y * r.pixel_aspect_y / 200.0
else:
xres, yres = render_get_resolution_(r)
xratio = xres * r.pixel_aspect_x / 200.0
yratio = yres * r.pixel_aspect_y / 200.0
if camera is None or camera.type != 'PERSP':
fit = 'AUTO'
else:
fit = camera.sensor_fit
if fit == 'HORIZONTAL' or fit == 'AUTO' and xratio > yratio:
aspectratio = xratio / yratio
xaspect = aspectratio
yaspect = 1.0
elif fit == 'VERTICAL' or fit == 'AUTO' and yratio > xratio:
aspectratio = yratio / xratio
xaspect = 1.0
yaspect = aspectratio
else:
aspectratio = xaspect = yaspect = 1.0
return xaspect, yaspect, aspectratio
def | (ob, render, region, region_data, scene):
# Code reference:
# https://blender.stackexchange.com/questions/6377/coordinates-of-corners-of-camera-view-border
cam = ob.data
frame = cam.view_frame(scene=scene)
# move from object-space into world-space
frame = [ob.matrix_world @ v for v in frame]
# move into pixelspace
frame_px = [location_3d_to_region_2d(region, region_data, v) for v in frame]
min_x = -1
min_y = -1
max_x = -1
max_y = -1
for v in frame_px:
if min_x == -1:
min_x = v[0]
elif min_x > v[0]:
min_x = v[0]
if max_x < v[0]:
max_x = v[0]
if min_y == -1:
min_y = v[1]
elif min_y > v[1]:
min_y = v[1]
if max_y < v[1]:
max_y = v[1]
cam_width = max_x - min_x
cam_height = max_y - min_y
x0 = min_x + render.border_min_x * cam_width
x1 = min_x + render.border_max_x * cam_width
y0 = min_y + render.border_min_y * cam_height
y1 = min_y + render.border_max_y * cam_height
return (x0, x1, y0, y1) | get_viewport_cam_borders | identifier_name |
camera_utils.py | from bpy_extras.view3d_utils import location_3d_to_region_2d
def render_get_resolution_(r):
xres = int(r.resolution_x * r.resolution_percentage * 0.01)
yres = int(r.resolution_y * r.resolution_percentage * 0.01)
return xres, yres
def render_get_aspect_(r, camera=None, x=-1, y=-1):
if x != -1 and y != -1:
xratio = x * r.pixel_aspect_x / 200.0
yratio = y * r.pixel_aspect_y / 200.0
else:
xres, yres = render_get_resolution_(r)
xratio = xres * r.pixel_aspect_x / 200.0
yratio = yres * r.pixel_aspect_y / 200.0
if camera is None or camera.type != 'PERSP':
fit = 'AUTO'
else:
fit = camera.sensor_fit
if fit == 'HORIZONTAL' or fit == 'AUTO' and xratio > yratio:
aspectratio = xratio / yratio
xaspect = aspectratio
yaspect = 1.0
elif fit == 'VERTICAL' or fit == 'AUTO' and yratio > xratio:
aspectratio = yratio / xratio
xaspect = 1.0
yaspect = aspectratio
else:
aspectratio = xaspect = yaspect = 1.0
return xaspect, yaspect, aspectratio
def get_viewport_cam_borders(ob, render, region, region_data, scene):
# Code reference:
# https://blender.stackexchange.com/questions/6377/coordinates-of-corners-of-camera-view-border
| cam = ob.data
frame = cam.view_frame(scene=scene)
# move from object-space into world-space
frame = [ob.matrix_world @ v for v in frame]
# move into pixelspace
frame_px = [location_3d_to_region_2d(region, region_data, v) for v in frame]
min_x = -1
min_y = -1
max_x = -1
max_y = -1
for v in frame_px:
if min_x == -1:
min_x = v[0]
elif min_x > v[0]:
min_x = v[0]
if max_x < v[0]:
max_x = v[0]
if min_y == -1:
min_y = v[1]
elif min_y > v[1]:
min_y = v[1]
if max_y < v[1]:
max_y = v[1]
cam_width = max_x - min_x
cam_height = max_y - min_y
x0 = min_x + render.border_min_x * cam_width
x1 = min_x + render.border_max_x * cam_width
y0 = min_y + render.border_min_y * cam_height
y1 = min_y + render.border_max_y * cam_height
return (x0, x1, y0, y1) | identifier_body | |
camera_utils.py | from bpy_extras.view3d_utils import location_3d_to_region_2d
def render_get_resolution_(r):
xres = int(r.resolution_x * r.resolution_percentage * 0.01)
yres = int(r.resolution_y * r.resolution_percentage * 0.01)
return xres, yres
def render_get_aspect_(r, camera=None, x=-1, y=-1):
if x != -1 and y != -1:
xratio = x * r.pixel_aspect_x / 200.0
yratio = y * r.pixel_aspect_y / 200.0
else:
xres, yres = render_get_resolution_(r)
xratio = xres * r.pixel_aspect_x / 200.0
yratio = yres * r.pixel_aspect_y / 200.0
if camera is None or camera.type != 'PERSP':
fit = 'AUTO'
else:
fit = camera.sensor_fit
if fit == 'HORIZONTAL' or fit == 'AUTO' and xratio > yratio:
aspectratio = xratio / yratio
xaspect = aspectratio
yaspect = 1.0
elif fit == 'VERTICAL' or fit == 'AUTO' and yratio > xratio:
aspectratio = yratio / xratio
xaspect = 1.0
yaspect = aspectratio
else:
aspectratio = xaspect = yaspect = 1.0
return xaspect, yaspect, aspectratio
| # https://blender.stackexchange.com/questions/6377/coordinates-of-corners-of-camera-view-border
cam = ob.data
frame = cam.view_frame(scene=scene)
# move from object-space into world-space
frame = [ob.matrix_world @ v for v in frame]
# move into pixelspace
frame_px = [location_3d_to_region_2d(region, region_data, v) for v in frame]
min_x = -1
min_y = -1
max_x = -1
max_y = -1
for v in frame_px:
if min_x == -1:
min_x = v[0]
elif min_x > v[0]:
min_x = v[0]
if max_x < v[0]:
max_x = v[0]
if min_y == -1:
min_y = v[1]
elif min_y > v[1]:
min_y = v[1]
if max_y < v[1]:
max_y = v[1]
cam_width = max_x - min_x
cam_height = max_y - min_y
x0 = min_x + render.border_min_x * cam_width
x1 = min_x + render.border_max_x * cam_width
y0 = min_y + render.border_min_y * cam_height
y1 = min_y + render.border_max_y * cam_height
return (x0, x1, y0, y1) |
def get_viewport_cam_borders(ob, render, region, region_data, scene):
# Code reference: | random_line_split |
camera_utils.py | from bpy_extras.view3d_utils import location_3d_to_region_2d
def render_get_resolution_(r):
xres = int(r.resolution_x * r.resolution_percentage * 0.01)
yres = int(r.resolution_y * r.resolution_percentage * 0.01)
return xres, yres
def render_get_aspect_(r, camera=None, x=-1, y=-1):
if x != -1 and y != -1:
xratio = x * r.pixel_aspect_x / 200.0
yratio = y * r.pixel_aspect_y / 200.0
else:
xres, yres = render_get_resolution_(r)
xratio = xres * r.pixel_aspect_x / 200.0
yratio = yres * r.pixel_aspect_y / 200.0
if camera is None or camera.type != 'PERSP':
fit = 'AUTO'
else:
|
if fit == 'HORIZONTAL' or fit == 'AUTO' and xratio > yratio:
aspectratio = xratio / yratio
xaspect = aspectratio
yaspect = 1.0
elif fit == 'VERTICAL' or fit == 'AUTO' and yratio > xratio:
aspectratio = yratio / xratio
xaspect = 1.0
yaspect = aspectratio
else:
aspectratio = xaspect = yaspect = 1.0
return xaspect, yaspect, aspectratio
def get_viewport_cam_borders(ob, render, region, region_data, scene):
# Code reference:
# https://blender.stackexchange.com/questions/6377/coordinates-of-corners-of-camera-view-border
cam = ob.data
frame = cam.view_frame(scene=scene)
# move from object-space into world-space
frame = [ob.matrix_world @ v for v in frame]
# move into pixelspace
frame_px = [location_3d_to_region_2d(region, region_data, v) for v in frame]
min_x = -1
min_y = -1
max_x = -1
max_y = -1
for v in frame_px:
if min_x == -1:
min_x = v[0]
elif min_x > v[0]:
min_x = v[0]
if max_x < v[0]:
max_x = v[0]
if min_y == -1:
min_y = v[1]
elif min_y > v[1]:
min_y = v[1]
if max_y < v[1]:
max_y = v[1]
cam_width = max_x - min_x
cam_height = max_y - min_y
x0 = min_x + render.border_min_x * cam_width
x1 = min_x + render.border_max_x * cam_width
y0 = min_y + render.border_min_y * cam_height
y1 = min_y + render.border_max_y * cam_height
return (x0, x1, y0, y1) | fit = camera.sensor_fit | conditional_block |
address-select.component.spec.ts | /*
* Copyright (c) 2014-2021 Bjoern Kimminich.
* SPDX-License-Identifier: MIT
*/
import { TranslateModule, TranslateService } from '@ngx-translate/core'
import { HttpClientTestingModule } from '@angular/common/http/testing'
import { MatCardModule } from '@angular/material/card'
import { MatFormFieldModule } from '@angular/material/form-field'
import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing'
import { MatInputModule } from '@angular/material/input'
import { ReactiveFormsModule } from '@angular/forms'
import { BrowserAnimationsModule } from '@angular/platform-browser/animations'
import { MatTableModule } from '@angular/material/table'
import { MatExpansionModule } from '@angular/material/expansion'
import { MatDividerModule } from '@angular/material/divider'
import { MatRadioModule } from '@angular/material/radio'
import { MatDialogModule } from '@angular/material/dialog'
import { AddressComponent } from '../address/address.component'
import { AddressSelectComponent } from './address-select.component'
import { RouterTestingModule } from '@angular/router/testing'
import { DeliveryMethodComponent } from '../delivery-method/delivery-method.component'
import { MatIconModule } from '@angular/material/icon'
import { MatTooltipModule } from '@angular/material/tooltip'
import { MatCheckboxModule } from '@angular/material/checkbox'
import { EventEmitter } from '@angular/core'
import { of } from 'rxjs'
import { MatSnackBar } from '@angular/material/snack-bar'
describe('AddressSelectComponent', () => {
let component: AddressSelectComponent
let fixture: ComponentFixture<AddressSelectComponent>
let snackBar: any
let translateService
beforeEach(waitForAsync(() => {
translateService = jasmine.createSpyObj('TranslateService', ['get'])
translateService.get.and.returnValue(of({}))
translateService.onLangChange = new EventEmitter()
translateService.onTranslationChange = new EventEmitter()
translateService.onDefaultLangChange = new EventEmitter()
snackBar = jasmine.createSpyObj('MatSnackBar', ['open'])
snackBar.open.and.returnValue(null)
TestBed.configureTestingModule({
imports: [
RouterTestingModule.withRoutes([
{ path: 'delivery-method', component: DeliveryMethodComponent }
]),
TranslateModule.forRoot(),
HttpClientTestingModule,
ReactiveFormsModule,
BrowserAnimationsModule,
MatCardModule,
MatTableModule,
MatFormFieldModule,
MatInputModule,
MatExpansionModule,
MatDividerModule,
MatRadioModule,
MatDialogModule,
MatIconModule,
MatTooltipModule,
MatCheckboxModule
], | { provide: MatSnackBar, useValue: snackBar }]
})
.compileComponents()
}))
beforeEach(() => {
fixture = TestBed.createComponent(AddressSelectComponent)
component = fixture.componentInstance
fixture.detectChanges()
})
it('should create', () => {
expect(component).toBeTruthy()
})
it('should store address id on calling getMessage', () => {
component.getMessage(1)
expect(component.addressId).toBe(1)
})
}) | declarations: [AddressSelectComponent, AddressComponent, DeliveryMethodComponent],
providers: [{ provide: TranslateService, useValue: translateService }, | random_line_split |
serial.js | /**
* Filter to keep request sequence.
*/
var logger = require('pomelo-logger').getLogger('pomelo', __filename);
var taskManager = require('../../common/manager/taskManager');
module.exports = function() {
return new Filter();
};
var Filter = function() {
};
/**
* request serialization after filter
*/
Filter.prototype.before = function(msg, session, next) {
taskManager.addTask(session.id, function(task) {
session.__serialTask__ = task;
next();
}, function() {
logger.error('[serial filter] msg timeout, msg:' + JSON.stringify(msg));
});
};
/**
* request serialization after filter
*/
Filter.prototype.after = function(err, msg, session, resp, next) {
var task = session.__serialTask__;
if(task) |
next(err);
};
| {
if(!task.done() && !err) {
err = new Error('task time out. msg:' + JSON.stringify(msg));
}
} | conditional_block |
serial.js | /**
* Filter to keep request sequence.
*/
var logger = require('pomelo-logger').getLogger('pomelo', __filename);
var taskManager = require('../../common/manager/taskManager');
module.exports = function() {
return new Filter();
};
var Filter = function() {
};
/**
* request serialization after filter
*/
Filter.prototype.before = function(msg, session, next) {
taskManager.addTask(session.id, function(task) { | };
/**
* request serialization after filter
*/
Filter.prototype.after = function(err, msg, session, resp, next) {
var task = session.__serialTask__;
if(task) {
if(!task.done() && !err) {
err = new Error('task time out. msg:' + JSON.stringify(msg));
}
}
next(err);
}; | session.__serialTask__ = task;
next();
}, function() {
logger.error('[serial filter] msg timeout, msg:' + JSON.stringify(msg));
}); | random_line_split |
ThreeMFWriter.py | # Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from UM.Mesh.MeshWriter import MeshWriter
from UM.Math.Vector import Vector
from UM.Logger import Logger
from UM.Math.Matrix import Matrix
from UM.Application import Application
import UM.Scene.SceneNode
import Savitar
import numpy
MYPY = False
try:
if not MYPY:
import xml.etree.cElementTree as ET
except ImportError:
Logger.log("w", "Unable to load cElementTree, switching to slower version")
import xml.etree.ElementTree as ET
import zipfile
import UM.Application
class ThreeMFWriter(MeshWriter):
def __init__(self):
super().__init__()
self._namespaces = {
"3mf": "http://schemas.microsoft.com/3dmanufacturing/core/2015/02",
"content-types": "http://schemas.openxmlformats.org/package/2006/content-types",
"relationships": "http://schemas.openxmlformats.org/package/2006/relationships",
"cura": "http://software.ultimaker.com/xml/cura/3mf/2015/10"
}
self._unit_matrix_string = self._convertMatrixToString(Matrix())
self._archive = None
self._store_archive = False
def _convertMatrixToString(self, matrix):
result = ""
result += str(matrix._data[0, 0]) + " "
result += str(matrix._data[1, 0]) + " "
result += str(matrix._data[2, 0]) + " "
result += str(matrix._data[0, 1]) + " "
result += str(matrix._data[1, 1]) + " "
result += str(matrix._data[2, 1]) + " "
result += str(matrix._data[0, 2]) + " "
result += str(matrix._data[1, 2]) + " "
result += str(matrix._data[2, 2]) + " "
result += str(matrix._data[0, 3]) + " "
result += str(matrix._data[1, 3]) + " "
result += str(matrix._data[2, 3])
return result
## Should we store the archive
# Note that if this is true, the archive will not be closed.
# The object that set this parameter is then responsible for closing it correctly!
def setStoreArchive(self, store_archive):
self._store_archive = store_archive
## Convenience function that converts an Uranium SceneNode object to a SavitarSceneNode
# \returns Uranium Scenen node.
def _convertUMNodeToSavitarNode(self, um_node, transformation = Matrix()):
if type(um_node) is not UM.Scene.SceneNode.SceneNode:
return None
savitar_node = Savitar.SceneNode()
node_matrix = um_node.getLocalTransformation()
matrix_string = self._convertMatrixToString(node_matrix.preMultiply(transformation))
savitar_node.setTransformation(matrix_string)
mesh_data = um_node.getMeshData()
if mesh_data is not None:
savitar_node.getMeshData().setVerticesFromBytes(mesh_data.getVerticesAsByteArray())
indices_array = mesh_data.getIndicesAsByteArray()
if indices_array is not None:
savitar_node.getMeshData().setFacesFromBytes(indices_array)
else:
savitar_node.getMeshData().setFacesFromBytes(numpy.arange(mesh_data.getVertices().size / 3, dtype=numpy.int32).tostring())
# Handle per object settings (if any)
stack = um_node.callDecoration("getStack")
if stack is not None:
changed_setting_keys = set(stack.getTop().getAllKeys())
# Ensure that we save the extruder used for this object.
if stack.getProperty("machine_extruder_count", "value") > 1:
changed_setting_keys.add("extruder_nr")
# Get values for all changed settings & save them.
for key in changed_setting_keys:
savitar_node.setSetting(key, str(stack.getProperty(key, "value")))
for child_node in um_node.getChildren():
savitar_child_node = self._convertUMNodeToSavitarNode(child_node)
if savitar_child_node is not None:
savitar_node.addChild(savitar_child_node)
return savitar_node
def getArchive(self):
return self._archive
def | (self, stream, nodes, mode = MeshWriter.OutputMode.BinaryMode):
self._archive = None # Reset archive
archive = zipfile.ZipFile(stream, "w", compression = zipfile.ZIP_DEFLATED)
try:
model_file = zipfile.ZipInfo("3D/3dmodel.model")
# Because zipfile is stupid and ignores archive-level compression settings when writing with ZipInfo.
model_file.compress_type = zipfile.ZIP_DEFLATED
# Create content types file
content_types_file = zipfile.ZipInfo("[Content_Types].xml")
content_types_file.compress_type = zipfile.ZIP_DEFLATED
content_types = ET.Element("Types", xmlns = self._namespaces["content-types"])
rels_type = ET.SubElement(content_types, "Default", Extension = "rels", ContentType = "application/vnd.openxmlformats-package.relationships+xml")
model_type = ET.SubElement(content_types, "Default", Extension = "model", ContentType = "application/vnd.ms-package.3dmanufacturing-3dmodel+xml")
# Create _rels/.rels file
relations_file = zipfile.ZipInfo("_rels/.rels")
relations_file.compress_type = zipfile.ZIP_DEFLATED
relations_element = ET.Element("Relationships", xmlns = self._namespaces["relationships"])
model_relation_element = ET.SubElement(relations_element, "Relationship", Target = "/3D/3dmodel.model", Id = "rel0", Type = "http://schemas.microsoft.com/3dmanufacturing/2013/01/3dmodel")
savitar_scene = Savitar.Scene()
transformation_matrix = Matrix()
transformation_matrix._data[1, 1] = 0
transformation_matrix._data[1, 2] = -1
transformation_matrix._data[2, 1] = 1
transformation_matrix._data[2, 2] = 0
global_container_stack = Application.getInstance().getGlobalContainerStack()
# Second step: 3MF defines the left corner of the machine as center, whereas cura uses the center of the
# build volume.
if global_container_stack:
translation_vector = Vector(x=global_container_stack.getProperty("machine_width", "value") / 2,
y=global_container_stack.getProperty("machine_depth", "value") / 2,
z=0)
translation_matrix = Matrix()
translation_matrix.setByTranslation(translation_vector)
transformation_matrix.preMultiply(translation_matrix)
root_node = UM.Application.Application.getInstance().getController().getScene().getRoot()
for node in nodes:
if node == root_node:
for root_child in node.getChildren():
savitar_node = self._convertUMNodeToSavitarNode(root_child, transformation_matrix)
if savitar_node:
savitar_scene.addSceneNode(savitar_node)
else:
savitar_node = self._convertUMNodeToSavitarNode(node, transformation_matrix)
if savitar_node:
savitar_scene.addSceneNode(savitar_node)
parser = Savitar.ThreeMFParser()
scene_string = parser.sceneToString(savitar_scene)
archive.writestr(model_file, scene_string)
archive.writestr(content_types_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(content_types))
archive.writestr(relations_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(relations_element))
except Exception as e:
Logger.logException("e", "Error writing zip file")
return False
finally:
if not self._store_archive:
archive.close()
else:
self._archive = archive
return True
| write | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.