file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
isMatch.ts | /**
* Performs a partial deep comparison between object and source to
* determine if object contains equivalent property values.
*
* Partial comparisons will match empty array and empty object
* source values against any array or object value, respectively.
*
* @category Language
*
* First version: July 14, 2017
* Last updated : July 14, 2017
*
* @export
* @param {object} input
* @param {object} source
* @returns {boolean}
*/
import { isEqual } from './isEqual';
import { keysIn } from './keysIn';
export function | (input: object, source: object): boolean {
const sourceKey: PropertyKey[]
= keysIn({ source, goDeep: true, enumOnly: true });
if (sourceKey.length === 0) return true;
const inputKey: PropertyKey[]
= keysIn({ source: input, goDeep: true, enumOnly: true });
if (sourceKey.length > inputKey.length) return false;
let key: PropertyKey;
for (key of sourceKey) {
if (!inputKey.includes(key) || !isEqual(input[key], source[key]))
return false;
}
return true;
}
| isMatch | identifier_name |
class_study5.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
'''
私有方法和私有字段
私有字段:self.__Thailand
私有字段是不能直接被对象和类进行访问的,需要通过动态方法访问
同样私有字段也是可以通过特性的方法访问的
私有方法:
私有方法是不能直接被对象和类进行访问的,通过使用动态方法进行访问
japan._Provice__sha() #显示调用私有方法,但是不建议这么使用
私有字段使用字段,可以让被人访问,但是不可以让别人改动
'''
class Provice:
memo = '省份之一' #这个值是属于类的 静态字段
def __init__(self,name,capital,leader,flag): #slef就是你以后创建对象的对象值
self.Name = name #这个值属于对象 动态字段
self.Capital = capital
self.Leader = leader
self.__Thailand = flag #私有字段
def show(self): #访问私有字段
print self.__Thailand
def __sha(self): #定义私有方法
print '我是Alex'
def Foo2(self): #通过动态方法访问私有方法
self.__sha()
@property #通过特性访问私有字段
def Thailand(self):
return self.__Thailand
#hn = Provice('河南','郑州','李克强')
#sd = Provice('山东','济南','习近平')
japan = Provice('日本','东京','安倍',True)
#print japan.__Thailand #访问报错 AttributeError: Provice instance has no attribute '__Thailand'
#对象是不能直接访问私有字段的
japan.show() #私有字段需要使用动态方法进行访问输出
japan.Foo2() #私有方法通过使用动态方法进行访问输出
japan._Provice__sha() #显示调用私有方法,但是不建议这么使用
print japan.Thailand #通过特性访问私有字段 | identifier_body | ||
class_study5.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
'''
私有方法和私有字段
私有字段:self.__Thailand
私有字段是不能直接被对象和类进行访问的,需要通过动态方法访问
同样私有字段也是可以通过特性的方法访问的
私有方法:
私有方法是不能直接被对象和类进行访问的,通过使用动态方法进行访问
japan._Provice__sha() #显示调用私有方法,但是不建议这么使用
私有字段使用字段,可以让被人访问,但是不可以让别人改动
'''
class Provice:
memo = '省份之一' #这个值是属于类的 静态字段
def __init__(self,name,capital,leader,flag): #slef就是你以后创建对象的对象值
self.Name = name #这个值属于对象 动态字段
self.Capital = capital
self.Leader = leader
| def show(self): #访问私有字段
print self.__Thailand
def __sha(self): #定义私有方法
print '我是Alex'
def Foo2(self): #通过动态方法访问私有方法
self.__sha()
@property #通过特性访问私有字段
def Thailand(self):
return self.__Thailand
#hn = Provice('河南','郑州','李克强')
#sd = Provice('山东','济南','习近平')
japan = Provice('日本','东京','安倍',True)
#print japan.__Thailand #访问报错 AttributeError: Provice instance has no attribute '__Thailand'
#对象是不能直接访问私有字段的
japan.show() #私有字段需要使用动态方法进行访问输出
japan.Foo2() #私有方法通过使用动态方法进行访问输出
japan._Provice__sha() #显示调用私有方法,但是不建议这么使用
print japan.Thailand #通过特性访问私有字段 | self.__Thailand = flag #私有字段
| random_line_split |
class_study5.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
'''
私有方法和私有字段
私有字段:self.__Thailand
私有字段是不能直接被对象和类进行访问的,需要通过动态方法访问
同样私有字段也是可以通过特性的方法访问的
私有方法:
私有方法是不能直接被对象和类进行访问的,通过使用动态方法进行访问
japan._Provice__sha() #显示调用私有方法,但是不建议这么使用
私有字段使用字段,可以让被人访问,但是不可以让别人改动
'''
class Provice:
memo = '省份之一' #这个值是属于类的 静态字段
def __init__(self,name,capital,leader,flag): #slef就是你以后创建对象的对象值
self.Name = name #这个值属于对象 动态字段
self.Capital = capital
self.Leader = leader
self.__Thailand = flag #私有字段
def show(self): #访问私有字段
print self.__Thailand
def __sha(self): #定义私有方法
print '我是Alex'
def Foo2(self): #通过动态方法访问私有方法
self.__sha()
@property #通过特性访问私有字段
def Thailand(self):
return self.__Thailand
#hn = Provice('河南','郑州','李克强')
#sd = Provice('山 | 济南','习近平')
japan = Provice('日本','东京','安倍',True)
#print japan.__Thailand #访问报错 AttributeError: Provice instance has no attribute '__Thailand'
#对象是不能直接访问私有字段的
japan.show() #私有字段需要使用动态方法进行访问输出
japan.Foo2() #私有方法通过使用动态方法进行访问输出
japan._Provice__sha() #显示调用私有方法,但是不建议这么使用
print japan.Thailand #通过特性访问私有字段 | 东',' | identifier_name |
accesslog.js | /**
* @param {string} value
* @returns {RegExp}
* */
/**
* @param {RegExp | string } re
* @returns {string}
*/
function source(re) {
if (!re) return null;
if (typeof re === "string") return re;
return re.source;
}
/**
* @param {...(RegExp | string) } args
* @returns {string}
*/
function concat(...args) {
const joined = args.map((x) => source(x)).join("");
return joined;
}
/**
* Any of the passed expresssions may match
*
* Creates a huge this | this | that | that match
* @param {(RegExp | string)[] } args
* @returns {string}
*/
function either(...args) {
const joined = '(' + args.map((x) => source(x)).join("|") + ")";
return joined;
}
/*
Language: Apache Access Log
Author: Oleg Efimov <efimovov@gmail.com>
Description: Apache/Nginx Access Logs
Website: https://httpd.apache.org/docs/2.4/logs.html#accesslog
Audit: 2020
*/
/** @type LanguageFn */
function | (_hljs) {
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods
const HTTP_VERBS = [
"GET",
"POST",
"HEAD",
"PUT",
"DELETE",
"CONNECT",
"OPTIONS",
"PATCH",
"TRACE"
];
return {
name: 'Apache Access Log',
contains: [
// IP
{
className: 'number',
begin: /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}(:\d{1,5})?\b/,
relevance: 5
},
// Other numbers
{
className: 'number',
begin: /\b\d+\b/,
relevance: 0
},
// Requests
{
className: 'string',
begin: concat(/"/, either(...HTTP_VERBS)),
end: /"/,
keywords: HTTP_VERBS,
illegal: /\n/,
relevance: 5,
contains: [
{
begin: /HTTP\/[12]\.\d'/,
relevance: 5
}
]
},
// Dates
{
className: 'string',
// dates must have a certain length, this prevents matching
// simple array accesses a[123] and [] and other common patterns
// found in other languages
begin: /\[\d[^\]\n]{8,}\]/,
illegal: /\n/,
relevance: 1
},
{
className: 'string',
begin: /\[/,
end: /\]/,
illegal: /\n/,
relevance: 0
},
// User agent / relevance boost
{
className: 'string',
begin: /"Mozilla\/\d\.\d \(/,
end: /"/,
illegal: /\n/,
relevance: 3
},
// Strings
{
className: 'string',
begin: /"/,
end: /"/,
illegal: /\n/,
relevance: 0
}
]
};
}
module.exports = accesslog;
| accesslog | identifier_name |
accesslog.js | /**
* @param {string} value
* @returns {RegExp}
* */
/**
* @param {RegExp | string } re
* @returns {string}
*/
function source(re) {
if (!re) return null;
if (typeof re === "string") return re;
return re.source;
}
/**
* @param {...(RegExp | string) } args
* @returns {string}
*/
function concat(...args) {
const joined = args.map((x) => source(x)).join("");
return joined;
}
/**
* Any of the passed expresssions may match
*
* Creates a huge this | this | that | that match
* @param {(RegExp | string)[] } args
* @returns {string} |
/*
Language: Apache Access Log
Author: Oleg Efimov <efimovov@gmail.com>
Description: Apache/Nginx Access Logs
Website: https://httpd.apache.org/docs/2.4/logs.html#accesslog
Audit: 2020
*/
/** @type LanguageFn */
function accesslog(_hljs) {
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods
const HTTP_VERBS = [
"GET",
"POST",
"HEAD",
"PUT",
"DELETE",
"CONNECT",
"OPTIONS",
"PATCH",
"TRACE"
];
return {
name: 'Apache Access Log',
contains: [
// IP
{
className: 'number',
begin: /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}(:\d{1,5})?\b/,
relevance: 5
},
// Other numbers
{
className: 'number',
begin: /\b\d+\b/,
relevance: 0
},
// Requests
{
className: 'string',
begin: concat(/"/, either(...HTTP_VERBS)),
end: /"/,
keywords: HTTP_VERBS,
illegal: /\n/,
relevance: 5,
contains: [
{
begin: /HTTP\/[12]\.\d'/,
relevance: 5
}
]
},
// Dates
{
className: 'string',
// dates must have a certain length, this prevents matching
// simple array accesses a[123] and [] and other common patterns
// found in other languages
begin: /\[\d[^\]\n]{8,}\]/,
illegal: /\n/,
relevance: 1
},
{
className: 'string',
begin: /\[/,
end: /\]/,
illegal: /\n/,
relevance: 0
},
// User agent / relevance boost
{
className: 'string',
begin: /"Mozilla\/\d\.\d \(/,
end: /"/,
illegal: /\n/,
relevance: 3
},
// Strings
{
className: 'string',
begin: /"/,
end: /"/,
illegal: /\n/,
relevance: 0
}
]
};
}
module.exports = accesslog; | */
function either(...args) {
const joined = '(' + args.map((x) => source(x)).join("|") + ")";
return joined;
} | random_line_split |
accesslog.js | /**
* @param {string} value
* @returns {RegExp}
* */
/**
* @param {RegExp | string } re
* @returns {string}
*/
function source(re) {
if (!re) return null;
if (typeof re === "string") return re;
return re.source;
}
/**
* @param {...(RegExp | string) } args
* @returns {string}
*/
function concat(...args) |
/**
* Any of the passed expresssions may match
*
* Creates a huge this | this | that | that match
* @param {(RegExp | string)[] } args
* @returns {string}
*/
function either(...args) {
const joined = '(' + args.map((x) => source(x)).join("|") + ")";
return joined;
}
/*
Language: Apache Access Log
Author: Oleg Efimov <efimovov@gmail.com>
Description: Apache/Nginx Access Logs
Website: https://httpd.apache.org/docs/2.4/logs.html#accesslog
Audit: 2020
*/
/** @type LanguageFn */
function accesslog(_hljs) {
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods
const HTTP_VERBS = [
"GET",
"POST",
"HEAD",
"PUT",
"DELETE",
"CONNECT",
"OPTIONS",
"PATCH",
"TRACE"
];
return {
name: 'Apache Access Log',
contains: [
// IP
{
className: 'number',
begin: /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}(:\d{1,5})?\b/,
relevance: 5
},
// Other numbers
{
className: 'number',
begin: /\b\d+\b/,
relevance: 0
},
// Requests
{
className: 'string',
begin: concat(/"/, either(...HTTP_VERBS)),
end: /"/,
keywords: HTTP_VERBS,
illegal: /\n/,
relevance: 5,
contains: [
{
begin: /HTTP\/[12]\.\d'/,
relevance: 5
}
]
},
// Dates
{
className: 'string',
// dates must have a certain length, this prevents matching
// simple array accesses a[123] and [] and other common patterns
// found in other languages
begin: /\[\d[^\]\n]{8,}\]/,
illegal: /\n/,
relevance: 1
},
{
className: 'string',
begin: /\[/,
end: /\]/,
illegal: /\n/,
relevance: 0
},
// User agent / relevance boost
{
className: 'string',
begin: /"Mozilla\/\d\.\d \(/,
end: /"/,
illegal: /\n/,
relevance: 3
},
// Strings
{
className: 'string',
begin: /"/,
end: /"/,
illegal: /\n/,
relevance: 0
}
]
};
}
module.exports = accesslog;
| {
const joined = args.map((x) => source(x)).join("");
return joined;
} | identifier_body |
user.py | #!usr/bin/env python
# -*- coding:utf-8 -*-
"""
@author: magic
"""
from django.contrib import admin
from blog.models import User
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext, ugettext_lazy as _
class BlogUserAdmin(UserAdmin):
filesets = (
(None, {'fields': ('username', 'email', 'password')}),
(_('Personal info'), {'fields': ('email', 'qq', 'phone')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': {'last_login', 'date_joined'}}),
)
add_fieldsets = (
(None, {
'classes': ('wide', ),
'fields': ('username', 'email', 'password1', 'password2'), | )
admin.site.register(User, BlogUserAdmin) | }), | random_line_split |
user.py | #!usr/bin/env python
# -*- coding:utf-8 -*-
"""
@author: magic
"""
from django.contrib import admin
from blog.models import User
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext, ugettext_lazy as _
class BlogUserAdmin(UserAdmin):
|
admin.site.register(User, BlogUserAdmin) | filesets = (
(None, {'fields': ('username', 'email', 'password')}),
(_('Personal info'), {'fields': ('email', 'qq', 'phone')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': {'last_login', 'date_joined'}}),
)
add_fieldsets = (
(None, {
'classes': ('wide', ),
'fields': ('username', 'email', 'password1', 'password2'),
}),
) | identifier_body |
user.py | #!usr/bin/env python
# -*- coding:utf-8 -*-
"""
@author: magic
"""
from django.contrib import admin
from blog.models import User
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext, ugettext_lazy as _
class | (UserAdmin):
filesets = (
(None, {'fields': ('username', 'email', 'password')}),
(_('Personal info'), {'fields': ('email', 'qq', 'phone')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': {'last_login', 'date_joined'}}),
)
add_fieldsets = (
(None, {
'classes': ('wide', ),
'fields': ('username', 'email', 'password1', 'password2'),
}),
)
admin.site.register(User, BlogUserAdmin) | BlogUserAdmin | identifier_name |
index.ts | export { W3W_REGEX } from './constants';
export {
autosuggest,
AutosuggestOptions,
AutosuggestResponse,
} from './requests/autosuggest';
export { autosuggestSelection } from './requests/autosuggest-selection';
export {
availableLanguages,
AvailableLanguagesResponse,
} from './requests/available-languages';
export { convertTo3wa, convertTo3waGeoJson } from './requests/convert-to-3wa';
export {
convertToCoordinates, | gridSection,
gridSectionGeoJson,
GridSectionGeoJsonResponse,
GridSectionJsonResponse,
} from './requests/grid-section';
export {
Bounds,
Coordinates,
LocationJsonResponse,
LocationGeoJsonResponse,
LocationProperties,
ResponseFormat,
} from './types';
export {
setOptions,
getOptions,
ApiOptions,
getWords,
valid3wa,
} from './utils'; | convertToCoordinatesGeoJson,
} from './requests/convert-to-coordinates';
export { | random_line_split |
Settings.ts | /**
* Copyright (c) Tiny Technologies, Inc. All rights reserved. | */
import Editor from 'tinymce/core/api/Editor';
const shouldIndentOnTab = (editor: Editor) => editor.getParam('lists_indent_on_tab', true);
const getForcedRootBlock = (editor: Editor): string => {
const block = editor.getParam('forced_root_block', 'p');
if (block === false) {
return '';
} else if (block === true) {
return 'p';
} else {
return block;
}
};
const getForcedRootBlockAttrs = (editor: Editor): Record<string, string> => editor.getParam('forced_root_block_attrs', {});
export {
shouldIndentOnTab,
getForcedRootBlock,
getForcedRootBlockAttrs
}; | * Licensed under the LGPL or a commercial license.
* For LGPL see License.txt in the project root for license information.
* For commercial licenses see https://www.tiny.cloud/ | random_line_split |
Settings.ts | /**
* Copyright (c) Tiny Technologies, Inc. All rights reserved.
* Licensed under the LGPL or a commercial license.
* For LGPL see License.txt in the project root for license information.
* For commercial licenses see https://www.tiny.cloud/
*/
import Editor from 'tinymce/core/api/Editor';
const shouldIndentOnTab = (editor: Editor) => editor.getParam('lists_indent_on_tab', true);
const getForcedRootBlock = (editor: Editor): string => {
const block = editor.getParam('forced_root_block', 'p');
if (block === false) {
return '';
} else if (block === true) {
return 'p';
} else |
};
const getForcedRootBlockAttrs = (editor: Editor): Record<string, string> => editor.getParam('forced_root_block_attrs', {});
export {
shouldIndentOnTab,
getForcedRootBlock,
getForcedRootBlockAttrs
};
| {
return block;
} | conditional_block |
travis_test_script.py | import sys, subprocess, time
"""
This script is made as a wrapper for sc2 bots to set a timeout to the bots (in case they cant find the last enemy structure or the game is ending in a draw)
Usage:
cd into python-sc2/ directory
docker build -t test_image -f test/Dockerfile .
docker run test_image -c "python test/travis_test_script.py test/autotest_bot.py"
"""
retries = 2
timeout_time = 3*60 # My maxout bot took 110 - 140 real seconds for 7 minutes in game time
if len(sys.argv) > 1:
# Attempt to run process with retries and timeouts
t0 = time.time()
process, result = None, None
for i in range(retries):
t0 = time.time()
process = subprocess.Popen(["python", sys.argv[1]], stdout=subprocess.PIPE)
try:
# Stop the current bot if the timeout was reached - the bot needs to finish a game within 3 minutes real time
result = process.communicate(timeout=timeout_time)
except subprocess.TimeoutExpired:
continue
out, err = result
result = out.decode("utf-8")
# Break as the bot run was successful
break
if process.returncode is not None:
# Reformat the output into a list
print_output: str = result
linebreaks = [
["\r\n", print_output.count("\r\n")],
["\r", print_output.count("\r")],
["\n", print_output.count("\n")],
]
most_linebreaks_type = max(linebreaks, key=lambda x: x[1])
linebreak_type, linebreak_count = most_linebreaks_type
output_as_list = print_output.split(linebreak_type)
print("Travis test script, bot output:\r\n{}".format("\r\n".join(output_as_list)))
# Bot was not successfully run in time, returncode will be None
if process.returncode is None or process.returncode != 0:
print("Exiting with exit code 5, error: Attempted to launch script {} timed out after {} seconds. Retries completed: {}".format(sys.argv[1], timeout_time, retries))
exit(5)
# process.returncode will always return 0 if the game was run successfully or if there was a python error (in this case it returns as defeat)
print("Returncode: {}".format(process.returncode))
print("Game took {} real time seconds".format(round(time.time() - t0, 1)))
if process is not None and process.returncode == 0:
for line in output_as_list:
# This will throw an error if a bot is called Traceback
if "Traceback " in line:
|
print("Exiting with exit code 0")
exit(0)
# Exit code 1: game crashed I think
print("Exiting with exit code 1")
exit(1)
# Exit code 2: bot was not launched
print("Exiting with exit code 2")
exit(2) | print("Exiting with exit code 3")
exit(3) | conditional_block |
travis_test_script.py | import sys, subprocess, time
""" | docker build -t test_image -f test/Dockerfile .
docker run test_image -c "python test/travis_test_script.py test/autotest_bot.py"
"""
retries = 2
timeout_time = 3*60 # My maxout bot took 110 - 140 real seconds for 7 minutes in game time
if len(sys.argv) > 1:
# Attempt to run process with retries and timeouts
t0 = time.time()
process, result = None, None
for i in range(retries):
t0 = time.time()
process = subprocess.Popen(["python", sys.argv[1]], stdout=subprocess.PIPE)
try:
# Stop the current bot if the timeout was reached - the bot needs to finish a game within 3 minutes real time
result = process.communicate(timeout=timeout_time)
except subprocess.TimeoutExpired:
continue
out, err = result
result = out.decode("utf-8")
# Break as the bot run was successful
break
if process.returncode is not None:
# Reformat the output into a list
print_output: str = result
linebreaks = [
["\r\n", print_output.count("\r\n")],
["\r", print_output.count("\r")],
["\n", print_output.count("\n")],
]
most_linebreaks_type = max(linebreaks, key=lambda x: x[1])
linebreak_type, linebreak_count = most_linebreaks_type
output_as_list = print_output.split(linebreak_type)
print("Travis test script, bot output:\r\n{}".format("\r\n".join(output_as_list)))
# Bot was not successfully run in time, returncode will be None
if process.returncode is None or process.returncode != 0:
print("Exiting with exit code 5, error: Attempted to launch script {} timed out after {} seconds. Retries completed: {}".format(sys.argv[1], timeout_time, retries))
exit(5)
# process.returncode will always return 0 if the game was run successfully or if there was a python error (in this case it returns as defeat)
print("Returncode: {}".format(process.returncode))
print("Game took {} real time seconds".format(round(time.time() - t0, 1)))
if process is not None and process.returncode == 0:
for line in output_as_list:
# This will throw an error if a bot is called Traceback
if "Traceback " in line:
print("Exiting with exit code 3")
exit(3)
print("Exiting with exit code 0")
exit(0)
# Exit code 1: game crashed I think
print("Exiting with exit code 1")
exit(1)
# Exit code 2: bot was not launched
print("Exiting with exit code 2")
exit(2) | This script is made as a wrapper for sc2 bots to set a timeout to the bots (in case they cant find the last enemy structure or the game is ending in a draw)
Usage:
cd into python-sc2/ directory | random_line_split |
poly.rs | // Copyright (C) 2020 Inderjit Gill <email@indy.io>
// This file is part of Seni
// Seni is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Seni is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
use crate::error::{Error, Result};
use crate::matrix::Matrix;
use crate::render_list::RenderList;
use crate::rgb::Rgb;
use crate::uvmapper::UvMapping;
use log::error;
pub fn | (
render_list: &mut RenderList,
matrix: &Matrix,
coords: &[(f32, f32)],
colours: &[Rgb],
uvm: &UvMapping,
) -> Result<()> {
let num_vertices = coords.len();
if colours.len() != num_vertices {
error!("render_poly: coords and colours length mismatch");
return Err(Error::Geometry);
} else if num_vertices < 3 {
return Ok(());
}
let (x, y) = coords[0];
render_list.prepare_to_add_triangle_strip(matrix, num_vertices, x, y)?;
let rp = render_list
.render_packets
.last_mut()
.ok_or(Error::Geometry)?;
let rpg = rp.get_mut_render_packet_geometry()?;
for i in 0..num_vertices {
let (x, y) = coords[i];
rpg.add_vertex(matrix, x, y, &colours[i], uvm.map[4], uvm.map[5])
}
Ok(())
}
| render | identifier_name |
poly.rs | // Copyright (C) 2020 Inderjit Gill <email@indy.io>
// This file is part of Seni
// Seni is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Seni is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
use crate::error::{Error, Result};
use crate::matrix::Matrix;
use crate::render_list::RenderList;
use crate::rgb::Rgb;
use crate::uvmapper::UvMapping;
use log::error;
pub fn render(
render_list: &mut RenderList,
matrix: &Matrix,
coords: &[(f32, f32)],
colours: &[Rgb],
uvm: &UvMapping,
) -> Result<()> | {
let num_vertices = coords.len();
if colours.len() != num_vertices {
error!("render_poly: coords and colours length mismatch");
return Err(Error::Geometry);
} else if num_vertices < 3 {
return Ok(());
}
let (x, y) = coords[0];
render_list.prepare_to_add_triangle_strip(matrix, num_vertices, x, y)?;
let rp = render_list
.render_packets
.last_mut()
.ok_or(Error::Geometry)?;
let rpg = rp.get_mut_render_packet_geometry()?;
for i in 0..num_vertices {
let (x, y) = coords[i];
rpg.add_vertex(matrix, x, y, &colours[i], uvm.map[4], uvm.map[5])
}
Ok(())
} | identifier_body | |
poly.rs | // Copyright (C) 2020 Inderjit Gill <email@indy.io>
// This file is part of Seni
// Seni is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Seni is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
| use crate::render_list::RenderList;
use crate::rgb::Rgb;
use crate::uvmapper::UvMapping;
use log::error;
pub fn render(
render_list: &mut RenderList,
matrix: &Matrix,
coords: &[(f32, f32)],
colours: &[Rgb],
uvm: &UvMapping,
) -> Result<()> {
let num_vertices = coords.len();
if colours.len() != num_vertices {
error!("render_poly: coords and colours length mismatch");
return Err(Error::Geometry);
} else if num_vertices < 3 {
return Ok(());
}
let (x, y) = coords[0];
render_list.prepare_to_add_triangle_strip(matrix, num_vertices, x, y)?;
let rp = render_list
.render_packets
.last_mut()
.ok_or(Error::Geometry)?;
let rpg = rp.get_mut_render_packet_geometry()?;
for i in 0..num_vertices {
let (x, y) = coords[i];
rpg.add_vertex(matrix, x, y, &colours[i], uvm.map[4], uvm.map[5])
}
Ok(())
} | use crate::error::{Error, Result};
use crate::matrix::Matrix; | random_line_split |
era_prep.py | #!/usr/bin/env python
""" This module preprocesses ERA-Interim data, units, accumulated to instantaneous values and timestep interpolation for 6 h to 3 h values.
Example:
as import:
| Attributes:
wd = "/home/joel/sim/topomap_test/"
plotshp = TRUE
Todo:
"""
path2script = "./rsrc/toposcale_pre2.R"
# main
def main(wd, startDate, endDate):
"""Main entry point for the script."""
run_rscript_fileout(path2script,[wd, startDate, endDate])
# functions
def run_rscript_stdout(path2script , args):
""" Function to define comands to run an Rscript. Returns an object. """
import subprocess
command = 'Rscript'
cmd = [command, path2script] + args
print("Running:" + str(cmd))
x = subprocess.check_output(cmd, universal_newlines=True)
return(x)
def run_rscript_fileout(path2script , args):
""" Function to define comands to run an Rscript. Outputs a file. """
import subprocess
command = 'Rscript'
cmd = [command, path2script] + args
print("Running:" + str(cmd))
subprocess.check_output(cmd)
# calling main
if __name__ == '__main__':
import sys
wd = sys.argv[1]
startDate = sys.argv[2]
endDate = sys.argv[3]
main(wd, startDate, endDate) | from getERA import era_prep as prep
prep.main(wd, config['main']['startDate'], config['main']['endDate'])
| random_line_split |
era_prep.py | #!/usr/bin/env python
""" This module preprocesses ERA-Interim data, units, accumulated to instantaneous values and timestep interpolation for 6 h to 3 h values.
Example:
as import:
from getERA import era_prep as prep
prep.main(wd, config['main']['startDate'], config['main']['endDate'])
Attributes:
wd = "/home/joel/sim/topomap_test/"
plotshp = TRUE
Todo:
"""
path2script = "./rsrc/toposcale_pre2.R"
# main
def main(wd, startDate, endDate):
"""Main entry point for the script."""
run_rscript_fileout(path2script,[wd, startDate, endDate])
# functions
def run_rscript_stdout(path2script , args):
""" Function to define comands to run an Rscript. Returns an object. """
import subprocess
command = 'Rscript'
cmd = [command, path2script] + args
print("Running:" + str(cmd))
x = subprocess.check_output(cmd, universal_newlines=True)
return(x)
def run_rscript_fileout(path2script , args):
""" Function to define comands to run an Rscript. Outputs a file. """
import subprocess
command = 'Rscript'
cmd = [command, path2script] + args
print("Running:" + str(cmd))
subprocess.check_output(cmd)
# calling main
if __name__ == '__main__':
| import sys
wd = sys.argv[1]
startDate = sys.argv[2]
endDate = sys.argv[3]
main(wd, startDate, endDate) | conditional_block | |
era_prep.py | #!/usr/bin/env python
""" This module preprocesses ERA-Interim data, units, accumulated to instantaneous values and timestep interpolation for 6 h to 3 h values.
Example:
as import:
from getERA import era_prep as prep
prep.main(wd, config['main']['startDate'], config['main']['endDate'])
Attributes:
wd = "/home/joel/sim/topomap_test/"
plotshp = TRUE
Todo:
"""
path2script = "./rsrc/toposcale_pre2.R"
# main
def | (wd, startDate, endDate):
"""Main entry point for the script."""
run_rscript_fileout(path2script,[wd, startDate, endDate])
# functions
def run_rscript_stdout(path2script , args):
""" Function to define comands to run an Rscript. Returns an object. """
import subprocess
command = 'Rscript'
cmd = [command, path2script] + args
print("Running:" + str(cmd))
x = subprocess.check_output(cmd, universal_newlines=True)
return(x)
def run_rscript_fileout(path2script , args):
""" Function to define comands to run an Rscript. Outputs a file. """
import subprocess
command = 'Rscript'
cmd = [command, path2script] + args
print("Running:" + str(cmd))
subprocess.check_output(cmd)
# calling main
if __name__ == '__main__':
import sys
wd = sys.argv[1]
startDate = sys.argv[2]
endDate = sys.argv[3]
main(wd, startDate, endDate)
| main | identifier_name |
era_prep.py | #!/usr/bin/env python
""" This module preprocesses ERA-Interim data, units, accumulated to instantaneous values and timestep interpolation for 6 h to 3 h values.
Example:
as import:
from getERA import era_prep as prep
prep.main(wd, config['main']['startDate'], config['main']['endDate'])
Attributes:
wd = "/home/joel/sim/topomap_test/"
plotshp = TRUE
Todo:
"""
path2script = "./rsrc/toposcale_pre2.R"
# main
def main(wd, startDate, endDate):
"""Main entry point for the script."""
run_rscript_fileout(path2script,[wd, startDate, endDate])
# functions
def run_rscript_stdout(path2script , args):
""" Function to define comands to run an Rscript. Returns an object. """
import subprocess
command = 'Rscript'
cmd = [command, path2script] + args
print("Running:" + str(cmd))
x = subprocess.check_output(cmd, universal_newlines=True)
return(x)
def run_rscript_fileout(path2script , args):
|
# calling main
if __name__ == '__main__':
import sys
wd = sys.argv[1]
startDate = sys.argv[2]
endDate = sys.argv[3]
main(wd, startDate, endDate)
| """ Function to define comands to run an Rscript. Outputs a file. """
import subprocess
command = 'Rscript'
cmd = [command, path2script] + args
print("Running:" + str(cmd))
subprocess.check_output(cmd) | identifier_body |
files.py | import six
#==============================================================================
# https://docs.python.org/2/library/csv.html
#==============================================================================
if six.PY2:
import csv
import codecs
import cStringIO
class UTF8Recoder:
"""
Iterator that reads an encoded stream and reencodes the input to UTF-8
"""
def __init__(self, f, encoding):
self.reader = codecs.getreader(encoding)(f)
def __iter__(self):
return self
def next(self):
return self.reader.next().encode("utf-8")
class UnicodeReader:
"""
A CSV reader which will iterate over lines in the CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
f = UTF8Recoder(f, encoding)
self.reader = csv.reader(f, dialect=dialect, **kwds)
def next(self):
row = self.reader.next()
return [unicode(s, "utf-8") for s in row]
def __iter__(self):
return self
class UnicodeWriter:
"""
A CSV writer which will write rows to CSV file "f",
which is encoded in the given encoding.
"""
def | (self, f, dialect=csv.excel, encoding="utf-8", **kwds):
# Redirect output to a queue
self.queue = cStringIO.StringIO()
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
self.stream = f
self.encoder = codecs.getincrementalencoder(encoding)()
def writerow(self, row):
self.writer.writerow([s.encode("utf-8") for s in row])
# Fetch UTF-8 output from the queue ...
data = self.queue.getvalue()
data = data.decode("utf-8")
# ... and reencode it into the target encoding
data = self.encoder.encode(data)
# write to the target stream
self.stream.write(data)
# empty queue
self.queue.truncate(0)
def writerows(self, rows):
for row in rows:
self.writerow(row)
| __init__ | identifier_name |
files.py | import six
#==============================================================================
# https://docs.python.org/2/library/csv.html
#==============================================================================
if six.PY2:
import csv
import codecs
import cStringIO
class UTF8Recoder:
"""
Iterator that reads an encoded stream and reencodes the input to UTF-8
"""
def __init__(self, f, encoding):
self.reader = codecs.getreader(encoding)(f)
def __iter__(self):
return self
def next(self):
return self.reader.next().encode("utf-8")
class UnicodeReader:
"""
A CSV reader which will iterate over lines in the CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
|
def next(self):
row = self.reader.next()
return [unicode(s, "utf-8") for s in row]
def __iter__(self):
return self
class UnicodeWriter:
"""
A CSV writer which will write rows to CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
# Redirect output to a queue
self.queue = cStringIO.StringIO()
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
self.stream = f
self.encoder = codecs.getincrementalencoder(encoding)()
def writerow(self, row):
self.writer.writerow([s.encode("utf-8") for s in row])
# Fetch UTF-8 output from the queue ...
data = self.queue.getvalue()
data = data.decode("utf-8")
# ... and reencode it into the target encoding
data = self.encoder.encode(data)
# write to the target stream
self.stream.write(data)
# empty queue
self.queue.truncate(0)
def writerows(self, rows):
for row in rows:
self.writerow(row)
| f = UTF8Recoder(f, encoding)
self.reader = csv.reader(f, dialect=dialect, **kwds) | identifier_body |
files.py | import six
#==============================================================================
# https://docs.python.org/2/library/csv.html
#==============================================================================
if six.PY2:
| import csv
import codecs
import cStringIO
class UTF8Recoder:
"""
Iterator that reads an encoded stream and reencodes the input to UTF-8
"""
def __init__(self, f, encoding):
self.reader = codecs.getreader(encoding)(f)
def __iter__(self):
return self
def next(self):
return self.reader.next().encode("utf-8")
class UnicodeReader:
"""
A CSV reader which will iterate over lines in the CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
f = UTF8Recoder(f, encoding)
self.reader = csv.reader(f, dialect=dialect, **kwds)
def next(self):
row = self.reader.next()
return [unicode(s, "utf-8") for s in row]
def __iter__(self):
return self
class UnicodeWriter:
"""
A CSV writer which will write rows to CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
# Redirect output to a queue
self.queue = cStringIO.StringIO()
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
self.stream = f
self.encoder = codecs.getincrementalencoder(encoding)()
def writerow(self, row):
self.writer.writerow([s.encode("utf-8") for s in row])
# Fetch UTF-8 output from the queue ...
data = self.queue.getvalue()
data = data.decode("utf-8")
# ... and reencode it into the target encoding
data = self.encoder.encode(data)
# write to the target stream
self.stream.write(data)
# empty queue
self.queue.truncate(0)
def writerows(self, rows):
for row in rows:
self.writerow(row) | conditional_block | |
files.py | import six
#==============================================================================
# https://docs.python.org/2/library/csv.html
#==============================================================================
if six.PY2:
import csv
import codecs
import cStringIO
class UTF8Recoder:
"""
Iterator that reads an encoded stream and reencodes the input to UTF-8
"""
def __init__(self, f, encoding):
self.reader = codecs.getreader(encoding)(f)
def __iter__(self):
return self
def next(self):
return self.reader.next().encode("utf-8")
class UnicodeReader:
"""
A CSV reader which will iterate over lines in the CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
f = UTF8Recoder(f, encoding)
self.reader = csv.reader(f, dialect=dialect, **kwds)
def next(self):
row = self.reader.next()
return [unicode(s, "utf-8") for s in row]
def __iter__(self):
return self
class UnicodeWriter: | """
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
# Redirect output to a queue
self.queue = cStringIO.StringIO()
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
self.stream = f
self.encoder = codecs.getincrementalencoder(encoding)()
def writerow(self, row):
self.writer.writerow([s.encode("utf-8") for s in row])
# Fetch UTF-8 output from the queue ...
data = self.queue.getvalue()
data = data.decode("utf-8")
# ... and reencode it into the target encoding
data = self.encoder.encode(data)
# write to the target stream
self.stream.write(data)
# empty queue
self.queue.truncate(0)
def writerows(self, rows):
for row in rows:
self.writerow(row) | """
A CSV writer which will write rows to CSV file "f",
which is encoded in the given encoding. | random_line_split |
shell.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
An interactive shell.
This file is designed to be launched as a PYTHONSTARTUP script.
""" |
import atexit
import os
import platform
import warnings
import py4j
from pyspark import SparkConf
from pyspark.context import SparkContext
from pyspark.sql import SparkSession, SQLContext
if os.environ.get("SPARK_EXECUTOR_URI"):
SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"])
SparkContext._ensure_initialized()
try:
# Try to access HiveConf, it will raise exception if Hive is not added
conf = SparkConf()
if conf.get('spark.sql.catalogImplementation', 'hive').lower() == 'hive':
SparkContext._jvm.org.apache.hadoop.hive.conf.HiveConf()
spark = SparkSession.builder\
.enableHiveSupport()\
.getOrCreate()
else:
spark = SparkSession.builder.getOrCreate()
except py4j.protocol.Py4JError:
if conf.get('spark.sql.catalogImplementation', '').lower() == 'hive':
warnings.warn("Fall back to non-hive support because failing to access HiveConf, "
"please make sure you build spark with hive")
spark = SparkSession.builder.getOrCreate()
except TypeError:
if conf.get('spark.sql.catalogImplementation', '').lower() == 'hive':
warnings.warn("Fall back to non-hive support because failing to access HiveConf, "
"please make sure you build spark with hive")
spark = SparkSession.builder.getOrCreate()
sc = spark.sparkContext
sql = spark.sql
atexit.register(lambda: sc.stop())
# for compatibility
sqlContext = spark._wrapped
sqlCtx = sqlContext
print("""Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/__ / .__/\_,_/_/ /_/\_\ version %s
/_/
""" % sc.version)
print("Using Python version %s (%s, %s)" % (
platform.python_version(),
platform.python_build()[0],
platform.python_build()[1]))
print("SparkSession available as 'spark'.")
# The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
# which allows us to execute the user's PYTHONSTARTUP file:
_pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')
if _pythonstartup and os.path.isfile(_pythonstartup):
with open(_pythonstartup) as f:
code = compile(f.read(), _pythonstartup, 'exec')
exec(code) | random_line_split | |
shell.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
An interactive shell.
This file is designed to be launched as a PYTHONSTARTUP script.
"""
import atexit
import os
import platform
import warnings
import py4j
from pyspark import SparkConf
from pyspark.context import SparkContext
from pyspark.sql import SparkSession, SQLContext
if os.environ.get("SPARK_EXECUTOR_URI"):
SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"])
SparkContext._ensure_initialized()
try:
# Try to access HiveConf, it will raise exception if Hive is not added
conf = SparkConf()
if conf.get('spark.sql.catalogImplementation', 'hive').lower() == 'hive':
|
else:
spark = SparkSession.builder.getOrCreate()
except py4j.protocol.Py4JError:
if conf.get('spark.sql.catalogImplementation', '').lower() == 'hive':
warnings.warn("Fall back to non-hive support because failing to access HiveConf, "
"please make sure you build spark with hive")
spark = SparkSession.builder.getOrCreate()
except TypeError:
if conf.get('spark.sql.catalogImplementation', '').lower() == 'hive':
warnings.warn("Fall back to non-hive support because failing to access HiveConf, "
"please make sure you build spark with hive")
spark = SparkSession.builder.getOrCreate()
sc = spark.sparkContext
sql = spark.sql
atexit.register(lambda: sc.stop())
# for compatibility
sqlContext = spark._wrapped
sqlCtx = sqlContext
print("""Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/__ / .__/\_,_/_/ /_/\_\ version %s
/_/
""" % sc.version)
print("Using Python version %s (%s, %s)" % (
platform.python_version(),
platform.python_build()[0],
platform.python_build()[1]))
print("SparkSession available as 'spark'.")
# The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
# which allows us to execute the user's PYTHONSTARTUP file:
_pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')
if _pythonstartup and os.path.isfile(_pythonstartup):
with open(_pythonstartup) as f:
code = compile(f.read(), _pythonstartup, 'exec')
exec(code)
| SparkContext._jvm.org.apache.hadoop.hive.conf.HiveConf()
spark = SparkSession.builder\
.enableHiveSupport()\
.getOrCreate() | conditional_block |
__init__.py | """The met component."""
from datetime import timedelta
import logging
from random import randrange
import metno
from homeassistant.const import (
CONF_ELEVATION,
CONF_LATITUDE,
CONF_LONGITUDE,
EVENT_CORE_CONFIG_UPDATE,
LENGTH_FEET,
LENGTH_METERS,
)
from homeassistant.core import Config, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util.distance import convert as convert_distance
import homeassistant.util.dt as dt_util
from .const import CONF_TRACK_HOME, DOMAIN
URL = "https://aa015h6buqvih86i1.api.met.no/weatherapi/locationforecast/2.0/complete"
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up configured Met."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass, config_entry):
"""Set up Met as config entry."""
coordinator = MetDataUpdateCoordinator(hass, config_entry)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
if config_entry.data.get(CONF_TRACK_HOME, False):
coordinator.track_home()
hass.data[DOMAIN][config_entry.entry_id] = coordinator
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "weather")
)
return True
async def async_unload_entry(hass, config_entry): | hass.data[DOMAIN][config_entry.entry_id].untrack_home()
hass.data[DOMAIN].pop(config_entry.entry_id)
return True
class MetDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Met data."""
def __init__(self, hass, config_entry):
"""Initialize global Met data updater."""
self._unsub_track_home = None
self.weather = MetWeatherData(
hass, config_entry.data, hass.config.units.is_metric
)
self.weather.init_data()
update_interval = timedelta(minutes=randrange(55, 65))
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval)
async def _async_update_data(self):
"""Fetch data from Met."""
try:
return await self.weather.fetch_data()
except Exception as err:
raise UpdateFailed(f"Update failed: {err}") from err
def track_home(self):
"""Start tracking changes to HA home setting."""
if self._unsub_track_home:
return
async def _async_update_weather_data(_event=None):
"""Update weather data."""
self.weather.init_data()
await self.async_refresh()
self._unsub_track_home = self.hass.bus.async_listen(
EVENT_CORE_CONFIG_UPDATE, _async_update_weather_data
)
def untrack_home(self):
"""Stop tracking changes to HA home setting."""
if self._unsub_track_home:
self._unsub_track_home()
self._unsub_track_home = None
class MetWeatherData:
"""Keep data for Met.no weather entities."""
def __init__(self, hass, config, is_metric):
"""Initialise the weather entity data."""
self.hass = hass
self._config = config
self._is_metric = is_metric
self._weather_data = None
self.current_weather_data = {}
self.daily_forecast = None
self.hourly_forecast = None
def init_data(self):
"""Weather data inialization - get the coordinates."""
if self._config.get(CONF_TRACK_HOME, False):
latitude = self.hass.config.latitude
longitude = self.hass.config.longitude
elevation = self.hass.config.elevation
else:
latitude = self._config[CONF_LATITUDE]
longitude = self._config[CONF_LONGITUDE]
elevation = self._config[CONF_ELEVATION]
if not self._is_metric:
elevation = int(
round(convert_distance(elevation, LENGTH_FEET, LENGTH_METERS))
)
coordinates = {
"lat": str(latitude),
"lon": str(longitude),
"msl": str(elevation),
}
self._weather_data = metno.MetWeatherData(
coordinates, async_get_clientsession(self.hass), api_url=URL
)
async def fetch_data(self):
"""Fetch data from API - (current weather and forecast)."""
await self._weather_data.fetching_data()
self.current_weather_data = self._weather_data.get_current_weather()
time_zone = dt_util.DEFAULT_TIME_ZONE
self.daily_forecast = self._weather_data.get_forecast(time_zone, False)
self.hourly_forecast = self._weather_data.get_forecast(time_zone, True)
return self | """Unload a config entry."""
await hass.config_entries.async_forward_entry_unload(config_entry, "weather") | random_line_split |
__init__.py | """The met component."""
from datetime import timedelta
import logging
from random import randrange
import metno
from homeassistant.const import (
CONF_ELEVATION,
CONF_LATITUDE,
CONF_LONGITUDE,
EVENT_CORE_CONFIG_UPDATE,
LENGTH_FEET,
LENGTH_METERS,
)
from homeassistant.core import Config, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util.distance import convert as convert_distance
import homeassistant.util.dt as dt_util
from .const import CONF_TRACK_HOME, DOMAIN
URL = "https://aa015h6buqvih86i1.api.met.no/weatherapi/locationforecast/2.0/complete"
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up configured Met."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass, config_entry):
"""Set up Met as config entry."""
coordinator = MetDataUpdateCoordinator(hass, config_entry)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
if config_entry.data.get(CONF_TRACK_HOME, False):
coordinator.track_home()
hass.data[DOMAIN][config_entry.entry_id] = coordinator
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "weather")
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
await hass.config_entries.async_forward_entry_unload(config_entry, "weather")
hass.data[DOMAIN][config_entry.entry_id].untrack_home()
hass.data[DOMAIN].pop(config_entry.entry_id)
return True
class MetDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Met data."""
def __init__(self, hass, config_entry):
"""Initialize global Met data updater."""
self._unsub_track_home = None
self.weather = MetWeatherData(
hass, config_entry.data, hass.config.units.is_metric
)
self.weather.init_data()
update_interval = timedelta(minutes=randrange(55, 65))
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval)
async def _async_update_data(self):
"""Fetch data from Met."""
try:
return await self.weather.fetch_data()
except Exception as err:
raise UpdateFailed(f"Update failed: {err}") from err
def track_home(self):
"""Start tracking changes to HA home setting."""
if self._unsub_track_home:
return
async def _async_update_weather_data(_event=None):
"""Update weather data."""
self.weather.init_data()
await self.async_refresh()
self._unsub_track_home = self.hass.bus.async_listen(
EVENT_CORE_CONFIG_UPDATE, _async_update_weather_data
)
def untrack_home(self):
"""Stop tracking changes to HA home setting."""
if self._unsub_track_home:
|
class MetWeatherData:
"""Keep data for Met.no weather entities."""
def __init__(self, hass, config, is_metric):
"""Initialise the weather entity data."""
self.hass = hass
self._config = config
self._is_metric = is_metric
self._weather_data = None
self.current_weather_data = {}
self.daily_forecast = None
self.hourly_forecast = None
def init_data(self):
"""Weather data inialization - get the coordinates."""
if self._config.get(CONF_TRACK_HOME, False):
latitude = self.hass.config.latitude
longitude = self.hass.config.longitude
elevation = self.hass.config.elevation
else:
latitude = self._config[CONF_LATITUDE]
longitude = self._config[CONF_LONGITUDE]
elevation = self._config[CONF_ELEVATION]
if not self._is_metric:
elevation = int(
round(convert_distance(elevation, LENGTH_FEET, LENGTH_METERS))
)
coordinates = {
"lat": str(latitude),
"lon": str(longitude),
"msl": str(elevation),
}
self._weather_data = metno.MetWeatherData(
coordinates, async_get_clientsession(self.hass), api_url=URL
)
async def fetch_data(self):
"""Fetch data from API - (current weather and forecast)."""
await self._weather_data.fetching_data()
self.current_weather_data = self._weather_data.get_current_weather()
time_zone = dt_util.DEFAULT_TIME_ZONE
self.daily_forecast = self._weather_data.get_forecast(time_zone, False)
self.hourly_forecast = self._weather_data.get_forecast(time_zone, True)
return self
| self._unsub_track_home()
self._unsub_track_home = None | conditional_block |
__init__.py | """The met component."""
from datetime import timedelta
import logging
from random import randrange
import metno
from homeassistant.const import (
CONF_ELEVATION,
CONF_LATITUDE,
CONF_LONGITUDE,
EVENT_CORE_CONFIG_UPDATE,
LENGTH_FEET,
LENGTH_METERS,
)
from homeassistant.core import Config, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util.distance import convert as convert_distance
import homeassistant.util.dt as dt_util
from .const import CONF_TRACK_HOME, DOMAIN
URL = "https://aa015h6buqvih86i1.api.met.no/weatherapi/locationforecast/2.0/complete"
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up configured Met."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass, config_entry):
"""Set up Met as config entry."""
coordinator = MetDataUpdateCoordinator(hass, config_entry)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
if config_entry.data.get(CONF_TRACK_HOME, False):
coordinator.track_home()
hass.data[DOMAIN][config_entry.entry_id] = coordinator
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "weather")
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
await hass.config_entries.async_forward_entry_unload(config_entry, "weather")
hass.data[DOMAIN][config_entry.entry_id].untrack_home()
hass.data[DOMAIN].pop(config_entry.entry_id)
return True
class MetDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Met data."""
def __init__(self, hass, config_entry):
"""Initialize global Met data updater."""
self._unsub_track_home = None
self.weather = MetWeatherData(
hass, config_entry.data, hass.config.units.is_metric
)
self.weather.init_data()
update_interval = timedelta(minutes=randrange(55, 65))
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval)
async def _async_update_data(self):
"""Fetch data from Met."""
try:
return await self.weather.fetch_data()
except Exception as err:
raise UpdateFailed(f"Update failed: {err}") from err
def track_home(self):
"""Start tracking changes to HA home setting."""
if self._unsub_track_home:
return
async def _async_update_weather_data(_event=None):
"""Update weather data."""
self.weather.init_data()
await self.async_refresh()
self._unsub_track_home = self.hass.bus.async_listen(
EVENT_CORE_CONFIG_UPDATE, _async_update_weather_data
)
def untrack_home(self):
"""Stop tracking changes to HA home setting."""
if self._unsub_track_home:
self._unsub_track_home()
self._unsub_track_home = None
class MetWeatherData:
"""Keep data for Met.no weather entities."""
def | (self, hass, config, is_metric):
"""Initialise the weather entity data."""
self.hass = hass
self._config = config
self._is_metric = is_metric
self._weather_data = None
self.current_weather_data = {}
self.daily_forecast = None
self.hourly_forecast = None
def init_data(self):
"""Weather data inialization - get the coordinates."""
if self._config.get(CONF_TRACK_HOME, False):
latitude = self.hass.config.latitude
longitude = self.hass.config.longitude
elevation = self.hass.config.elevation
else:
latitude = self._config[CONF_LATITUDE]
longitude = self._config[CONF_LONGITUDE]
elevation = self._config[CONF_ELEVATION]
if not self._is_metric:
elevation = int(
round(convert_distance(elevation, LENGTH_FEET, LENGTH_METERS))
)
coordinates = {
"lat": str(latitude),
"lon": str(longitude),
"msl": str(elevation),
}
self._weather_data = metno.MetWeatherData(
coordinates, async_get_clientsession(self.hass), api_url=URL
)
async def fetch_data(self):
"""Fetch data from API - (current weather and forecast)."""
await self._weather_data.fetching_data()
self.current_weather_data = self._weather_data.get_current_weather()
time_zone = dt_util.DEFAULT_TIME_ZONE
self.daily_forecast = self._weather_data.get_forecast(time_zone, False)
self.hourly_forecast = self._weather_data.get_forecast(time_zone, True)
return self
| __init__ | identifier_name |
__init__.py | """The met component."""
from datetime import timedelta
import logging
from random import randrange
import metno
from homeassistant.const import (
CONF_ELEVATION,
CONF_LATITUDE,
CONF_LONGITUDE,
EVENT_CORE_CONFIG_UPDATE,
LENGTH_FEET,
LENGTH_METERS,
)
from homeassistant.core import Config, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util.distance import convert as convert_distance
import homeassistant.util.dt as dt_util
from .const import CONF_TRACK_HOME, DOMAIN
URL = "https://aa015h6buqvih86i1.api.met.no/weatherapi/locationforecast/2.0/complete"
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up configured Met."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass, config_entry):
"""Set up Met as config entry."""
coordinator = MetDataUpdateCoordinator(hass, config_entry)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
if config_entry.data.get(CONF_TRACK_HOME, False):
coordinator.track_home()
hass.data[DOMAIN][config_entry.entry_id] = coordinator
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "weather")
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
await hass.config_entries.async_forward_entry_unload(config_entry, "weather")
hass.data[DOMAIN][config_entry.entry_id].untrack_home()
hass.data[DOMAIN].pop(config_entry.entry_id)
return True
class MetDataUpdateCoordinator(DataUpdateCoordinator):
|
class MetWeatherData:
"""Keep data for Met.no weather entities."""
def __init__(self, hass, config, is_metric):
"""Initialise the weather entity data."""
self.hass = hass
self._config = config
self._is_metric = is_metric
self._weather_data = None
self.current_weather_data = {}
self.daily_forecast = None
self.hourly_forecast = None
def init_data(self):
"""Weather data inialization - get the coordinates."""
if self._config.get(CONF_TRACK_HOME, False):
latitude = self.hass.config.latitude
longitude = self.hass.config.longitude
elevation = self.hass.config.elevation
else:
latitude = self._config[CONF_LATITUDE]
longitude = self._config[CONF_LONGITUDE]
elevation = self._config[CONF_ELEVATION]
if not self._is_metric:
elevation = int(
round(convert_distance(elevation, LENGTH_FEET, LENGTH_METERS))
)
coordinates = {
"lat": str(latitude),
"lon": str(longitude),
"msl": str(elevation),
}
self._weather_data = metno.MetWeatherData(
coordinates, async_get_clientsession(self.hass), api_url=URL
)
async def fetch_data(self):
"""Fetch data from API - (current weather and forecast)."""
await self._weather_data.fetching_data()
self.current_weather_data = self._weather_data.get_current_weather()
time_zone = dt_util.DEFAULT_TIME_ZONE
self.daily_forecast = self._weather_data.get_forecast(time_zone, False)
self.hourly_forecast = self._weather_data.get_forecast(time_zone, True)
return self
| """Class to manage fetching Met data."""
def __init__(self, hass, config_entry):
"""Initialize global Met data updater."""
self._unsub_track_home = None
self.weather = MetWeatherData(
hass, config_entry.data, hass.config.units.is_metric
)
self.weather.init_data()
update_interval = timedelta(minutes=randrange(55, 65))
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval)
async def _async_update_data(self):
"""Fetch data from Met."""
try:
return await self.weather.fetch_data()
except Exception as err:
raise UpdateFailed(f"Update failed: {err}") from err
def track_home(self):
"""Start tracking changes to HA home setting."""
if self._unsub_track_home:
return
async def _async_update_weather_data(_event=None):
"""Update weather data."""
self.weather.init_data()
await self.async_refresh()
self._unsub_track_home = self.hass.bus.async_listen(
EVENT_CORE_CONFIG_UPDATE, _async_update_weather_data
)
def untrack_home(self):
"""Stop tracking changes to HA home setting."""
if self._unsub_track_home:
self._unsub_track_home()
self._unsub_track_home = None | identifier_body |
sph_hamsi_test.rs | extern crate sphlib;
extern crate libc;
use sphlib::{sph_hamsi, utils};
#[test]
fn will_be_224_hash() {
let dest = sph_hamsi::hamsi224_init_load_close("");
let actual = utils::to_hex_hash(&dest);
assert_eq!("b9f6eb1a9b990373f9d2cb125584333c69a3d41ae291845f05da221f", actual.to_string());
}
#[test]
fn will_be_256_hash() |
#[test]
fn will_be_384_hash() {
let dest = sph_hamsi::hamsi384_init_load_close("");
let actual = utils::to_hex_hash(&dest);
assert_eq!("3943cd34e3b96b197a8bf4bac7aa982d18530dd12f41136b26d7e88759255f21153f4a4bd02e523612b8427f9dd96c8d", actual.to_string());
}
#[test]
fn will_be_512_hash() {
let dest = sph_hamsi::hamsi512_init_load_close("");
let actual = utils::to_hex_hash(&dest);
assert_eq!("5cd7436a91e27fc809d7015c3407540633dab391127113ce6ba360f0c1e35f404510834a551610d6e871e75651ea381a8ba628af1dcf2b2be13af2eb6247290f", actual.to_string());
}
| {
let dest = sph_hamsi::hamsi256_init_load_close("");
let actual = utils::to_hex_hash(&dest);
assert_eq!("750e9ec469f4db626bee7e0c10ddaa1bd01fe194b94efbabebd24764dc2b13e9", actual.to_string());
} | identifier_body |
sph_hamsi_test.rs | extern crate sphlib;
extern crate libc;
use sphlib::{sph_hamsi, utils};
#[test]
fn | () {
let dest = sph_hamsi::hamsi224_init_load_close("");
let actual = utils::to_hex_hash(&dest);
assert_eq!("b9f6eb1a9b990373f9d2cb125584333c69a3d41ae291845f05da221f", actual.to_string());
}
#[test]
fn will_be_256_hash() {
let dest = sph_hamsi::hamsi256_init_load_close("");
let actual = utils::to_hex_hash(&dest);
assert_eq!("750e9ec469f4db626bee7e0c10ddaa1bd01fe194b94efbabebd24764dc2b13e9", actual.to_string());
}
#[test]
fn will_be_384_hash() {
let dest = sph_hamsi::hamsi384_init_load_close("");
let actual = utils::to_hex_hash(&dest);
assert_eq!("3943cd34e3b96b197a8bf4bac7aa982d18530dd12f41136b26d7e88759255f21153f4a4bd02e523612b8427f9dd96c8d", actual.to_string());
}
#[test]
fn will_be_512_hash() {
let dest = sph_hamsi::hamsi512_init_load_close("");
let actual = utils::to_hex_hash(&dest);
assert_eq!("5cd7436a91e27fc809d7015c3407540633dab391127113ce6ba360f0c1e35f404510834a551610d6e871e75651ea381a8ba628af1dcf2b2be13af2eb6247290f", actual.to_string());
}
| will_be_224_hash | identifier_name |
sph_hamsi_test.rs | extern crate sphlib;
extern crate libc;
use sphlib::{sph_hamsi, utils};
#[test]
fn will_be_224_hash() {
let dest = sph_hamsi::hamsi224_init_load_close("");
let actual = utils::to_hex_hash(&dest);
assert_eq!("b9f6eb1a9b990373f9d2cb125584333c69a3d41ae291845f05da221f", actual.to_string());
}
#[test]
fn will_be_256_hash() {
let dest = sph_hamsi::hamsi256_init_load_close("");
let actual = utils::to_hex_hash(&dest);
assert_eq!("750e9ec469f4db626bee7e0c10ddaa1bd01fe194b94efbabebd24764dc2b13e9", actual.to_string());
}
| }
#[test]
fn will_be_512_hash() {
let dest = sph_hamsi::hamsi512_init_load_close("");
let actual = utils::to_hex_hash(&dest);
assert_eq!("5cd7436a91e27fc809d7015c3407540633dab391127113ce6ba360f0c1e35f404510834a551610d6e871e75651ea381a8ba628af1dcf2b2be13af2eb6247290f", actual.to_string());
} | #[test]
fn will_be_384_hash() {
let dest = sph_hamsi::hamsi384_init_load_close("");
let actual = utils::to_hex_hash(&dest);
assert_eq!("3943cd34e3b96b197a8bf4bac7aa982d18530dd12f41136b26d7e88759255f21153f4a4bd02e523612b8427f9dd96c8d", actual.to_string()); | random_line_split |
test_json_protocol.py | # -*- coding: utf-8 -*-
from thriftpy.protocol import TJSONProtocol
from thriftpy.thrift import TPayload, TType
from thriftpy.transport import TMemoryBuffer
from thriftpy._compat import u
import thriftpy.protocol.json as proto
class TItem(TPayload):
thrift_spec = {
1: (TType.I32, "id"),
2: (TType.LIST, "phones", (TType.STRING)),
}
default_spec = [("id", None), ("phones", None)]
def test_map_to_obj():
val = [{"key": "ratio", "value": "0.618"}]
spec = [TType.STRING, TType.DOUBLE]
obj = proto.map_to_obj(val, spec)
assert {"ratio": 0.618} == obj
| obj = {"ratio": 0.618}
spec = [TType.STRING, TType.DOUBLE]
json = proto.map_to_json(obj, spec)
assert [{"key": "ratio", "value": 0.618}] == json
def test_list_to_obj():
val = [4, 8, 4, 12, 67]
spec = TType.I32
obj = proto.list_to_obj(val, spec)
assert [4, 8, 4, 12, 67] == obj
def test_list_to_json():
val = [4, 8, 4, 12, 67]
spec = TType.I32
json = proto.list_to_json(val, spec)
assert [4, 8, 4, 12, 67] == json
def test_struct_to_json():
obj = TItem(id=13, phones=["5234", "12346456"])
json = proto.struct_to_json(obj)
assert {"id": 13, "phones": ["5234", "12346456"]} == json
def test_struct_to_obj():
json = {"id": 13, "phones": ["5234", "12346456"]}
obj = TItem()
obj = proto.struct_to_obj(json, obj)
assert obj.id == 13 and obj.phones == ["5234", "12346456"]
def test_json_proto_api_write():
obj = TItem(id=13, phones=["5234", "12346456"])
trans = TMemoryBuffer()
p = TJSONProtocol(trans)
p.write_struct(obj)
data = trans.getvalue().decode("utf-8")
length = data[0:4]
import json
data = json.loads(data[4:])
assert length == "\x00\x00\x00S" and data == {
"metadata": {"version": 1},
"payload": {"phones": ["5234", "12346456"], "id": 13}}
def test_json_proto_api_read():
obj = TItem(id=13, phones=["5234", "12346456"])
trans = TMemoryBuffer()
p = TJSONProtocol(trans)
p.write_struct(obj)
obj2 = TItem()
obj2 = p.read_struct(obj2)
assert obj.id == 13 and obj.phones == ["5234", "12346456"]
def test_unicode_string():
class Foo(TPayload):
thrift_spec = {
1: (TType.STRING, "name")
}
default_spec = [("name", None)]
trans = TMemoryBuffer()
p = TJSONProtocol(trans)
foo = Foo(name=u('pão de açúcar'))
foo.write(p)
foo2 = Foo()
foo2.read(p)
assert foo == foo2 |
def test_map_to_json(): | random_line_split |
test_json_protocol.py | # -*- coding: utf-8 -*-
from thriftpy.protocol import TJSONProtocol
from thriftpy.thrift import TPayload, TType
from thriftpy.transport import TMemoryBuffer
from thriftpy._compat import u
import thriftpy.protocol.json as proto
class TItem(TPayload):
thrift_spec = {
1: (TType.I32, "id"),
2: (TType.LIST, "phones", (TType.STRING)),
}
default_spec = [("id", None), ("phones", None)]
def test_map_to_obj():
val = [{"key": "ratio", "value": "0.618"}]
spec = [TType.STRING, TType.DOUBLE]
obj = proto.map_to_obj(val, spec)
assert {"ratio": 0.618} == obj
def test_map_to_json():
obj = {"ratio": 0.618}
spec = [TType.STRING, TType.DOUBLE]
json = proto.map_to_json(obj, spec)
assert [{"key": "ratio", "value": 0.618}] == json
def test_list_to_obj():
val = [4, 8, 4, 12, 67]
spec = TType.I32
obj = proto.list_to_obj(val, spec)
assert [4, 8, 4, 12, 67] == obj
def test_list_to_json():
|
def test_struct_to_json():
obj = TItem(id=13, phones=["5234", "12346456"])
json = proto.struct_to_json(obj)
assert {"id": 13, "phones": ["5234", "12346456"]} == json
def test_struct_to_obj():
json = {"id": 13, "phones": ["5234", "12346456"]}
obj = TItem()
obj = proto.struct_to_obj(json, obj)
assert obj.id == 13 and obj.phones == ["5234", "12346456"]
def test_json_proto_api_write():
obj = TItem(id=13, phones=["5234", "12346456"])
trans = TMemoryBuffer()
p = TJSONProtocol(trans)
p.write_struct(obj)
data = trans.getvalue().decode("utf-8")
length = data[0:4]
import json
data = json.loads(data[4:])
assert length == "\x00\x00\x00S" and data == {
"metadata": {"version": 1},
"payload": {"phones": ["5234", "12346456"], "id": 13}}
def test_json_proto_api_read():
obj = TItem(id=13, phones=["5234", "12346456"])
trans = TMemoryBuffer()
p = TJSONProtocol(trans)
p.write_struct(obj)
obj2 = TItem()
obj2 = p.read_struct(obj2)
assert obj.id == 13 and obj.phones == ["5234", "12346456"]
def test_unicode_string():
class Foo(TPayload):
thrift_spec = {
1: (TType.STRING, "name")
}
default_spec = [("name", None)]
trans = TMemoryBuffer()
p = TJSONProtocol(trans)
foo = Foo(name=u('pão de açúcar'))
foo.write(p)
foo2 = Foo()
foo2.read(p)
assert foo == foo2
| val = [4, 8, 4, 12, 67]
spec = TType.I32
json = proto.list_to_json(val, spec)
assert [4, 8, 4, 12, 67] == json | identifier_body |
test_json_protocol.py | # -*- coding: utf-8 -*-
from thriftpy.protocol import TJSONProtocol
from thriftpy.thrift import TPayload, TType
from thriftpy.transport import TMemoryBuffer
from thriftpy._compat import u
import thriftpy.protocol.json as proto
class TItem(TPayload):
thrift_spec = {
1: (TType.I32, "id"),
2: (TType.LIST, "phones", (TType.STRING)),
}
default_spec = [("id", None), ("phones", None)]
def test_map_to_obj():
val = [{"key": "ratio", "value": "0.618"}]
spec = [TType.STRING, TType.DOUBLE]
obj = proto.map_to_obj(val, spec)
assert {"ratio": 0.618} == obj
def test_map_to_json():
obj = {"ratio": 0.618}
spec = [TType.STRING, TType.DOUBLE]
json = proto.map_to_json(obj, spec)
assert [{"key": "ratio", "value": 0.618}] == json
def test_list_to_obj():
val = [4, 8, 4, 12, 67]
spec = TType.I32
obj = proto.list_to_obj(val, spec)
assert [4, 8, 4, 12, 67] == obj
def test_list_to_json():
val = [4, 8, 4, 12, 67]
spec = TType.I32
json = proto.list_to_json(val, spec)
assert [4, 8, 4, 12, 67] == json
def test_struct_to_json():
obj = TItem(id=13, phones=["5234", "12346456"])
json = proto.struct_to_json(obj)
assert {"id": 13, "phones": ["5234", "12346456"]} == json
def test_struct_to_obj():
json = {"id": 13, "phones": ["5234", "12346456"]}
obj = TItem()
obj = proto.struct_to_obj(json, obj)
assert obj.id == 13 and obj.phones == ["5234", "12346456"]
def test_json_proto_api_write():
obj = TItem(id=13, phones=["5234", "12346456"])
trans = TMemoryBuffer()
p = TJSONProtocol(trans)
p.write_struct(obj)
data = trans.getvalue().decode("utf-8")
length = data[0:4]
import json
data = json.loads(data[4:])
assert length == "\x00\x00\x00S" and data == {
"metadata": {"version": 1},
"payload": {"phones": ["5234", "12346456"], "id": 13}}
def test_json_proto_api_read():
obj = TItem(id=13, phones=["5234", "12346456"])
trans = TMemoryBuffer()
p = TJSONProtocol(trans)
p.write_struct(obj)
obj2 = TItem()
obj2 = p.read_struct(obj2)
assert obj.id == 13 and obj.phones == ["5234", "12346456"]
def test_unicode_string():
class | (TPayload):
thrift_spec = {
1: (TType.STRING, "name")
}
default_spec = [("name", None)]
trans = TMemoryBuffer()
p = TJSONProtocol(trans)
foo = Foo(name=u('pão de açúcar'))
foo.write(p)
foo2 = Foo()
foo2.read(p)
assert foo == foo2
| Foo | identifier_name |
events.js | function runTest(config,qualifier) | {
var testname = testnamePrefix( qualifier, config.keysystem ) + ', basic events';
var configuration = getSimpleConfigurationForContent( config.content );
if ( config.initDataType && config.initData ) configuration.initDataTypes = [ config.initDataType ]
async_test(function(test)
{
var initDataType;
var initData;
var mediaKeySession;
function processMessage(event)
{
assert_true(event instanceof window.MediaKeyMessageEvent);
assert_equals(event.target, mediaKeySession);
assert_equals(event.type, 'message');
assert_any( assert_equals,
event.messageType,
[ 'license-request', 'individualization-request' ] );
config.messagehandler( event.messageType, event.message ).then( function( response ) {
waitForEventAndRunStep('keystatuseschange', mediaKeySession, test.step_func(processKeyStatusesChange), test);
mediaKeySession.update( response ).catch(function(error) {
forceTestFailureFromPromise(test, error);
});
});
}
function processKeyStatusesChange(event)
{
assert_true(event instanceof Event);
assert_equals(event.target, mediaKeySession);
assert_equals(event.type, 'keystatuseschange');
test.done();
}
navigator.requestMediaKeySystemAccess( config.keysystem, [ configuration ] ).then(function(access) {
initDataType = access.getConfiguration().initDataTypes[0];
if ( config.initDataType && config.initData ) {
initData = config.initData;
} else {
initData = getInitData(config.content, initDataType);
}
return access.createMediaKeys();
}).then(test.step_func(function(mediaKeys) {
mediaKeySession = mediaKeys.createSession();
waitForEventAndRunStep('message', mediaKeySession, test.step_func(processMessage), test);
return mediaKeySession.generateRequest(initDataType, initData);
})).catch(test.step_func(function(error) {
forceTestFailureFromPromise(test, error);
}));
}, testname );
} | identifier_body | |
events.js | function runTest(config,qualifier) {
var testname = testnamePrefix( qualifier, config.keysystem ) + ', basic events';
var configuration = getSimpleConfigurationForContent( config.content );
if ( config.initDataType && config.initData ) configuration.initDataTypes = [ config.initDataType ]
async_test(function(test)
{
var initDataType;
var initData;
var mediaKeySession;
function processMessage(event)
{
assert_true(event instanceof window.MediaKeyMessageEvent);
assert_equals(event.target, mediaKeySession);
assert_equals(event.type, 'message');
assert_any( assert_equals,
event.messageType,
[ 'license-request', 'individualization-request' ] );
config.messagehandler( event.messageType, event.message ).then( function( response ) {
waitForEventAndRunStep('keystatuseschange', mediaKeySession, test.step_func(processKeyStatusesChange), test);
mediaKeySession.update( response ).catch(function(error) {
forceTestFailureFromPromise(test, error);
});
});
}
function processKeyStatusesChange(event)
{
assert_true(event instanceof Event);
assert_equals(event.target, mediaKeySession);
assert_equals(event.type, 'keystatuseschange');
test.done();
}
navigator.requestMediaKeySystemAccess( config.keysystem, [ configuration ] ).then(function(access) {
initDataType = access.getConfiguration().initDataTypes[0];
if ( config.initDataType && config.initData ) {
initData = config.initData;
} else {
initData = getInitData(config.content, initDataType);
}
| return mediaKeySession.generateRequest(initDataType, initData);
})).catch(test.step_func(function(error) {
forceTestFailureFromPromise(test, error);
}));
}, testname );
} | return access.createMediaKeys();
}).then(test.step_func(function(mediaKeys) {
mediaKeySession = mediaKeys.createSession();
waitForEventAndRunStep('message', mediaKeySession, test.step_func(processMessage), test); | random_line_split |
events.js | function | (config,qualifier) {
var testname = testnamePrefix( qualifier, config.keysystem ) + ', basic events';
var configuration = getSimpleConfigurationForContent( config.content );
if ( config.initDataType && config.initData ) configuration.initDataTypes = [ config.initDataType ]
async_test(function(test)
{
var initDataType;
var initData;
var mediaKeySession;
function processMessage(event)
{
assert_true(event instanceof window.MediaKeyMessageEvent);
assert_equals(event.target, mediaKeySession);
assert_equals(event.type, 'message');
assert_any( assert_equals,
event.messageType,
[ 'license-request', 'individualization-request' ] );
config.messagehandler( event.messageType, event.message ).then( function( response ) {
waitForEventAndRunStep('keystatuseschange', mediaKeySession, test.step_func(processKeyStatusesChange), test);
mediaKeySession.update( response ).catch(function(error) {
forceTestFailureFromPromise(test, error);
});
});
}
function processKeyStatusesChange(event)
{
assert_true(event instanceof Event);
assert_equals(event.target, mediaKeySession);
assert_equals(event.type, 'keystatuseschange');
test.done();
}
navigator.requestMediaKeySystemAccess( config.keysystem, [ configuration ] ).then(function(access) {
initDataType = access.getConfiguration().initDataTypes[0];
if ( config.initDataType && config.initData ) {
initData = config.initData;
} else {
initData = getInitData(config.content, initDataType);
}
return access.createMediaKeys();
}).then(test.step_func(function(mediaKeys) {
mediaKeySession = mediaKeys.createSession();
waitForEventAndRunStep('message', mediaKeySession, test.step_func(processMessage), test);
return mediaKeySession.generateRequest(initDataType, initData);
})).catch(test.step_func(function(error) {
forceTestFailureFromPromise(test, error);
}));
}, testname );
}
| runTest | identifier_name |
conf.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
#
# This file is based upon the file generated by sphinx-quickstart. However,
# where sphinx-quickstart hardcodes values in this file that you input, this
# file has been changed to pull from your module's metadata module.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
# Import project metadata
from fedora_college import metadata
# -- General configuration ----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# show todos
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = metadata.project
copyright = metadata.copyright
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = metadata.version
# The full version, including alpha/beta/rc tags.
release = metadata.version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None | # The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = metadata.project_no_spaces + 'doc'
# -- Options for LaTeX output -------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author,
# documentclass [howto/manual]).
latex_documents = [
('index', metadata.project_no_spaces + '.tex',
metadata.project + ' Documentation', metadata.authors_string,
'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', metadata.package, metadata.project + ' Documentation',
metadata.authors_string, 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', metadata.project_no_spaces,
metadata.project + ' Documentation', metadata.authors_string,
metadata.project_no_spaces, metadata.description, 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
}
# Extra local configuration. This is useful for placing the class description
# in the class docstring and the __init__ parameter documentation in the
# __init__ docstring. See
# <http://sphinx-doc.org/ext/autodoc.html#confval-autoclass_content> for more
# information.
autoclass_content = 'both' | random_line_split | |
Gruntfile.js | /* recebe o objeto grunt como parâmetro*/
module.exports = function(grunt) {
grunt.initConfig({
clean: {
dist: {
src: 'dist'
}
},
copy: {
public: {
cwd: 'public',
src: '**',
dest: 'dist',
expand: true
}
},
useminPrepare: {
html: 'dist/**/*.html'
},
usemin: {
html: 'dist/**/*.html'
},
imagemin: {
public: {
xpand: true,
cwd: 'dist/img',
src: '**/*.{png, jpg, gif}',
dest: 'dist/img'
}
},
rev: {
options: {
encoding: 'utf8',
algorithm: 'md5',
length: 8
},
imagens: {
src: ['dist/img/**/*.{png,jpg,gif}']
},
minificados: {
src: ['dist/js/**/*.min.js', 'dist/css/**/*.min.css']
}
},
coffee: {
compilar: {
expand: true,
cwd: 'public/coffee',
src: ['**/*.coffee'],
dest: 'public/js',
ext: '.js'
}
},
less: {
compilar: {
expand: true,
cwd: 'public/less',
src: ['**/*.less'],
dest: 'public/css',
ext: '.css'
}
},
watch: {
coffee: {
options: {
event: ['added', 'changed']
},
files: 'public/coffee/**/*.coffee',
tasks: 'coffee:compilar'
},
less: {
options: {
event: ['added', 'changed']
},
files: 'public/less/**/*.less',
tasks: 'less:compilar'
},
js: {
options: {
event: ['changed']
},
files: 'public/js/**/*.js',
tasks: 'jshint:js'
}
},
jshint: {
js: {
src: ['public/js/**/*.js']
}
},
browserSync: {
public: {
bsFiles: { | options: {
watchTask: true,
server: {
baseDir: "public"
}
}
}
}
});
grunt.registerTask('dist', ['clean', 'copy']);
grunt.registerTask('minifica', ['useminPrepare', 'concat', 'uglify', 'cssmin', 'rev:imagens','rev:minificados', 'usemin', 'imagemin']);
grunt.registerTask('default', ['dist', 'minifica']);
// task para desenvolvimento
grunt.registerTask('dev', ['browserSync', 'watch']);
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-concat');
grunt.loadNpmTasks('grunt-contrib-cssmin');
grunt.loadNpmTasks('grunt-contrib-imagemin');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-usemin');
grunt.loadNpmTasks('grunt-rev');
grunt.loadNpmTasks('grunt-contrib-coffee');
grunt.loadNpmTasks('grunt-contrib-less');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-browser-sync');
} | src: ['public/**/*']
}, | random_line_split |
jqt.activityIndicator.js | /*
_/ _/_/ _/_/_/_/_/ _/
_/ _/ _/ _/_/ _/ _/ _/_/_/ _/_/_/
_/ _/ _/_/ _/ _/ _/ _/ _/ _/ _/ _/
_/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/
_/ _/_/ _/ _/ _/_/ _/_/_/ _/_/_/ _/ _/
_/
_/
Created by David Kaneda <http://www.davidkaneda.com>
Documentation and issue tracking on Google Code <http://code.google.com/p/jqtouch/>
Special thanks to Jonathan Stark <http://jonathanstark.com/>
and pinch/zoom <http://www.pinchzoom.com/>
(c) 2009 by jQTouch project members.
See LICENSE.txt for license.
activityIndicator - Daniel J. Pinter - DataZombies
Based on http://starkravingcoder.blogspot.com/2007/09/canvas-loading-indicator.html
Object Properties (all properties are set in the canvas tag):
animating
is the object in motion? Use object methods to change - true/false - Default = false
barHeight
height of the bars in px - Default = 5
barWidth
width of the bars in px - Default = 2
color
uses canvas's style attribute to set the bar color - in rgb() - Default = 0, 0, 0 (black)
direction
the direction the object rotates - counterclockwise/clockwise - Default = clockwise
innerRadius
radius of the hole in the middle in px - Default = 5
numberOfBars
how many bars the object has - Default = 12
speed
how fast the object rotates - larger numbers are slower - Default = 50
xPos
x-position on canvas in px - Default = center of canvas
yPos
y-position on canvas in px - Default = middle of canvas
Object Methods:
start()
begins the object's rotation
stop()
ends the object's rotation
Object Instantiation:
var aiGreenStar = new activityIndicator($('#GreenStar'));
Bind Object to Events via jQuery:
$('#page1').bind('pageAnimationStart', function (e, data) {if (data.direction === 'in'){aiGreenStar.start();}});
$('#page').bind('pageAnimationEnd', function (e, data) {if (data.direction === 'out'){aiGreenStar.stop();}});
Canvas tag with Object's ID:
This displays an green asterisk-like (*) activityIndicator in the top left corner of a 100 x 250 canvas.
<canvas id="GreenStar" height="100" width="250" barHeight="10" barWidth="3" style="color:rgb(0,255,0);"
direction="counterclockwise" innerRadius="5" numberOfBars="6" speed="50" xPos="30" yPos="45"></canvas>
*/
function activityIndicator(canvas) {
var animating = false;
var barHeight = $(canvas).attr('barHeight') - 0;
var barWidth = $(canvas).attr('barWidth') - 0;
var color = $(canvas).css('color');
var context = $(canvas).get(0).getContext('2d');
var direction = $(canvas).attr('direction');
var innerRadius = $(canvas).attr('innerRadius') - 0;
var numberOfBars = $(canvas).attr('numberOfBars') - 0;
var speed = $(canvas).attr('speed') - 0;
var xPos = $(canvas).attr('xPos') - 0;
var yPos = $(canvas).attr('yPos') - 0;
var offset = 0;
if (isNaN(barHeight)) {barHeight = 5;}
if (isNaN(barWidth)) {barWidth = 2;}
var a = color.indexOf('(') + 1;
var b = a;
if (a !== -1) {
if (color.substr(0, 4) === 'rgb('){
b = color.lastIndexOf(')') - a;
} else if (color.substr(0, 5) === 'rgba(') {
b = color.lastIndexOf(',') - a;
}
color = b > a ? color.substr(a, b) + ', ' : '0, 0, 0, ';
} else {
color = '0, 0, 0, ';
}
switch (direction){
case 'counterclockwise':
direction = -1;
break;
case 'clockwise': default:
direction = 1;
break;
}
if (isNaN(innerRadius)) {innerRadius = 5;}
if (isNaN(numberOfBars)) {numberOfBars = 12;}
if (isNaN(speed)) {speed = 50;}
if (isNaN(xPos)) {xPos = $(canvas).attr('width') / 2;}
if (isNaN(yPos)) {yPos = $(canvas).attr('height') / 2;}
function clear() {context.clearRect(0, 0, context.canvas.clientWidth, context.canvas.clientHeight);}; | clear();
context.save();
context.translate(xPos, yPos);
for (var i = 0; i < numberOfBars; i++) {
var angle = 2 * ((offset + i) % numberOfBars) * Math.PI / numberOfBars;
context.save();
context.translate((innerRadius * Math.sin(-angle)), (innerRadius * Math.cos(-angle)));
context.rotate(angle);
context.fillStyle = 'rgba(' + color + (numberOfBars + 1 - i) / (numberOfBars + 1) + ')';
context.fillRect(-barWidth / 2, 0, barWidth, barHeight);
context.restore();
}
context.restore();
};
function animate() {
if (!animating) {return;};
offset = (offset + direction) % numberOfBars;
draw(offset);
setTimeout(animate, speed);
};
function start(){
animating = true;
animate();
};
function stop(){
animating = false;
clear();
};
return {
start: start,
stop: stop
};
}; |
function draw(offset) { | random_line_split |
jqt.activityIndicator.js | /*
_/ _/_/ _/_/_/_/_/ _/
_/ _/ _/ _/_/ _/ _/ _/_/_/ _/_/_/
_/ _/ _/_/ _/ _/ _/ _/ _/ _/ _/ _/
_/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/
_/ _/_/ _/ _/ _/_/ _/_/_/ _/_/_/ _/ _/
_/
_/
Created by David Kaneda <http://www.davidkaneda.com>
Documentation and issue tracking on Google Code <http://code.google.com/p/jqtouch/>
Special thanks to Jonathan Stark <http://jonathanstark.com/>
and pinch/zoom <http://www.pinchzoom.com/>
(c) 2009 by jQTouch project members.
See LICENSE.txt for license.
activityIndicator - Daniel J. Pinter - DataZombies
Based on http://starkravingcoder.blogspot.com/2007/09/canvas-loading-indicator.html
Object Properties (all properties are set in the canvas tag):
animating
is the object in motion? Use object methods to change - true/false - Default = false
barHeight
height of the bars in px - Default = 5
barWidth
width of the bars in px - Default = 2
color
uses canvas's style attribute to set the bar color - in rgb() - Default = 0, 0, 0 (black)
direction
the direction the object rotates - counterclockwise/clockwise - Default = clockwise
innerRadius
radius of the hole in the middle in px - Default = 5
numberOfBars
how many bars the object has - Default = 12
speed
how fast the object rotates - larger numbers are slower - Default = 50
xPos
x-position on canvas in px - Default = center of canvas
yPos
y-position on canvas in px - Default = middle of canvas
Object Methods:
start()
begins the object's rotation
stop()
ends the object's rotation
Object Instantiation:
var aiGreenStar = new activityIndicator($('#GreenStar'));
Bind Object to Events via jQuery:
$('#page1').bind('pageAnimationStart', function (e, data) {if (data.direction === 'in'){aiGreenStar.start();}});
$('#page').bind('pageAnimationEnd', function (e, data) {if (data.direction === 'out'){aiGreenStar.stop();}});
Canvas tag with Object's ID:
This displays an green asterisk-like (*) activityIndicator in the top left corner of a 100 x 250 canvas.
<canvas id="GreenStar" height="100" width="250" barHeight="10" barWidth="3" style="color:rgb(0,255,0);"
direction="counterclockwise" innerRadius="5" numberOfBars="6" speed="50" xPos="30" yPos="45"></canvas>
*/
function activityIndicator(canvas) {
var animating = false;
var barHeight = $(canvas).attr('barHeight') - 0;
var barWidth = $(canvas).attr('barWidth') - 0;
var color = $(canvas).css('color');
var context = $(canvas).get(0).getContext('2d');
var direction = $(canvas).attr('direction');
var innerRadius = $(canvas).attr('innerRadius') - 0;
var numberOfBars = $(canvas).attr('numberOfBars') - 0;
var speed = $(canvas).attr('speed') - 0;
var xPos = $(canvas).attr('xPos') - 0;
var yPos = $(canvas).attr('yPos') - 0;
var offset = 0;
if (isNaN(barHeight)) {barHeight = 5;}
if (isNaN(barWidth)) {barWidth = 2;}
var a = color.indexOf('(') + 1;
var b = a;
if (a !== -1) {
if (color.substr(0, 4) === 'rgb('){
b = color.lastIndexOf(')') - a;
} else if (color.substr(0, 5) === 'rgba(') {
b = color.lastIndexOf(',') - a;
}
color = b > a ? color.substr(a, b) + ', ' : '0, 0, 0, ';
} else {
color = '0, 0, 0, ';
}
switch (direction){
case 'counterclockwise':
direction = -1;
break;
case 'clockwise': default:
direction = 1;
break;
}
if (isNaN(innerRadius)) {innerRadius = 5;}
if (isNaN(numberOfBars)) {numberOfBars = 12;}
if (isNaN(speed)) {speed = 50;}
if (isNaN(xPos)) {xPos = $(canvas).attr('width') / 2;}
if (isNaN(yPos)) {yPos = $(canvas).attr('height') / 2;}
function clear() {context.clearRect(0, 0, context.canvas.clientWidth, context.canvas.clientHeight);};
function draw(offset) {
clear();
context.save();
context.translate(xPos, yPos);
for (var i = 0; i < numberOfBars; i++) {
var angle = 2 * ((offset + i) % numberOfBars) * Math.PI / numberOfBars;
context.save();
context.translate((innerRadius * Math.sin(-angle)), (innerRadius * Math.cos(-angle)));
context.rotate(angle);
context.fillStyle = 'rgba(' + color + (numberOfBars + 1 - i) / (numberOfBars + 1) + ')';
context.fillRect(-barWidth / 2, 0, barWidth, barHeight);
context.restore();
}
context.restore();
};
function animate() {
if (!animating) | ;
offset = (offset + direction) % numberOfBars;
draw(offset);
setTimeout(animate, speed);
};
function start(){
animating = true;
animate();
};
function stop(){
animating = false;
clear();
};
return {
start: start,
stop: stop
};
}; | {return;} | conditional_block |
jqt.activityIndicator.js | /*
_/ _/_/ _/_/_/_/_/ _/
_/ _/ _/ _/_/ _/ _/ _/_/_/ _/_/_/
_/ _/ _/_/ _/ _/ _/ _/ _/ _/ _/ _/
_/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/
_/ _/_/ _/ _/ _/_/ _/_/_/ _/_/_/ _/ _/
_/
_/
Created by David Kaneda <http://www.davidkaneda.com>
Documentation and issue tracking on Google Code <http://code.google.com/p/jqtouch/>
Special thanks to Jonathan Stark <http://jonathanstark.com/>
and pinch/zoom <http://www.pinchzoom.com/>
(c) 2009 by jQTouch project members.
See LICENSE.txt for license.
activityIndicator - Daniel J. Pinter - DataZombies
Based on http://starkravingcoder.blogspot.com/2007/09/canvas-loading-indicator.html
Object Properties (all properties are set in the canvas tag):
animating
is the object in motion? Use object methods to change - true/false - Default = false
barHeight
height of the bars in px - Default = 5
barWidth
width of the bars in px - Default = 2
color
uses canvas's style attribute to set the bar color - in rgb() - Default = 0, 0, 0 (black)
direction
the direction the object rotates - counterclockwise/clockwise - Default = clockwise
innerRadius
radius of the hole in the middle in px - Default = 5
numberOfBars
how many bars the object has - Default = 12
speed
how fast the object rotates - larger numbers are slower - Default = 50
xPos
x-position on canvas in px - Default = center of canvas
yPos
y-position on canvas in px - Default = middle of canvas
Object Methods:
start()
begins the object's rotation
stop()
ends the object's rotation
Object Instantiation:
var aiGreenStar = new activityIndicator($('#GreenStar'));
Bind Object to Events via jQuery:
$('#page1').bind('pageAnimationStart', function (e, data) {if (data.direction === 'in'){aiGreenStar.start();}});
$('#page').bind('pageAnimationEnd', function (e, data) {if (data.direction === 'out'){aiGreenStar.stop();}});
Canvas tag with Object's ID:
This displays an green asterisk-like (*) activityIndicator in the top left corner of a 100 x 250 canvas.
<canvas id="GreenStar" height="100" width="250" barHeight="10" barWidth="3" style="color:rgb(0,255,0);"
direction="counterclockwise" innerRadius="5" numberOfBars="6" speed="50" xPos="30" yPos="45"></canvas>
*/
function activityIndicator(canvas) {
var animating = false;
var barHeight = $(canvas).attr('barHeight') - 0;
var barWidth = $(canvas).attr('barWidth') - 0;
var color = $(canvas).css('color');
var context = $(canvas).get(0).getContext('2d');
var direction = $(canvas).attr('direction');
var innerRadius = $(canvas).attr('innerRadius') - 0;
var numberOfBars = $(canvas).attr('numberOfBars') - 0;
var speed = $(canvas).attr('speed') - 0;
var xPos = $(canvas).attr('xPos') - 0;
var yPos = $(canvas).attr('yPos') - 0;
var offset = 0;
if (isNaN(barHeight)) {barHeight = 5;}
if (isNaN(barWidth)) {barWidth = 2;}
var a = color.indexOf('(') + 1;
var b = a;
if (a !== -1) {
if (color.substr(0, 4) === 'rgb('){
b = color.lastIndexOf(')') - a;
} else if (color.substr(0, 5) === 'rgba(') {
b = color.lastIndexOf(',') - a;
}
color = b > a ? color.substr(a, b) + ', ' : '0, 0, 0, ';
} else {
color = '0, 0, 0, ';
}
switch (direction){
case 'counterclockwise':
direction = -1;
break;
case 'clockwise': default:
direction = 1;
break;
}
if (isNaN(innerRadius)) {innerRadius = 5;}
if (isNaN(numberOfBars)) {numberOfBars = 12;}
if (isNaN(speed)) {speed = 50;}
if (isNaN(xPos)) {xPos = $(canvas).attr('width') / 2;}
if (isNaN(yPos)) {yPos = $(canvas).attr('height') / 2;}
function clear() {context.clearRect(0, 0, context.canvas.clientWidth, context.canvas.clientHeight);};
function draw(offset) {
clear();
context.save();
context.translate(xPos, yPos);
for (var i = 0; i < numberOfBars; i++) {
var angle = 2 * ((offset + i) % numberOfBars) * Math.PI / numberOfBars;
context.save();
context.translate((innerRadius * Math.sin(-angle)), (innerRadius * Math.cos(-angle)));
context.rotate(angle);
context.fillStyle = 'rgba(' + color + (numberOfBars + 1 - i) / (numberOfBars + 1) + ')';
context.fillRect(-barWidth / 2, 0, barWidth, barHeight);
context.restore();
}
context.restore();
};
function animate() | ;
function start(){
animating = true;
animate();
};
function stop(){
animating = false;
clear();
};
return {
start: start,
stop: stop
};
}; | {
if (!animating) {return;};
offset = (offset + direction) % numberOfBars;
draw(offset);
setTimeout(animate, speed);
} | identifier_body |
jqt.activityIndicator.js | /*
_/ _/_/ _/_/_/_/_/ _/
_/ _/ _/ _/_/ _/ _/ _/_/_/ _/_/_/
_/ _/ _/_/ _/ _/ _/ _/ _/ _/ _/ _/
_/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/
_/ _/_/ _/ _/ _/_/ _/_/_/ _/_/_/ _/ _/
_/
_/
Created by David Kaneda <http://www.davidkaneda.com>
Documentation and issue tracking on Google Code <http://code.google.com/p/jqtouch/>
Special thanks to Jonathan Stark <http://jonathanstark.com/>
and pinch/zoom <http://www.pinchzoom.com/>
(c) 2009 by jQTouch project members.
See LICENSE.txt for license.
activityIndicator - Daniel J. Pinter - DataZombies
Based on http://starkravingcoder.blogspot.com/2007/09/canvas-loading-indicator.html
Object Properties (all properties are set in the canvas tag):
animating
is the object in motion? Use object methods to change - true/false - Default = false
barHeight
height of the bars in px - Default = 5
barWidth
width of the bars in px - Default = 2
color
uses canvas's style attribute to set the bar color - in rgb() - Default = 0, 0, 0 (black)
direction
the direction the object rotates - counterclockwise/clockwise - Default = clockwise
innerRadius
radius of the hole in the middle in px - Default = 5
numberOfBars
how many bars the object has - Default = 12
speed
how fast the object rotates - larger numbers are slower - Default = 50
xPos
x-position on canvas in px - Default = center of canvas
yPos
y-position on canvas in px - Default = middle of canvas
Object Methods:
start()
begins the object's rotation
stop()
ends the object's rotation
Object Instantiation:
var aiGreenStar = new activityIndicator($('#GreenStar'));
Bind Object to Events via jQuery:
$('#page1').bind('pageAnimationStart', function (e, data) {if (data.direction === 'in'){aiGreenStar.start();}});
$('#page').bind('pageAnimationEnd', function (e, data) {if (data.direction === 'out'){aiGreenStar.stop();}});
Canvas tag with Object's ID:
This displays an green asterisk-like (*) activityIndicator in the top left corner of a 100 x 250 canvas.
<canvas id="GreenStar" height="100" width="250" barHeight="10" barWidth="3" style="color:rgb(0,255,0);"
direction="counterclockwise" innerRadius="5" numberOfBars="6" speed="50" xPos="30" yPos="45"></canvas>
*/
function | (canvas) {
var animating = false;
var barHeight = $(canvas).attr('barHeight') - 0;
var barWidth = $(canvas).attr('barWidth') - 0;
var color = $(canvas).css('color');
var context = $(canvas).get(0).getContext('2d');
var direction = $(canvas).attr('direction');
var innerRadius = $(canvas).attr('innerRadius') - 0;
var numberOfBars = $(canvas).attr('numberOfBars') - 0;
var speed = $(canvas).attr('speed') - 0;
var xPos = $(canvas).attr('xPos') - 0;
var yPos = $(canvas).attr('yPos') - 0;
var offset = 0;
if (isNaN(barHeight)) {barHeight = 5;}
if (isNaN(barWidth)) {barWidth = 2;}
var a = color.indexOf('(') + 1;
var b = a;
if (a !== -1) {
if (color.substr(0, 4) === 'rgb('){
b = color.lastIndexOf(')') - a;
} else if (color.substr(0, 5) === 'rgba(') {
b = color.lastIndexOf(',') - a;
}
color = b > a ? color.substr(a, b) + ', ' : '0, 0, 0, ';
} else {
color = '0, 0, 0, ';
}
switch (direction){
case 'counterclockwise':
direction = -1;
break;
case 'clockwise': default:
direction = 1;
break;
}
if (isNaN(innerRadius)) {innerRadius = 5;}
if (isNaN(numberOfBars)) {numberOfBars = 12;}
if (isNaN(speed)) {speed = 50;}
if (isNaN(xPos)) {xPos = $(canvas).attr('width') / 2;}
if (isNaN(yPos)) {yPos = $(canvas).attr('height') / 2;}
function clear() {context.clearRect(0, 0, context.canvas.clientWidth, context.canvas.clientHeight);};
function draw(offset) {
clear();
context.save();
context.translate(xPos, yPos);
for (var i = 0; i < numberOfBars; i++) {
var angle = 2 * ((offset + i) % numberOfBars) * Math.PI / numberOfBars;
context.save();
context.translate((innerRadius * Math.sin(-angle)), (innerRadius * Math.cos(-angle)));
context.rotate(angle);
context.fillStyle = 'rgba(' + color + (numberOfBars + 1 - i) / (numberOfBars + 1) + ')';
context.fillRect(-barWidth / 2, 0, barWidth, barHeight);
context.restore();
}
context.restore();
};
function animate() {
if (!animating) {return;};
offset = (offset + direction) % numberOfBars;
draw(offset);
setTimeout(animate, speed);
};
function start(){
animating = true;
animate();
};
function stop(){
animating = false;
clear();
};
return {
start: start,
stop: stop
};
}; | activityIndicator | identifier_name |
match.rs | // rustfmt-normalize_comments: true
// Match expressions.
fn foo() {
// A match expression.
match x {
// Some comment.
a => foo(),
b if 0 < 42 => foo(),
c => { // Another comment.
// Comment.
an_expression;
foo()
}
Foo(ref bar) =>
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
Pattern1 | Pattern2 | Pattern3 => false,
Paternnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn |
Paternnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn => {
blah
}
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn => meh,
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn if looooooooooooooooooong_guard => meh,
Patternnnnnnnnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnnnnnnnn if looooooooooooooooooooooooooooooooooooooooong_guard =>
meh,
// Test that earlier patterns can take the guard space
(aaaa, bbbbb, ccccccc, aaaaa, bbbbbbbb, cccccc, aaaa, bbbbbbbb, cccccc, dddddd) |
Patternnnnnnnnnnnnnnnnnnnnnnnnn if loooooooooooooooooooooooooooooooooooooooooong_guard => {}
_ => {}
ast::PathParameters::AngleBracketedParameters(ref data) if data.lifetimes.len() > 0 ||
data.types.len() > 0 ||
data.bindings.len() > 0 => {}
}
let whatever = match something {
/// DOC COMMENT!
Some(_) => 42,
// Comment on an attribute.
#[an_attribute]
// Comment after an attribute.
None => 0,
#[rustfmt::skip]
Blurb => { }
};
}
// Test that a match on an overflow line is laid out properly.
fn main() {
let sub_span =
match xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx {
Some(sub_span) => Some(sub_span),
None => sub_span,
};
}
// Test that one-line bodies align.
fn main() {
match r {
Variableeeeeeeeeeeeeeeeee => ( "variable",
vec!("id", "name", "qualname",
"value", "type", "scopeid"),
true,
true),
Enummmmmmmmmmmmmmmmmmmmm => ("enum",
vec!("id","qualname","scopeid","value"),
true,
true),
Variantttttttttttttttttttttttt => ("variant",
vec!("id",
"name",
"qualname",
"type",
"value",
"scopeid"),
true,
true),
};
match x{
y=>{/*Block with comment. Preserve me.*/ }
z=>{stmt();} }
}
fn matches() {
match 1 {
-1 => 10,
1 => 1, // foo
2 => 2,
// bar
3 => 3,
_ => 0 // baz
}
}
fn match_skip() {
let _ = match Some(1) {
#[rustfmt::skip]
Some( n ) => n,
None => 1,
};
}
fn issue339() {
match a {
b => {}
c => { }
d => {
}
e => {
}
// collapsing here is safe
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff => {
}
// collapsing here exceeds line length
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffg => {
}
h => { // comment above block
}
i => {
} // comment below block
j => {
// comment inside block
}
j2 => {
// comments inside...
} // ... and after
// TODO uncomment when vertical whitespace is handled better
// k => {
//
// // comment with WS above
// }
// l => {
// // comment with ws below
//
// }
m => {
} n => { } o =>
{
}
p => { // Don't collapse me
} q => { } r =>
{
}
s => 0, // s comment
// t comment
t => 1,
u => 2,
v => {
} /* funky block
* comment */
// final comment
}
}
fn issue355() {
match mac {
a => println!("a", b),
b => vec!(1, 2),
c => vec!(3; 4),
d => {
println!("a", b)
}
e => {
vec!(1, 2)
}
f => {
vec!(3; 4)
}
h => println!("a", b), // h comment
i => vec!(1, 2), // i comment
j => vec!(3; 4), // j comment
// k comment
k => println!("a", b),
// l comment
l => vec!(1, 2),
// m comment
m => vec!(3; 4),
// Rewrite splits macro
nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn => println!("a", b),
// Rewrite splits macro
oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo => vec!(1, 2),
// Macro support fails to recognise this macro as splittable
// We push the whole expr to a new line, TODO split this macro as well
pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp => vec!(3; 4),
// q, r and s: Rewrite splits match arm
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq => println!("a", b),
rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr => vec!(1, 2),
ssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss => vec!(3; 4),
// Funky bracketing styles
t => println!{"a", b},
u => vec!{1, 2},
v => vec!{3; 4},
w => println!["a", b],
x => vec![1, 2],
y =>vec![3; 4],
// Brackets with comments
tc => println!{"a", b}, // comment
uc => vec!{1, 2}, // comment
vc =>vec!{3; 4}, // comment
wc =>println!["a", b], // comment
xc => vec![1,2], // comment
yc => vec![3; 4], // comment
yd =>
looooooooooooooooooooooooooooooooooooooooooooooooooooooooong_func(aaaaaaaaaa,
bbbbbbbbbb,
cccccccccc,
dddddddddd),
}
}
fn issue280() {
{
match x {
CompressionMode::DiscardNewline | CompressionMode::CompressWhitespaceNewline => ch ==
'\n',
ast::ItemConst(ref typ, ref expr) => self.process_static_or_const_item(item,
&typ,
&expr),
}
}
}
fn issue383() {
match resolution.last_private {LastImport{..} => false, _ => true};
}
fn issue507() {
match 1 {
1 => unsafe { std::intrinsics::abort() },
_ => (),
}
}
fn issue508() {
match s.type_id() {
Some(NodeTypeId::Element(ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLCanvasElement))) => true,
Some(NodeTypeId::Element(ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLObjectElement))) => s.has_object_data(),
Some(NodeTypeId::Element(_)) => false,
}
}
fn issue496() {{{{
match def {
def::DefConst(def_id) | def::DefAssociatedConst(def_id) =>
match const_eval::lookup_const_by_id(cx.tcx, def_id, Some(self.pat.id)) {
Some(const_expr) => { x }}}}}}}
fn issue494() {
{
match stmt.node {
hir::StmtExpr(ref expr, id) | hir::StmtSemi(ref expr, id) =>
result.push(
StmtRef::Mirror(
Box::new(Stmt { span: stmt.span,
kind: StmtKind::Expr {
scope: cx.tcx.region_maps.node_extent(id),
expr: expr.to_ref() } }))),
}
}
}
fn issue386() {
match foo {
BiEq | BiLt | BiLe | BiNe | BiGt | BiGe =>
true,
BiAnd | BiOr | BiAdd | BiSub | BiMul | BiDiv | BiRem |
BiBitXor | BiBitAnd | BiBitOr | BiShl | BiShr =>
false,
}
}
fn guards() {
match foo {
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa if foooooooooooooo && barrrrrrrrrrrr => {}
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa if foooooooooooooo && barrrrrrrrrrrr => {}
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
if fooooooooooooooooooooo &&
(bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb || cccccccccccccccccccccccccccccccccccccccc) => {}
}
}
fn issue1371() |
fn issue1395() {
let bar = Some(true);
let foo = Some(true);
let mut x = false;
bar.and_then(|_| {
match foo {
None => None,
Some(b) => {
x = true;
Some(b)
}
}
});
}
fn issue1456() {
Ok(Recording {
artists: match reader.evaluate(".//mb:recording/mb:artist-credit/mb:name-credit")? {
Nodeset(nodeset) => {
let res: Result<Vec<ArtistRef>, ReadError> = nodeset
.iter()
.map(|node| {
XPathNodeReader::new(node, &context).and_then(|r| ArtistRef::from_xml(&r))
})
.collect();
res?
}
_ => Vec::new(),
},
})
}
fn issue1460() {
let _ = match foo {
REORDER_BUFFER_CHANGE_INTERNAL_SPEC_INSERT => "internal_spec_insert_internal_spec_insert_internal_spec_insert",
_ => "reorder_something",
};
}
fn issue525() {
foobar(f, "{}", match *self {
TaskState::Started => "started",
TaskState::Success => "success",
TaskState::Failed => "failed",
});
}
// #1838, #1839
fn match_with_near_max_width() {
let (this_line_uses_99_characters_and_is_formatted_properly, x012345) = match some_expression {
_ => unimplemented!(),
};
let (should_be_formatted_like_the_line_above_using_100_characters, x0) = match some_expression {
_ => unimplemented!(),
};
let (should_put_the_brace_on_the_next_line_using_101_characters, x0000) = match some_expression
{
_ => unimplemented!(),
};
match m {
Variant::Tag | Variant::Tag2 | Variant::Tag3 | Variant::Tag4 | Variant::Tag5 | Variant::Tag6 =>
{}
}
}
fn match_with_trailing_spaces() {
match x {
Some(..) => 0,
None => 1,
}
}
fn issue_2099() {
let a = match x {
};
let b = match x {
};
match x {}
}
// #2021
impl<'tcx> Const<'tcx> {
pub fn from_constval<'a>() -> Const<'tcx> {
let val = match *cv {
ConstVal::Variant(_) | ConstVal::Aggregate(..) | ConstVal::Unevaluated(..) => bug!("MIR must not use `{:?}` (aggregates are expanded to MIR rvalues)", cv),
};
}
}
// #2151
fn issue_2151() {
match either {
x => {
}y => ()
}
}
// #2152
fn issue_2152() {
match m {
"aaaaaaaaaaaaa" | "bbbbbbbbbbbbb" | "cccccccccccccccccccccccccccccccccccccccccccc" if true => {}
"bind" | "writev" | "readv" | "sendmsg" | "recvmsg" if android && (aarch64 || x86_64) => true,
}
}
// #2376
// Preserve block around expressions with condition.
fn issue_2376() {
let mut x = None;
match x {
Some(0) => {
for i in 1..11 {
x = Some(i);
}
}
Some(ref mut y) => {
while *y < 10 {
*y += 1;
}
}
None => {
while let None = x {
x = Some(10);
}
}
}
}
// #2621
// Strip leading `|` in match arm patterns
fn issue_2621() {
let x = Foo::A;
match x {
Foo::A => println!("No vert single condition"),
Foo::B | Foo::C => println!("Center vert two conditions"),
| Foo::D => println!("Preceding vert single condition"),
| Foo::E
| Foo::F => println!("Preceding vert over two lines"),
Foo::G |
Foo::H => println!("Trailing vert over two lines"),
// Comment on its own line
| Foo::I => println!("With comment"), // Comment after line
}
}
fn issue_2377() {
match tok {
Tok::Not
| Tok::BNot
| Tok::Plus
| Tok::Minus
| Tok::PlusPlus
| Tok::MinusMinus
| Tok::Void
| Tok::Delete if prec <= 16 => {
// code here...
}
Tok::TypeOf if prec <= 16 => {}
}
}
// #3040
fn issue_3040() {
{
match foo {
DevtoolScriptControlMsg::WantsLiveNotifications(id, to_send) => {
match documents.find_window(id) {
Some(window) => devtools::handle_wants_live_notifications(window.upcast(), to_send),
None => return warn!("Message sent to closed pipeline {}.", id),
}
}
}
}
}
// #3030
fn issue_3030() {
match input.trim().parse::<f64>() {
Ok(val)
if !(
// A valid number is the same as what rust considers to be valid,
// except for +1., NaN, and Infinity.
val.is_infinite() || val
.is_nan() || input.ends_with(".") || input.starts_with("+")
)
=> {
}
}
}
fn issue_3005() {
match *token {
Token::Dimension {
value, ref unit, ..
} if num_context.is_ok(context.parsing_mode, value) =>
{
return NoCalcLength::parse_dimension(context, value, unit)
.map(LengthOrPercentage::Length)
.map_err(|()| location.new_unexpected_token_error(token.clone()));
},
}
}
// #3774
fn issue_3774() {
{
{
{
match foo {
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachab(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreacha!(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachabl(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachae!(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachable(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachable!(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => rrunreachable!(),
}
}
}
}
}
| {
Some(match type_ {
sfEvtClosed => Closed,
sfEvtResized => {
let e = unsafe { *event.size.as_ref() };
Resized {
width: e.width,
height: e.height,
}
}
sfEvtLostFocus => LostFocus,
sfEvtGainedFocus => GainedFocus,
sfEvtTextEntered => {
TextEntered {
unicode: unsafe {
::std::char::from_u32((*event.text.as_ref()).unicode)
.expect("Invalid unicode encountered on TextEntered event")
},
}
}
sfEvtKeyPressed => {
let e = unsafe { event.key.as_ref() };
KeyPressed {
code: unsafe { ::std::mem::transmute(e.code) },
alt: e.alt.to_bool(),
ctrl: e.control.to_bool(),
shift: e.shift.to_bool(),
system: e.system.to_bool(),
}
}
sfEvtKeyReleased => {
let e = unsafe { event.key.as_ref() };
KeyReleased {
code: unsafe { ::std::mem::transmute(e.code) },
alt: e.alt.to_bool(),
ctrl: e.control.to_bool(),
shift: e.shift.to_bool(),
system: e.system.to_bool(),
}
}
})
} | identifier_body |
match.rs | // rustfmt-normalize_comments: true
// Match expressions.
fn foo() {
// A match expression.
match x {
// Some comment.
a => foo(),
b if 0 < 42 => foo(),
c => { // Another comment.
// Comment.
an_expression;
foo()
}
Foo(ref bar) =>
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
Pattern1 | Pattern2 | Pattern3 => false,
Paternnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn |
Paternnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn => {
blah
}
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn => meh,
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn if looooooooooooooooooong_guard => meh,
Patternnnnnnnnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnnnnnnnn if looooooooooooooooooooooooooooooooooooooooong_guard =>
meh,
// Test that earlier patterns can take the guard space
(aaaa, bbbbb, ccccccc, aaaaa, bbbbbbbb, cccccc, aaaa, bbbbbbbb, cccccc, dddddd) |
Patternnnnnnnnnnnnnnnnnnnnnnnnn if loooooooooooooooooooooooooooooooooooooooooong_guard => {}
_ => {}
ast::PathParameters::AngleBracketedParameters(ref data) if data.lifetimes.len() > 0 ||
data.types.len() > 0 ||
data.bindings.len() > 0 => {}
}
let whatever = match something {
/// DOC COMMENT!
Some(_) => 42,
// Comment on an attribute.
#[an_attribute]
// Comment after an attribute.
None => 0,
#[rustfmt::skip]
Blurb => { }
};
}
// Test that a match on an overflow line is laid out properly.
fn main() {
let sub_span =
match xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx {
Some(sub_span) => Some(sub_span),
None => sub_span,
};
}
// Test that one-line bodies align.
fn main() {
match r {
Variableeeeeeeeeeeeeeeeee => ( "variable",
vec!("id", "name", "qualname",
"value", "type", "scopeid"),
true,
true),
Enummmmmmmmmmmmmmmmmmmmm => ("enum",
vec!("id","qualname","scopeid","value"),
true,
true),
Variantttttttttttttttttttttttt => ("variant",
vec!("id",
"name",
"qualname",
"type",
"value",
"scopeid"),
true,
true),
};
match x{
y=>{/*Block with comment. Preserve me.*/ }
z=>{stmt();} }
}
fn matches() {
match 1 {
-1 => 10,
1 => 1, // foo
2 => 2,
// bar
3 => 3,
_ => 0 // baz
}
}
fn match_skip() {
let _ = match Some(1) {
#[rustfmt::skip]
Some( n ) => n,
None => 1,
};
}
fn issue339() {
match a {
b => {}
c => { }
d => {
}
e => {
}
// collapsing here is safe
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff => {
}
// collapsing here exceeds line length
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffg => {
}
h => { // comment above block
}
i => {
} // comment below block
j => {
// comment inside block
}
j2 => {
// comments inside...
} // ... and after
// TODO uncomment when vertical whitespace is handled better
// k => {
//
// // comment with WS above
// }
// l => {
// // comment with ws below
//
// }
m => {
} n => { } o =>
{
}
p => { // Don't collapse me
} q => { } r =>
{
}
s => 0, // s comment
// t comment
t => 1,
u => 2,
v => {
} /* funky block
* comment */
// final comment
}
}
fn issue355() {
match mac {
a => println!("a", b),
b => vec!(1, 2),
c => vec!(3; 4),
d => {
println!("a", b)
}
e => {
vec!(1, 2)
}
f => {
vec!(3; 4)
}
h => println!("a", b), // h comment
i => vec!(1, 2), // i comment
j => vec!(3; 4), // j comment
// k comment
k => println!("a", b),
// l comment
l => vec!(1, 2),
// m comment
m => vec!(3; 4),
// Rewrite splits macro
nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn => println!("a", b),
// Rewrite splits macro
oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo => vec!(1, 2),
// Macro support fails to recognise this macro as splittable
// We push the whole expr to a new line, TODO split this macro as well
pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp => vec!(3; 4),
// q, r and s: Rewrite splits match arm
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq => println!("a", b),
rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr => vec!(1, 2),
ssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss => vec!(3; 4),
// Funky bracketing styles
t => println!{"a", b},
u => vec!{1, 2},
v => vec!{3; 4},
w => println!["a", b],
x => vec![1, 2],
y =>vec![3; 4],
// Brackets with comments
tc => println!{"a", b}, // comment
uc => vec!{1, 2}, // comment
vc =>vec!{3; 4}, // comment
wc =>println!["a", b], // comment
xc => vec![1,2], // comment
yc => vec![3; 4], // comment
yd =>
looooooooooooooooooooooooooooooooooooooooooooooooooooooooong_func(aaaaaaaaaa,
bbbbbbbbbb,
cccccccccc,
dddddddddd),
}
}
fn issue280() {
{
match x {
CompressionMode::DiscardNewline | CompressionMode::CompressWhitespaceNewline => ch ==
'\n',
ast::ItemConst(ref typ, ref expr) => self.process_static_or_const_item(item,
&typ,
&expr),
}
}
}
fn issue383() {
match resolution.last_private {LastImport{..} => false, _ => true};
}
fn issue507() {
match 1 {
1 => unsafe { std::intrinsics::abort() },
_ => (),
}
}
fn issue508() {
match s.type_id() {
Some(NodeTypeId::Element(ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLCanvasElement))) => true,
Some(NodeTypeId::Element(ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLObjectElement))) => s.has_object_data(),
Some(NodeTypeId::Element(_)) => false,
}
}
fn issue496() {{{{
match def {
def::DefConst(def_id) | def::DefAssociatedConst(def_id) =>
match const_eval::lookup_const_by_id(cx.tcx, def_id, Some(self.pat.id)) {
Some(const_expr) => { x }}}}}}}
fn issue494() {
{
match stmt.node {
hir::StmtExpr(ref expr, id) | hir::StmtSemi(ref expr, id) =>
result.push(
StmtRef::Mirror(
Box::new(Stmt { span: stmt.span,
kind: StmtKind::Expr {
scope: cx.tcx.region_maps.node_extent(id),
expr: expr.to_ref() } }))),
}
}
}
fn issue386() {
match foo {
BiEq | BiLt | BiLe | BiNe | BiGt | BiGe =>
true,
BiAnd | BiOr | BiAdd | BiSub | BiMul | BiDiv | BiRem |
BiBitXor | BiBitAnd | BiBitOr | BiShl | BiShr =>
false,
}
}
fn guards() {
match foo {
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa if foooooooooooooo && barrrrrrrrrrrr => {}
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa if foooooooooooooo && barrrrrrrrrrrr => {}
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
if fooooooooooooooooooooo &&
(bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb || cccccccccccccccccccccccccccccccccccccccc) => {}
}
}
fn issue1371() {
Some(match type_ {
sfEvtClosed => Closed,
sfEvtResized => {
let e = unsafe { *event.size.as_ref() };
Resized {
width: e.width,
height: e.height,
}
}
sfEvtLostFocus => LostFocus,
sfEvtGainedFocus => GainedFocus,
sfEvtTextEntered => {
TextEntered {
unicode: unsafe {
::std::char::from_u32((*event.text.as_ref()).unicode)
.expect("Invalid unicode encountered on TextEntered event")
},
}
}
sfEvtKeyPressed => {
let e = unsafe { event.key.as_ref() };
KeyPressed {
code: unsafe { ::std::mem::transmute(e.code) },
alt: e.alt.to_bool(),
ctrl: e.control.to_bool(),
shift: e.shift.to_bool(),
system: e.system.to_bool(),
}
}
sfEvtKeyReleased => {
let e = unsafe { event.key.as_ref() };
KeyReleased {
code: unsafe { ::std::mem::transmute(e.code) },
alt: e.alt.to_bool(),
ctrl: e.control.to_bool(),
shift: e.shift.to_bool(),
system: e.system.to_bool(),
}
}
})
}
fn issue1395() {
let bar = Some(true);
let foo = Some(true);
let mut x = false;
bar.and_then(|_| {
match foo {
None => None,
Some(b) => {
x = true;
Some(b)
}
}
});
}
fn issue1456() {
Ok(Recording {
artists: match reader.evaluate(".//mb:recording/mb:artist-credit/mb:name-credit")? {
Nodeset(nodeset) => {
let res: Result<Vec<ArtistRef>, ReadError> = nodeset
.iter()
.map(|node| {
XPathNodeReader::new(node, &context).and_then(|r| ArtistRef::from_xml(&r))
})
.collect();
res?
}
_ => Vec::new(),
},
})
}
fn issue1460() {
let _ = match foo {
REORDER_BUFFER_CHANGE_INTERNAL_SPEC_INSERT => "internal_spec_insert_internal_spec_insert_internal_spec_insert",
_ => "reorder_something",
};
}
fn | () {
foobar(f, "{}", match *self {
TaskState::Started => "started",
TaskState::Success => "success",
TaskState::Failed => "failed",
});
}
// #1838, #1839
fn match_with_near_max_width() {
let (this_line_uses_99_characters_and_is_formatted_properly, x012345) = match some_expression {
_ => unimplemented!(),
};
let (should_be_formatted_like_the_line_above_using_100_characters, x0) = match some_expression {
_ => unimplemented!(),
};
let (should_put_the_brace_on_the_next_line_using_101_characters, x0000) = match some_expression
{
_ => unimplemented!(),
};
match m {
Variant::Tag | Variant::Tag2 | Variant::Tag3 | Variant::Tag4 | Variant::Tag5 | Variant::Tag6 =>
{}
}
}
fn match_with_trailing_spaces() {
match x {
Some(..) => 0,
None => 1,
}
}
fn issue_2099() {
let a = match x {
};
let b = match x {
};
match x {}
}
// #2021
impl<'tcx> Const<'tcx> {
pub fn from_constval<'a>() -> Const<'tcx> {
let val = match *cv {
ConstVal::Variant(_) | ConstVal::Aggregate(..) | ConstVal::Unevaluated(..) => bug!("MIR must not use `{:?}` (aggregates are expanded to MIR rvalues)", cv),
};
}
}
// #2151
fn issue_2151() {
match either {
x => {
}y => ()
}
}
// #2152
fn issue_2152() {
match m {
"aaaaaaaaaaaaa" | "bbbbbbbbbbbbb" | "cccccccccccccccccccccccccccccccccccccccccccc" if true => {}
"bind" | "writev" | "readv" | "sendmsg" | "recvmsg" if android && (aarch64 || x86_64) => true,
}
}
// #2376
// Preserve block around expressions with condition.
fn issue_2376() {
let mut x = None;
match x {
Some(0) => {
for i in 1..11 {
x = Some(i);
}
}
Some(ref mut y) => {
while *y < 10 {
*y += 1;
}
}
None => {
while let None = x {
x = Some(10);
}
}
}
}
// #2621
// Strip leading `|` in match arm patterns
fn issue_2621() {
let x = Foo::A;
match x {
Foo::A => println!("No vert single condition"),
Foo::B | Foo::C => println!("Center vert two conditions"),
| Foo::D => println!("Preceding vert single condition"),
| Foo::E
| Foo::F => println!("Preceding vert over two lines"),
Foo::G |
Foo::H => println!("Trailing vert over two lines"),
// Comment on its own line
| Foo::I => println!("With comment"), // Comment after line
}
}
fn issue_2377() {
match tok {
Tok::Not
| Tok::BNot
| Tok::Plus
| Tok::Minus
| Tok::PlusPlus
| Tok::MinusMinus
| Tok::Void
| Tok::Delete if prec <= 16 => {
// code here...
}
Tok::TypeOf if prec <= 16 => {}
}
}
// #3040
fn issue_3040() {
{
match foo {
DevtoolScriptControlMsg::WantsLiveNotifications(id, to_send) => {
match documents.find_window(id) {
Some(window) => devtools::handle_wants_live_notifications(window.upcast(), to_send),
None => return warn!("Message sent to closed pipeline {}.", id),
}
}
}
}
}
// #3030
fn issue_3030() {
match input.trim().parse::<f64>() {
Ok(val)
if !(
// A valid number is the same as what rust considers to be valid,
// except for +1., NaN, and Infinity.
val.is_infinite() || val
.is_nan() || input.ends_with(".") || input.starts_with("+")
)
=> {
}
}
}
fn issue_3005() {
match *token {
Token::Dimension {
value, ref unit, ..
} if num_context.is_ok(context.parsing_mode, value) =>
{
return NoCalcLength::parse_dimension(context, value, unit)
.map(LengthOrPercentage::Length)
.map_err(|()| location.new_unexpected_token_error(token.clone()));
},
}
}
// #3774
fn issue_3774() {
{
{
{
match foo {
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachab(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreacha!(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachabl(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachae!(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachable(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachable!(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => rrunreachable!(),
}
}
}
}
}
| issue525 | identifier_name |
match.rs | // rustfmt-normalize_comments: true
// Match expressions.
fn foo() {
// A match expression.
match x {
// Some comment.
a => foo(),
b if 0 < 42 => foo(),
c => { // Another comment.
// Comment.
an_expression;
foo()
}
Foo(ref bar) =>
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
Pattern1 | Pattern2 | Pattern3 => false,
Paternnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn |
Paternnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn => {
blah
}
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn => meh,
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn if looooooooooooooooooong_guard => meh,
Patternnnnnnnnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnnnnnnnn if looooooooooooooooooooooooooooooooooooooooong_guard =>
meh,
// Test that earlier patterns can take the guard space
(aaaa, bbbbb, ccccccc, aaaaa, bbbbbbbb, cccccc, aaaa, bbbbbbbb, cccccc, dddddd) |
Patternnnnnnnnnnnnnnnnnnnnnnnnn if loooooooooooooooooooooooooooooooooooooooooong_guard => {}
_ => {}
ast::PathParameters::AngleBracketedParameters(ref data) if data.lifetimes.len() > 0 ||
data.types.len() > 0 ||
data.bindings.len() > 0 => {}
}
let whatever = match something {
/// DOC COMMENT!
Some(_) => 42,
// Comment on an attribute.
#[an_attribute]
// Comment after an attribute.
None => 0,
#[rustfmt::skip]
Blurb => { }
};
}
// Test that a match on an overflow line is laid out properly.
fn main() {
let sub_span =
match xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx {
Some(sub_span) => Some(sub_span),
None => sub_span,
};
}
// Test that one-line bodies align.
fn main() {
match r {
Variableeeeeeeeeeeeeeeeee => ( "variable",
vec!("id", "name", "qualname",
"value", "type", "scopeid"),
true,
true),
Enummmmmmmmmmmmmmmmmmmmm => ("enum",
vec!("id","qualname","scopeid","value"),
true,
true),
Variantttttttttttttttttttttttt => ("variant",
vec!("id",
"name",
"qualname",
"type",
"value",
"scopeid"),
true,
true),
};
match x{
y=>{/*Block with comment. Preserve me.*/ }
z=>{stmt();} }
}
fn matches() {
match 1 {
-1 => 10,
1 => 1, // foo
2 => 2,
// bar
3 => 3,
_ => 0 // baz
}
}
fn match_skip() {
let _ = match Some(1) {
#[rustfmt::skip]
Some( n ) => n,
None => 1,
};
}
fn issue339() {
match a {
b => {}
c => { }
d => {
}
e => {
}
// collapsing here is safe
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff => {
}
// collapsing here exceeds line length
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffg => {
}
h => { // comment above block
}
i => {
} // comment below block
j => {
// comment inside block
}
j2 => {
// comments inside...
} // ... and after
// TODO uncomment when vertical whitespace is handled better
// k => {
//
// // comment with WS above | m => {
} n => { } o =>
{
}
p => { // Don't collapse me
} q => { } r =>
{
}
s => 0, // s comment
// t comment
t => 1,
u => 2,
v => {
} /* funky block
* comment */
// final comment
}
}
fn issue355() {
match mac {
a => println!("a", b),
b => vec!(1, 2),
c => vec!(3; 4),
d => {
println!("a", b)
}
e => {
vec!(1, 2)
}
f => {
vec!(3; 4)
}
h => println!("a", b), // h comment
i => vec!(1, 2), // i comment
j => vec!(3; 4), // j comment
// k comment
k => println!("a", b),
// l comment
l => vec!(1, 2),
// m comment
m => vec!(3; 4),
// Rewrite splits macro
nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn => println!("a", b),
// Rewrite splits macro
oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo => vec!(1, 2),
// Macro support fails to recognise this macro as splittable
// We push the whole expr to a new line, TODO split this macro as well
pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp => vec!(3; 4),
// q, r and s: Rewrite splits match arm
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq => println!("a", b),
rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr => vec!(1, 2),
ssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss => vec!(3; 4),
// Funky bracketing styles
t => println!{"a", b},
u => vec!{1, 2},
v => vec!{3; 4},
w => println!["a", b],
x => vec![1, 2],
y =>vec![3; 4],
// Brackets with comments
tc => println!{"a", b}, // comment
uc => vec!{1, 2}, // comment
vc =>vec!{3; 4}, // comment
wc =>println!["a", b], // comment
xc => vec![1,2], // comment
yc => vec![3; 4], // comment
yd =>
looooooooooooooooooooooooooooooooooooooooooooooooooooooooong_func(aaaaaaaaaa,
bbbbbbbbbb,
cccccccccc,
dddddddddd),
}
}
fn issue280() {
{
match x {
CompressionMode::DiscardNewline | CompressionMode::CompressWhitespaceNewline => ch ==
'\n',
ast::ItemConst(ref typ, ref expr) => self.process_static_or_const_item(item,
&typ,
&expr),
}
}
}
fn issue383() {
match resolution.last_private {LastImport{..} => false, _ => true};
}
fn issue507() {
match 1 {
1 => unsafe { std::intrinsics::abort() },
_ => (),
}
}
fn issue508() {
match s.type_id() {
Some(NodeTypeId::Element(ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLCanvasElement))) => true,
Some(NodeTypeId::Element(ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLObjectElement))) => s.has_object_data(),
Some(NodeTypeId::Element(_)) => false,
}
}
fn issue496() {{{{
match def {
def::DefConst(def_id) | def::DefAssociatedConst(def_id) =>
match const_eval::lookup_const_by_id(cx.tcx, def_id, Some(self.pat.id)) {
Some(const_expr) => { x }}}}}}}
fn issue494() {
{
match stmt.node {
hir::StmtExpr(ref expr, id) | hir::StmtSemi(ref expr, id) =>
result.push(
StmtRef::Mirror(
Box::new(Stmt { span: stmt.span,
kind: StmtKind::Expr {
scope: cx.tcx.region_maps.node_extent(id),
expr: expr.to_ref() } }))),
}
}
}
fn issue386() {
match foo {
BiEq | BiLt | BiLe | BiNe | BiGt | BiGe =>
true,
BiAnd | BiOr | BiAdd | BiSub | BiMul | BiDiv | BiRem |
BiBitXor | BiBitAnd | BiBitOr | BiShl | BiShr =>
false,
}
}
fn guards() {
match foo {
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa if foooooooooooooo && barrrrrrrrrrrr => {}
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa if foooooooooooooo && barrrrrrrrrrrr => {}
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
if fooooooooooooooooooooo &&
(bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb || cccccccccccccccccccccccccccccccccccccccc) => {}
}
}
fn issue1371() {
Some(match type_ {
sfEvtClosed => Closed,
sfEvtResized => {
let e = unsafe { *event.size.as_ref() };
Resized {
width: e.width,
height: e.height,
}
}
sfEvtLostFocus => LostFocus,
sfEvtGainedFocus => GainedFocus,
sfEvtTextEntered => {
TextEntered {
unicode: unsafe {
::std::char::from_u32((*event.text.as_ref()).unicode)
.expect("Invalid unicode encountered on TextEntered event")
},
}
}
sfEvtKeyPressed => {
let e = unsafe { event.key.as_ref() };
KeyPressed {
code: unsafe { ::std::mem::transmute(e.code) },
alt: e.alt.to_bool(),
ctrl: e.control.to_bool(),
shift: e.shift.to_bool(),
system: e.system.to_bool(),
}
}
sfEvtKeyReleased => {
let e = unsafe { event.key.as_ref() };
KeyReleased {
code: unsafe { ::std::mem::transmute(e.code) },
alt: e.alt.to_bool(),
ctrl: e.control.to_bool(),
shift: e.shift.to_bool(),
system: e.system.to_bool(),
}
}
})
}
fn issue1395() {
let bar = Some(true);
let foo = Some(true);
let mut x = false;
bar.and_then(|_| {
match foo {
None => None,
Some(b) => {
x = true;
Some(b)
}
}
});
}
fn issue1456() {
Ok(Recording {
artists: match reader.evaluate(".//mb:recording/mb:artist-credit/mb:name-credit")? {
Nodeset(nodeset) => {
let res: Result<Vec<ArtistRef>, ReadError> = nodeset
.iter()
.map(|node| {
XPathNodeReader::new(node, &context).and_then(|r| ArtistRef::from_xml(&r))
})
.collect();
res?
}
_ => Vec::new(),
},
})
}
fn issue1460() {
let _ = match foo {
REORDER_BUFFER_CHANGE_INTERNAL_SPEC_INSERT => "internal_spec_insert_internal_spec_insert_internal_spec_insert",
_ => "reorder_something",
};
}
fn issue525() {
foobar(f, "{}", match *self {
TaskState::Started => "started",
TaskState::Success => "success",
TaskState::Failed => "failed",
});
}
// #1838, #1839
fn match_with_near_max_width() {
let (this_line_uses_99_characters_and_is_formatted_properly, x012345) = match some_expression {
_ => unimplemented!(),
};
let (should_be_formatted_like_the_line_above_using_100_characters, x0) = match some_expression {
_ => unimplemented!(),
};
let (should_put_the_brace_on_the_next_line_using_101_characters, x0000) = match some_expression
{
_ => unimplemented!(),
};
match m {
Variant::Tag | Variant::Tag2 | Variant::Tag3 | Variant::Tag4 | Variant::Tag5 | Variant::Tag6 =>
{}
}
}
fn match_with_trailing_spaces() {
match x {
Some(..) => 0,
None => 1,
}
}
fn issue_2099() {
let a = match x {
};
let b = match x {
};
match x {}
}
// #2021
impl<'tcx> Const<'tcx> {
pub fn from_constval<'a>() -> Const<'tcx> {
let val = match *cv {
ConstVal::Variant(_) | ConstVal::Aggregate(..) | ConstVal::Unevaluated(..) => bug!("MIR must not use `{:?}` (aggregates are expanded to MIR rvalues)", cv),
};
}
}
// #2151
fn issue_2151() {
match either {
x => {
}y => ()
}
}
// #2152
fn issue_2152() {
match m {
"aaaaaaaaaaaaa" | "bbbbbbbbbbbbb" | "cccccccccccccccccccccccccccccccccccccccccccc" if true => {}
"bind" | "writev" | "readv" | "sendmsg" | "recvmsg" if android && (aarch64 || x86_64) => true,
}
}
// #2376
// Preserve block around expressions with condition.
fn issue_2376() {
let mut x = None;
match x {
Some(0) => {
for i in 1..11 {
x = Some(i);
}
}
Some(ref mut y) => {
while *y < 10 {
*y += 1;
}
}
None => {
while let None = x {
x = Some(10);
}
}
}
}
// #2621
// Strip leading `|` in match arm patterns
fn issue_2621() {
let x = Foo::A;
match x {
Foo::A => println!("No vert single condition"),
Foo::B | Foo::C => println!("Center vert two conditions"),
| Foo::D => println!("Preceding vert single condition"),
| Foo::E
| Foo::F => println!("Preceding vert over two lines"),
Foo::G |
Foo::H => println!("Trailing vert over two lines"),
// Comment on its own line
| Foo::I => println!("With comment"), // Comment after line
}
}
fn issue_2377() {
match tok {
Tok::Not
| Tok::BNot
| Tok::Plus
| Tok::Minus
| Tok::PlusPlus
| Tok::MinusMinus
| Tok::Void
| Tok::Delete if prec <= 16 => {
// code here...
}
Tok::TypeOf if prec <= 16 => {}
}
}
// #3040
fn issue_3040() {
{
match foo {
DevtoolScriptControlMsg::WantsLiveNotifications(id, to_send) => {
match documents.find_window(id) {
Some(window) => devtools::handle_wants_live_notifications(window.upcast(), to_send),
None => return warn!("Message sent to closed pipeline {}.", id),
}
}
}
}
}
// #3030
fn issue_3030() {
match input.trim().parse::<f64>() {
Ok(val)
if !(
// A valid number is the same as what rust considers to be valid,
// except for +1., NaN, and Infinity.
val.is_infinite() || val
.is_nan() || input.ends_with(".") || input.starts_with("+")
)
=> {
}
}
}
fn issue_3005() {
match *token {
Token::Dimension {
value, ref unit, ..
} if num_context.is_ok(context.parsing_mode, value) =>
{
return NoCalcLength::parse_dimension(context, value, unit)
.map(LengthOrPercentage::Length)
.map_err(|()| location.new_unexpected_token_error(token.clone()));
},
}
}
// #3774
fn issue_3774() {
{
{
{
match foo {
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachab(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreacha!(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachabl(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachae!(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachable(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachable!(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => rrunreachable!(),
}
}
}
}
} | // }
// l => {
// // comment with ws below
//
// } | random_line_split |
HTMLViewer.py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Copyright (C) 2014 Technische Universität Berlin,
Fakultät IV - Elektrotechnik und Informatik,
Fachgebiet Regelungssysteme,
Einsteinufer 17, D-10587 Berlin, Germany
This file is part of PaPI.
PaPI is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PaPI is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PaPI. If not, see <http://www.gnu.org/licenses/>.
Contributors:
<Stefan Ruppin
"""
from PyQt5.QtWebKitWidgets import QWebView
from PyQt5 import QtCore
from papi.plugin.base_classes.vip_base import vip_base
class HTMLViewer(vip_base):
def cb_initialize_plugin(self):
# ---------------------------
# Read configuration
# ---------------------------
self.config = self.pl_get_current_config_ref()
content = self.config['content']['value']
isUrl = self.config['isUrl']['value']
# --------------------------------
# Create Widget
# --------------------------------
self.WebView = QWebView()
# This call is important, because the background structure needs to know the used widget!
# In the background the qmidiwindow will becreated and the widget will be added
self.pl_set_widget_for_internal_usage( self.WebView )
print(isUrl)
if isUrl == '1':
url = QtCore.QUrl(content)
self.WebView.load(url)
else:
self.WebView.setHtml(content)
self.WebView.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.WebView.customContextMenuRequested.connect(self.show_context_menu)
return True
def show_context_menu(self, pos):
gloPos = self.WebView.mapToGlobal(pos)
self.cmenu = self.pl_create_control_context_menu()
self.cmenu.exec_(gloPos)
def cb_pause(self):
# will be called, when plugin gets paused
# can be used to get plugin in a defined state before pause
# e.a.
pass
def cb_resume(self):
# will be called when plugin gets resumed
# can be used to wake up the plugin from defined pause state
# e.a. reopen communication ports, files etc.
pa | def cb_execute(self, Data=None, block_name = None, plugin_uname = None):
# Do main work here!
# If this plugin is an IOP plugin, then there will be no Data parameter because it wont get data
# If this plugin is a DPP, then it will get Data with data
# param: Data is a Data hash and block_name is the block_name of Data origin
# Data is a hash, so use ist like: Data[CORE_TIME_SIGNAL] = [t1, t2, ...] where CORE_TIME_SIGNAL is a signal_name
# hash signal_name: value
# Data could have multiple types stored in it e.a. Data['d1'] = int, Data['d2'] = []
pass
def cb_set_parameter(self, name, value):
# attetion: value is a string and need to be processed !
# if name == 'irgendeinParameter':
# do that .... with value
pass
def cb_quit(self):
# do something before plugin will close, e.a. close connections ...
pass
def cb_get_plugin_configuration(self):
#
# Implement a own part of the config
# config is a hash of hass object
# config_parameter_name : {}
# config[config_parameter_name]['value'] NEEDS TO BE IMPLEMENTED
# configs can be marked as advanced for create dialog
# http://utilitymill.com/utility/Regex_For_Range
config = {
"content": {
'value': """<p> Insert your html code here </p>""",
'display_text' : 'HTML Content',
'advanced' : 'HTLM',
'tooltip' : 'Plain html code to be displayed'
},
"isUrl": {
'value': "0",
'display_text': "Content==Url?",
'tooltip': "Set to 1 if the content is an url that should be loaded",
'advanced' : 'HTML'
},
}
return config
def cb_plugin_meta_updated(self):
"""
Whenever the meta information is updated this function is called (if implemented).
:return:
"""
#dplugin_info = self.dplugin_info
pass
| ss
| identifier_body |
HTMLViewer.py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Copyright (C) 2014 Technische Universität Berlin,
Fakultät IV - Elektrotechnik und Informatik,
Fachgebiet Regelungssysteme,
Einsteinufer 17, D-10587 Berlin, Germany
This file is part of PaPI.
PaPI is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PaPI is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PaPI. If not, see <http://www.gnu.org/licenses/>.
Contributors:
<Stefan Ruppin
"""
from PyQt5.QtWebKitWidgets import QWebView
from PyQt5 import QtCore
from papi.plugin.base_classes.vip_base import vip_base
class HTMLViewer(vip_base):
def cb_initialize_plugin(self):
# ---------------------------
# Read configuration
# ---------------------------
self.config = self.pl_get_current_config_ref()
content = self.config['content']['value']
isUrl = self.config['isUrl']['value']
# --------------------------------
# Create Widget
# --------------------------------
self.WebView = QWebView()
# This call is important, because the background structure needs to know the used widget!
# In the background the qmidiwindow will becreated and the widget will be added
self.pl_set_widget_for_internal_usage( self.WebView )
print(isUrl)
if isUrl == '1':
url = QtCore.QUrl(content)
self.WebView.load(url)
else:
self.WebView.setHtml(content)
self.WebView.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.WebView.customContextMenuRequested.connect(self.show_context_menu)
return True
def show_context_menu(self, pos):
gloPos = self.WebView.mapToGlobal(pos)
self.cmenu = self.pl_create_control_context_menu()
self.cmenu.exec_(gloPos)
def cb_pause(self):
# will be called, when plugin gets paused
# can be used to get plugin in a defined state before pause
# e.a.
pass
def cb_resume(self):
# will be called when plugin gets resumed
# can be used to wake up the plugin from defined pause state
# e.a. reopen communication ports, files etc.
pass
def cb_execute(self, Data=None, block_name = None, plugin_uname = None):
# Do main work here!
# If this plugin is an IOP plugin, then there will be no Data parameter because it wont get data
# If this plugin is a DPP, then it will get Data with data
# param: Data is a Data hash and block_name is the block_name of Data origin
# Data is a hash, so use ist like: Data[CORE_TIME_SIGNAL] = [t1, t2, ...] where CORE_TIME_SIGNAL is a signal_name
# hash signal_name: value
# Data could have multiple types stored in it e.a. Data['d1'] = int, Data['d2'] = []
pass
def cb | elf, name, value):
# attetion: value is a string and need to be processed !
# if name == 'irgendeinParameter':
# do that .... with value
pass
def cb_quit(self):
# do something before plugin will close, e.a. close connections ...
pass
def cb_get_plugin_configuration(self):
#
# Implement a own part of the config
# config is a hash of hass object
# config_parameter_name : {}
# config[config_parameter_name]['value'] NEEDS TO BE IMPLEMENTED
# configs can be marked as advanced for create dialog
# http://utilitymill.com/utility/Regex_For_Range
config = {
"content": {
'value': """<p> Insert your html code here </p>""",
'display_text' : 'HTML Content',
'advanced' : 'HTLM',
'tooltip' : 'Plain html code to be displayed'
},
"isUrl": {
'value': "0",
'display_text': "Content==Url?",
'tooltip': "Set to 1 if the content is an url that should be loaded",
'advanced' : 'HTML'
},
}
return config
def cb_plugin_meta_updated(self):
"""
Whenever the meta information is updated this function is called (if implemented).
:return:
"""
#dplugin_info = self.dplugin_info
pass
| _set_parameter(s | identifier_name |
HTMLViewer.py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Copyright (C) 2014 Technische Universität Berlin,
Fakultät IV - Elektrotechnik und Informatik,
Fachgebiet Regelungssysteme,
Einsteinufer 17, D-10587 Berlin, Germany
This file is part of PaPI.
PaPI is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PaPI is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PaPI. If not, see <http://www.gnu.org/licenses/>.
Contributors:
<Stefan Ruppin
"""
from PyQt5.QtWebKitWidgets import QWebView
from PyQt5 import QtCore
from papi.plugin.base_classes.vip_base import vip_base
class HTMLViewer(vip_base):
def cb_initialize_plugin(self):
# ---------------------------
# Read configuration
# ---------------------------
self.config = self.pl_get_current_config_ref()
content = self.config['content']['value']
isUrl = self.config['isUrl']['value']
# --------------------------------
# Create Widget
# --------------------------------
self.WebView = QWebView()
# This call is important, because the background structure needs to know the used widget!
# In the background the qmidiwindow will becreated and the widget will be added
self.pl_set_widget_for_internal_usage( self.WebView )
print(isUrl)
if isUrl == '1':
url = QtCore.QUrl(content)
self.WebView.load(url)
else:
self.WebView.setHtml(content)
self.WebView.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.WebView.customContextMenuRequested.connect(self.show_context_menu)
return True
def show_context_menu(self, pos):
gloPos = self.WebView.mapToGlobal(pos)
self.cmenu = self.pl_create_control_context_menu()
self.cmenu.exec_(gloPos)
def cb_pause(self):
# will be called, when plugin gets paused
# can be used to get plugin in a defined state before pause
# e.a.
pass
def cb_resume(self):
# will be called when plugin gets resumed
# can be used to wake up the plugin from defined pause state
# e.a. reopen communication ports, files etc.
pass
def cb_execute(self, Data=None, block_name = None, plugin_uname = None):
# Do main work here!
# If this plugin is an IOP plugin, then there will be no Data parameter because it wont get data
# If this plugin is a DPP, then it will get Data with data
# param: Data is a Data hash and block_name is the block_name of Data origin
# Data is a hash, so use ist like: Data[CORE_TIME_SIGNAL] = [t1, t2, ...] where CORE_TIME_SIGNAL is a signal_name
# hash signal_name: value
# Data could have multiple types stored in it e.a. Data['d1'] = int, Data['d2'] = []
pass
def cb_set_parameter(self, name, value):
# attetion: value is a string and need to be processed !
# if name == 'irgendeinParameter':
# do that .... with value
pass
def cb_quit(self):
# do something before plugin will close, e.a. close connections ...
pass
def cb_get_plugin_configuration(self):
#
# Implement a own part of the config
# config is a hash of hass object
# config_parameter_name : {}
# config[config_parameter_name]['value'] NEEDS TO BE IMPLEMENTED
# configs can be marked as advanced for create dialog
# http://utilitymill.com/utility/Regex_For_Range
config = {
"content": {
'value': """<p> Insert your html code here </p>""",
'display_text' : 'HTML Content',
'advanced' : 'HTLM',
'tooltip' : 'Plain html code to be displayed'
},
"isUrl": {
'value': "0",
'display_text': "Content==Url?", | 'tooltip': "Set to 1 if the content is an url that should be loaded",
'advanced' : 'HTML'
},
}
return config
def cb_plugin_meta_updated(self):
"""
Whenever the meta information is updated this function is called (if implemented).
:return:
"""
#dplugin_info = self.dplugin_info
pass | random_line_split | |
HTMLViewer.py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Copyright (C) 2014 Technische Universität Berlin,
Fakultät IV - Elektrotechnik und Informatik,
Fachgebiet Regelungssysteme,
Einsteinufer 17, D-10587 Berlin, Germany
This file is part of PaPI.
PaPI is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PaPI is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PaPI. If not, see <http://www.gnu.org/licenses/>.
Contributors:
<Stefan Ruppin
"""
from PyQt5.QtWebKitWidgets import QWebView
from PyQt5 import QtCore
from papi.plugin.base_classes.vip_base import vip_base
class HTMLViewer(vip_base):
def cb_initialize_plugin(self):
# ---------------------------
# Read configuration
# ---------------------------
self.config = self.pl_get_current_config_ref()
content = self.config['content']['value']
isUrl = self.config['isUrl']['value']
# --------------------------------
# Create Widget
# --------------------------------
self.WebView = QWebView()
# This call is important, because the background structure needs to know the used widget!
# In the background the qmidiwindow will becreated and the widget will be added
self.pl_set_widget_for_internal_usage( self.WebView )
print(isUrl)
if isUrl == '1':
ur | else:
self.WebView.setHtml(content)
self.WebView.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.WebView.customContextMenuRequested.connect(self.show_context_menu)
return True
def show_context_menu(self, pos):
gloPos = self.WebView.mapToGlobal(pos)
self.cmenu = self.pl_create_control_context_menu()
self.cmenu.exec_(gloPos)
def cb_pause(self):
# will be called, when plugin gets paused
# can be used to get plugin in a defined state before pause
# e.a.
pass
def cb_resume(self):
# will be called when plugin gets resumed
# can be used to wake up the plugin from defined pause state
# e.a. reopen communication ports, files etc.
pass
def cb_execute(self, Data=None, block_name = None, plugin_uname = None):
# Do main work here!
# If this plugin is an IOP plugin, then there will be no Data parameter because it wont get data
# If this plugin is a DPP, then it will get Data with data
# param: Data is a Data hash and block_name is the block_name of Data origin
# Data is a hash, so use ist like: Data[CORE_TIME_SIGNAL] = [t1, t2, ...] where CORE_TIME_SIGNAL is a signal_name
# hash signal_name: value
# Data could have multiple types stored in it e.a. Data['d1'] = int, Data['d2'] = []
pass
def cb_set_parameter(self, name, value):
# attetion: value is a string and need to be processed !
# if name == 'irgendeinParameter':
# do that .... with value
pass
def cb_quit(self):
# do something before plugin will close, e.a. close connections ...
pass
def cb_get_plugin_configuration(self):
#
# Implement a own part of the config
# config is a hash of hass object
# config_parameter_name : {}
# config[config_parameter_name]['value'] NEEDS TO BE IMPLEMENTED
# configs can be marked as advanced for create dialog
# http://utilitymill.com/utility/Regex_For_Range
config = {
"content": {
'value': """<p> Insert your html code here </p>""",
'display_text' : 'HTML Content',
'advanced' : 'HTLM',
'tooltip' : 'Plain html code to be displayed'
},
"isUrl": {
'value': "0",
'display_text': "Content==Url?",
'tooltip': "Set to 1 if the content is an url that should be loaded",
'advanced' : 'HTML'
},
}
return config
def cb_plugin_meta_updated(self):
"""
Whenever the meta information is updated this function is called (if implemented).
:return:
"""
#dplugin_info = self.dplugin_info
pass
| l = QtCore.QUrl(content)
self.WebView.load(url)
| conditional_block |
saveSimu.py | #!/usr/bin/python
import Sofa
import Flexible.IO
import sys,os
datadir=os.path.dirname(os.path.realpath(__file__))+"/data/"
nbFrames = 10
nbGaussPoints = 1
file_Dof= datadir + "dofs.py"
file_SF= datadir + "SF.py"
file_GP= datadir + "GP.py"
# scene creation method
def createScene(rootNode):
rootNode.createObject('RequiredPlugin', pluginName="image")
rootNode.createObject('RequiredPlugin', pluginName="Flexible")
rootNode.createObject('VisualStyle', displayFlags="showBehaviorModels showVisual")
rootNode.findData('dt').value=0.05
rootNode.findData('gravity').value='0 -9.8 0'
rootNode.createObject('EulerImplicit',rayleighStiffness="0",rayleighMass="0")
rootNode.createObject('CGLinearSolver', iterations=25, tolerance=1.0e-9, threshold=1.0e-9)
snode = rootNode.createChild('Flexible')
snode.createObject('MeshObjLoader', name="loader", filename="mesh/torus.obj", triangulate="1")
snode.createObject('MeshToImageEngine', template="ImageUC", name="rasterizer", src="@loader", voxelSize="0.1", padSize="1", rotateImage="true")
snode.createObject('ImageContainer', template="ImageUC", name="image", src="@rasterizer", drawBB="false")
snode.createObject('ImageSampler', template="ImageUC", name="sampler", src="@image", method="1", param=str(nbFrames), fixedPosition="", printLog="false")
snode.createObject('MergeMeshes', name="merged", nbMeshes="2", position1="@sampler.fixedPosition", position2="@sampler.position")
snode.createObject('MechanicalObject', template="Affine", name="dof", showObject="true", showObjectScale="0.7", src="@merged")
sf = snode.createObject('VoronoiShapeFunction', name="SF", position="@dof.rest_position", src="@image", method="0", nbRef="4")
snode.createObject('BoxROI', template="Vec3d", box="0 -2 0 5 2 5", position="@merged.position", name="FixedROI")
snode.createObject('FixedConstraint', indices="@FixedROI.indices")
bnode = snode.createChild('behavior')
gp = bnode.createObject('ImageGaussPointSampler', name="sampler", indices="@../SF.indices", weights="@../SF.weights", transform="@../SF.transform", method="2", order="4" ,showSamplesScale="0", targetNumber=str(nbGaussPoints),clearData="1")
bnode.createObject('MechanicalObject', template="F332")
bnode.createObject('LinearMapping', template="Affine,F332")
Enode = bnode.createChild('E')
Enode.createObject('MechanicalObject', template="E332" )
Enode.createObject('GreenStrainMapping', template="F332,E332" )
Enode.createObject('HookeForceField', template="E332", youngModulus="2000.0" ,poissonRatio="0.2", viscosity="0")
cnode = snode.createChild('collision')
cnode.createObject('Mesh', name="mesh", src="@../loader")
cnode.createObject('MechanicalObject', template="Vec3d", name="pts")
cnode.createObject('UniformMass', totalMass="20")
cnode.createObject('LinearMapping', template="Affine,Vec3d")
vnode = cnode.createChild('visual')
vnode.createObject('VisualModel', color="1 8e-1 8e-1")
vnode.createObject('IdentityMapping')
snode.createObject('PythonScriptController', filename=__file__, classname="simu_save", variables="dof SF behavior/sampler")
print "ctrl+SPACE will dump the simulation model in /data"
return 0
#-----------------------------------------------------------------------------------------------------------------------------------------
class simu_save(Sofa.PythonScriptController):
def createGraph(self,node):
self.node=node
self.dof = node.getObject(self.findData('variables').value[0][0])
self.sf = node.getObject(self.findData('variables').value[1][0])
self.gp = node.getObject(self.findData('variables').value[2][0])
if self.dof==None or self.sf==None or self.gp==None:
|
return 0
def onKeyPressed(self,k):
if self.dof==None or self.sf==None or self.gp==None:
return 0
if ord(k)==32: # ctrl+SPACE
Flexible.IO.export_AffineFrames(self.dof, file_Dof)
Flexible.IO.export_ImageShapeFunction(self.node, self.sf, file_SF)
Flexible.IO.export_GaussPoints(self.gp, file_GP)
print "Simulation state saved";
return 0
| print "PythonScriptController: components in variables not found"
return 0 | conditional_block |
saveSimu.py | #!/usr/bin/python
import Sofa
import Flexible.IO
import sys,os
datadir=os.path.dirname(os.path.realpath(__file__))+"/data/"
nbFrames = 10
nbGaussPoints = 1
file_Dof= datadir + "dofs.py"
file_SF= datadir + "SF.py"
file_GP= datadir + "GP.py"
# scene creation method
def createScene(rootNode):
rootNode.createObject('RequiredPlugin', pluginName="image")
rootNode.createObject('RequiredPlugin', pluginName="Flexible")
rootNode.createObject('VisualStyle', displayFlags="showBehaviorModels showVisual")
rootNode.findData('dt').value=0.05
rootNode.findData('gravity').value='0 -9.8 0'
rootNode.createObject('EulerImplicit',rayleighStiffness="0",rayleighMass="0")
rootNode.createObject('CGLinearSolver', iterations=25, tolerance=1.0e-9, threshold=1.0e-9)
snode = rootNode.createChild('Flexible')
snode.createObject('MeshObjLoader', name="loader", filename="mesh/torus.obj", triangulate="1")
snode.createObject('MeshToImageEngine', template="ImageUC", name="rasterizer", src="@loader", voxelSize="0.1", padSize="1", rotateImage="true")
snode.createObject('ImageContainer', template="ImageUC", name="image", src="@rasterizer", drawBB="false")
snode.createObject('ImageSampler', template="ImageUC", name="sampler", src="@image", method="1", param=str(nbFrames), fixedPosition="", printLog="false")
snode.createObject('MergeMeshes', name="merged", nbMeshes="2", position1="@sampler.fixedPosition", position2="@sampler.position")
snode.createObject('MechanicalObject', template="Affine", name="dof", showObject="true", showObjectScale="0.7", src="@merged")
sf = snode.createObject('VoronoiShapeFunction', name="SF", position="@dof.rest_position", src="@image", method="0", nbRef="4")
snode.createObject('BoxROI', template="Vec3d", box="0 -2 0 5 2 5", position="@merged.position", name="FixedROI")
snode.createObject('FixedConstraint', indices="@FixedROI.indices")
bnode = snode.createChild('behavior')
gp = bnode.createObject('ImageGaussPointSampler', name="sampler", indices="@../SF.indices", weights="@../SF.weights", transform="@../SF.transform", method="2", order="4" ,showSamplesScale="0", targetNumber=str(nbGaussPoints),clearData="1")
bnode.createObject('MechanicalObject', template="F332")
bnode.createObject('LinearMapping', template="Affine,F332")
Enode = bnode.createChild('E')
Enode.createObject('MechanicalObject', template="E332" )
Enode.createObject('GreenStrainMapping', template="F332,E332" )
Enode.createObject('HookeForceField', template="E332", youngModulus="2000.0" ,poissonRatio="0.2", viscosity="0")
cnode = snode.createChild('collision')
cnode.createObject('Mesh', name="mesh", src="@../loader")
cnode.createObject('MechanicalObject', template="Vec3d", name="pts")
cnode.createObject('UniformMass', totalMass="20")
cnode.createObject('LinearMapping', template="Affine,Vec3d")
vnode = cnode.createChild('visual')
vnode.createObject('VisualModel', color="1 8e-1 8e-1")
vnode.createObject('IdentityMapping')
snode.createObject('PythonScriptController', filename=__file__, classname="simu_save", variables="dof SF behavior/sampler")
print "ctrl+SPACE will dump the simulation model in /data"
return 0
#-----------------------------------------------------------------------------------------------------------------------------------------
class simu_save(Sofa.PythonScriptController):
def createGraph(self,node):
self.node=node
self.dof = node.getObject(self.findData('variables').value[0][0])
self.sf = node.getObject(self.findData('variables').value[1][0])
self.gp = node.getObject(self.findData('variables').value[2][0])
if self.dof==None or self.sf==None or self.gp==None:
print "PythonScriptController: components in variables not found"
return 0
return 0
def onKeyPressed(self,k):
| if self.dof==None or self.sf==None or self.gp==None:
return 0
if ord(k)==32: # ctrl+SPACE
Flexible.IO.export_AffineFrames(self.dof, file_Dof)
Flexible.IO.export_ImageShapeFunction(self.node, self.sf, file_SF)
Flexible.IO.export_GaussPoints(self.gp, file_GP)
print "Simulation state saved";
return 0 | identifier_body | |
saveSimu.py | #!/usr/bin/python
import Sofa
import Flexible.IO
import sys,os
datadir=os.path.dirname(os.path.realpath(__file__))+"/data/"
nbFrames = 10
nbGaussPoints = 1
file_Dof= datadir + "dofs.py"
file_SF= datadir + "SF.py"
file_GP= datadir + "GP.py"
# scene creation method
def createScene(rootNode):
rootNode.createObject('RequiredPlugin', pluginName="image")
rootNode.createObject('RequiredPlugin', pluginName="Flexible")
rootNode.createObject('VisualStyle', displayFlags="showBehaviorModels showVisual")
rootNode.findData('dt').value=0.05
rootNode.findData('gravity').value='0 -9.8 0'
rootNode.createObject('EulerImplicit',rayleighStiffness="0",rayleighMass="0")
rootNode.createObject('CGLinearSolver', iterations=25, tolerance=1.0e-9, threshold=1.0e-9)
snode = rootNode.createChild('Flexible')
snode.createObject('MeshObjLoader', name="loader", filename="mesh/torus.obj", triangulate="1")
snode.createObject('MeshToImageEngine', template="ImageUC", name="rasterizer", src="@loader", voxelSize="0.1", padSize="1", rotateImage="true")
snode.createObject('ImageContainer', template="ImageUC", name="image", src="@rasterizer", drawBB="false")
snode.createObject('ImageSampler', template="ImageUC", name="sampler", src="@image", method="1", param=str(nbFrames), fixedPosition="", printLog="false")
snode.createObject('MergeMeshes', name="merged", nbMeshes="2", position1="@sampler.fixedPosition", position2="@sampler.position")
snode.createObject('MechanicalObject', template="Affine", name="dof", showObject="true", showObjectScale="0.7", src="@merged")
sf = snode.createObject('VoronoiShapeFunction', name="SF", position="@dof.rest_position", src="@image", method="0", nbRef="4")
snode.createObject('BoxROI', template="Vec3d", box="0 -2 0 5 2 5", position="@merged.position", name="FixedROI")
snode.createObject('FixedConstraint', indices="@FixedROI.indices")
bnode = snode.createChild('behavior')
gp = bnode.createObject('ImageGaussPointSampler', name="sampler", indices="@../SF.indices", weights="@../SF.weights", transform="@../SF.transform", method="2", order="4" ,showSamplesScale="0", targetNumber=str(nbGaussPoints),clearData="1")
bnode.createObject('MechanicalObject', template="F332")
bnode.createObject('LinearMapping', template="Affine,F332")
Enode = bnode.createChild('E')
Enode.createObject('MechanicalObject', template="E332" )
Enode.createObject('GreenStrainMapping', template="F332,E332" )
Enode.createObject('HookeForceField', template="E332", youngModulus="2000.0" ,poissonRatio="0.2", viscosity="0")
cnode = snode.createChild('collision')
cnode.createObject('Mesh', name="mesh", src="@../loader")
cnode.createObject('MechanicalObject', template="Vec3d", name="pts")
cnode.createObject('UniformMass', totalMass="20")
cnode.createObject('LinearMapping', template="Affine,Vec3d")
vnode = cnode.createChild('visual')
vnode.createObject('VisualModel', color="1 8e-1 8e-1")
vnode.createObject('IdentityMapping')
snode.createObject('PythonScriptController', filename=__file__, classname="simu_save", variables="dof SF behavior/sampler")
print "ctrl+SPACE will dump the simulation model in /data"
return 0
|
class simu_save(Sofa.PythonScriptController):
def createGraph(self,node):
self.node=node
self.dof = node.getObject(self.findData('variables').value[0][0])
self.sf = node.getObject(self.findData('variables').value[1][0])
self.gp = node.getObject(self.findData('variables').value[2][0])
if self.dof==None or self.sf==None or self.gp==None:
print "PythonScriptController: components in variables not found"
return 0
return 0
def onKeyPressed(self,k):
if self.dof==None or self.sf==None or self.gp==None:
return 0
if ord(k)==32: # ctrl+SPACE
Flexible.IO.export_AffineFrames(self.dof, file_Dof)
Flexible.IO.export_ImageShapeFunction(self.node, self.sf, file_SF)
Flexible.IO.export_GaussPoints(self.gp, file_GP)
print "Simulation state saved";
return 0 | #----------------------------------------------------------------------------------------------------------------------------------------- | random_line_split |
saveSimu.py | #!/usr/bin/python
import Sofa
import Flexible.IO
import sys,os
datadir=os.path.dirname(os.path.realpath(__file__))+"/data/"
nbFrames = 10
nbGaussPoints = 1
file_Dof= datadir + "dofs.py"
file_SF= datadir + "SF.py"
file_GP= datadir + "GP.py"
# scene creation method
def createScene(rootNode):
rootNode.createObject('RequiredPlugin', pluginName="image")
rootNode.createObject('RequiredPlugin', pluginName="Flexible")
rootNode.createObject('VisualStyle', displayFlags="showBehaviorModels showVisual")
rootNode.findData('dt').value=0.05
rootNode.findData('gravity').value='0 -9.8 0'
rootNode.createObject('EulerImplicit',rayleighStiffness="0",rayleighMass="0")
rootNode.createObject('CGLinearSolver', iterations=25, tolerance=1.0e-9, threshold=1.0e-9)
snode = rootNode.createChild('Flexible')
snode.createObject('MeshObjLoader', name="loader", filename="mesh/torus.obj", triangulate="1")
snode.createObject('MeshToImageEngine', template="ImageUC", name="rasterizer", src="@loader", voxelSize="0.1", padSize="1", rotateImage="true")
snode.createObject('ImageContainer', template="ImageUC", name="image", src="@rasterizer", drawBB="false")
snode.createObject('ImageSampler', template="ImageUC", name="sampler", src="@image", method="1", param=str(nbFrames), fixedPosition="", printLog="false")
snode.createObject('MergeMeshes', name="merged", nbMeshes="2", position1="@sampler.fixedPosition", position2="@sampler.position")
snode.createObject('MechanicalObject', template="Affine", name="dof", showObject="true", showObjectScale="0.7", src="@merged")
sf = snode.createObject('VoronoiShapeFunction', name="SF", position="@dof.rest_position", src="@image", method="0", nbRef="4")
snode.createObject('BoxROI', template="Vec3d", box="0 -2 0 5 2 5", position="@merged.position", name="FixedROI")
snode.createObject('FixedConstraint', indices="@FixedROI.indices")
bnode = snode.createChild('behavior')
gp = bnode.createObject('ImageGaussPointSampler', name="sampler", indices="@../SF.indices", weights="@../SF.weights", transform="@../SF.transform", method="2", order="4" ,showSamplesScale="0", targetNumber=str(nbGaussPoints),clearData="1")
bnode.createObject('MechanicalObject', template="F332")
bnode.createObject('LinearMapping', template="Affine,F332")
Enode = bnode.createChild('E')
Enode.createObject('MechanicalObject', template="E332" )
Enode.createObject('GreenStrainMapping', template="F332,E332" )
Enode.createObject('HookeForceField', template="E332", youngModulus="2000.0" ,poissonRatio="0.2", viscosity="0")
cnode = snode.createChild('collision')
cnode.createObject('Mesh', name="mesh", src="@../loader")
cnode.createObject('MechanicalObject', template="Vec3d", name="pts")
cnode.createObject('UniformMass', totalMass="20")
cnode.createObject('LinearMapping', template="Affine,Vec3d")
vnode = cnode.createChild('visual')
vnode.createObject('VisualModel', color="1 8e-1 8e-1")
vnode.createObject('IdentityMapping')
snode.createObject('PythonScriptController', filename=__file__, classname="simu_save", variables="dof SF behavior/sampler")
print "ctrl+SPACE will dump the simulation model in /data"
return 0
#-----------------------------------------------------------------------------------------------------------------------------------------
class | (Sofa.PythonScriptController):
def createGraph(self,node):
self.node=node
self.dof = node.getObject(self.findData('variables').value[0][0])
self.sf = node.getObject(self.findData('variables').value[1][0])
self.gp = node.getObject(self.findData('variables').value[2][0])
if self.dof==None or self.sf==None or self.gp==None:
print "PythonScriptController: components in variables not found"
return 0
return 0
def onKeyPressed(self,k):
if self.dof==None or self.sf==None or self.gp==None:
return 0
if ord(k)==32: # ctrl+SPACE
Flexible.IO.export_AffineFrames(self.dof, file_Dof)
Flexible.IO.export_ImageShapeFunction(self.node, self.sf, file_SF)
Flexible.IO.export_GaussPoints(self.gp, file_GP)
print "Simulation state saved";
return 0
| simu_save | identifier_name |
index.js | import React, { Component } from 'react';
import { TouchableHighlight, Text } from 'react-native';
import All from 'rnx-ui/All';
import ImgRollView from 'rnx-ui/ImgRollView';
import Alert from 'rnx-ui/Alert';
import { NavBar, Icon } from '../../component';
import Router from '../../router';
import style from './styles';
class Page extends Component {
static section = 'Data Entry';
constructor(props) {
super(props);
this.state = {
uriSelected: [],
visible: false,
};
this.onSelect = this.onSelect.bind(this);
this.toggleURIList = this.toggleURIList.bind(this);
}
| (data) {
const { uriSelected } = data;
this.setState({ uriSelected });
}
toggleURIList() {
this.setState({
visible: !this.state.visible,
});
}
render() {
const { uriSelected, visible } = this.state;
return (
<All>
<NavBar title="ImgRollView" />
<ImgRollView
onSelect={this.onSelect}
style={style.imgRollViewStyle}
maxSelected={5}
iconSelected={<Icon name="fa-check" style={style.iconStyle} />}
iconUnSelected={<Icon name="fa-check" style={style.iconStyle} />}
/>
<TouchableHighlight onPress={this.toggleURIList} style={style.bottomBar}>
<Text style={style.bottomBarText}>
请选择图片:{uriSelected.length}/5
</Text>
</TouchableHighlight>
<Alert
visible={visible}
message={uriSelected.join('\n')}
onPress={this.toggleURIList}
/>
</All>
);
}
}
Router.register('ImgRollView', Page);
export default Page;
| onSelect | identifier_name |
index.js | import React, { Component } from 'react';
import { TouchableHighlight, Text } from 'react-native';
import All from 'rnx-ui/All';
import ImgRollView from 'rnx-ui/ImgRollView';
import Alert from 'rnx-ui/Alert';
import { NavBar, Icon } from '../../component';
import Router from '../../router';
import style from './styles';
class Page extends Component {
static section = 'Data Entry';
constructor(props) {
super(props);
this.state = {
uriSelected: [],
visible: false,
};
this.onSelect = this.onSelect.bind(this);
this.toggleURIList = this.toggleURIList.bind(this);
}
onSelect(data) {
const { uriSelected } = data;
this.setState({ uriSelected });
}
toggleURIList() {
this.setState({
visible: !this.state.visible,
});
}
render() {
const { uriSelected, visible } = this.state;
return (
<All>
<NavBar title="ImgRollView" />
<ImgRollView
onSelect={this.onSelect}
style={style.imgRollViewStyle}
maxSelected={5}
iconSelected={<Icon name="fa-check" style={style.iconStyle} />}
iconUnSelected={<Icon name="fa-check" style={style.iconStyle} />}
/>
<TouchableHighlight onPress={this.toggleURIList} style={style.bottomBar}>
<Text style={style.bottomBarText}>
请选择图片:{uriSelected.length}/5
</Text>
</TouchableHighlight>
<Alert
visible={visible}
message={uriSelected.join('\n')} | onPress={this.toggleURIList}
/>
</All>
);
}
}
Router.register('ImgRollView', Page);
export default Page; | random_line_split | |
index.js | import React, { Component } from 'react';
import { TouchableHighlight, Text } from 'react-native';
import All from 'rnx-ui/All';
import ImgRollView from 'rnx-ui/ImgRollView';
import Alert from 'rnx-ui/Alert';
import { NavBar, Icon } from '../../component';
import Router from '../../router';
import style from './styles';
class Page extends Component {
static section = 'Data Entry';
constructor(props) |
onSelect(data) {
const { uriSelected } = data;
this.setState({ uriSelected });
}
toggleURIList() {
this.setState({
visible: !this.state.visible,
});
}
render() {
const { uriSelected, visible } = this.state;
return (
<All>
<NavBar title="ImgRollView" />
<ImgRollView
onSelect={this.onSelect}
style={style.imgRollViewStyle}
maxSelected={5}
iconSelected={<Icon name="fa-check" style={style.iconStyle} />}
iconUnSelected={<Icon name="fa-check" style={style.iconStyle} />}
/>
<TouchableHighlight onPress={this.toggleURIList} style={style.bottomBar}>
<Text style={style.bottomBarText}>
请选择图片:{uriSelected.length}/5
</Text>
</TouchableHighlight>
<Alert
visible={visible}
message={uriSelected.join('\n')}
onPress={this.toggleURIList}
/>
</All>
);
}
}
Router.register('ImgRollView', Page);
export default Page;
| {
super(props);
this.state = {
uriSelected: [],
visible: false,
};
this.onSelect = this.onSelect.bind(this);
this.toggleURIList = this.toggleURIList.bind(this);
} | identifier_body |
views.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.core.urlresolvers import reverse
from django.views.generic import DetailView, ListView, RedirectView, UpdateView
from django.contrib.auth.mixins import LoginRequiredMixin
from .models import User
class UserDetailView(LoginRequiredMixin, DetailView):
model = User
# These next two lines tell the view to index lookups by username
slug_field = "username"
slug_url_kwarg = "username"
class UserRedirectView(LoginRequiredMixin, RedirectView):
permanent = False
def get_redirect_url(self):
return reverse("users:detail",
kwargs={"username": self.request.user.username})
class UserUpdateView(LoginRequiredMixin, UpdateView):
fields = ['name', ]
# we already imported User in the view code above, remember?
model = User
# send the user back to their own page after a successful update
def | (self):
return reverse("users:detail",
kwargs={"username": self.request.user.username})
def get_object(self):
# Only get the User record for the user making the request
return User.objects.get(username=self.request.user.username)
class UserListView(LoginRequiredMixin, ListView):
model = User
# These next two lines tell the view to index lookups by username
slug_field = "username"
slug_url_kwarg = "username"
| get_success_url | identifier_name |
views.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.core.urlresolvers import reverse
from django.views.generic import DetailView, ListView, RedirectView, UpdateView
from django.contrib.auth.mixins import LoginRequiredMixin
from .models import User
class UserDetailView(LoginRequiredMixin, DetailView):
model = User
# These next two lines tell the view to index lookups by username
slug_field = "username"
slug_url_kwarg = "username"
class UserRedirectView(LoginRequiredMixin, RedirectView):
permanent = False
def get_redirect_url(self):
|
class UserUpdateView(LoginRequiredMixin, UpdateView):
fields = ['name', ]
# we already imported User in the view code above, remember?
model = User
# send the user back to their own page after a successful update
def get_success_url(self):
return reverse("users:detail",
kwargs={"username": self.request.user.username})
def get_object(self):
# Only get the User record for the user making the request
return User.objects.get(username=self.request.user.username)
class UserListView(LoginRequiredMixin, ListView):
model = User
# These next two lines tell the view to index lookups by username
slug_field = "username"
slug_url_kwarg = "username"
| return reverse("users:detail",
kwargs={"username": self.request.user.username}) | identifier_body |
views.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.core.urlresolvers import reverse
from django.views.generic import DetailView, ListView, RedirectView, UpdateView
from django.contrib.auth.mixins import LoginRequiredMixin
from .models import User
class UserDetailView(LoginRequiredMixin, DetailView):
model = User
# These next two lines tell the view to index lookups by username
slug_field = "username"
slug_url_kwarg = "username"
class UserRedirectView(LoginRequiredMixin, RedirectView):
permanent = False
def get_redirect_url(self):
return reverse("users:detail",
kwargs={"username": self.request.user.username})
class UserUpdateView(LoginRequiredMixin, UpdateView):
fields = ['name', ]
# we already imported User in the view code above, remember?
model = User
# send the user back to their own page after a successful update
def get_success_url(self):
return reverse("users:detail",
kwargs={"username": self.request.user.username}) | # Only get the User record for the user making the request
return User.objects.get(username=self.request.user.username)
class UserListView(LoginRequiredMixin, ListView):
model = User
# These next two lines tell the view to index lookups by username
slug_field = "username"
slug_url_kwarg = "username" |
def get_object(self): | random_line_split |
resources.py | # Autogenerated by the mkresources management command 2014-11-13 23:53
from tastypie.resources import ModelResource
from tastypie.fields import ToOneField, ToManyField
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from ietf import api
from ietf.message.models import * # pyflakes:ignore
from ietf.person.resources import PersonResource
from ietf.group.resources import GroupResource
from ietf.doc.resources import DocumentResource
class MessageResource(ModelResource):
by = ToOneField(PersonResource, 'by')
related_groups = ToManyField(GroupResource, 'related_groups', null=True)
related_docs = ToManyField(DocumentResource, 'related_docs', null=True)
class Meta:
queryset = Message.objects.all()
serializer = api.Serializer()
#resource_name = 'message'
filtering = {
"id": ALL,
"time": ALL,
"subject": ALL,
"frm": ALL,
"to": ALL,
"cc": ALL,
"bcc": ALL,
"reply_to": ALL,
"body": ALL,
"content_type": ALL,
"by": ALL_WITH_RELATIONS,
"related_groups": ALL_WITH_RELATIONS,
"related_docs": ALL_WITH_RELATIONS,
}
api.message.register(MessageResource())
from ietf.person.resources import PersonResource
class | (ModelResource):
by = ToOneField(PersonResource, 'by')
message = ToOneField(MessageResource, 'message')
class Meta:
queryset = SendQueue.objects.all()
serializer = api.Serializer()
#resource_name = 'sendqueue'
filtering = {
"id": ALL,
"time": ALL,
"send_at": ALL,
"sent_at": ALL,
"note": ALL,
"by": ALL_WITH_RELATIONS,
"message": ALL_WITH_RELATIONS,
}
api.message.register(SendQueueResource())
| SendQueueResource | identifier_name |
resources.py | # Autogenerated by the mkresources management command 2014-11-13 23:53
from tastypie.resources import ModelResource
from tastypie.fields import ToOneField, ToManyField
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from ietf import api
from ietf.message.models import * # pyflakes:ignore
from ietf.person.resources import PersonResource
from ietf.group.resources import GroupResource
from ietf.doc.resources import DocumentResource
class MessageResource(ModelResource):
by = ToOneField(PersonResource, 'by')
related_groups = ToManyField(GroupResource, 'related_groups', null=True)
related_docs = ToManyField(DocumentResource, 'related_docs', null=True)
class Meta:
queryset = Message.objects.all()
serializer = api.Serializer()
#resource_name = 'message'
filtering = {
"id": ALL,
"time": ALL,
"subject": ALL,
"frm": ALL,
"to": ALL,
"cc": ALL,
"bcc": ALL,
"reply_to": ALL,
"body": ALL,
"content_type": ALL,
"by": ALL_WITH_RELATIONS, | "related_docs": ALL_WITH_RELATIONS,
}
api.message.register(MessageResource())
from ietf.person.resources import PersonResource
class SendQueueResource(ModelResource):
by = ToOneField(PersonResource, 'by')
message = ToOneField(MessageResource, 'message')
class Meta:
queryset = SendQueue.objects.all()
serializer = api.Serializer()
#resource_name = 'sendqueue'
filtering = {
"id": ALL,
"time": ALL,
"send_at": ALL,
"sent_at": ALL,
"note": ALL,
"by": ALL_WITH_RELATIONS,
"message": ALL_WITH_RELATIONS,
}
api.message.register(SendQueueResource()) | "related_groups": ALL_WITH_RELATIONS, | random_line_split |
resources.py | # Autogenerated by the mkresources management command 2014-11-13 23:53
from tastypie.resources import ModelResource
from tastypie.fields import ToOneField, ToManyField
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from ietf import api
from ietf.message.models import * # pyflakes:ignore
from ietf.person.resources import PersonResource
from ietf.group.resources import GroupResource
from ietf.doc.resources import DocumentResource
class MessageResource(ModelResource):
by = ToOneField(PersonResource, 'by')
related_groups = ToManyField(GroupResource, 'related_groups', null=True)
related_docs = ToManyField(DocumentResource, 'related_docs', null=True)
class Meta:
queryset = Message.objects.all()
serializer = api.Serializer()
#resource_name = 'message'
filtering = {
"id": ALL,
"time": ALL,
"subject": ALL,
"frm": ALL,
"to": ALL,
"cc": ALL,
"bcc": ALL,
"reply_to": ALL,
"body": ALL,
"content_type": ALL,
"by": ALL_WITH_RELATIONS,
"related_groups": ALL_WITH_RELATIONS,
"related_docs": ALL_WITH_RELATIONS,
}
api.message.register(MessageResource())
from ietf.person.resources import PersonResource
class SendQueueResource(ModelResource):
|
api.message.register(SendQueueResource())
| by = ToOneField(PersonResource, 'by')
message = ToOneField(MessageResource, 'message')
class Meta:
queryset = SendQueue.objects.all()
serializer = api.Serializer()
#resource_name = 'sendqueue'
filtering = {
"id": ALL,
"time": ALL,
"send_at": ALL,
"sent_at": ALL,
"note": ALL,
"by": ALL_WITH_RELATIONS,
"message": ALL_WITH_RELATIONS,
} | identifier_body |
TreeTraversal.py | import Queue
class Graph:
def __init__(self, number_of_vertices):
self.number_of_vertices = number_of_vertices
self.vertex_details = {}
self.visited = {}
def add_edge(self, vertex_label, edge):
if self.vertex_details.has_key(vertex_label):
self.vertex_details[vertex_label].append(edge)
else:
self.vertex_details[vertex_label] = []
self.vertex_details[vertex_label].append(edge)
self.visited[vertex_label] = 0
def bfs(self, starting_vertex):
print "Starting breath first search from vertex: ", starting_vertex
bfs_queue = Queue.Queue()
bfs_trace = []
bfs_queue.put(starting_vertex)
self.visited[starting_vertex] = 1
while(not bfs_queue.empty()):
current_vertex = bfs_queue.get()
bfs_trace.append(current_vertex)
adjacent_vertices = self.vertex_details[current_vertex]
for adjacent_vertex in adjacent_vertices:
if self.visited[adjacent_vertex] == 0:
bfs_queue.put(adjacent_vertex)
self.visited[adjacent_vertex] = 1
return bfs_trace
def dfs(self, vertex):
self.visited[vertex] = 1
print vertex," ",
adjacent_vertices = self.vertex_details[vertex]
for adjacent_vertex in adjacent_vertices:
if self.visited[adjacent_vertex] == 0:
self.dfs(adjacent_vertex)
def print_bfs(self, bfs_trace):
print bfs_trace
def | ():
g = Graph(4)
g.add_edge(0, 1);
g.add_edge(0, 2);
g.add_edge(1, 2);
g.add_edge(2, 0);
g.add_edge(2, 3);
g.add_edge(3, 3);
# bfs_trace = g.bfs(2)
# g.print_bfs(bfs_trace)
g.dfs(2)
if __name__ == '__main__':
main()
| main | identifier_name |
TreeTraversal.py | import Queue
class Graph:
def __init__(self, number_of_vertices):
self.number_of_vertices = number_of_vertices
self.vertex_details = {}
self.visited = {}
def add_edge(self, vertex_label, edge):
if self.vertex_details.has_key(vertex_label):
self.vertex_details[vertex_label].append(edge)
else:
self.vertex_details[vertex_label] = []
self.vertex_details[vertex_label].append(edge)
self.visited[vertex_label] = 0
def bfs(self, starting_vertex):
|
def dfs(self, vertex):
self.visited[vertex] = 1
print vertex," ",
adjacent_vertices = self.vertex_details[vertex]
for adjacent_vertex in adjacent_vertices:
if self.visited[adjacent_vertex] == 0:
self.dfs(adjacent_vertex)
def print_bfs(self, bfs_trace):
print bfs_trace
def main():
g = Graph(4)
g.add_edge(0, 1);
g.add_edge(0, 2);
g.add_edge(1, 2);
g.add_edge(2, 0);
g.add_edge(2, 3);
g.add_edge(3, 3);
# bfs_trace = g.bfs(2)
# g.print_bfs(bfs_trace)
g.dfs(2)
if __name__ == '__main__':
main()
| print "Starting breath first search from vertex: ", starting_vertex
bfs_queue = Queue.Queue()
bfs_trace = []
bfs_queue.put(starting_vertex)
self.visited[starting_vertex] = 1
while(not bfs_queue.empty()):
current_vertex = bfs_queue.get()
bfs_trace.append(current_vertex)
adjacent_vertices = self.vertex_details[current_vertex]
for adjacent_vertex in adjacent_vertices:
if self.visited[adjacent_vertex] == 0:
bfs_queue.put(adjacent_vertex)
self.visited[adjacent_vertex] = 1
return bfs_trace | identifier_body |
TreeTraversal.py | import Queue
class Graph:
def __init__(self, number_of_vertices):
self.number_of_vertices = number_of_vertices
self.vertex_details = {}
self.visited = {}
def add_edge(self, vertex_label, edge):
if self.vertex_details.has_key(vertex_label):
self.vertex_details[vertex_label].append(edge)
else:
self.vertex_details[vertex_label] = []
self.vertex_details[vertex_label].append(edge)
self.visited[vertex_label] = 0
def bfs(self, starting_vertex):
print "Starting breath first search from vertex: ", starting_vertex
bfs_queue = Queue.Queue()
bfs_trace = []
bfs_queue.put(starting_vertex)
self.visited[starting_vertex] = 1
while(not bfs_queue.empty()):
|
return bfs_trace
def dfs(self, vertex):
self.visited[vertex] = 1
print vertex," ",
adjacent_vertices = self.vertex_details[vertex]
for adjacent_vertex in adjacent_vertices:
if self.visited[adjacent_vertex] == 0:
self.dfs(adjacent_vertex)
def print_bfs(self, bfs_trace):
print bfs_trace
def main():
g = Graph(4)
g.add_edge(0, 1);
g.add_edge(0, 2);
g.add_edge(1, 2);
g.add_edge(2, 0);
g.add_edge(2, 3);
g.add_edge(3, 3);
# bfs_trace = g.bfs(2)
# g.print_bfs(bfs_trace)
g.dfs(2)
if __name__ == '__main__':
main()
| current_vertex = bfs_queue.get()
bfs_trace.append(current_vertex)
adjacent_vertices = self.vertex_details[current_vertex]
for adjacent_vertex in adjacent_vertices:
if self.visited[adjacent_vertex] == 0:
bfs_queue.put(adjacent_vertex)
self.visited[adjacent_vertex] = 1 | conditional_block |
TreeTraversal.py | import Queue
class Graph:
def __init__(self, number_of_vertices):
self.number_of_vertices = number_of_vertices
self.vertex_details = {}
self.visited = {}
def add_edge(self, vertex_label, edge):
if self.vertex_details.has_key(vertex_label):
self.vertex_details[vertex_label].append(edge)
else:
self.vertex_details[vertex_label] = []
self.vertex_details[vertex_label].append(edge)
self.visited[vertex_label] = 0
def bfs(self, starting_vertex):
print "Starting breath first search from vertex: ", starting_vertex
bfs_queue = Queue.Queue()
bfs_trace = []
bfs_queue.put(starting_vertex)
self.visited[starting_vertex] = 1
while(not bfs_queue.empty()):
current_vertex = bfs_queue.get()
bfs_trace.append(current_vertex)
adjacent_vertices = self.vertex_details[current_vertex]
for adjacent_vertex in adjacent_vertices:
if self.visited[adjacent_vertex] == 0:
bfs_queue.put(adjacent_vertex)
self.visited[adjacent_vertex] = 1
return bfs_trace
def dfs(self, vertex):
self.visited[vertex] = 1
print vertex," ",
adjacent_vertices = self.vertex_details[vertex]
for adjacent_vertex in adjacent_vertices:
if self.visited[adjacent_vertex] == 0:
self.dfs(adjacent_vertex)
def print_bfs(self, bfs_trace):
print bfs_trace |
def main():
g = Graph(4)
g.add_edge(0, 1);
g.add_edge(0, 2);
g.add_edge(1, 2);
g.add_edge(2, 0);
g.add_edge(2, 3);
g.add_edge(3, 3);
# bfs_trace = g.bfs(2)
# g.print_bfs(bfs_trace)
g.dfs(2)
if __name__ == '__main__':
main() | random_line_split | |
char_indexing.rs | use std::ops::Range;
pub(crate) trait CharIndexable<'b> {
fn char_index(&'b self, range: Range<usize>) -> &'b str;
}
pub struct CharIndexableStr<'a> {
s: &'a str,
indices: Vec<usize>,
}
impl CharIndexableStr<'_> {
pub(crate) fn char_count(&self) -> usize {
self.indices.len()
}
}
impl<'a> From<&'a str> for CharIndexableStr<'a> {
fn | (s: &'a str) -> Self {
CharIndexableStr {
indices: s.char_indices().map(|(i, _c)| i).collect(),
s,
}
}
}
impl<'a, 'b: 'a> CharIndexable<'b> for CharIndexableStr<'a> {
fn char_index(&'b self, range: Range<usize>) -> &'b str {
if range.end >= self.indices.len() {
&self.s[self.indices[range.start]..]
} else {
&self.s[self.indices[range.start]..self.indices[range.end]]
}
}
}
| from | identifier_name |
char_indexing.rs | use std::ops::Range;
pub(crate) trait CharIndexable<'b> {
fn char_index(&'b self, range: Range<usize>) -> &'b str;
}
pub struct CharIndexableStr<'a> {
s: &'a str,
indices: Vec<usize>,
}
impl CharIndexableStr<'_> {
pub(crate) fn char_count(&self) -> usize {
self.indices.len()
}
}
impl<'a> From<&'a str> for CharIndexableStr<'a> {
fn from(s: &'a str) -> Self {
CharIndexableStr {
indices: s.char_indices().map(|(i, _c)| i).collect(),
s,
}
}
}
impl<'a, 'b: 'a> CharIndexable<'b> for CharIndexableStr<'a> {
fn char_index(&'b self, range: Range<usize>) -> &'b str {
if range.end >= self.indices.len() {
&self.s[self.indices[range.start]..]
} else |
}
}
| {
&self.s[self.indices[range.start]..self.indices[range.end]]
} | conditional_block |
char_indexing.rs | use std::ops::Range;
pub(crate) trait CharIndexable<'b> {
fn char_index(&'b self, range: Range<usize>) -> &'b str;
}
pub struct CharIndexableStr<'a> {
s: &'a str,
indices: Vec<usize>,
}
impl CharIndexableStr<'_> {
pub(crate) fn char_count(&self) -> usize {
self.indices.len()
}
}
impl<'a> From<&'a str> for CharIndexableStr<'a> {
fn from(s: &'a str) -> Self {
CharIndexableStr {
indices: s.char_indices().map(|(i, _c)| i).collect(),
s,
}
}
}
impl<'a, 'b: 'a> CharIndexable<'b> for CharIndexableStr<'a> {
fn char_index(&'b self, range: Range<usize>) -> &'b str {
if range.end >= self.indices.len() { | &self.s[self.indices[range.start]..self.indices[range.end]]
}
}
} | &self.s[self.indices[range.start]..]
} else { | random_line_split |
estr-slice.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() | {
let x = &"hello";
let v = &"hello";
let y : &str = &"there";
info!(x);
info!(y);
assert_eq!(x[0], 'h' as u8);
assert_eq!(x[4], 'o' as u8);
let z : &str = &"thing";
assert_eq!(v, x);
assert!(x != z);
let a = &"aaaa";
let b = &"bbbb";
let c = &"cccc";
let cc = &"ccccc";
info!(a);
assert!(a < b);
assert!(a <= b);
assert!(a != b);
assert!(b >= a);
assert!(b > a);
info!(b);
assert!(a < c);
assert!(a <= c);
assert!(a != c);
assert!(c >= a);
assert!(c > a);
info!(c);
assert!(c < cc);
assert!(c <= cc);
assert!(c != cc);
assert!(cc >= c);
assert!(cc > c);
info!(cc);
} | identifier_body | |
estr-slice.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let x = &"hello";
let v = &"hello";
let y : &str = &"there";
info!(x);
info!(y);
assert_eq!(x[0], 'h' as u8);
assert_eq!(x[4], 'o' as u8);
let z : &str = &"thing";
assert_eq!(v, x);
assert!(x != z);
let a = &"aaaa";
let b = &"bbbb";
let c = &"cccc";
let cc = &"ccccc";
info!(a);
assert!(a < b);
assert!(a <= b);
assert!(a != b);
assert!(b >= a);
assert!(b > a);
info!(b);
assert!(a < c); | assert!(c > a);
info!(c);
assert!(c < cc);
assert!(c <= cc);
assert!(c != cc);
assert!(cc >= c);
assert!(cc > c);
info!(cc);
} | assert!(a <= c);
assert!(a != c);
assert!(c >= a); | random_line_split |
estr-slice.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn | () {
let x = &"hello";
let v = &"hello";
let y : &str = &"there";
info!(x);
info!(y);
assert_eq!(x[0], 'h' as u8);
assert_eq!(x[4], 'o' as u8);
let z : &str = &"thing";
assert_eq!(v, x);
assert!(x != z);
let a = &"aaaa";
let b = &"bbbb";
let c = &"cccc";
let cc = &"ccccc";
info!(a);
assert!(a < b);
assert!(a <= b);
assert!(a != b);
assert!(b >= a);
assert!(b > a);
info!(b);
assert!(a < c);
assert!(a <= c);
assert!(a != c);
assert!(c >= a);
assert!(c > a);
info!(c);
assert!(c < cc);
assert!(c <= cc);
assert!(c != cc);
assert!(cc >= c);
assert!(cc > c);
info!(cc);
}
| main | identifier_name |
localizations.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
import { Event } from 'vs/base/common/event';
export interface ILocalization {
languageId: string;
languageName?: string;
localizedLanguageName?: string;
translations: ITranslation[];
minimalTranslations?: { [key: string]: string };
}
export interface ITranslation {
id: string;
path: string;
}
export const ILocalizationsService = createDecorator<ILocalizationsService>('localizationsService');
export interface ILocalizationsService {
readonly _serviceBrand: undefined;
readonly onDidLanguagesChange: Event<void>;
getLanguageIds(): Promise<string[]>;
}
export function isValidLocalization(localization: ILocalization): boolean {
if (typeof localization.languageId !== 'string') {
return false;
}
if (!Array.isArray(localization.translations) || localization.translations.length === 0) {
return false;
}
for (const translation of localization.translations) {
if (typeof translation.id !== 'string') {
return false;
}
if (typeof translation.path !== 'string') {
return false;
}
}
if (localization.languageName && typeof localization.languageName !== 'string') {
return false;
}
if (localization.localizedLanguageName && typeof localization.localizedLanguageName !== 'string') { | return false;
}
return true;
} | random_line_split | |
localizations.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
import { Event } from 'vs/base/common/event';
export interface ILocalization {
languageId: string;
languageName?: string;
localizedLanguageName?: string;
translations: ITranslation[];
minimalTranslations?: { [key: string]: string };
}
export interface ITranslation {
id: string;
path: string;
}
export const ILocalizationsService = createDecorator<ILocalizationsService>('localizationsService');
export interface ILocalizationsService {
readonly _serviceBrand: undefined;
readonly onDidLanguagesChange: Event<void>;
getLanguageIds(): Promise<string[]>;
}
export function isValidLocalization(localization: ILocalization): boolean {
if (typeof localization.languageId !== 'string') {
return false;
}
if (!Array.isArray(localization.translations) || localization.translations.length === 0) {
return false;
}
for (const translation of localization.translations) {
if (typeof translation.id !== 'string') {
return false;
}
if (typeof translation.path !== 'string') {
return false;
}
}
if (localization.languageName && typeof localization.languageName !== 'string') |
if (localization.localizedLanguageName && typeof localization.localizedLanguageName !== 'string') {
return false;
}
return true;
}
| {
return false;
} | conditional_block |
localizations.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
import { Event } from 'vs/base/common/event';
export interface ILocalization {
languageId: string;
languageName?: string;
localizedLanguageName?: string;
translations: ITranslation[];
minimalTranslations?: { [key: string]: string };
}
export interface ITranslation {
id: string;
path: string;
}
export const ILocalizationsService = createDecorator<ILocalizationsService>('localizationsService');
export interface ILocalizationsService {
readonly _serviceBrand: undefined;
readonly onDidLanguagesChange: Event<void>;
getLanguageIds(): Promise<string[]>;
}
export function | (localization: ILocalization): boolean {
if (typeof localization.languageId !== 'string') {
return false;
}
if (!Array.isArray(localization.translations) || localization.translations.length === 0) {
return false;
}
for (const translation of localization.translations) {
if (typeof translation.id !== 'string') {
return false;
}
if (typeof translation.path !== 'string') {
return false;
}
}
if (localization.languageName && typeof localization.languageName !== 'string') {
return false;
}
if (localization.localizedLanguageName && typeof localization.localizedLanguageName !== 'string') {
return false;
}
return true;
}
| isValidLocalization | identifier_name |
localizations.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
import { Event } from 'vs/base/common/event';
export interface ILocalization {
languageId: string;
languageName?: string;
localizedLanguageName?: string;
translations: ITranslation[];
minimalTranslations?: { [key: string]: string };
}
export interface ITranslation {
id: string;
path: string;
}
export const ILocalizationsService = createDecorator<ILocalizationsService>('localizationsService');
export interface ILocalizationsService {
readonly _serviceBrand: undefined;
readonly onDidLanguagesChange: Event<void>;
getLanguageIds(): Promise<string[]>;
}
export function isValidLocalization(localization: ILocalization): boolean | {
if (typeof localization.languageId !== 'string') {
return false;
}
if (!Array.isArray(localization.translations) || localization.translations.length === 0) {
return false;
}
for (const translation of localization.translations) {
if (typeof translation.id !== 'string') {
return false;
}
if (typeof translation.path !== 'string') {
return false;
}
}
if (localization.languageName && typeof localization.languageName !== 'string') {
return false;
}
if (localization.localizedLanguageName && typeof localization.localizedLanguageName !== 'string') {
return false;
}
return true;
} | identifier_body | |
hash-chain.py | #-*- coding: utf-8 -*-
class Hash():
def __init__(self):
self.size = 20
self.slots = []
for i in xrange(0, 20):
self.slots.append([])
def __setitem__(self, key, value):
chain = self.slots[self.hash(key)]
for data in chain:
if data[0] == key:
data[1] = value
return True
chain.append([key, value])
def __getitem__(self, key):
chain = self.slots[self.hash(key)]
for data in chain:
if data[0] == key:
return data[1]
return None
def delete(self, key):
slot = self.hash(key)
chain = self.slots[slot]
for i, data in enumerate(chain):
|
raise ValueError("Key %s if not found." % key)
def hash(self, key):
return self.stoi(key) % self.size
def stoi(self, key):
inte = 0
for c in key:
inte = inte + ord(c)
return inte
h = Hash()
h["fuck"] = val = {"name": "jerry"}
h["ucfk"] = val2 = {"name": "lucy"}
h["ufck"] = val3 = {"name": "tony"}
h["uckf"] = val4 = {"name": "honey"}
assert h["fuck"] == val
assert h["ucfk"] == val2
assert h["ufck"] == val3
assert h["uckf"] == val4
h["love"] = "you"
h.delete("love")
assert h["love"] == None
h["you"] = "cool"
h["uoy"] = "sucks"
assert h["you"] == "cool"
assert h["uoy"] == "sucks"
h.delete("you")
assert h["you"] == None
h["uoy"] = "Fool"
assert h["uoy"] == "Fool"
| if data[0] == key:
del chain[i]
return True | conditional_block |
hash-chain.py | #-*- coding: utf-8 -*-
class Hash():
def __init__(self):
self.size = 20
self.slots = []
for i in xrange(0, 20):
self.slots.append([])
def __setitem__(self, key, value):
chain = self.slots[self.hash(key)]
for data in chain:
if data[0] == key:
data[1] = value
return True
chain.append([key, value])
def __getitem__(self, key):
chain = self.slots[self.hash(key)]
for data in chain:
if data[0] == key:
return data[1]
return None
def delete(self, key):
slot = self.hash(key)
chain = self.slots[slot]
for i, data in enumerate(chain):
if data[0] == key:
del chain[i]
return True
raise ValueError("Key %s if not found." % key)
def hash(self, key):
return self.stoi(key) % self.size
def stoi(self, key):
inte = 0
for c in key:
inte = inte + ord(c)
return inte
h = Hash()
h["fuck"] = val = {"name": "jerry"}
h["ucfk"] = val2 = {"name": "lucy"}
h["ufck"] = val3 = {"name": "tony"}
h["uckf"] = val4 = {"name": "honey"}
assert h["fuck"] == val
assert h["ucfk"] == val2
assert h["ufck"] == val3
assert h["uckf"] == val4
h["love"] = "you"
h.delete("love")
assert h["love"] == None
h["you"] = "cool" | assert h["uoy"] == "sucks"
h.delete("you")
assert h["you"] == None
h["uoy"] = "Fool"
assert h["uoy"] == "Fool" | h["uoy"] = "sucks"
assert h["you"] == "cool" | random_line_split |
hash-chain.py | #-*- coding: utf-8 -*-
class | ():
def __init__(self):
self.size = 20
self.slots = []
for i in xrange(0, 20):
self.slots.append([])
def __setitem__(self, key, value):
chain = self.slots[self.hash(key)]
for data in chain:
if data[0] == key:
data[1] = value
return True
chain.append([key, value])
def __getitem__(self, key):
chain = self.slots[self.hash(key)]
for data in chain:
if data[0] == key:
return data[1]
return None
def delete(self, key):
slot = self.hash(key)
chain = self.slots[slot]
for i, data in enumerate(chain):
if data[0] == key:
del chain[i]
return True
raise ValueError("Key %s if not found." % key)
def hash(self, key):
return self.stoi(key) % self.size
def stoi(self, key):
inte = 0
for c in key:
inte = inte + ord(c)
return inte
h = Hash()
h["fuck"] = val = {"name": "jerry"}
h["ucfk"] = val2 = {"name": "lucy"}
h["ufck"] = val3 = {"name": "tony"}
h["uckf"] = val4 = {"name": "honey"}
assert h["fuck"] == val
assert h["ucfk"] == val2
assert h["ufck"] == val3
assert h["uckf"] == val4
h["love"] = "you"
h.delete("love")
assert h["love"] == None
h["you"] = "cool"
h["uoy"] = "sucks"
assert h["you"] == "cool"
assert h["uoy"] == "sucks"
h.delete("you")
assert h["you"] == None
h["uoy"] = "Fool"
assert h["uoy"] == "Fool"
| Hash | identifier_name |
hash-chain.py | #-*- coding: utf-8 -*-
class Hash():
def __init__(self):
|
def __setitem__(self, key, value):
chain = self.slots[self.hash(key)]
for data in chain:
if data[0] == key:
data[1] = value
return True
chain.append([key, value])
def __getitem__(self, key):
chain = self.slots[self.hash(key)]
for data in chain:
if data[0] == key:
return data[1]
return None
def delete(self, key):
slot = self.hash(key)
chain = self.slots[slot]
for i, data in enumerate(chain):
if data[0] == key:
del chain[i]
return True
raise ValueError("Key %s if not found." % key)
def hash(self, key):
return self.stoi(key) % self.size
def stoi(self, key):
inte = 0
for c in key:
inte = inte + ord(c)
return inte
h = Hash()
h["fuck"] = val = {"name": "jerry"}
h["ucfk"] = val2 = {"name": "lucy"}
h["ufck"] = val3 = {"name": "tony"}
h["uckf"] = val4 = {"name": "honey"}
assert h["fuck"] == val
assert h["ucfk"] == val2
assert h["ufck"] == val3
assert h["uckf"] == val4
h["love"] = "you"
h.delete("love")
assert h["love"] == None
h["you"] = "cool"
h["uoy"] = "sucks"
assert h["you"] == "cool"
assert h["uoy"] == "sucks"
h.delete("you")
assert h["you"] == None
h["uoy"] = "Fool"
assert h["uoy"] == "Fool"
| self.size = 20
self.slots = []
for i in xrange(0, 20):
self.slots.append([]) | identifier_body |
quick_test.py | import sys
import os
from ..data.molecular_species import molecular_species
from ..data.reaction_mechanism_class import reaction_mechanism
from ..data.condition_class import condition
from ..data.reagent import reagent
from ..data.puzzle_class import puzzle
from ..data.solution_class import solution
def | (class_obj):
return class_obj.__name__
# depends on JSON base class
for class_being_tested in [molecular_species, condition, reaction_mechanism, reagent, puzzle, solution]:
system_output = sys.stdout # store stdout
sys.stdout = open(os.getcwd() + "/testing_result_" + name(class_being_tested) + ".txt", "w") # pipe to file
test_result = class_being_tested.test()
sys.stdout.close() # close file
sys.stdout = system_output #replace stdout
if test_result:
print("PASSED", name(class_being_tested), sep=" ")
else:
print("FAILED", name(class_being_tested), sep=" ")
| name | identifier_name |
quick_test.py | import sys
import os
from ..data.molecular_species import molecular_species
from ..data.reaction_mechanism_class import reaction_mechanism
from ..data.condition_class import condition
from ..data.reagent import reagent
from ..data.puzzle_class import puzzle
from ..data.solution_class import solution
def name(class_obj):
return class_obj.__name__
# depends on JSON base class
for class_being_tested in [molecular_species, condition, reaction_mechanism, reagent, puzzle, solution]:
system_output = sys.stdout # store stdout
sys.stdout = open(os.getcwd() + "/testing_result_" + name(class_being_tested) + ".txt", "w") # pipe to file
test_result = class_being_tested.test()
sys.stdout.close() # close file
sys.stdout = system_output #replace stdout
if test_result:
print("PASSED", name(class_being_tested), sep=" ")
else:
| print("FAILED", name(class_being_tested), sep=" ") | conditional_block | |
quick_test.py | import sys
import os
from ..data.molecular_species import molecular_species
from ..data.reaction_mechanism_class import reaction_mechanism
from ..data.condition_class import condition
from ..data.reagent import reagent
from ..data.puzzle_class import puzzle
from ..data.solution_class import solution
def name(class_obj):
return class_obj.__name__
# depends on JSON base class
for class_being_tested in [molecular_species, condition, reaction_mechanism, reagent, puzzle, solution]:
system_output = sys.stdout # store stdout | test_result = class_being_tested.test()
sys.stdout.close() # close file
sys.stdout = system_output #replace stdout
if test_result:
print("PASSED", name(class_being_tested), sep=" ")
else:
print("FAILED", name(class_being_tested), sep=" ") | sys.stdout = open(os.getcwd() + "/testing_result_" + name(class_being_tested) + ".txt", "w") # pipe to file | random_line_split |
quick_test.py | import sys
import os
from ..data.molecular_species import molecular_species
from ..data.reaction_mechanism_class import reaction_mechanism
from ..data.condition_class import condition
from ..data.reagent import reagent
from ..data.puzzle_class import puzzle
from ..data.solution_class import solution
def name(class_obj):
|
# depends on JSON base class
for class_being_tested in [molecular_species, condition, reaction_mechanism, reagent, puzzle, solution]:
system_output = sys.stdout # store stdout
sys.stdout = open(os.getcwd() + "/testing_result_" + name(class_being_tested) + ".txt", "w") # pipe to file
test_result = class_being_tested.test()
sys.stdout.close() # close file
sys.stdout = system_output #replace stdout
if test_result:
print("PASSED", name(class_being_tested), sep=" ")
else:
print("FAILED", name(class_being_tested), sep=" ")
| return class_obj.__name__ | identifier_body |
moosetree.py | # moosetree.py ---
#
# Filename: moosetree.py
# Description:
# Author: subhasis ray
# Maintainer:
# Created: Tue Jun 23 18:54:14 2009 (+0530)
# Version:
# Last-Updated: Sun Jul 5 01:35:11 2009 (+0530)
# By: subhasis ray
# Update #: 137
# URL:
# Keywords:
# Compatibility:
#
#
# Commentary:
#
#
#
#
# Change log:
#
#
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA 02110-1301, USA.
#
#
# Code:
import moose
import sys
from PyQt4 import QtCore, QtGui
class MooseTreeItem(QtGui.QTreeWidgetItem):
def __init__(self, *args):
QtGui.QTreeWidgetItem.__init__(self, *args)
self.mooseObj_ = None
def setMooseObject(self, mooseObject):
if isinstance(mooseObject, moose.Id):
self.mooseObj_ = moose.Neutral(mooseObject)
elif isinstance(mooseObject, moose.PyMooseBase):
self.mooseObj_ = mooseObject
else:
raise Error
self.setText(0, QtCore.QString(self.mooseObj_.name))
self.setToolTip(0, QtCore.QString('class:' + self.mooseObj_.className))
def getMooseObject(self):
return self.mooseObj_
def | (self, text):
self.setText(0, QtCore.QString(self.mooseObj_.name))
class MooseTreeWidget(QtGui.QTreeWidget):
def __init__(self, *args):
QtGui.QTreeWidget.__init__(self, *args)
self.rootObject = moose.Neutral('/')
self.itemList = []
self.setupTree(self.rootObject, self, self.itemList)
self.setCurrentItem(self.itemList[0]) # Make root the default item
def setupTree(self, mooseObject, parent, itemlist):
item = MooseTreeItem(parent)
item.setMooseObject(mooseObject)
itemlist.append(item)
for child in mooseObject.children():
childObj = moose.Neutral(child)
self.setupTree(childObj, item, itemlist)
return item
def recreateTree(self):
self.clear()
self.itemList = []
self.setupTree(moose.Neutral('/'), self, self.itemList)
def insertMooseObjectSlot(self, class_name):
try:
class_name = str(class_name)
class_obj = eval('moose.' + class_name)
current = self.currentItem()
new_item = MooseTreeItem(current)
parent = current.getMooseObject()
# print 'creating new', class_name, 'under', parent.path
new_obj = class_obj(class_name, parent)
new_item.setMooseObject(new_obj)
current.addChild(new_item)
self.itemList.append(new_item)
except AttributeError:
print class_name, ': no such class in module moose'
if __name__ == '__main__':
c = moose.Compartment("c")
d = moose.HHChannel("chan", c)
app = QtGui.QApplication(sys.argv)
widget = MooseTreeWidget()
# widget = QtGui.QTreeWidget()
# items = []
# root = moose.Neutral('/')
# parent = widget
# item = setupTree(root, widget, items)
# while stack:
# mooseObject = stack.pop()
# item = QtGui.QTreeWidgetItem(parent)
# item.setText(0, widget.tr(mooseObject.name))
# parent = item
# for child in mooseObject.children():
# stack.append(moose.Neutral(child))
widget.show()
sys.exit(app.exec_())
#
# moosetree.py ends here
| updateSlot | identifier_name |
moosetree.py | # moosetree.py ---
#
# Filename: moosetree.py
# Description:
# Author: subhasis ray
# Maintainer:
# Created: Tue Jun 23 18:54:14 2009 (+0530)
# Version:
# Last-Updated: Sun Jul 5 01:35:11 2009 (+0530)
# By: subhasis ray
# Update #: 137
# URL:
# Keywords:
# Compatibility:
#
#
# Commentary:
#
#
#
#
# Change log:
#
#
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA 02110-1301, USA.
#
#
# Code:
import moose
import sys
from PyQt4 import QtCore, QtGui
class MooseTreeItem(QtGui.QTreeWidgetItem):
def __init__(self, *args):
QtGui.QTreeWidgetItem.__init__(self, *args)
self.mooseObj_ = None
def setMooseObject(self, mooseObject):
if isinstance(mooseObject, moose.Id):
self.mooseObj_ = moose.Neutral(mooseObject)
elif isinstance(mooseObject, moose.PyMooseBase):
self.mooseObj_ = mooseObject
else:
raise Error
self.setText(0, QtCore.QString(self.mooseObj_.name))
self.setToolTip(0, QtCore.QString('class:' + self.mooseObj_.className))
def getMooseObject(self):
return self.mooseObj_
def updateSlot(self, text):
self.setText(0, QtCore.QString(self.mooseObj_.name))
class MooseTreeWidget(QtGui.QTreeWidget):
def __init__(self, *args):
|
def setupTree(self, mooseObject, parent, itemlist):
item = MooseTreeItem(parent)
item.setMooseObject(mooseObject)
itemlist.append(item)
for child in mooseObject.children():
childObj = moose.Neutral(child)
self.setupTree(childObj, item, itemlist)
return item
def recreateTree(self):
self.clear()
self.itemList = []
self.setupTree(moose.Neutral('/'), self, self.itemList)
def insertMooseObjectSlot(self, class_name):
try:
class_name = str(class_name)
class_obj = eval('moose.' + class_name)
current = self.currentItem()
new_item = MooseTreeItem(current)
parent = current.getMooseObject()
# print 'creating new', class_name, 'under', parent.path
new_obj = class_obj(class_name, parent)
new_item.setMooseObject(new_obj)
current.addChild(new_item)
self.itemList.append(new_item)
except AttributeError:
print class_name, ': no such class in module moose'
if __name__ == '__main__':
c = moose.Compartment("c")
d = moose.HHChannel("chan", c)
app = QtGui.QApplication(sys.argv)
widget = MooseTreeWidget()
# widget = QtGui.QTreeWidget()
# items = []
# root = moose.Neutral('/')
# parent = widget
# item = setupTree(root, widget, items)
# while stack:
# mooseObject = stack.pop()
# item = QtGui.QTreeWidgetItem(parent)
# item.setText(0, widget.tr(mooseObject.name))
# parent = item
# for child in mooseObject.children():
# stack.append(moose.Neutral(child))
widget.show()
sys.exit(app.exec_())
#
# moosetree.py ends here
| QtGui.QTreeWidget.__init__(self, *args)
self.rootObject = moose.Neutral('/')
self.itemList = []
self.setupTree(self.rootObject, self, self.itemList)
self.setCurrentItem(self.itemList[0]) # Make root the default item | identifier_body |
moosetree.py | # moosetree.py ---
#
# Filename: moosetree.py
# Description:
# Author: subhasis ray
# Maintainer:
# Created: Tue Jun 23 18:54:14 2009 (+0530)
# Version:
# Last-Updated: Sun Jul 5 01:35:11 2009 (+0530)
# By: subhasis ray
# Update #: 137
# URL:
# Keywords:
# Compatibility:
#
#
# Commentary:
#
#
#
#
# Change log:
#
#
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA 02110-1301, USA.
#
#
# Code:
import moose
import sys
from PyQt4 import QtCore, QtGui
class MooseTreeItem(QtGui.QTreeWidgetItem):
def __init__(self, *args):
QtGui.QTreeWidgetItem.__init__(self, *args)
self.mooseObj_ = None
def setMooseObject(self, mooseObject):
if isinstance(mooseObject, moose.Id):
self.mooseObj_ = moose.Neutral(mooseObject)
elif isinstance(mooseObject, moose.PyMooseBase):
|
else:
raise Error
self.setText(0, QtCore.QString(self.mooseObj_.name))
self.setToolTip(0, QtCore.QString('class:' + self.mooseObj_.className))
def getMooseObject(self):
return self.mooseObj_
def updateSlot(self, text):
self.setText(0, QtCore.QString(self.mooseObj_.name))
class MooseTreeWidget(QtGui.QTreeWidget):
def __init__(self, *args):
QtGui.QTreeWidget.__init__(self, *args)
self.rootObject = moose.Neutral('/')
self.itemList = []
self.setupTree(self.rootObject, self, self.itemList)
self.setCurrentItem(self.itemList[0]) # Make root the default item
def setupTree(self, mooseObject, parent, itemlist):
item = MooseTreeItem(parent)
item.setMooseObject(mooseObject)
itemlist.append(item)
for child in mooseObject.children():
childObj = moose.Neutral(child)
self.setupTree(childObj, item, itemlist)
return item
def recreateTree(self):
self.clear()
self.itemList = []
self.setupTree(moose.Neutral('/'), self, self.itemList)
def insertMooseObjectSlot(self, class_name):
try:
class_name = str(class_name)
class_obj = eval('moose.' + class_name)
current = self.currentItem()
new_item = MooseTreeItem(current)
parent = current.getMooseObject()
# print 'creating new', class_name, 'under', parent.path
new_obj = class_obj(class_name, parent)
new_item.setMooseObject(new_obj)
current.addChild(new_item)
self.itemList.append(new_item)
except AttributeError:
print class_name, ': no such class in module moose'
if __name__ == '__main__':
c = moose.Compartment("c")
d = moose.HHChannel("chan", c)
app = QtGui.QApplication(sys.argv)
widget = MooseTreeWidget()
# widget = QtGui.QTreeWidget()
# items = []
# root = moose.Neutral('/')
# parent = widget
# item = setupTree(root, widget, items)
# while stack:
# mooseObject = stack.pop()
# item = QtGui.QTreeWidgetItem(parent)
# item.setText(0, widget.tr(mooseObject.name))
# parent = item
# for child in mooseObject.children():
# stack.append(moose.Neutral(child))
widget.show()
sys.exit(app.exec_())
#
# moosetree.py ends here
| self.mooseObj_ = mooseObject | conditional_block |
moosetree.py | # moosetree.py ---
#
# Filename: moosetree.py
# Description:
# Author: subhasis ray
# Maintainer:
# Created: Tue Jun 23 18:54:14 2009 (+0530)
# Version:
# Last-Updated: Sun Jul 5 01:35:11 2009 (+0530)
# By: subhasis ray
# Update #: 137
# URL:
# Keywords: | # Compatibility:
#
#
# Commentary:
#
#
#
#
# Change log:
#
#
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA 02110-1301, USA.
#
#
# Code:
import moose
import sys
from PyQt4 import QtCore, QtGui
class MooseTreeItem(QtGui.QTreeWidgetItem):
def __init__(self, *args):
QtGui.QTreeWidgetItem.__init__(self, *args)
self.mooseObj_ = None
def setMooseObject(self, mooseObject):
if isinstance(mooseObject, moose.Id):
self.mooseObj_ = moose.Neutral(mooseObject)
elif isinstance(mooseObject, moose.PyMooseBase):
self.mooseObj_ = mooseObject
else:
raise Error
self.setText(0, QtCore.QString(self.mooseObj_.name))
self.setToolTip(0, QtCore.QString('class:' + self.mooseObj_.className))
def getMooseObject(self):
return self.mooseObj_
def updateSlot(self, text):
self.setText(0, QtCore.QString(self.mooseObj_.name))
class MooseTreeWidget(QtGui.QTreeWidget):
def __init__(self, *args):
QtGui.QTreeWidget.__init__(self, *args)
self.rootObject = moose.Neutral('/')
self.itemList = []
self.setupTree(self.rootObject, self, self.itemList)
self.setCurrentItem(self.itemList[0]) # Make root the default item
def setupTree(self, mooseObject, parent, itemlist):
item = MooseTreeItem(parent)
item.setMooseObject(mooseObject)
itemlist.append(item)
for child in mooseObject.children():
childObj = moose.Neutral(child)
self.setupTree(childObj, item, itemlist)
return item
def recreateTree(self):
self.clear()
self.itemList = []
self.setupTree(moose.Neutral('/'), self, self.itemList)
def insertMooseObjectSlot(self, class_name):
try:
class_name = str(class_name)
class_obj = eval('moose.' + class_name)
current = self.currentItem()
new_item = MooseTreeItem(current)
parent = current.getMooseObject()
# print 'creating new', class_name, 'under', parent.path
new_obj = class_obj(class_name, parent)
new_item.setMooseObject(new_obj)
current.addChild(new_item)
self.itemList.append(new_item)
except AttributeError:
print class_name, ': no such class in module moose'
if __name__ == '__main__':
c = moose.Compartment("c")
d = moose.HHChannel("chan", c)
app = QtGui.QApplication(sys.argv)
widget = MooseTreeWidget()
# widget = QtGui.QTreeWidget()
# items = []
# root = moose.Neutral('/')
# parent = widget
# item = setupTree(root, widget, items)
# while stack:
# mooseObject = stack.pop()
# item = QtGui.QTreeWidgetItem(parent)
# item.setText(0, widget.tr(mooseObject.name))
# parent = item
# for child in mooseObject.children():
# stack.append(moose.Neutral(child))
widget.show()
sys.exit(app.exec_())
#
# moosetree.py ends here | random_line_split | |
infer_regusage.rs | //! Infers how each function uses every register
//! For every function, patch all of its call sites to ignore registers that the
//! callee doesn't read and to preserve register values that the callee
//! preserves. Then, record which registers it reads and which registers it
//! preserves.
//!
//! After this, all functions should have mutable [`regusage`][RadecoFunction::regusage]s.
//!
//! This analysis is super conservative; for example, if a function preserves a
//! register by pushing it onto the stack and popping it back right before
//! returning, it is considered to be read and not preserved because we can't
//! guarantee that that stack location is never subsequently read or modified.
//! See #147 for further discussion
use analysis::analyzer::{
all, Action, Analyzer, AnalyzerInfo, AnalyzerKind, AnalyzerResult, Change, FuncAnalyzer,
ModuleAnalyzer,
};
use analysis::dce::DCE;
use analysis::inst_combine::Combiner;
use frontend::radeco_containers::{RadecoFunction, RadecoModule};
use middle::ir;
use middle::regfile::*;
use middle::ssa::cfg_traits::*;
use middle::ssa::ssa_traits::*;
use middle::ssa::ssastorage::SSAStorage;
use middle::ssa::utils;
use petgraph::visit::{DfsPostOrder, Walker};
use std::any::Any;
use std::collections::{BTreeMap, HashSet};
const NAME: &str = "inferer";
const REQUIRES: &[AnalyzerKind] = &[];
pub const INFO: AnalyzerInfo = AnalyzerInfo {
name: NAME,
kind: AnalyzerKind::Inferer,
requires: REQUIRES,
uses_policy: false,
};
#[derive(Debug)]
pub struct Inferer {
/// Register file of the current architecture.
reginfo: SubRegisterFile,
/// Addresses of the functions we've already analyzed
analyzed: HashSet<u64>,
}
impl Analyzer for Inferer {
fn info(&self) -> &'static AnalyzerInfo {
&INFO
}
fn as_any(&self) -> &dyn Any {
self
}
}
impl ModuleAnalyzer for Inferer {
/// Calls `patch_fn`, `dce::collect`, and `analyze_fn` on every function,
/// callees first
fn analyze<T: FnMut(Box<Change>) -> Action>(
&mut self,
rmod: &mut RadecoModule,
_policy: Option<T>,
) -> Option<Box<AnalyzerResult>> {
// for imports, *ASSUME* that the callconv that r2 says is correct
let mut new_analyzed = Vec::new();
{
let imp_ru_iter = rmod.imports.iter().filter_map(|(&imp_addr, imp_info)| {
let imp_rfn = imp_info.rfn.borrow();
let regusage = self.reginfo.r2callconv_to_register_usage(
imp_rfn.callconv.as_ref()?, // ignore imports without callconvs
&*imp_rfn.callconv_name,
)?;
Some((imp_addr, regusage))
});
for (imp_addr, imp_ru) in imp_ru_iter {
rmod.functions.get_mut(&imp_addr).unwrap().regusage = imp_ru;
new_analyzed.push(imp_addr);
}
}
for func in new_analyzed {
self.analyzed.insert(func);
}
let mut dfs_wi = DfsPostOrder::empty(&rmod.callgraph).iter(&rmod.callgraph);
// pick a function ...
for fn_ni in rmod.callgraph.node_indices() {
// ... and start a dfs on it
dfs_wi.inner_mut().move_to(fn_ni);
while let Some(fn_to_anal) = dfs_wi.next() {
let fn_addr = rmod.callgraph[fn_to_anal];
// ignore functions already in `call_convs` (probably because its an import)
if !self.analyzed.contains(&fn_addr) {
self.patch_fn(fn_addr, &mut rmod.functions);
let rfn = &mut rmod.functions.get_mut(&fn_addr).unwrap();
let mut dce = DCE::new();
dce.analyze(rfn, Some(all));
let mut combiner = Combiner::new();
combiner.analyze(rfn, Some(all));
let ru = self.analyze_fn(rfn, &self.reginfo).unwrap_or_else(|| {
radeco_err!("Failed to analyze fn: {:?} (@ {:#X})", rfn.name, fn_addr);
// if analysis failed, default to "reads and clobbers everything"
self.reginfo.new_register_usage()
});
rfn.regusage = ru;
self.analyzed.insert(fn_addr);
}
}
}
None
}
}
impl Inferer {
pub fn new(reginfo: SubRegisterFile) -> Inferer {
Inferer {
reginfo: reginfo,
analyzed: HashSet::new(),
}
}
/// Using the callconv info we've gathered so far, patch-up call sites to
/// to remove arguments that the callee doesn't read and make values in
/// callee-saved registers be preserved across the call.
fn patch_fn(&self, fn_addr: u64, fn_map: &mut BTreeMap<u64, RadecoFunction>) -> () {
radeco_trace!("patching calls in fn: {}", fn_map[&fn_addr].name);
for node in fn_map[&fn_addr].ssa().inorder_walk() {
if let Ok(NodeType::Op(ir::MOpcode::OpCall)) =
fn_map[&fn_addr].ssa().node_data(node).map(|nd| nd.nt)
{
self.patch_call_node(node, fn_addr, fn_map)
.unwrap_or_else(|| {
radeco_warn!(
"failed to remove unused args for call at {:#X}",
fn_map[&fn_addr].ssa().address(node).unwrap()
);
});
}
}
}
fn | (
&self,
call_node: <SSAStorage as SSA>::ValueRef,
fn_addr: u64,
fn_map: &mut BTreeMap<u64, RadecoFunction>,
) -> Option<()> {
// bail on indirect or weird call
let (call_tgt_addr, call_reg_map) = direct_call_info(fn_map[&fn_addr].ssa(), call_node)?;
// remove unread args
for (regid, &op_node) in &call_reg_map {
if fn_map[&call_tgt_addr].regusage.is_ignored(regid) {
fn_map
.get_mut(&fn_addr)
.unwrap()
.ssa_mut()
.op_unuse(call_node, op_node);
}
}
// bridge preserved registers
for (regid, (use_node, _)) in utils::call_rets(call_node, fn_map[&fn_addr].ssa()) {
if fn_map[&call_tgt_addr].regusage.is_preserved(regid) {
fn_map
.get_mut(&fn_addr)
.unwrap()
.ssa_mut()
.replace_value(use_node, call_reg_map[regid]);
}
}
Some(())
}
fn analyze_fn(&self, rfn: &RadecoFunction, reginfo: &SubRegisterFile) -> Option<RegisterUsage> {
radeco_trace!("analyzing fn: {}", rfn.name);
let ssa = rfn.ssa();
let entry_regstate_node = ssa.registers_in(ssa.entry_node()?)?;
let exit_regstate_node = ssa.registers_in(ssa.exit_node()?)?;
// some registers may not be present in the entry node;
// this means that the function neither reads nor preserves that register
let entry_regstate = utils::register_state_info(entry_regstate_node, ssa);
let exit_regstate = utils::register_state_info(exit_regstate_node, ssa);
let mut ret = reginfo.new_register_usage();
ret.set_all_ignored();
for regid in ssa.regfile.iter_register_ids() {
// ignore registers not in entry regstate
if let Some(&(reg_val_entry, _)) = entry_regstate.get(regid) {
// bail if a register isn't present in exit regstate
let &(reg_val_exit, _) = exit_regstate.get(regid)?;
if reg_val_exit == reg_val_entry {
ret.set_preserved(regid);
}
// find all uses, ignoring entry/exit register state
let mut uses_iter = ssa
.uses_of(reg_val_entry)
.into_iter()
.filter(|&n| n != entry_regstate_node && n != exit_regstate_node);
if uses_iter.next().is_some() {
ret.set_read(regid);
}
}
}
Some(ret)
}
}
fn direct_call_info(
ssa: &SSAStorage,
call_node: <SSAStorage as SSA>::ValueRef,
) -> Option<(u64, RegisterMap<<SSAStorage as SSA>::ValueRef>)> {
let callinfo = utils::call_info(call_node, ssa)?;
Some((ssa.constant(callinfo.target)?, callinfo.register_args))
}
| patch_call_node | identifier_name |
infer_regusage.rs | //! Infers how each function uses every register
//! For every function, patch all of its call sites to ignore registers that the
//! callee doesn't read and to preserve register values that the callee
//! preserves. Then, record which registers it reads and which registers it
//! preserves.
//!
//! After this, all functions should have mutable [`regusage`][RadecoFunction::regusage]s.
//!
//! This analysis is super conservative; for example, if a function preserves a
//! register by pushing it onto the stack and popping it back right before
//! returning, it is considered to be read and not preserved because we can't
//! guarantee that that stack location is never subsequently read or modified.
//! See #147 for further discussion
use analysis::analyzer::{
all, Action, Analyzer, AnalyzerInfo, AnalyzerKind, AnalyzerResult, Change, FuncAnalyzer,
ModuleAnalyzer,
};
use analysis::dce::DCE;
use analysis::inst_combine::Combiner;
use frontend::radeco_containers::{RadecoFunction, RadecoModule};
use middle::ir;
use middle::regfile::*;
use middle::ssa::cfg_traits::*;
use middle::ssa::ssa_traits::*;
use middle::ssa::ssastorage::SSAStorage;
use middle::ssa::utils;
use petgraph::visit::{DfsPostOrder, Walker};
use std::any::Any;
use std::collections::{BTreeMap, HashSet};
const NAME: &str = "inferer";
const REQUIRES: &[AnalyzerKind] = &[];
pub const INFO: AnalyzerInfo = AnalyzerInfo {
name: NAME,
kind: AnalyzerKind::Inferer,
requires: REQUIRES,
uses_policy: false,
};
#[derive(Debug)]
pub struct Inferer {
/// Register file of the current architecture.
reginfo: SubRegisterFile,
/// Addresses of the functions we've already analyzed
analyzed: HashSet<u64>,
}
impl Analyzer for Inferer {
fn info(&self) -> &'static AnalyzerInfo {
&INFO
}
fn as_any(&self) -> &dyn Any {
self
}
}
impl ModuleAnalyzer for Inferer {
/// Calls `patch_fn`, `dce::collect`, and `analyze_fn` on every function,
/// callees first
fn analyze<T: FnMut(Box<Change>) -> Action>(
&mut self,
rmod: &mut RadecoModule,
_policy: Option<T>,
) -> Option<Box<AnalyzerResult>> {
// for imports, *ASSUME* that the callconv that r2 says is correct
let mut new_analyzed = Vec::new();
{
let imp_ru_iter = rmod.imports.iter().filter_map(|(&imp_addr, imp_info)| {
let imp_rfn = imp_info.rfn.borrow();
let regusage = self.reginfo.r2callconv_to_register_usage(
imp_rfn.callconv.as_ref()?, // ignore imports without callconvs
&*imp_rfn.callconv_name,
)?;
Some((imp_addr, regusage))
});
for (imp_addr, imp_ru) in imp_ru_iter {
rmod.functions.get_mut(&imp_addr).unwrap().regusage = imp_ru;
new_analyzed.push(imp_addr);
}
}
for func in new_analyzed {
self.analyzed.insert(func);
}
let mut dfs_wi = DfsPostOrder::empty(&rmod.callgraph).iter(&rmod.callgraph);
// pick a function ...
for fn_ni in rmod.callgraph.node_indices() {
// ... and start a dfs on it
dfs_wi.inner_mut().move_to(fn_ni);
while let Some(fn_to_anal) = dfs_wi.next() {
let fn_addr = rmod.callgraph[fn_to_anal];
// ignore functions already in `call_convs` (probably because its an import)
if !self.analyzed.contains(&fn_addr) |
}
}
None
}
}
impl Inferer {
pub fn new(reginfo: SubRegisterFile) -> Inferer {
Inferer {
reginfo: reginfo,
analyzed: HashSet::new(),
}
}
/// Using the callconv info we've gathered so far, patch-up call sites to
/// to remove arguments that the callee doesn't read and make values in
/// callee-saved registers be preserved across the call.
fn patch_fn(&self, fn_addr: u64, fn_map: &mut BTreeMap<u64, RadecoFunction>) -> () {
radeco_trace!("patching calls in fn: {}", fn_map[&fn_addr].name);
for node in fn_map[&fn_addr].ssa().inorder_walk() {
if let Ok(NodeType::Op(ir::MOpcode::OpCall)) =
fn_map[&fn_addr].ssa().node_data(node).map(|nd| nd.nt)
{
self.patch_call_node(node, fn_addr, fn_map)
.unwrap_or_else(|| {
radeco_warn!(
"failed to remove unused args for call at {:#X}",
fn_map[&fn_addr].ssa().address(node).unwrap()
);
});
}
}
}
fn patch_call_node(
&self,
call_node: <SSAStorage as SSA>::ValueRef,
fn_addr: u64,
fn_map: &mut BTreeMap<u64, RadecoFunction>,
) -> Option<()> {
// bail on indirect or weird call
let (call_tgt_addr, call_reg_map) = direct_call_info(fn_map[&fn_addr].ssa(), call_node)?;
// remove unread args
for (regid, &op_node) in &call_reg_map {
if fn_map[&call_tgt_addr].regusage.is_ignored(regid) {
fn_map
.get_mut(&fn_addr)
.unwrap()
.ssa_mut()
.op_unuse(call_node, op_node);
}
}
// bridge preserved registers
for (regid, (use_node, _)) in utils::call_rets(call_node, fn_map[&fn_addr].ssa()) {
if fn_map[&call_tgt_addr].regusage.is_preserved(regid) {
fn_map
.get_mut(&fn_addr)
.unwrap()
.ssa_mut()
.replace_value(use_node, call_reg_map[regid]);
}
}
Some(())
}
fn analyze_fn(&self, rfn: &RadecoFunction, reginfo: &SubRegisterFile) -> Option<RegisterUsage> {
radeco_trace!("analyzing fn: {}", rfn.name);
let ssa = rfn.ssa();
let entry_regstate_node = ssa.registers_in(ssa.entry_node()?)?;
let exit_regstate_node = ssa.registers_in(ssa.exit_node()?)?;
// some registers may not be present in the entry node;
// this means that the function neither reads nor preserves that register
let entry_regstate = utils::register_state_info(entry_regstate_node, ssa);
let exit_regstate = utils::register_state_info(exit_regstate_node, ssa);
let mut ret = reginfo.new_register_usage();
ret.set_all_ignored();
for regid in ssa.regfile.iter_register_ids() {
// ignore registers not in entry regstate
if let Some(&(reg_val_entry, _)) = entry_regstate.get(regid) {
// bail if a register isn't present in exit regstate
let &(reg_val_exit, _) = exit_regstate.get(regid)?;
if reg_val_exit == reg_val_entry {
ret.set_preserved(regid);
}
// find all uses, ignoring entry/exit register state
let mut uses_iter = ssa
.uses_of(reg_val_entry)
.into_iter()
.filter(|&n| n != entry_regstate_node && n != exit_regstate_node);
if uses_iter.next().is_some() {
ret.set_read(regid);
}
}
}
Some(ret)
}
}
fn direct_call_info(
ssa: &SSAStorage,
call_node: <SSAStorage as SSA>::ValueRef,
) -> Option<(u64, RegisterMap<<SSAStorage as SSA>::ValueRef>)> {
let callinfo = utils::call_info(call_node, ssa)?;
Some((ssa.constant(callinfo.target)?, callinfo.register_args))
}
| {
self.patch_fn(fn_addr, &mut rmod.functions);
let rfn = &mut rmod.functions.get_mut(&fn_addr).unwrap();
let mut dce = DCE::new();
dce.analyze(rfn, Some(all));
let mut combiner = Combiner::new();
combiner.analyze(rfn, Some(all));
let ru = self.analyze_fn(rfn, &self.reginfo).unwrap_or_else(|| {
radeco_err!("Failed to analyze fn: {:?} (@ {:#X})", rfn.name, fn_addr);
// if analysis failed, default to "reads and clobbers everything"
self.reginfo.new_register_usage()
});
rfn.regusage = ru;
self.analyzed.insert(fn_addr);
} | conditional_block |
infer_regusage.rs | //! Infers how each function uses every register
//! For every function, patch all of its call sites to ignore registers that the
//! callee doesn't read and to preserve register values that the callee
//! preserves. Then, record which registers it reads and which registers it
//! preserves.
//!
//! After this, all functions should have mutable [`regusage`][RadecoFunction::regusage]s.
//!
//! This analysis is super conservative; for example, if a function preserves a
//! register by pushing it onto the stack and popping it back right before
//! returning, it is considered to be read and not preserved because we can't
//! guarantee that that stack location is never subsequently read or modified.
//! See #147 for further discussion
use analysis::analyzer::{
all, Action, Analyzer, AnalyzerInfo, AnalyzerKind, AnalyzerResult, Change, FuncAnalyzer,
ModuleAnalyzer,
};
use analysis::dce::DCE;
use analysis::inst_combine::Combiner;
use frontend::radeco_containers::{RadecoFunction, RadecoModule};
use middle::ir;
use middle::regfile::*;
use middle::ssa::cfg_traits::*;
use middle::ssa::ssa_traits::*;
use middle::ssa::ssastorage::SSAStorage;
use middle::ssa::utils;
use petgraph::visit::{DfsPostOrder, Walker};
use std::any::Any;
use std::collections::{BTreeMap, HashSet};
const NAME: &str = "inferer";
const REQUIRES: &[AnalyzerKind] = &[];
pub const INFO: AnalyzerInfo = AnalyzerInfo {
name: NAME,
kind: AnalyzerKind::Inferer,
requires: REQUIRES,
uses_policy: false,
};
#[derive(Debug)]
pub struct Inferer {
/// Register file of the current architecture.
reginfo: SubRegisterFile,
/// Addresses of the functions we've already analyzed
analyzed: HashSet<u64>,
}
impl Analyzer for Inferer {
fn info(&self) -> &'static AnalyzerInfo {
&INFO
}
fn as_any(&self) -> &dyn Any {
self
}
}
impl ModuleAnalyzer for Inferer {
/// Calls `patch_fn`, `dce::collect`, and `analyze_fn` on every function,
/// callees first
fn analyze<T: FnMut(Box<Change>) -> Action>(
&mut self,
rmod: &mut RadecoModule,
_policy: Option<T>,
) -> Option<Box<AnalyzerResult>> {
// for imports, *ASSUME* that the callconv that r2 says is correct
let mut new_analyzed = Vec::new();
{
let imp_ru_iter = rmod.imports.iter().filter_map(|(&imp_addr, imp_info)| {
let imp_rfn = imp_info.rfn.borrow();
let regusage = self.reginfo.r2callconv_to_register_usage(
imp_rfn.callconv.as_ref()?, // ignore imports without callconvs
&*imp_rfn.callconv_name,
)?;
Some((imp_addr, regusage))
});
for (imp_addr, imp_ru) in imp_ru_iter {
rmod.functions.get_mut(&imp_addr).unwrap().regusage = imp_ru;
new_analyzed.push(imp_addr);
}
}
for func in new_analyzed {
self.analyzed.insert(func);
}
let mut dfs_wi = DfsPostOrder::empty(&rmod.callgraph).iter(&rmod.callgraph);
// pick a function ...
for fn_ni in rmod.callgraph.node_indices() {
// ... and start a dfs on it
dfs_wi.inner_mut().move_to(fn_ni);
while let Some(fn_to_anal) = dfs_wi.next() {
let fn_addr = rmod.callgraph[fn_to_anal];
// ignore functions already in `call_convs` (probably because its an import)
if !self.analyzed.contains(&fn_addr) {
self.patch_fn(fn_addr, &mut rmod.functions);
let rfn = &mut rmod.functions.get_mut(&fn_addr).unwrap();
let mut dce = DCE::new();
dce.analyze(rfn, Some(all));
let mut combiner = Combiner::new();
combiner.analyze(rfn, Some(all));
let ru = self.analyze_fn(rfn, &self.reginfo).unwrap_or_else(|| {
radeco_err!("Failed to analyze fn: {:?} (@ {:#X})", rfn.name, fn_addr);
// if analysis failed, default to "reads and clobbers everything"
self.reginfo.new_register_usage()
});
rfn.regusage = ru;
self.analyzed.insert(fn_addr);
}
}
}
None
}
}
impl Inferer {
pub fn new(reginfo: SubRegisterFile) -> Inferer {
Inferer {
reginfo: reginfo,
analyzed: HashSet::new(),
}
}
/// Using the callconv info we've gathered so far, patch-up call sites to
/// to remove arguments that the callee doesn't read and make values in
/// callee-saved registers be preserved across the call.
fn patch_fn(&self, fn_addr: u64, fn_map: &mut BTreeMap<u64, RadecoFunction>) -> () {
radeco_trace!("patching calls in fn: {}", fn_map[&fn_addr].name);
for node in fn_map[&fn_addr].ssa().inorder_walk() {
if let Ok(NodeType::Op(ir::MOpcode::OpCall)) =
fn_map[&fn_addr].ssa().node_data(node).map(|nd| nd.nt)
{
self.patch_call_node(node, fn_addr, fn_map)
.unwrap_or_else(|| {
radeco_warn!(
"failed to remove unused args for call at {:#X}",
fn_map[&fn_addr].ssa().address(node).unwrap()
);
});
}
}
}
fn patch_call_node(
&self,
call_node: <SSAStorage as SSA>::ValueRef,
fn_addr: u64,
fn_map: &mut BTreeMap<u64, RadecoFunction>,
) -> Option<()> {
// bail on indirect or weird call
let (call_tgt_addr, call_reg_map) = direct_call_info(fn_map[&fn_addr].ssa(), call_node)?;
// remove unread args
for (regid, &op_node) in &call_reg_map {
if fn_map[&call_tgt_addr].regusage.is_ignored(regid) {
fn_map
.get_mut(&fn_addr)
.unwrap()
.ssa_mut()
.op_unuse(call_node, op_node);
}
}
// bridge preserved registers
for (regid, (use_node, _)) in utils::call_rets(call_node, fn_map[&fn_addr].ssa()) {
if fn_map[&call_tgt_addr].regusage.is_preserved(regid) {
fn_map
.get_mut(&fn_addr)
.unwrap()
.ssa_mut()
.replace_value(use_node, call_reg_map[regid]);
}
}
Some(())
}
fn analyze_fn(&self, rfn: &RadecoFunction, reginfo: &SubRegisterFile) -> Option<RegisterUsage> {
radeco_trace!("analyzing fn: {}", rfn.name);
let ssa = rfn.ssa();
let entry_regstate_node = ssa.registers_in(ssa.entry_node()?)?;
let exit_regstate_node = ssa.registers_in(ssa.exit_node()?)?;
// some registers may not be present in the entry node;
// this means that the function neither reads nor preserves that register | ret.set_all_ignored();
for regid in ssa.regfile.iter_register_ids() {
// ignore registers not in entry regstate
if let Some(&(reg_val_entry, _)) = entry_regstate.get(regid) {
// bail if a register isn't present in exit regstate
let &(reg_val_exit, _) = exit_regstate.get(regid)?;
if reg_val_exit == reg_val_entry {
ret.set_preserved(regid);
}
// find all uses, ignoring entry/exit register state
let mut uses_iter = ssa
.uses_of(reg_val_entry)
.into_iter()
.filter(|&n| n != entry_regstate_node && n != exit_regstate_node);
if uses_iter.next().is_some() {
ret.set_read(regid);
}
}
}
Some(ret)
}
}
fn direct_call_info(
ssa: &SSAStorage,
call_node: <SSAStorage as SSA>::ValueRef,
) -> Option<(u64, RegisterMap<<SSAStorage as SSA>::ValueRef>)> {
let callinfo = utils::call_info(call_node, ssa)?;
Some((ssa.constant(callinfo.target)?, callinfo.register_args))
} | let entry_regstate = utils::register_state_info(entry_regstate_node, ssa);
let exit_regstate = utils::register_state_info(exit_regstate_node, ssa);
let mut ret = reginfo.new_register_usage(); | random_line_split |
infer_regusage.rs | //! Infers how each function uses every register
//! For every function, patch all of its call sites to ignore registers that the
//! callee doesn't read and to preserve register values that the callee
//! preserves. Then, record which registers it reads and which registers it
//! preserves.
//!
//! After this, all functions should have mutable [`regusage`][RadecoFunction::regusage]s.
//!
//! This analysis is super conservative; for example, if a function preserves a
//! register by pushing it onto the stack and popping it back right before
//! returning, it is considered to be read and not preserved because we can't
//! guarantee that that stack location is never subsequently read or modified.
//! See #147 for further discussion
use analysis::analyzer::{
all, Action, Analyzer, AnalyzerInfo, AnalyzerKind, AnalyzerResult, Change, FuncAnalyzer,
ModuleAnalyzer,
};
use analysis::dce::DCE;
use analysis::inst_combine::Combiner;
use frontend::radeco_containers::{RadecoFunction, RadecoModule};
use middle::ir;
use middle::regfile::*;
use middle::ssa::cfg_traits::*;
use middle::ssa::ssa_traits::*;
use middle::ssa::ssastorage::SSAStorage;
use middle::ssa::utils;
use petgraph::visit::{DfsPostOrder, Walker};
use std::any::Any;
use std::collections::{BTreeMap, HashSet};
const NAME: &str = "inferer";
const REQUIRES: &[AnalyzerKind] = &[];
pub const INFO: AnalyzerInfo = AnalyzerInfo {
name: NAME,
kind: AnalyzerKind::Inferer,
requires: REQUIRES,
uses_policy: false,
};
#[derive(Debug)]
pub struct Inferer {
/// Register file of the current architecture.
reginfo: SubRegisterFile,
/// Addresses of the functions we've already analyzed
analyzed: HashSet<u64>,
}
impl Analyzer for Inferer {
fn info(&self) -> &'static AnalyzerInfo {
&INFO
}
fn as_any(&self) -> &dyn Any {
self
}
}
impl ModuleAnalyzer for Inferer {
/// Calls `patch_fn`, `dce::collect`, and `analyze_fn` on every function,
/// callees first
fn analyze<T: FnMut(Box<Change>) -> Action>(
&mut self,
rmod: &mut RadecoModule,
_policy: Option<T>,
) -> Option<Box<AnalyzerResult>> {
// for imports, *ASSUME* that the callconv that r2 says is correct
let mut new_analyzed = Vec::new();
{
let imp_ru_iter = rmod.imports.iter().filter_map(|(&imp_addr, imp_info)| {
let imp_rfn = imp_info.rfn.borrow();
let regusage = self.reginfo.r2callconv_to_register_usage(
imp_rfn.callconv.as_ref()?, // ignore imports without callconvs
&*imp_rfn.callconv_name,
)?;
Some((imp_addr, regusage))
});
for (imp_addr, imp_ru) in imp_ru_iter {
rmod.functions.get_mut(&imp_addr).unwrap().regusage = imp_ru;
new_analyzed.push(imp_addr);
}
}
for func in new_analyzed {
self.analyzed.insert(func);
}
let mut dfs_wi = DfsPostOrder::empty(&rmod.callgraph).iter(&rmod.callgraph);
// pick a function ...
for fn_ni in rmod.callgraph.node_indices() {
// ... and start a dfs on it
dfs_wi.inner_mut().move_to(fn_ni);
while let Some(fn_to_anal) = dfs_wi.next() {
let fn_addr = rmod.callgraph[fn_to_anal];
// ignore functions already in `call_convs` (probably because its an import)
if !self.analyzed.contains(&fn_addr) {
self.patch_fn(fn_addr, &mut rmod.functions);
let rfn = &mut rmod.functions.get_mut(&fn_addr).unwrap();
let mut dce = DCE::new();
dce.analyze(rfn, Some(all));
let mut combiner = Combiner::new();
combiner.analyze(rfn, Some(all));
let ru = self.analyze_fn(rfn, &self.reginfo).unwrap_or_else(|| {
radeco_err!("Failed to analyze fn: {:?} (@ {:#X})", rfn.name, fn_addr);
// if analysis failed, default to "reads and clobbers everything"
self.reginfo.new_register_usage()
});
rfn.regusage = ru;
self.analyzed.insert(fn_addr);
}
}
}
None
}
}
impl Inferer {
pub fn new(reginfo: SubRegisterFile) -> Inferer {
Inferer {
reginfo: reginfo,
analyzed: HashSet::new(),
}
}
/// Using the callconv info we've gathered so far, patch-up call sites to
/// to remove arguments that the callee doesn't read and make values in
/// callee-saved registers be preserved across the call.
fn patch_fn(&self, fn_addr: u64, fn_map: &mut BTreeMap<u64, RadecoFunction>) -> () |
fn patch_call_node(
&self,
call_node: <SSAStorage as SSA>::ValueRef,
fn_addr: u64,
fn_map: &mut BTreeMap<u64, RadecoFunction>,
) -> Option<()> {
// bail on indirect or weird call
let (call_tgt_addr, call_reg_map) = direct_call_info(fn_map[&fn_addr].ssa(), call_node)?;
// remove unread args
for (regid, &op_node) in &call_reg_map {
if fn_map[&call_tgt_addr].regusage.is_ignored(regid) {
fn_map
.get_mut(&fn_addr)
.unwrap()
.ssa_mut()
.op_unuse(call_node, op_node);
}
}
// bridge preserved registers
for (regid, (use_node, _)) in utils::call_rets(call_node, fn_map[&fn_addr].ssa()) {
if fn_map[&call_tgt_addr].regusage.is_preserved(regid) {
fn_map
.get_mut(&fn_addr)
.unwrap()
.ssa_mut()
.replace_value(use_node, call_reg_map[regid]);
}
}
Some(())
}
fn analyze_fn(&self, rfn: &RadecoFunction, reginfo: &SubRegisterFile) -> Option<RegisterUsage> {
radeco_trace!("analyzing fn: {}", rfn.name);
let ssa = rfn.ssa();
let entry_regstate_node = ssa.registers_in(ssa.entry_node()?)?;
let exit_regstate_node = ssa.registers_in(ssa.exit_node()?)?;
// some registers may not be present in the entry node;
// this means that the function neither reads nor preserves that register
let entry_regstate = utils::register_state_info(entry_regstate_node, ssa);
let exit_regstate = utils::register_state_info(exit_regstate_node, ssa);
let mut ret = reginfo.new_register_usage();
ret.set_all_ignored();
for regid in ssa.regfile.iter_register_ids() {
// ignore registers not in entry regstate
if let Some(&(reg_val_entry, _)) = entry_regstate.get(regid) {
// bail if a register isn't present in exit regstate
let &(reg_val_exit, _) = exit_regstate.get(regid)?;
if reg_val_exit == reg_val_entry {
ret.set_preserved(regid);
}
// find all uses, ignoring entry/exit register state
let mut uses_iter = ssa
.uses_of(reg_val_entry)
.into_iter()
.filter(|&n| n != entry_regstate_node && n != exit_regstate_node);
if uses_iter.next().is_some() {
ret.set_read(regid);
}
}
}
Some(ret)
}
}
fn direct_call_info(
ssa: &SSAStorage,
call_node: <SSAStorage as SSA>::ValueRef,
) -> Option<(u64, RegisterMap<<SSAStorage as SSA>::ValueRef>)> {
let callinfo = utils::call_info(call_node, ssa)?;
Some((ssa.constant(callinfo.target)?, callinfo.register_args))
}
| {
radeco_trace!("patching calls in fn: {}", fn_map[&fn_addr].name);
for node in fn_map[&fn_addr].ssa().inorder_walk() {
if let Ok(NodeType::Op(ir::MOpcode::OpCall)) =
fn_map[&fn_addr].ssa().node_data(node).map(|nd| nd.nt)
{
self.patch_call_node(node, fn_addr, fn_map)
.unwrap_or_else(|| {
radeco_warn!(
"failed to remove unused args for call at {:#X}",
fn_map[&fn_addr].ssa().address(node).unwrap()
);
});
}
}
} | identifier_body |
server.py | import socket
from PIL import Image
import io
import os
def string_to_byte(hex_input):
return bytearray.fromhex(hex_input)
def serve():
host = ""
port = 5001
my_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
my_socket.bind((host, port))
my_socket.listen(1)
conn, address = my_socket.accept()
decoded_data_buffer = ""
decoded_data = ""
print("Connection from: " + str(address))
# Keep on reading untill the program finishes
while True:
img = None
# Read until a colon is found. This signals the image size segment
while not ":" in decoded_data:
data = conn.recv(1)
decoded_data = data.decode()
decoded_data_buffer += decoded_data
print("Config received: ")
print(decoded_data_buffer)
expected_size_str = decoded_data_buffer.replace("config", "").replace(",", "").replace(":", "")
expected_size = int(expected_size_str)
print("Expected hex bytes: ", expected_size)
decoded_data_buffer = ""
hexBytesCount = 0
# Read the amount of hex chars indicated before
while True:
|
print("Read hex bytes: ", len(decoded_data_buffer))
image_bytes = string_to_byte(decoded_data_buffer)
print("Read bytes: ", len(image_bytes))
img = Image.open(io.BytesIO(bytes(image_bytes)))
imageName = "test" + ".jpg"
img.save(imageName)
print("Saved image: ", imageName)
decoded_data_buffer = ""
decoded_data = ""
steering, throttle = (0,0) # TODO: Plug your designed algorithm here.
os.remove('test.jpg')
reply = '{ "steering" : "%f", "throttle" : "%f" }' % (steering, throttle)
reply = reply + '\n'
print(reply)
reply = reply.encode()
conn.send(reply )
conn.close()
if __name__ == '__main__':
print("Server Start")
serve()
| missing_bytes = expected_size - hexBytesCount
data = conn.recv(missing_bytes if missing_bytes < 1024 else 1024)
if not data:
break
print("Read ", hexBytesCount, " out of ", expected_size)
last_read_size = len(data)
hexBytesCount += last_read_size
decoded_data = data.decode()
decoded_data_buffer += decoded_data
if hexBytesCount >= expected_size:
break # We are done! | conditional_block |
server.py | import socket
from PIL import Image
import io
import os
def string_to_byte(hex_input):
return bytearray.fromhex(hex_input)
| port = 5001
my_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
my_socket.bind((host, port))
my_socket.listen(1)
conn, address = my_socket.accept()
decoded_data_buffer = ""
decoded_data = ""
print("Connection from: " + str(address))
# Keep on reading untill the program finishes
while True:
img = None
# Read until a colon is found. This signals the image size segment
while not ":" in decoded_data:
data = conn.recv(1)
decoded_data = data.decode()
decoded_data_buffer += decoded_data
print("Config received: ")
print(decoded_data_buffer)
expected_size_str = decoded_data_buffer.replace("config", "").replace(",", "").replace(":", "")
expected_size = int(expected_size_str)
print("Expected hex bytes: ", expected_size)
decoded_data_buffer = ""
hexBytesCount = 0
# Read the amount of hex chars indicated before
while True:
missing_bytes = expected_size - hexBytesCount
data = conn.recv(missing_bytes if missing_bytes < 1024 else 1024)
if not data:
break
print("Read ", hexBytesCount, " out of ", expected_size)
last_read_size = len(data)
hexBytesCount += last_read_size
decoded_data = data.decode()
decoded_data_buffer += decoded_data
if hexBytesCount >= expected_size:
break # We are done!
print("Read hex bytes: ", len(decoded_data_buffer))
image_bytes = string_to_byte(decoded_data_buffer)
print("Read bytes: ", len(image_bytes))
img = Image.open(io.BytesIO(bytes(image_bytes)))
imageName = "test" + ".jpg"
img.save(imageName)
print("Saved image: ", imageName)
decoded_data_buffer = ""
decoded_data = ""
steering, throttle = (0,0) # TODO: Plug your designed algorithm here.
os.remove('test.jpg')
reply = '{ "steering" : "%f", "throttle" : "%f" }' % (steering, throttle)
reply = reply + '\n'
print(reply)
reply = reply.encode()
conn.send(reply )
conn.close()
if __name__ == '__main__':
print("Server Start")
serve() |
def serve():
host = "" | random_line_split |
server.py | import socket
from PIL import Image
import io
import os
def string_to_byte(hex_input):
|
def serve():
host = ""
port = 5001
my_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
my_socket.bind((host, port))
my_socket.listen(1)
conn, address = my_socket.accept()
decoded_data_buffer = ""
decoded_data = ""
print("Connection from: " + str(address))
# Keep on reading untill the program finishes
while True:
img = None
# Read until a colon is found. This signals the image size segment
while not ":" in decoded_data:
data = conn.recv(1)
decoded_data = data.decode()
decoded_data_buffer += decoded_data
print("Config received: ")
print(decoded_data_buffer)
expected_size_str = decoded_data_buffer.replace("config", "").replace(",", "").replace(":", "")
expected_size = int(expected_size_str)
print("Expected hex bytes: ", expected_size)
decoded_data_buffer = ""
hexBytesCount = 0
# Read the amount of hex chars indicated before
while True:
missing_bytes = expected_size - hexBytesCount
data = conn.recv(missing_bytes if missing_bytes < 1024 else 1024)
if not data:
break
print("Read ", hexBytesCount, " out of ", expected_size)
last_read_size = len(data)
hexBytesCount += last_read_size
decoded_data = data.decode()
decoded_data_buffer += decoded_data
if hexBytesCount >= expected_size:
break # We are done!
print("Read hex bytes: ", len(decoded_data_buffer))
image_bytes = string_to_byte(decoded_data_buffer)
print("Read bytes: ", len(image_bytes))
img = Image.open(io.BytesIO(bytes(image_bytes)))
imageName = "test" + ".jpg"
img.save(imageName)
print("Saved image: ", imageName)
decoded_data_buffer = ""
decoded_data = ""
steering, throttle = (0,0) # TODO: Plug your designed algorithm here.
os.remove('test.jpg')
reply = '{ "steering" : "%f", "throttle" : "%f" }' % (steering, throttle)
reply = reply + '\n'
print(reply)
reply = reply.encode()
conn.send(reply )
conn.close()
if __name__ == '__main__':
print("Server Start")
serve()
| return bytearray.fromhex(hex_input) | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.