file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
job.py | import base64
import pickle
import re
from os import path
from py12306.cluster.cluster import Cluster
from py12306.helpers.api import *
from py12306.app import *
from py12306.helpers.auth_code import AuthCode
from py12306.helpers.event import Event
from py12306.helpers.func import *
from py12306.helpers.request import Request
from py12306.helpers.type import UserType
from py12306.helpers.qrcode import print_qrcode
from py12306.log.order_log import OrderLog
from py12306.log.user_log import UserLog
from py12306.log.common_log import CommonLog
class UserJob:
# heartbeat = 60 * 2 # 心跳保持时长
is_alive = True
check_interval = 5
key = None
user_name = ''
password = ''
type = 'qr'
user = None
info = {} # 用户信息
last_heartbeat = None
is_ready = False
user_loaded = False # 用户是否已加载成功
passengers = []
retry_time = 3
retry_count = 0
login_num = 0 # 尝试登录次数
# Init page
global_repeat_submit_token = None
ticket_info_for_passenger_form = None
order_request_dto = None
cluster = None
lock_init_user_time = 3 * 60
cookie = False
def __init__(self, info):
self.cluster = Cluster()
self.init_data(info)
def init_data(self, info):
self.session = Request()
self.session.add_response_hook(self.response_login_check)
self.key = str(info.get('key'))
self.user_name = info.get('user_name')
self.password = info.get('password')
self.type = info.get('type')
def update_user(self):
from py12306.user.user import User
self.user = User()
self.load_user()
def run(self):
# load user
self.update_user()
self.start()
def start(self):
"""
检测心跳
:return:
"""
while True and self.is_alive:
app_available_check()
if Config().is_slave():
self.load_user_from_remote()
else:
if Config().is_master() and not self.cookie: self.load_user_from_remote() # 主节点加载一次 Cookie
self.check_heartbeat()
if Const.IS_TEST: return
stay_second(self.check_interval)
def check_heartbeat(self):
# 心跳检测
if self.get_last_heartbeat() and (time_int() - self.get_last_heartbeat()) < Config().USER_HEARTBEAT_INTERVAL:
return True
# 只有主节点才能走到这
if self.is_first_time() or not self.check_user_is_login():
if not self.handle_login(): return
self.user_did_load()
message = UserLog.MESSAGE_USER_HEARTBEAT_NORMAL.format(self.get_name(), Config().USER_HEARTBEAT_INTERVAL) | return int(self.cluster.session.get(Cluster.KEY_USER_LAST_HEARTBEAT, 0))
return self.last_heartbeat
def set_last_heartbeat(self, time=None):
time = time if time != None else time_int()
if Config().is_cluster_enabled():
self.cluster.session.set(Cluster.KEY_USER_LAST_HEARTBEAT, time)
self.last_heartbeat = time
# def init_cookies
def is_first_time(self):
if Config().is_cluster_enabled():
return not self.cluster.get_user_cookie(self.key)
return not path.exists(self.get_cookie_path())
def handle_login(self, expire=False):
if expire: UserLog.print_user_expired()
self.is_ready = False
UserLog.print_start_login(user=self)
if self.type == 'qr':
return self.qr_login()
else:
return self.login()
def login(self):
"""
获取验证码结果
:return 权限校验码
"""
data = {
'username': self.user_name,
'password': self.password,
'appid': 'otn'
}
answer = AuthCode.get_auth_code(self.session)
data['answer'] = answer
self.request_device_id()
response = self.session.post(API_BASE_LOGIN.get('url'), data)
result = response.json()
if result.get('result_code') == 0: # 登录成功
"""
login 获得 cookie uamtk
auth/uamtk 不请求,会返回 uamtk票据内容为空
/otn/uamauthclient 能拿到用户名
"""
new_tk = self.auth_uamtk()
user_name = self.auth_uamauthclient(new_tk)
self.update_user_info({'user_name': user_name})
self.login_did_success()
return True
elif result.get('result_code') == 2: # 账号之内错误
# 登录失败,用户名或密码为空
# 密码输入错误
UserLog.add_quick_log(UserLog.MESSAGE_LOGIN_FAIL.format(result.get('result_message'))).flush()
else:
UserLog.add_quick_log(
UserLog.MESSAGE_LOGIN_FAIL.format(result.get('result_message', result.get('message',
CommonLog.MESSAGE_RESPONSE_EMPTY_ERROR)))).flush()
return False
def qr_login(self):
self.request_device_id()
image_uuid, png_path = self.download_code()
while True:
data = {
'RAIL_DEVICEID': self.session.cookies.get('RAIL_DEVICEID'),
'RAIL_EXPIRATION': self.session.cookies.get('RAIL_EXPIRATION'),
'uuid': image_uuid,
'appid': 'otn'
}
response = self.session.post(API_AUTH_QRCODE_CHECK.get('url'), data)
result = response.json()
result_code = int(result.get('result_code'))
if result_code == 0:
time.sleep(2)
elif result_code == 1:
UserLog.add_quick_log('请确认登录').flush()
time.sleep(2)
elif result_code == 2:
break
elif result_code == 3:
try:
os.remove(png_path)
except Exception as e:
UserLog.add_quick_log('无法删除文件: {}'.format(e)).flush()
image_uuid = self.download_code()
try:
os.remove(png_path)
except Exception as e:
UserLog.add_quick_log('无法删除文件: {}'.format(e)).flush()
self.session.get(API_USER_LOGIN, allow_redirects=True)
new_tk = self.auth_uamtk()
user_name = self.auth_uamauthclient(new_tk)
self.update_user_info({'user_name': user_name})
self.session.get(API_USER_LOGIN, allow_redirects=True)
self.login_did_success()
return True
def download_code(self):
try:
UserLog.add_quick_log(UserLog.MESSAGE_QRCODE_DOWNLOADING).flush()
response = self.session.post(API_AUTH_QRCODE_BASE64_DOWNLOAD.get('url'), data={'appid': 'otn'})
result = response.json()
if result.get('result_code') == '0':
img_bytes = base64.b64decode(result.get('image'))
try:
os.mkdir(Config().USER_DATA_DIR + '/qrcode')
except FileExistsError:
pass
png_path = path.normpath(Config().USER_DATA_DIR + '/qrcode/%d.png' % time.time())
with open(png_path, 'wb') as file:
file.write(img_bytes)
file.close()
if os.name == 'nt':
os.startfile(png_path)
else:
print_qrcode(png_path)
UserLog.add_log(UserLog.MESSAGE_QRCODE_DOWNLOADED.format(png_path)).flush()
Notification.send_email_with_qrcode(Config().EMAIL_RECEIVER, '你有新的登录二维码啦!', png_path)
self.retry_count = 0
return result.get('uuid'), png_path
raise KeyError('获取二维码失败: {}'.format(result.get('result_message')))
except Exception as e:
UserLog.add_quick_log(
UserLog.MESSAGE_QRCODE_FAIL.format(e, self.retry_time)).flush()
self.retry_count = self.retry_count + 1
if self.retry_count == 20:
self.retry_count = 0
try:
os.remove(self.get_cookie_path())
except:
pass
time.sleep(self.retry_time)
return self.download_code()
def check_user_is_login(self):
response = self.session.get(API_USER_LOGIN_CHECK)
is_login = response.json().get('data.is_login', False) == 'Y'
if is_login:
self.save_user()
self.set_last_heartbeat()
return self.get_user_info() # 检测应该是不会维持状态,这里再请求下个人中心看有没有用,01-10 看来应该是没用 01-22 有时拿到的状态 是已失效的再加上试试
return is_login
def auth_uamtk(self):
response = self.session.post(API_AUTH_UAMTK.get('url'), {'appid': 'otn'}, headers={
'Referer': 'https://kyfw.12306.cn/otn/passport?redirect=/otn/login/userLogin',
'Origin': 'https://kyfw.12306.cn'
})
result = response.json()
if result.get('newapptk'):
return result.get('newapptk')
# TODO 处理获取失败情况
return False
def auth_uamauthclient(self, tk):
response = self.session.post(API_AUTH_UAMAUTHCLIENT.get('url'), {'tk': tk})
result = response.json()
if result.get('username'):
return result.get('username')
# TODO 处理获取失败情况
return False
def request_device_id(self):
"""
获取加密后的浏览器特征 ID
:return:
"""
response = self.session.get(API_GET_BROWSER_DEVICE_ID)
if response.status_code == 200:
try:
result = json.loads(response.text)
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.61 Safari/537.36"
}
self.session.headers.update(headers)
response = self.session.get(base64.b64decode(result['id']).decode())
if response.text.find('callbackFunction') >= 0:
result = response.text[18:-2]
result = json.loads(result)
if not Config().is_cache_rail_id_enabled():
self.session.cookies.update({
'RAIL_EXPIRATION': result.get('exp'),
'RAIL_DEVICEID': result.get('dfp'),
})
else:
self.session.cookies.update({
'RAIL_EXPIRATION': Config().RAIL_EXPIRATION,
'RAIL_DEVICEID': Config().RAIL_DEVICEID,
})
except:
return False
def login_did_success(self):
"""
用户登录成功
:return:
"""
self.login_num += 1
self.welcome_user()
self.save_user()
self.get_user_info()
self.set_last_heartbeat()
self.is_ready = True
def welcome_user(self):
UserLog.print_welcome_user(self)
pass
def get_cookie_path(self):
return Config().USER_DATA_DIR + self.user_name + '.cookie'
def update_user_info(self, info):
self.info = {**self.info, **info}
def get_name(self):
return self.info.get('user_name', '')
def save_user(self):
if Config().is_master():
self.cluster.set_user_cookie(self.key, self.session.cookies)
self.cluster.set_user_info(self.key, self.info)
with open(self.get_cookie_path(), 'wb') as f:
pickle.dump(self.session.cookies, f)
def did_loaded_user(self):
"""
恢复用户成功
:return:
"""
UserLog.add_quick_log(UserLog.MESSAGE_LOADED_USER.format(self.user_name)).flush()
if self.check_user_is_login() and self.get_user_info():
UserLog.add_quick_log(UserLog.MESSAGE_LOADED_USER_SUCCESS.format(self.user_name)).flush()
UserLog.print_welcome_user(self)
self.user_did_load()
else:
UserLog.add_quick_log(UserLog.MESSAGE_LOADED_USER_BUT_EXPIRED).flush()
self.set_last_heartbeat(0)
def user_did_load(self):
"""
用户已经加载成功
:return:
"""
self.is_ready = True
if self.user_loaded: return
self.user_loaded = True
Event().user_loaded({'key': self.key}) # 发布通知
def get_user_info(self):
response = self.session.get(API_USER_INFO.get('url'))
result = response.json()
user_data = result.get('data.userDTO.loginUserDTO')
# 子节点访问会导致主节点登录失效 TODO 可快考虑实时同步 cookie
if user_data:
self.update_user_info({**user_data, **{'user_name': user_data.get('name')}})
self.save_user()
return True
return False
def load_user(self):
if Config().is_cluster_enabled(): return
cookie_path = self.get_cookie_path()
if path.exists(cookie_path):
with open(self.get_cookie_path(), 'rb') as f:
cookie = pickle.load(f)
self.cookie = True
self.session.cookies.update(cookie)
self.did_loaded_user()
return True
return None
def load_user_from_remote(self):
cookie = self.cluster.get_user_cookie(self.key)
info = self.cluster.get_user_info(self.key)
if Config().is_slave() and (not cookie or not info):
while True: # 子节点只能取
UserLog.add_quick_log(UserLog.MESSAGE_USER_COOKIE_NOT_FOUND_FROM_REMOTE.format(self.user_name)).flush()
stay_second(self.retry_time)
return self.load_user_from_remote()
if info: self.info = info
if cookie:
self.session.cookies.update(cookie)
if not self.cookie: # 第一次加载
self.cookie = True
if not Config().is_slave():
self.did_loaded_user()
else:
self.is_ready = True # 设置子节点用户 已准备好
UserLog.print_welcome_user(self)
return True
return False
def check_is_ready(self):
return self.is_ready
def wait_for_ready(self):
if self.is_ready: return self
UserLog.add_quick_log(UserLog.MESSAGE_WAIT_USER_INIT_COMPLETE.format(self.retry_time)).flush()
stay_second(self.retry_time)
return self.wait_for_ready()
def destroy(self):
"""
退出用户
:return:
"""
UserLog.add_quick_log(UserLog.MESSAGE_USER_BEING_DESTROY.format(self.user_name)).flush()
self.is_alive = False
def response_login_check(self, response, **kwargs):
if Config().is_master() and response.json().get('data.noLogin') == 'true': # relogin
self.handle_login(expire=True)
def get_user_passengers(self):
if self.passengers: return self.passengers
response = self.session.post(API_USER_PASSENGERS)
result = response.json()
if result.get('data.normal_passengers'):
self.passengers = result.get('data.normal_passengers')
# 将乘客写入到文件
with open(Config().USER_PASSENGERS_FILE % self.user_name, 'w', encoding='utf-8') as f:
f.write(json.dumps(self.passengers, indent=4, ensure_ascii=False))
return self.passengers
else:
UserLog.add_quick_log(
UserLog.MESSAGE_GET_USER_PASSENGERS_FAIL.format(
result.get('messages', CommonLog.MESSAGE_RESPONSE_EMPTY_ERROR), self.retry_time)).flush()
if Config().is_slave():
self.load_user_from_remote() # 加载最新 cookie
stay_second(self.retry_time)
return self.get_user_passengers()
def get_passengers_by_members(self, members):
"""
获取格式化后的乘客信息
:param members:
:return:
[{
name: '项羽',
type: 1,
id_card: 0000000000000000000,
type_text: '成人',
enc_str: 'aaaaaa'
}]
"""
self.get_user_passengers()
results = []
for member in members:
is_member_code = is_number(member)
if not is_member_code:
if member[0] == "*":
audlt = 1
member = member[1:]
else:
audlt = 0
child_check = array_dict_find_by_key_value(results, 'name', member)
if not is_member_code and child_check:
new_member = child_check.copy()
new_member['type'] = UserType.CHILD
new_member['type_text'] = dict_find_key_by_value(UserType.dicts, int(new_member['type']))
else:
if is_member_code:
passenger = array_dict_find_by_key_value(self.passengers, 'code', member)
else:
passenger = array_dict_find_by_key_value(self.passengers, 'passenger_name', member)
if audlt:
passenger['passenger_type'] = UserType.ADULT
if not passenger:
UserLog.add_quick_log(
UserLog.MESSAGE_USER_PASSENGERS_IS_INVALID.format(self.user_name, member)).flush()
return False
new_member = {
'name': passenger.get('passenger_name'),
'id_card': passenger.get('passenger_id_no'),
'id_card_type': passenger.get('passenger_id_type_code'),
'mobile': passenger.get('mobile_no'),
'type': passenger.get('passenger_type'),
'type_text': dict_find_key_by_value(UserType.dicts, int(passenger.get('passenger_type'))),
'enc_str': passenger.get('allEncStr')
}
results.append(new_member)
return results
def request_init_dc_page(self):
"""
请求下单页面 拿到 token
:return:
"""
data = {'_json_att': ''}
response = self.session.post(API_INITDC_URL, data)
html = response.text
token = re.search(r'var globalRepeatSubmitToken = \'(.+?)\'', html)
form = re.search(r'var ticketInfoForPassengerForm *= *(\{.+\})', html)
order = re.search(r'var orderRequestDTO *= *(\{.+\})', html)
# 系统忙,请稍后重试
if html.find('系统忙,请稍后重试') != -1:
OrderLog.add_quick_log(OrderLog.MESSAGE_REQUEST_INIT_DC_PAGE_FAIL).flush() # 重试无用,直接跳过
return False, False, html
try:
self.global_repeat_submit_token = token.groups()[0]
self.ticket_info_for_passenger_form = json.loads(form.groups()[0].replace("'", '"'))
self.order_request_dto = json.loads(order.groups()[0].replace("'", '"'))
except:
return False, False, html # TODO Error
slide_val = re.search(r"var if_check_slide_passcode.*='(\d?)'", html)
is_slide = False
if slide_val:
is_slide = int(slide_val[1]) == 1
return True, is_slide, html | UserLog.add_quick_log(message).flush()
def get_last_heartbeat(self):
if Config().is_cluster_enabled(): | random_line_split |
job.py | import base64
import pickle
import re
from os import path
from py12306.cluster.cluster import Cluster
from py12306.helpers.api import *
from py12306.app import *
from py12306.helpers.auth_code import AuthCode
from py12306.helpers.event import Event
from py12306.helpers.func import *
from py12306.helpers.request import Request
from py12306.helpers.type import UserType
from py12306.helpers.qrcode import print_qrcode
from py12306.log.order_log import OrderLog
from py12306.log.user_log import UserLog
from py12306.log.common_log import CommonLog
class UserJob:
# heartbeat = 60 * 2 # 心跳保持时长
is_alive = True
check_interval = 5
key = None
user_name = ''
password = ''
type = 'qr'
user = None
info = {} # 用户信息
last_heartbeat = None
is_ready = False
user_loaded = False # 用户是否已加载成功
passengers = []
retry_time = 3
retry_count = 0
login_num = 0 # 尝试登录次数
# Init page
global_repeat_submit_token = None
ticket_info_for_passenger_form = None
order_request_dto = None
cluster = None
lock_init_user_time = 3 * 60
cookie = False
def __init__(self, info):
self.cluster = Cluster()
self.init_data(info)
def init_data(self, info):
self.session = Request()
self.session.add_response_hook(self.response_login_check)
self.key = str(info.get('key'))
self.user_name = info.get('user_name')
self.password = info.get('password')
self.type = info.get('type')
def update_user(self):
from py12306.user.user import User
self.user = User()
self.load_user()
def run(self):
# load user
self.update_user()
self.start()
def start(self):
"""
检测心跳
:return:
"""
while True and self.is_alive:
app_available_check()
if Config().is_slave():
self.load_user_from_remote()
else:
if Config().is_master() and not self.cookie: self.load_user_from_remote() # 主节点加载一次 Cookie
self.check_heartbeat()
if Const.IS_TEST: return
stay_second(self.check_interval)
def check_heartbeat(self):
# 心跳检测
if self.get_last_heartbeat() and (time_int() - self.get_last_heartbeat()) < Config().USER_HEARTBEAT_INTERVAL:
return True
# 只有主节点才能走到这
if self.is_first_time() or not self.check_user_is_login():
if not self.handle_login(): return
self.user_did_load()
message = UserLog.MESSAGE_USER_HEARTBEAT_NORMAL.format(self.get_name(), Config().USER_HEARTBEAT_INTERVAL)
UserLog.add_quick_log(message).flush()
def get_last_heartbeat(self):
if Config().is_cluster_enabled():
return int(self.cluster.session.get(Cluster.KEY_USER_LAST_HEARTBEAT, 0))
return self.last_heartbeat
def set_last_heartbeat(self, time=None):
time = time if time != None else time_int()
if Config().is_cluster_enabled():
self.cluster.session.set(Cluster.KEY_USER_LAST_HEARTBEAT, time)
self.last_heartbeat = time
# def init_cookies
def is_first_time(self):
if Config().is_cluster_enabled():
return not self.cluster.get_user_cookie(self.key)
return not path.exists(self.get_cookie_path())
def handle_login(self, expire=False):
if expire: UserLog.print_user_expired()
self.is_ready = False
UserLog.print_start_login(user=self)
if self.type == 'qr':
return self.qr_login()
else:
return self.login()
def login(self):
"""
获取验证码结果
:return 权限校验码
"""
data = {
'username': self.user_name,
'password': self.password,
'appid': 'otn'
}
answer = AuthCode.get_auth_code(self.session)
data['answer'] = answer
self.request_device_id()
response = self.session.post(API_BASE_LOGIN.get('url'), data)
result = response.json()
if result.get('result_code') == 0: # 登录成功
"""
login 获得 cookie uamtk
auth/uamtk 不请求,会返回 uamtk票据内容为空
/otn/uamauthclient 能拿到用户名
"""
new_tk = self.auth_uamtk()
user_name = self.auth_uamauthclient(new_tk)
self.update_user_info({'user_name': user_name})
self.login_did_success()
return True
elif result.get('result_code') == 2: # 账号之内错误
# 登录失败,用户名或密码为空
# 密码输入错误
UserLog.add_quick_log(UserLog.MESSAGE_LOGIN_FAIL.format(result.get('result_message'))).flush()
else:
UserLog.add_quick_log(
UserLog.MESSAGE_LOGIN_FAIL.format(result.get('result_message', result.get('message',
CommonLog.MESSAGE_RESPONSE_EMPTY_ERROR)))).flush()
return False
def qr_login(self):
self.request_device_id()
image_uuid, png_path = self.download_code()
while True:
data = {
'RAIL_DEVICEID': self.session.cookies.get('RAIL_DEVICEID'),
'RAIL_EXPIRATION': self.session.cookies.get('RAIL_EXPIRATION'),
'uuid': image_uuid,
'appid': 'otn'
}
response = self.session.post(API_AUTH_QRCODE_CHECK.get('url'), data)
result = response.json()
result_code = int(result.get('result_code'))
if result_code == 0:
time.sleep(2)
elif result_code == 1:
UserLog.add_quick_log('请确认登录').flush()
time.sleep(2)
elif result_code == 2:
break
elif result_code == 3:
try:
os.remove(png_path)
except Exception as e:
UserLog.add_quick_log('无法删除文件: {}'.format(e)).flush()
image_uuid = self.download_code()
try:
os.remove(png_path)
except Exception as e:
UserLog.add_quick_log('无法删除文件: {}'.format(e)).flush()
self.session.get(API_USER_LOGIN, allow_redirects=True)
new_tk = self.auth_uamtk()
user_name = self.auth_uamauthclient(new_tk)
self.update_user_info({'user_name': user_name})
self.session.get(API_USER_LOGIN, allow_redirects=True)
self.login_did_success()
return True
def download_code(self):
try:
UserLog.add_quick_log(UserLog.MESSAGE_QRCODE_DOWNLOADING).flush()
response = self.session.post(API_AUTH_QRCODE_BASE64_DOWNLOAD.get('url'), data={'appid': 'otn'})
result = response.json()
if result.get('result_code') == '0':
img_bytes = base64.b64decode(result.get('image'))
try:
os.mkdir(Config().USER_DATA_DIR + '/qrcode')
except FileExistsError:
pass
png_path = path.normpath(Config().USER_DATA_DIR + '/qrcode/%d.png' % time.time())
with open(png_path, 'wb') as file:
file.write(img_bytes)
file.close()
if os.name == 'nt':
os.startfile(png_path)
else:
print_qrcode(png_path)
UserLog.add_log(UserLog.MESSAGE_QRCODE_DOWNLOADED.format(png_path)).flush()
Notification.send_email_with_qrcode(Config().EMAIL_RECEIVER, '你有新的登录二维码啦!', png_path)
self.retry_count = 0
return result.get('uuid'), png_path
raise KeyError('获取二维码失败: {}'.format(result.get('result_message')))
except Exception as e:
UserLog.add_quick_log(
UserLog.MESSAGE_QRCODE_FAIL.format(e, self.retry_time)).flush()
self.retry_count = self.retry_count + 1
if self.retry_count == 20:
self.retry_count = 0
try:
os.remove(self.get_cookie_path())
except:
pass
time.sleep(self.retry_time)
return self.download_code()
def check_user_is_login(self):
response = self.session.get(API_USER_LOGIN_CHECK)
is_login = response.json().get('data.is_login', False) == 'Y'
if is_login:
self.save_user()
self.set_last_heartbeat()
return self.get_user_info() # 检测应该是不会维持状态,这里再请求下个人中心看有没有用,01-10 看来应该是没用 01-22 有时拿到的状态 是已失效的再加上试试
return is_login
def auth_uamtk(self):
response = self.session.post(API_AUTH_UAMTK.get('url'), {'appid': 'otn'}, headers={
'Referer': 'https://kyfw.12306.cn/otn/passport?redirect=/otn/login/userLogin',
'Origin': 'https://kyfw.12306.cn'
})
result = response.json()
if result.get('newapptk'):
return result.get('newapptk')
# TODO 处理获取失败情况
return False
def auth_uamauthclient(self, tk):
response = self.session.post(API_AUTH_UAMAUTHCLIENT.get('url'), {'tk': tk})
result = response.json()
if result.get('username'):
return result.get('username')
# TODO 处理获取失败情况
return False
def request_device_id(self):
"""
获取加密后的浏览器特征 ID
:return:
"""
response = self.session.get(API_GET_BROWSER_DEVICE_ID)
if response.status_code == 200:
try:
result = json.loads(response.text)
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.61 Safari/537.36"
}
self.session.headers.update(headers)
response = self.session.get(base64.b64decode(result['id']).decode())
if response.text.find('callbackFunction') >= 0:
result = response.text[18:-2]
result = json.loads(result)
if not Config().is_cache_rail_id_enabled():
self.session.cookies.update({
'RAIL_EXPIRATION': result.get('exp'),
'RAIL_DEVICEID': result.get('dfp'),
})
else:
self.session.cookies.update({
'RAIL_EXPIRATION': Config().RAIL_EXPIRATION,
'RAIL_DEVICEID': Config().RAIL_DEVICEID,
})
except:
return False
def login_did_success(self):
"""
用户登录成功
:return:
"""
self.login_num += 1
self.welcome_user()
self.save_user()
self.get_user_info()
self.set_last_heartbeat()
self.is_ready = True
def welcome_user(self):
UserLog.print_welcome_user(self)
pass
def get_cookie_path(self):
return Config().USER_DATA_DIR + self.user_name + '.cookie'
def update_user_info(self, info):
self.info = {**self.info, **info}
def get_name(self):
return self.info.get('user_name', '')
def save_user(self):
if Config().is_master():
self.cluster.set_user_cookie(self.key, self.session.cookies)
self.cluster.set_user_info(self.key, self.info)
with open(self.get_cookie_path(), 'wb') as f:
pickle.dump(self.session.cookies, f)
def did_loaded_user(self):
"""
恢复用户成功
:return:
"""
UserLog.add_quick_log(UserLog.MESSAGE_LOADED_USER.format(self.user_name)).flush()
if self.check_user_is_login() and self.get_user_info():
UserLog.add_quick_log(UserLog.MESSAGE_LOADED_USER_SUCCESS.format(self.user_name)).flush()
UserLog.print_welcome_user(self)
self.user_did_load()
else:
UserLog.add_quick_log(UserLog.MESSAGE_LOADED_USER_BUT_EXPIRED).flush()
self.set_last_heartbeat(0)
def user_did_load(self):
"""
用户已经加载成功
:return:
"""
self.is_ready = True
if self.user_loaded: return
self.user_loaded = True
Event().user_loaded({'key': self.key}) # 发布通知
def get_user_info(self):
response = self.session.get(API_USER_INFO.get('url'))
result = response.json()
user_data = result.get('data.userDTO.loginUserDTO')
# 子节点访问会导致主节点登录失效 TODO 可快考虑实时同步 cookie
if user_data:
self.update_user_info({**user_data, **{'user_name': user_data.get('name')}})
self.save_user()
return True
return False
def load_user(self):
if Config().is_cluster_enabled(): return
cookie_path = self.get_cookie_path()
if path.exists(cookie_path):
with open(self.get_cookie_path(), 'rb') as f:
cookie = pickle.load(f)
self.cookie = True
self.session.cookies.update(cookie)
self.did_loaded_user()
return True
return None
def load_user_from_remote(self):
cookie = self.cluster.get_user_cookie(self.key)
info = self.cluster.get_user_info(self.key)
if Config().is_slave() and (not cookie or not info):
while True: # 子节点只能取
UserLog.add_quick_log(UserLog.MESSAGE_USER_COOKIE_NOT_FOUND_FROM_REMOTE.format(self.user_name)).flush()
stay_second(self.retry_time)
return self.load_user_from_remote()
if info: self.info = info
if cookie:
self.session.cookies.update(cookie)
if not self.cookie: # 第一次加载
self.cookie = True
if not Config().is_slave():
self.did_loaded_user()
else:
self.is_ready = True # 设置子节点用户 已准备好
UserLog.print_welcome_user(self)
return True
return False
def check_is_ready(self):
return self.is_ready
def wait_for_ready(self):
if self.is_ready: return self
UserLog.add_quick_log(UserLog.MESSAGE_WAIT_USER_INIT_COMPLETE.format(self.retry_time)).flush()
stay_second(self.retry_time)
return self.wait_for_ready()
def destroy(self):
"""
退出用户
:return:
"""
UserLog.add_quick_log(UserLog.MESSAGE_USER_BEING_DESTROY.format(self.user_name)).flush()
self.is_alive = False
def response_login_check(self, response, **kwargs):
if Config().is_master() and response.json().get('data.noLogin') == 'true': # relogin
self.handle_login(expire=True)
def get_user_passengers(self):
if self.passengers: return self.passengers
response = self.session.post(API_USER_PASSENGERS)
result = response.json()
if result.get('data.normal_passengers'):
self.passengers = result.get('data.normal_passengers')
# 将乘客写入到文件
with open(Config().USER_PASSENGERS_FILE % self.user_name, 'w', encoding='utf-8') as f:
f.write(json.dumps(self.passengers, indent=4, ensure_ascii=False))
return self.passengers
else:
UserLog.add_quick_log(
UserLog.MESSAGE_GET_USER_PASSENGERS_FAIL.format(
result.get('messages', CommonLog.MESSAGE_RESPONSE_EMPTY_ERROR), self.retry_time)).flush()
if Config().is_slave():
self.load_user_from_remote() # 加载最新 cookie
stay_second(self.retry_time)
return self.get_user_passengers()
def get_passengers_by_members(self, members):
"""
获取格式化后的乘客信息
:param members:
:return:
[{
name: '项羽',
type: 1,
id_card: 0000000000000000000,
type_text: '成人',
enc_str: 'aaaaaa'
}]
"""
self.get_user_passengers()
results = []
for member in members:
is_member_code = is_number(member)
if not is_member_code:
if member[0] == "*":
audlt = 1
member = member[1:]
else:
audlt = 0
child_check = array_dict_find_by_key_value(results, 'name', member)
if not is_member_code and child_check:
new_member = child_check.copy()
new_member['type'] = UserType.CHILD
new_member['type_text'] = dict_find_key_by_value(UserType.dicts, int(new_member['type']))
else:
if is_member_code:
passenger = array_dict_find_by_key_value(self.passengers, 'code', member)
else:
passenger = array_dict_find_by_key_value(self.passengers, 'passenger_name', member)
if audlt:
passenger['passenger_type'] = UserType.ADULT
if not passenger:
UserLog.add_quick_log(
UserLog.MESSAGE_USER_PASSENGERS_IS_INVALID.format(self.user_name, member)).flush()
return False
new_member = {
'name': passenger.get('passenger_name'),
'id_card': passenger.get('passenger_id_no'),
'id_card_type': passenger.get('passenger_id_type_code'),
'mobile': passenger.get('mobile_no'),
'type': passenger.get('passenger_type'),
'type_text': dict_find_key_by_value(UserType.dicts, int(passenger.get('passenger_type'))),
'enc_str': passenger.get('allEncStr')
}
results.append(new_member)
return results
def request_init_dc_page(self):
"""
请求下单页面 拿到 token
:return:
"""
data = {'_json_att': ''}
response = self.session.post(API_INITDC_URL, data)
html = response.text
token = re.search(r'var globalRepeatSubmitToken = \'(.+?)\'', html)
form = re.search(r'var ticketInfoForPassengerForm *= *(\{.+\})', html)
order = re.search(r'var orderRequestDTO *= *(\{.+\})', html)
# 系统忙,请稍后重试
if html.find('系统忙,请稍后重试') != -1:
OrderLog.add_quick_log(OrderLog.MESSAGE_REQUEST_INIT_DC_PAGE_FAIL).flush() # 重试无用,直接跳过
return False, False, html
try:
self.global_repeat_submi | t_token = token.groups()[0]
self.ticket_info_for_passenger_form = json.loads(form.groups()[0].replace("'", '"'))
self.order_request_dto = json.loads(order.groups()[0].replace("'", '"'))
except:
return False, False, html # TODO Error
slide_val = re.search(r"var if_check_slide_passcode.*='(\d?)'", html)
is_slide = False
if slide_val:
is_slide = int(slide_val[1]) == 1
return True, is_slide, html
| identifier_body | |
job.py | import base64
import pickle
import re
from os import path
from py12306.cluster.cluster import Cluster
from py12306.helpers.api import *
from py12306.app import *
from py12306.helpers.auth_code import AuthCode
from py12306.helpers.event import Event
from py12306.helpers.func import *
from py12306.helpers.request import Request
from py12306.helpers.type import UserType
from py12306.helpers.qrcode import print_qrcode
from py12306.log.order_log import OrderLog
from py12306.log.user_log import UserLog
from py12306.log.common_log import CommonLog
class UserJob:
# heartbeat = 60 * 2 # 心跳保持时长
is_alive = True
check_interval = 5
key = None
user_name = ''
password = ''
type = 'qr'
user = None
info = {} # 用户信息
last_heartbeat = None
is_ready = False
user_loaded = False # 用户是否已加载成功
passengers = []
retry_time = 3
retry_count = 0
login_num = 0 # 尝试登录次数
# Init page
global_repeat_submit_token = None
ticket_info_for_passenger_form = None
order_request_dto = None
cluster = None
lock_init_user_time = 3 * 60
cookie = False
def __init__(self, info):
self.cluster = Cluster()
self.init_data(info)
def init_data(self, info):
self.session = Request()
self.session.add_response_hook(self.response_login_check)
self.key = str(info.get('key'))
self.user_name = info.get('user_name')
self.password = info.get('password')
self.type = info.get('type')
def update_user(self):
from py12306.user.user import User
self.user = User()
self.load_user()
def run(self):
# load user
self.update_user()
self.start()
def start(self):
"""
检测心跳
:return:
"""
while True and self.is_alive:
app_available_check()
if Config().is_slave():
self.load_user_from_remote()
else:
if Config().is_master() and not self.cookie: self.load_user_from_remote() # 主节点加载一次 Cookie
self.check_heartbeat()
if Const.IS_TEST: return
stay_second(self.check_interval)
def check_heartbeat(self):
# 心跳检测
if self.get_last_heartbeat() and (time_int() - self.get_last_heartbeat()) < Config().USER_HEARTBEAT_INTERVAL:
return True
# 只有主节点才能走到这
if self.is_first_time() or not self.check_user_is_login():
if not self.handle_login(): return
self.user_did_load()
message = UserLog.MESSAGE_USER_HEARTBEAT_NORMAL.format(self.get_name(), Config().USER_HEARTBEAT_INTERVAL)
UserLog.add_quick_log(message).flush()
def get_last_heartbeat(self):
if Config().is_cluster_enabled():
return int(self.cluster.session.get(Cluster.KEY_USER_LAST_HEARTBEAT, 0))
return self.last_heartbeat
def set_last_heartbeat(self, time=None):
time = time if time != None else time_int()
if Config().is_cluster_enabled():
self.cluster.session.set(Cluster.KEY_USER_LAST_HEARTBEAT, time)
self.last_heartbeat = time
# def init_cookies
def is_first_time(self):
if Config().is_cluster_enabled():
return not self.cluster.get_user_cookie(self.key)
return not path.exists(self.get_cookie_path())
def handle_login(self, expire=False):
if expire: UserLog.print_user_expired()
self.is_ready = False
UserLog.print_start_login(user=self)
if self.type == 'qr':
return self.qr_login()
else:
return self.login()
def login(self):
"""
获取验证码结果
:return 权限校验码
"""
data = {
'username': self.user_name,
'password': self.password,
'appid': 'otn'
}
answer = AuthCode.get_auth_code(self.session)
data['answer'] = answer
self.request_device_id()
response = self.session.post(API_BASE_LOGIN.get('url'), data)
result = response.json()
if result.get('result_code') == 0: # 登录成功
"""
login 获得 cookie uamtk
auth/uamtk 不请求,会返回 uamtk票据内容为空
/otn/uamauthclient 能拿到用户名
"""
new_tk = self.auth_uamtk()
user_name = self.auth_uamauthclient(new_tk)
self.update_user_info({'user_name': user_name})
self.login_did_success()
return True
elif result.get('result_code') == 2: # 账号之内错误
# 登录失败,用户名或密码为空
# 密码输入错误
UserLog.add_quick_log(UserLog.MESSAGE_LOGIN_FAIL.format(result.get('result_message'))).flush()
else:
UserLog.add_quick_log(
UserLog.MESSAGE_LOGIN_FAIL.format(result.get('result_message', result.get('message',
CommonLog.MESSAGE_RESPONSE_EMPTY_ERROR)))).flush()
return False
def qr_login(self):
self.request_device_id()
image_uuid, png_path = self.download_code()
while True:
data = {
'RAIL_DEVICEID': self.session.cookies.get('RAIL_DEVICEID'),
'RAIL_EXPIRATION': self.session.cookies.get('RAIL_EXPIRATION'),
'uuid': image_uuid,
'appid': 'otn'
}
response = self.session.post(API_AUTH_QRCODE_CHECK.get('url'), data)
result = response.json()
result_code = int(result.get('result_code'))
if result_code == 0:
time.sleep(2)
elif result_code == 1:
UserLog.add_quick_log('请确认登录').flush()
time.sleep(2)
elif result_code == 2:
break
elif result_code == 3:
try:
os.remove(png_path)
except Exception as e:
UserLog.add_quick_log('无法删除文件: {}'.format(e)).flush()
image_uuid = self.download_code()
try:
os.remove(png_path)
except Exception as e:
UserLog.add_quick_log('无法删除文件: {}'.format(e)).flush()
self.session.get(API_USER_LOGIN, allow_redirects=True)
new_tk = self.auth_uamtk()
user_name = self.auth_uamauthclient(new_tk)
self.update_user_info({'user_name': user_name})
self.session.get(API_USER_LOGIN, allow_redirects=True)
self.login_did_success()
return True
def download_code(self):
try:
UserLog.add_quick_log(UserLog.MESSAGE_QRCODE_DOWNLOADING).flush()
response = self.session.post(API_AUTH_QRCODE_BASE64_DOWNLOAD.get('url'), data={'appid': 'otn'})
result = response.json()
if result.get('result_code') == '0':
img_bytes = base64.b64decode(result.get('image'))
try:
os.mkdir(Config().USER_DATA_DIR + '/qrcode')
except FileExistsError:
pass
png_path = path.normpath(Config().USER_DATA_DIR + '/qrcode/%d.png' % time.time())
with open(png_path, 'wb') as file:
file.write(img_bytes)
file.close()
if os.name == 'nt':
os.startfile(png_path)
else:
print_qrcode(png_path)
UserLog.add_log(UserLog.MESSAGE_QRCODE_DOWNLOADED.format(png_path)).flush()
Notification.send_email_with_qrcode(Config().EMAIL_RECEIVER, '你有新的登录二维码啦!', png_path)
self.retry_count = 0
return result.get('uuid'), png_path
raise KeyError('获取二维码失败: {}'.format(result.get('result_message')))
except Exception as e:
UserLog.add_quick_log(
UserLog.MESSAGE_QRCODE_FAIL.format(e, self.retry_time)).flush()
self.retry_count = self.retry_count + 1
if self.retry_count == 20:
self.retry_count = 0
try:
os.remove(self.get_cookie_path())
except:
pass
time.sleep(self.retry_time)
return self.download_code()
def check_user_is_login(self):
response = self.session.get(API_USER_LOGIN_CHECK)
is_login = response.json().get('data.is_login', False) == 'Y'
if is_login:
self.save_user()
self.set_last_heartbeat()
return self.get_user_info() # 检测应该是不 | 有用,01-10 看来应该是没用 01-22 有时拿到的状态 是已失效的再加上试试
return is_login
def auth_uamtk(self):
response = self.session.post(API_AUTH_UAMTK.get('url'), {'appid': 'otn'}, headers={
'Referer': 'https://kyfw.12306.cn/otn/passport?redirect=/otn/login/userLogin',
'Origin': 'https://kyfw.12306.cn'
})
result = response.json()
if result.get('newapptk'):
return result.get('newapptk')
# TODO 处理获取失败情况
return False
def auth_uamauthclient(self, tk):
response = self.session.post(API_AUTH_UAMAUTHCLIENT.get('url'), {'tk': tk})
result = response.json()
if result.get('username'):
return result.get('username')
# TODO 处理获取失败情况
return False
def request_device_id(self):
"""
获取加密后的浏览器特征 ID
:return:
"""
response = self.session.get(API_GET_BROWSER_DEVICE_ID)
if response.status_code == 200:
try:
result = json.loads(response.text)
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.61 Safari/537.36"
}
self.session.headers.update(headers)
response = self.session.get(base64.b64decode(result['id']).decode())
if response.text.find('callbackFunction') >= 0:
result = response.text[18:-2]
result = json.loads(result)
if not Config().is_cache_rail_id_enabled():
self.session.cookies.update({
'RAIL_EXPIRATION': result.get('exp'),
'RAIL_DEVICEID': result.get('dfp'),
})
else:
self.session.cookies.update({
'RAIL_EXPIRATION': Config().RAIL_EXPIRATION,
'RAIL_DEVICEID': Config().RAIL_DEVICEID,
})
except:
return False
def login_did_success(self):
"""
用户登录成功
:return:
"""
self.login_num += 1
self.welcome_user()
self.save_user()
self.get_user_info()
self.set_last_heartbeat()
self.is_ready = True
def welcome_user(self):
UserLog.print_welcome_user(self)
pass
def get_cookie_path(self):
return Config().USER_DATA_DIR + self.user_name + '.cookie'
def update_user_info(self, info):
self.info = {**self.info, **info}
def get_name(self):
return self.info.get('user_name', '')
def save_user(self):
if Config().is_master():
self.cluster.set_user_cookie(self.key, self.session.cookies)
self.cluster.set_user_info(self.key, self.info)
with open(self.get_cookie_path(), 'wb') as f:
pickle.dump(self.session.cookies, f)
def did_loaded_user(self):
"""
恢复用户成功
:return:
"""
UserLog.add_quick_log(UserLog.MESSAGE_LOADED_USER.format(self.user_name)).flush()
if self.check_user_is_login() and self.get_user_info():
UserLog.add_quick_log(UserLog.MESSAGE_LOADED_USER_SUCCESS.format(self.user_name)).flush()
UserLog.print_welcome_user(self)
self.user_did_load()
else:
UserLog.add_quick_log(UserLog.MESSAGE_LOADED_USER_BUT_EXPIRED).flush()
self.set_last_heartbeat(0)
def user_did_load(self):
"""
用户已经加载成功
:return:
"""
self.is_ready = True
if self.user_loaded: return
self.user_loaded = True
Event().user_loaded({'key': self.key}) # 发布通知
def get_user_info(self):
response = self.session.get(API_USER_INFO.get('url'))
result = response.json()
user_data = result.get('data.userDTO.loginUserDTO')
# 子节点访问会导致主节点登录失效 TODO 可快考虑实时同步 cookie
if user_data:
self.update_user_info({**user_data, **{'user_name': user_data.get('name')}})
self.save_user()
return True
return False
def load_user(self):
if Config().is_cluster_enabled(): return
cookie_path = self.get_cookie_path()
if path.exists(cookie_path):
with open(self.get_cookie_path(), 'rb') as f:
cookie = pickle.load(f)
self.cookie = True
self.session.cookies.update(cookie)
self.did_loaded_user()
return True
return None
def load_user_from_remote(self):
cookie = self.cluster.get_user_cookie(self.key)
info = self.cluster.get_user_info(self.key)
if Config().is_slave() and (not cookie or not info):
while True: # 子节点只能取
UserLog.add_quick_log(UserLog.MESSAGE_USER_COOKIE_NOT_FOUND_FROM_REMOTE.format(self.user_name)).flush()
stay_second(self.retry_time)
return self.load_user_from_remote()
if info: self.info = info
if cookie:
self.session.cookies.update(cookie)
if not self.cookie: # 第一次加载
self.cookie = True
if not Config().is_slave():
self.did_loaded_user()
else:
self.is_ready = True # 设置子节点用户 已准备好
UserLog.print_welcome_user(self)
return True
return False
def check_is_ready(self):
return self.is_ready
def wait_for_ready(self):
if self.is_ready: return self
UserLog.add_quick_log(UserLog.MESSAGE_WAIT_USER_INIT_COMPLETE.format(self.retry_time)).flush()
stay_second(self.retry_time)
return self.wait_for_ready()
def destroy(self):
"""
退出用户
:return:
"""
UserLog.add_quick_log(UserLog.MESSAGE_USER_BEING_DESTROY.format(self.user_name)).flush()
self.is_alive = False
def response_login_check(self, response, **kwargs):
if Config().is_master() and response.json().get('data.noLogin') == 'true': # relogin
self.handle_login(expire=True)
def get_user_passengers(self):
if self.passengers: return self.passengers
response = self.session.post(API_USER_PASSENGERS)
result = response.json()
if result.get('data.normal_passengers'):
self.passengers = result.get('data.normal_passengers')
# 将乘客写入到文件
with open(Config().USER_PASSENGERS_FILE % self.user_name, 'w', encoding='utf-8') as f:
f.write(json.dumps(self.passengers, indent=4, ensure_ascii=False))
return self.passengers
else:
UserLog.add_quick_log(
UserLog.MESSAGE_GET_USER_PASSENGERS_FAIL.format(
result.get('messages', CommonLog.MESSAGE_RESPONSE_EMPTY_ERROR), self.retry_time)).flush()
if Config().is_slave():
self.load_user_from_remote() # 加载最新 cookie
stay_second(self.retry_time)
return self.get_user_passengers()
def get_passengers_by_members(self, members):
"""
获取格式化后的乘客信息
:param members:
:return:
[{
name: '项羽',
type: 1,
id_card: 0000000000000000000,
type_text: '成人',
enc_str: 'aaaaaa'
}]
"""
self.get_user_passengers()
results = []
for member in members:
is_member_code = is_number(member)
if not is_member_code:
if member[0] == "*":
audlt = 1
member = member[1:]
else:
audlt = 0
child_check = array_dict_find_by_key_value(results, 'name', member)
if not is_member_code and child_check:
new_member = child_check.copy()
new_member['type'] = UserType.CHILD
new_member['type_text'] = dict_find_key_by_value(UserType.dicts, int(new_member['type']))
else:
if is_member_code:
passenger = array_dict_find_by_key_value(self.passengers, 'code', member)
else:
passenger = array_dict_find_by_key_value(self.passengers, 'passenger_name', member)
if audlt:
passenger['passenger_type'] = UserType.ADULT
if not passenger:
UserLog.add_quick_log(
UserLog.MESSAGE_USER_PASSENGERS_IS_INVALID.format(self.user_name, member)).flush()
return False
new_member = {
'name': passenger.get('passenger_name'),
'id_card': passenger.get('passenger_id_no'),
'id_card_type': passenger.get('passenger_id_type_code'),
'mobile': passenger.get('mobile_no'),
'type': passenger.get('passenger_type'),
'type_text': dict_find_key_by_value(UserType.dicts, int(passenger.get('passenger_type'))),
'enc_str': passenger.get('allEncStr')
}
results.append(new_member)
return results
def request_init_dc_page(self):
"""
请求下单页面 拿到 token
:return:
"""
data = {'_json_att': ''}
response = self.session.post(API_INITDC_URL, data)
html = response.text
token = re.search(r'var globalRepeatSubmitToken = \'(.+?)\'', html)
form = re.search(r'var ticketInfoForPassengerForm *= *(\{.+\})', html)
order = re.search(r'var orderRequestDTO *= *(\{.+\})', html)
# 系统忙,请稍后重试
if html.find('系统忙,请稍后重试') != -1:
OrderLog.add_quick_log(OrderLog.MESSAGE_REQUEST_INIT_DC_PAGE_FAIL).flush() # 重试无用,直接跳过
return False, False, html
try:
self.global_repeat_submit_token = token.groups()[0]
self.ticket_info_for_passenger_form = json.loads(form.groups()[0].replace("'", '"'))
self.order_request_dto = json.loads(order.groups()[0].replace("'", '"'))
except:
return False, False, html # TODO Error
slide_val = re.search(r"var if_check_slide_passcode.*='(\d?)'", html)
is_slide = False
if slide_val:
is_slide = int(slide_val[1]) == 1
return True, is_slide, html
| 会维持状态,这里再请求下个人中心看有没 | identifier_name |
jac.py | #!/usr/bin/python -Wall
# ================================================================
# Copyright (c) John Kerl 2007
# kerl.john.r@gmail.com
# ================================================================
from __future__ import division # 1/2 = 0.5, not 0.
from math import *
from sackmat_m import *
import copy
# ----------------------------------------------------------------
# Let
# F: R^m -> R^n
# i.e.
# [ F_1(x_1, ..., x_m) ]
# F(x) = [ : : : ]
# [ F_n(x_1, ..., x_m) ].
# Then Dij = dFi/dxj, i=1..n, j=1..m (an n x m matrix).
# This is numerically approximated (forward-difference approximation) by
# (F(x1,...,xj+h,...,xn) - F(x1,...,xj,...,xn)) / h
# or (centered-difference approximation)
# (F(x1,...,xj+h/2,...,xn) - F(x1,...,xj-h/2,...,xn)) / h.
def jac(F, q, h=1e-6):
m = len(q)
n = len(F(q))
DFq = make_zero_matrix(n, m)
# Centered-difference approximation
h2 = 0.5 * h
for j in range(0, m):
qb = copy.copy(q)
qf = copy.copy(q)
qb[j] -= h2
qf[j] += h2
Fqb = F(qb)
Fqf = F(qf)
for i in range(0, n):
DFq[i][j] = (Fqf[i] - Fqb[i]) / h
return DFq
# ----------------------------------------------------------------
def F1(q):
[x, y, z] = q
#f1 = x**2
#f2 = y**2
#f3 = z**2
#f1 = x**2 * y**2
#f2 = y**2 * z**2
#f3 = z**2 * x**2
f1 = x * y
f2 = y * z
f3 = z * x
#f1 = 1.0 * y * y
#f2 = 2.0 * x
#f3 = 3.0 * z
return [f1, f2, f3]
# ----------------------------------------------------------------
def F2(q):
[x, y, z] = q
return [x**2 + y**2 + z**2]
# ----------------------------------------------------------------
def do_point(F,q):
print "q =", q
DFq = jac(F, q)
print "DFq="
print DFq
#print "det(DFq) =", DFq.det()
# ----------------------------------------------------------------
def do_point_with_det(F,q):
print "-" * 40
print "q =", q
DFq = jac(F, q)
print "DFq="
print DFq
print "det(DFq) =", DFq.det()
# ----------------------------------------------------------------
def frufru():
F = F1
do_point_with_det(F, [0,0,0])
print
do_point_with_det(F, [0,0,1])
do_point_with_det(F, [0,1,0])
do_point_with_det(F, [1,0,0])
print
do_point_with_det(F, [1,1,0])
do_point_with_det(F, [1,0,1])
do_point_with_det(F, [0,1,1])
print
do_point_with_det(F, [1,1,1])
do_point_with_det(F, [1,2,3])
do_point_with_det(F, [sqrt(0.5),sqrt(0.5),0])
a=0.1
do_point_with_det(F, [cos(a),sin(a),0])
a = 0.2
b = 0.3
c = sqrt(1 - a**2 - b**2)
do_point_with_det(F, [a,b,c])
a = 0.8
b = 0.2
c = sqrt(1 - a**2 - b**2)
do_point_with_det(F, [a,b,c])
print
# ----------------------------------------------------------------
def F(q):
[x, y, z] = q
#f1 = x**2
#f2 = y**2
#f3 = z**2
#f1 = x**2 * y**2
#f2 = y**2 * z**2
#f3 = z**2 * x**2
f1 = x * y
f2 = y * z
f3 = z * x
#f1 = 1.0 * y * y
#f2 = 2.0 * x
#f3 = 3.0 * z
return [f1, f2, f3]
# ----------------------------------------------------------------
def G(q):
|
# ----------------------------------------------------------------
def gt_something():
thetalo = 0
thetahi = 2*math.pi
philo = 0
phihi = math.pi
nphi = 12
ntheta = 12
if (len(sys.argv) == 3):
nphi = int(sys.argv[1])
ntheta = int(sys.argv[2])
dtheta = (thetahi-thetalo)/ntheta
dphi = (phihi-philo)/nphi
phi = 0
for ii in range(0, nphi):
theta = 0
for jj in range(0, ntheta):
x = sin(phi) * cos(theta)
y = sin(phi) * sin(theta)
z = cos(phi)
q = [x,y,z]
DF = jac(F, q)
d = DF.det()
# Let G(x,y,z) = x^2 + y^2 + z^2. The unit sphere is the level set
# for G(x,y,z) = 1.
# Tangent plane at (u,v,w):
# dG/dx(x-u) + dG/dy(y-v) + dG/dz(z-w)
# where (u,v,w) are the coordinates of the point q and (x,y,z) are variable.
DG = jac(G, q)
# For DF restricted to this tangent plane:
# * DG (i.e. grad G) is the normal vector
# * This gives a point-normal form for the tangent plane
# * Project the standard basis for R3 onto the tangent plane
# * Row-reduce
DF = jac(F, q)
# * Form an orthonormal basis
# * Compute DF of the basis
# * Row-reduce that to get the rank of DF on TM|q
#print "q = ", q,
#print "det(DF) = ", d
#print "%7.4f %7.4f %7.4f %7.4f %7.4f,%7.4f %7.4f,%7.4f %7.4f,%7.4f" % (
# x,y,z, d, DG[0][0], -DG[0][0]*x, DG[0][1], -DG[0][1]*y, DG[0][2], -DG[0][2]*z)
nx = DG[0][0]
ny = DG[0][1]
nz = DG[0][2]
nml = [nx, ny, nz]
e0 = [1,0,0]
e1 = [0,1,0]
e2 = [0,0,1]
# Project the standard basis for R3 down to the tangent plane TM|q.
proj_e0 = projperp(e0, nml)
proj_e1 = projperp(e1, nml)
proj_e2 = projperp(e2, nml)
proj_e = sackmat([proj_e0, proj_e1, proj_e2])
# Row-reduce, compute rank, and trim
proj_e.row_echelon_form()
rank = proj_e.rank_rr()
proj_e.elements = proj_e.elements[0:rank]
# Orthonormalize
proj_e = gram_schmidt(proj_e)
#print "q=[%7.4f,%7.4f,%7.4f]" % (x, y, z),
#print "nml=[%7.4f,%7.4f,%7.4f]" % (nx, ny, nz),
#print "p0=[%7.4f,%7.4f,%7.4f] p1=[%7.4f,%7.4f,%7.4f]" % (
#proj_e[0][0], proj_e[0][1], proj_e[0][2], proj_e[1][0], proj_e[1][1], proj_e[1][2]),
# Take DF of the orthonormal basis.
proj_e = proj_e.transpose()
proj_e = DF * proj_e
proj_e = proj_e.transpose()
rank = proj_e.rank()
#print "p0=[%7.4f,%7.4f,%7.4f] p1=[%7.4f,%7.4f,%7.4f]" % (
#proj_e[0][0], proj_e[0][1], proj_e[0][2], proj_e[1][0], proj_e[1][1], proj_e[1][2]),
#print "rank=", proj_e.rank_rr(),
#print "d=%11.3e" % (d),
# xxx hack
if (rank == 1):
d = 0.7
#print "%11.3e" % (d),
print "%8.4f" % (d),
#print
theta += dtheta
print
phi += dphi
gt_something()
| [x, y, z] = q
return [x**2 + y**2 + z**2] | identifier_body |
jac.py | #!/usr/bin/python -Wall
# ================================================================
# Copyright (c) John Kerl 2007
# kerl.john.r@gmail.com
# ================================================================
from __future__ import division # 1/2 = 0.5, not 0.
from math import *
from sackmat_m import *
import copy
# ----------------------------------------------------------------
# Let
# F: R^m -> R^n
# i.e.
# [ F_1(x_1, ..., x_m) ]
# F(x) = [ : : : ]
# [ F_n(x_1, ..., x_m) ].
# Then Dij = dFi/dxj, i=1..n, j=1..m (an n x m matrix).
# This is numerically approximated (forward-difference approximation) by
# (F(x1,...,xj+h,...,xn) - F(x1,...,xj,...,xn)) / h
# or (centered-difference approximation)
# (F(x1,...,xj+h/2,...,xn) - F(x1,...,xj-h/2,...,xn)) / h.
def jac(F, q, h=1e-6):
m = len(q)
n = len(F(q))
DFq = make_zero_matrix(n, m)
# Centered-difference approximation
h2 = 0.5 * h
for j in range(0, m):
qb = copy.copy(q)
qf = copy.copy(q)
qb[j] -= h2
qf[j] += h2
Fqb = F(qb)
Fqf = F(qf)
for i in range(0, n):
DFq[i][j] = (Fqf[i] - Fqb[i]) / h
return DFq
# ----------------------------------------------------------------
def F1(q):
[x, y, z] = q
#f1 = x**2
#f2 = y**2
#f3 = z**2
#f1 = x**2 * y**2
#f2 = y**2 * z**2
#f3 = z**2 * x**2
f1 = x * y
f2 = y * z
f3 = z * x
#f1 = 1.0 * y * y
#f2 = 2.0 * x
#f3 = 3.0 * z
return [f1, f2, f3]
# ----------------------------------------------------------------
def F2(q):
[x, y, z] = q
return [x**2 + y**2 + z**2]
# ----------------------------------------------------------------
def do_point(F,q):
print "q =", q
DFq = jac(F, q)
print "DFq="
print DFq
#print "det(DFq) =", DFq.det()
# ----------------------------------------------------------------
def do_point_with_det(F,q):
print "-" * 40
print "q =", q
DFq = jac(F, q)
print "DFq="
print DFq
print "det(DFq) =", DFq.det()
# ----------------------------------------------------------------
def frufru():
F = F1
do_point_with_det(F, [0,0,0])
print
do_point_with_det(F, [0,0,1])
do_point_with_det(F, [0,1,0])
do_point_with_det(F, [1,0,0])
print
do_point_with_det(F, [1,1,0])
do_point_with_det(F, [1,0,1])
do_point_with_det(F, [0,1,1])
print
do_point_with_det(F, [1,1,1])
do_point_with_det(F, [1,2,3])
do_point_with_det(F, [sqrt(0.5),sqrt(0.5),0]) |
a = 0.2
b = 0.3
c = sqrt(1 - a**2 - b**2)
do_point_with_det(F, [a,b,c])
a = 0.8
b = 0.2
c = sqrt(1 - a**2 - b**2)
do_point_with_det(F, [a,b,c])
print
# ----------------------------------------------------------------
def F(q):
[x, y, z] = q
#f1 = x**2
#f2 = y**2
#f3 = z**2
#f1 = x**2 * y**2
#f2 = y**2 * z**2
#f3 = z**2 * x**2
f1 = x * y
f2 = y * z
f3 = z * x
#f1 = 1.0 * y * y
#f2 = 2.0 * x
#f3 = 3.0 * z
return [f1, f2, f3]
# ----------------------------------------------------------------
def G(q):
[x, y, z] = q
return [x**2 + y**2 + z**2]
# ----------------------------------------------------------------
def gt_something():
thetalo = 0
thetahi = 2*math.pi
philo = 0
phihi = math.pi
nphi = 12
ntheta = 12
if (len(sys.argv) == 3):
nphi = int(sys.argv[1])
ntheta = int(sys.argv[2])
dtheta = (thetahi-thetalo)/ntheta
dphi = (phihi-philo)/nphi
phi = 0
for ii in range(0, nphi):
theta = 0
for jj in range(0, ntheta):
x = sin(phi) * cos(theta)
y = sin(phi) * sin(theta)
z = cos(phi)
q = [x,y,z]
DF = jac(F, q)
d = DF.det()
# Let G(x,y,z) = x^2 + y^2 + z^2. The unit sphere is the level set
# for G(x,y,z) = 1.
# Tangent plane at (u,v,w):
# dG/dx(x-u) + dG/dy(y-v) + dG/dz(z-w)
# where (u,v,w) are the coordinates of the point q and (x,y,z) are variable.
DG = jac(G, q)
# For DF restricted to this tangent plane:
# * DG (i.e. grad G) is the normal vector
# * This gives a point-normal form for the tangent plane
# * Project the standard basis for R3 onto the tangent plane
# * Row-reduce
DF = jac(F, q)
# * Form an orthonormal basis
# * Compute DF of the basis
# * Row-reduce that to get the rank of DF on TM|q
#print "q = ", q,
#print "det(DF) = ", d
#print "%7.4f %7.4f %7.4f %7.4f %7.4f,%7.4f %7.4f,%7.4f %7.4f,%7.4f" % (
# x,y,z, d, DG[0][0], -DG[0][0]*x, DG[0][1], -DG[0][1]*y, DG[0][2], -DG[0][2]*z)
nx = DG[0][0]
ny = DG[0][1]
nz = DG[0][2]
nml = [nx, ny, nz]
e0 = [1,0,0]
e1 = [0,1,0]
e2 = [0,0,1]
# Project the standard basis for R3 down to the tangent plane TM|q.
proj_e0 = projperp(e0, nml)
proj_e1 = projperp(e1, nml)
proj_e2 = projperp(e2, nml)
proj_e = sackmat([proj_e0, proj_e1, proj_e2])
# Row-reduce, compute rank, and trim
proj_e.row_echelon_form()
rank = proj_e.rank_rr()
proj_e.elements = proj_e.elements[0:rank]
# Orthonormalize
proj_e = gram_schmidt(proj_e)
#print "q=[%7.4f,%7.4f,%7.4f]" % (x, y, z),
#print "nml=[%7.4f,%7.4f,%7.4f]" % (nx, ny, nz),
#print "p0=[%7.4f,%7.4f,%7.4f] p1=[%7.4f,%7.4f,%7.4f]" % (
#proj_e[0][0], proj_e[0][1], proj_e[0][2], proj_e[1][0], proj_e[1][1], proj_e[1][2]),
# Take DF of the orthonormal basis.
proj_e = proj_e.transpose()
proj_e = DF * proj_e
proj_e = proj_e.transpose()
rank = proj_e.rank()
#print "p0=[%7.4f,%7.4f,%7.4f] p1=[%7.4f,%7.4f,%7.4f]" % (
#proj_e[0][0], proj_e[0][1], proj_e[0][2], proj_e[1][0], proj_e[1][1], proj_e[1][2]),
#print "rank=", proj_e.rank_rr(),
#print "d=%11.3e" % (d),
# xxx hack
if (rank == 1):
d = 0.7
#print "%11.3e" % (d),
print "%8.4f" % (d),
#print
theta += dtheta
print
phi += dphi
gt_something() | a=0.1
do_point_with_det(F, [cos(a),sin(a),0]) | random_line_split |
jac.py | #!/usr/bin/python -Wall
# ================================================================
# Copyright (c) John Kerl 2007
# kerl.john.r@gmail.com
# ================================================================
from __future__ import division # 1/2 = 0.5, not 0.
from math import *
from sackmat_m import *
import copy
# ----------------------------------------------------------------
# Let
# F: R^m -> R^n
# i.e.
# [ F_1(x_1, ..., x_m) ]
# F(x) = [ : : : ]
# [ F_n(x_1, ..., x_m) ].
# Then Dij = dFi/dxj, i=1..n, j=1..m (an n x m matrix).
# This is numerically approximated (forward-difference approximation) by
# (F(x1,...,xj+h,...,xn) - F(x1,...,xj,...,xn)) / h
# or (centered-difference approximation)
# (F(x1,...,xj+h/2,...,xn) - F(x1,...,xj-h/2,...,xn)) / h.
def jac(F, q, h=1e-6):
m = len(q)
n = len(F(q))
DFq = make_zero_matrix(n, m)
# Centered-difference approximation
h2 = 0.5 * h
for j in range(0, m):
qb = copy.copy(q)
qf = copy.copy(q)
qb[j] -= h2
qf[j] += h2
Fqb = F(qb)
Fqf = F(qf)
for i in range(0, n):
DFq[i][j] = (Fqf[i] - Fqb[i]) / h
return DFq
# ----------------------------------------------------------------
def F1(q):
[x, y, z] = q
#f1 = x**2
#f2 = y**2
#f3 = z**2
#f1 = x**2 * y**2
#f2 = y**2 * z**2
#f3 = z**2 * x**2
f1 = x * y
f2 = y * z
f3 = z * x
#f1 = 1.0 * y * y
#f2 = 2.0 * x
#f3 = 3.0 * z
return [f1, f2, f3]
# ----------------------------------------------------------------
def F2(q):
[x, y, z] = q
return [x**2 + y**2 + z**2]
# ----------------------------------------------------------------
def do_point(F,q):
print "q =", q
DFq = jac(F, q)
print "DFq="
print DFq
#print "det(DFq) =", DFq.det()
# ----------------------------------------------------------------
def do_point_with_det(F,q):
print "-" * 40
print "q =", q
DFq = jac(F, q)
print "DFq="
print DFq
print "det(DFq) =", DFq.det()
# ----------------------------------------------------------------
def frufru():
F = F1
do_point_with_det(F, [0,0,0])
print
do_point_with_det(F, [0,0,1])
do_point_with_det(F, [0,1,0])
do_point_with_det(F, [1,0,0])
print
do_point_with_det(F, [1,1,0])
do_point_with_det(F, [1,0,1])
do_point_with_det(F, [0,1,1])
print
do_point_with_det(F, [1,1,1])
do_point_with_det(F, [1,2,3])
do_point_with_det(F, [sqrt(0.5),sqrt(0.5),0])
a=0.1
do_point_with_det(F, [cos(a),sin(a),0])
a = 0.2
b = 0.3
c = sqrt(1 - a**2 - b**2)
do_point_with_det(F, [a,b,c])
a = 0.8
b = 0.2
c = sqrt(1 - a**2 - b**2)
do_point_with_det(F, [a,b,c])
print
# ----------------------------------------------------------------
def F(q):
[x, y, z] = q
#f1 = x**2
#f2 = y**2
#f3 = z**2
#f1 = x**2 * y**2
#f2 = y**2 * z**2
#f3 = z**2 * x**2
f1 = x * y
f2 = y * z
f3 = z * x
#f1 = 1.0 * y * y
#f2 = 2.0 * x
#f3 = 3.0 * z
return [f1, f2, f3]
# ----------------------------------------------------------------
def G(q):
[x, y, z] = q
return [x**2 + y**2 + z**2]
# ----------------------------------------------------------------
def gt_something():
thetalo = 0
thetahi = 2*math.pi
philo = 0
phihi = math.pi
nphi = 12
ntheta = 12
if (len(sys.argv) == 3):
nphi = int(sys.argv[1])
ntheta = int(sys.argv[2])
dtheta = (thetahi-thetalo)/ntheta
dphi = (phihi-philo)/nphi
phi = 0
for ii in range(0, nphi):
theta = 0
for jj in range(0, ntheta):
|
print
phi += dphi
gt_something()
| x = sin(phi) * cos(theta)
y = sin(phi) * sin(theta)
z = cos(phi)
q = [x,y,z]
DF = jac(F, q)
d = DF.det()
# Let G(x,y,z) = x^2 + y^2 + z^2. The unit sphere is the level set
# for G(x,y,z) = 1.
# Tangent plane at (u,v,w):
# dG/dx(x-u) + dG/dy(y-v) + dG/dz(z-w)
# where (u,v,w) are the coordinates of the point q and (x,y,z) are variable.
DG = jac(G, q)
# For DF restricted to this tangent plane:
# * DG (i.e. grad G) is the normal vector
# * This gives a point-normal form for the tangent plane
# * Project the standard basis for R3 onto the tangent plane
# * Row-reduce
DF = jac(F, q)
# * Form an orthonormal basis
# * Compute DF of the basis
# * Row-reduce that to get the rank of DF on TM|q
#print "q = ", q,
#print "det(DF) = ", d
#print "%7.4f %7.4f %7.4f %7.4f %7.4f,%7.4f %7.4f,%7.4f %7.4f,%7.4f" % (
# x,y,z, d, DG[0][0], -DG[0][0]*x, DG[0][1], -DG[0][1]*y, DG[0][2], -DG[0][2]*z)
nx = DG[0][0]
ny = DG[0][1]
nz = DG[0][2]
nml = [nx, ny, nz]
e0 = [1,0,0]
e1 = [0,1,0]
e2 = [0,0,1]
# Project the standard basis for R3 down to the tangent plane TM|q.
proj_e0 = projperp(e0, nml)
proj_e1 = projperp(e1, nml)
proj_e2 = projperp(e2, nml)
proj_e = sackmat([proj_e0, proj_e1, proj_e2])
# Row-reduce, compute rank, and trim
proj_e.row_echelon_form()
rank = proj_e.rank_rr()
proj_e.elements = proj_e.elements[0:rank]
# Orthonormalize
proj_e = gram_schmidt(proj_e)
#print "q=[%7.4f,%7.4f,%7.4f]" % (x, y, z),
#print "nml=[%7.4f,%7.4f,%7.4f]" % (nx, ny, nz),
#print "p0=[%7.4f,%7.4f,%7.4f] p1=[%7.4f,%7.4f,%7.4f]" % (
#proj_e[0][0], proj_e[0][1], proj_e[0][2], proj_e[1][0], proj_e[1][1], proj_e[1][2]),
# Take DF of the orthonormal basis.
proj_e = proj_e.transpose()
proj_e = DF * proj_e
proj_e = proj_e.transpose()
rank = proj_e.rank()
#print "p0=[%7.4f,%7.4f,%7.4f] p1=[%7.4f,%7.4f,%7.4f]" % (
#proj_e[0][0], proj_e[0][1], proj_e[0][2], proj_e[1][0], proj_e[1][1], proj_e[1][2]),
#print "rank=", proj_e.rank_rr(),
#print "d=%11.3e" % (d),
# xxx hack
if (rank == 1):
d = 0.7
#print "%11.3e" % (d),
print "%8.4f" % (d),
#print
theta += dtheta | conditional_block |
jac.py | #!/usr/bin/python -Wall
# ================================================================
# Copyright (c) John Kerl 2007
# kerl.john.r@gmail.com
# ================================================================
from __future__ import division # 1/2 = 0.5, not 0.
from math import *
from sackmat_m import *
import copy
# ----------------------------------------------------------------
# Let
# F: R^m -> R^n
# i.e.
# [ F_1(x_1, ..., x_m) ]
# F(x) = [ : : : ]
# [ F_n(x_1, ..., x_m) ].
# Then Dij = dFi/dxj, i=1..n, j=1..m (an n x m matrix).
# This is numerically approximated (forward-difference approximation) by
# (F(x1,...,xj+h,...,xn) - F(x1,...,xj,...,xn)) / h
# or (centered-difference approximation)
# (F(x1,...,xj+h/2,...,xn) - F(x1,...,xj-h/2,...,xn)) / h.
def jac(F, q, h=1e-6):
m = len(q)
n = len(F(q))
DFq = make_zero_matrix(n, m)
# Centered-difference approximation
h2 = 0.5 * h
for j in range(0, m):
qb = copy.copy(q)
qf = copy.copy(q)
qb[j] -= h2
qf[j] += h2
Fqb = F(qb)
Fqf = F(qf)
for i in range(0, n):
DFq[i][j] = (Fqf[i] - Fqb[i]) / h
return DFq
# ----------------------------------------------------------------
def F1(q):
[x, y, z] = q
#f1 = x**2
#f2 = y**2
#f3 = z**2
#f1 = x**2 * y**2
#f2 = y**2 * z**2
#f3 = z**2 * x**2
f1 = x * y
f2 = y * z
f3 = z * x
#f1 = 1.0 * y * y
#f2 = 2.0 * x
#f3 = 3.0 * z
return [f1, f2, f3]
# ----------------------------------------------------------------
def F2(q):
[x, y, z] = q
return [x**2 + y**2 + z**2]
# ----------------------------------------------------------------
def do_point(F,q):
print "q =", q
DFq = jac(F, q)
print "DFq="
print DFq
#print "det(DFq) =", DFq.det()
# ----------------------------------------------------------------
def do_point_with_det(F,q):
print "-" * 40
print "q =", q
DFq = jac(F, q)
print "DFq="
print DFq
print "det(DFq) =", DFq.det()
# ----------------------------------------------------------------
def | ():
F = F1
do_point_with_det(F, [0,0,0])
print
do_point_with_det(F, [0,0,1])
do_point_with_det(F, [0,1,0])
do_point_with_det(F, [1,0,0])
print
do_point_with_det(F, [1,1,0])
do_point_with_det(F, [1,0,1])
do_point_with_det(F, [0,1,1])
print
do_point_with_det(F, [1,1,1])
do_point_with_det(F, [1,2,3])
do_point_with_det(F, [sqrt(0.5),sqrt(0.5),0])
a=0.1
do_point_with_det(F, [cos(a),sin(a),0])
a = 0.2
b = 0.3
c = sqrt(1 - a**2 - b**2)
do_point_with_det(F, [a,b,c])
a = 0.8
b = 0.2
c = sqrt(1 - a**2 - b**2)
do_point_with_det(F, [a,b,c])
print
# ----------------------------------------------------------------
def F(q):
[x, y, z] = q
#f1 = x**2
#f2 = y**2
#f3 = z**2
#f1 = x**2 * y**2
#f2 = y**2 * z**2
#f3 = z**2 * x**2
f1 = x * y
f2 = y * z
f3 = z * x
#f1 = 1.0 * y * y
#f2 = 2.0 * x
#f3 = 3.0 * z
return [f1, f2, f3]
# ----------------------------------------------------------------
def G(q):
[x, y, z] = q
return [x**2 + y**2 + z**2]
# ----------------------------------------------------------------
def gt_something():
thetalo = 0
thetahi = 2*math.pi
philo = 0
phihi = math.pi
nphi = 12
ntheta = 12
if (len(sys.argv) == 3):
nphi = int(sys.argv[1])
ntheta = int(sys.argv[2])
dtheta = (thetahi-thetalo)/ntheta
dphi = (phihi-philo)/nphi
phi = 0
for ii in range(0, nphi):
theta = 0
for jj in range(0, ntheta):
x = sin(phi) * cos(theta)
y = sin(phi) * sin(theta)
z = cos(phi)
q = [x,y,z]
DF = jac(F, q)
d = DF.det()
# Let G(x,y,z) = x^2 + y^2 + z^2. The unit sphere is the level set
# for G(x,y,z) = 1.
# Tangent plane at (u,v,w):
# dG/dx(x-u) + dG/dy(y-v) + dG/dz(z-w)
# where (u,v,w) are the coordinates of the point q and (x,y,z) are variable.
DG = jac(G, q)
# For DF restricted to this tangent plane:
# * DG (i.e. grad G) is the normal vector
# * This gives a point-normal form for the tangent plane
# * Project the standard basis for R3 onto the tangent plane
# * Row-reduce
DF = jac(F, q)
# * Form an orthonormal basis
# * Compute DF of the basis
# * Row-reduce that to get the rank of DF on TM|q
#print "q = ", q,
#print "det(DF) = ", d
#print "%7.4f %7.4f %7.4f %7.4f %7.4f,%7.4f %7.4f,%7.4f %7.4f,%7.4f" % (
# x,y,z, d, DG[0][0], -DG[0][0]*x, DG[0][1], -DG[0][1]*y, DG[0][2], -DG[0][2]*z)
nx = DG[0][0]
ny = DG[0][1]
nz = DG[0][2]
nml = [nx, ny, nz]
e0 = [1,0,0]
e1 = [0,1,0]
e2 = [0,0,1]
# Project the standard basis for R3 down to the tangent plane TM|q.
proj_e0 = projperp(e0, nml)
proj_e1 = projperp(e1, nml)
proj_e2 = projperp(e2, nml)
proj_e = sackmat([proj_e0, proj_e1, proj_e2])
# Row-reduce, compute rank, and trim
proj_e.row_echelon_form()
rank = proj_e.rank_rr()
proj_e.elements = proj_e.elements[0:rank]
# Orthonormalize
proj_e = gram_schmidt(proj_e)
#print "q=[%7.4f,%7.4f,%7.4f]" % (x, y, z),
#print "nml=[%7.4f,%7.4f,%7.4f]" % (nx, ny, nz),
#print "p0=[%7.4f,%7.4f,%7.4f] p1=[%7.4f,%7.4f,%7.4f]" % (
#proj_e[0][0], proj_e[0][1], proj_e[0][2], proj_e[1][0], proj_e[1][1], proj_e[1][2]),
# Take DF of the orthonormal basis.
proj_e = proj_e.transpose()
proj_e = DF * proj_e
proj_e = proj_e.transpose()
rank = proj_e.rank()
#print "p0=[%7.4f,%7.4f,%7.4f] p1=[%7.4f,%7.4f,%7.4f]" % (
#proj_e[0][0], proj_e[0][1], proj_e[0][2], proj_e[1][0], proj_e[1][1], proj_e[1][2]),
#print "rank=", proj_e.rank_rr(),
#print "d=%11.3e" % (d),
# xxx hack
if (rank == 1):
d = 0.7
#print "%11.3e" % (d),
print "%8.4f" % (d),
#print
theta += dtheta
print
phi += dphi
gt_something()
| frufru | identifier_name |
person.py |
__author__="vvladych"
__date__ ="$09.10.2014 23:01:15$"
from forecastmgmt.dao.db_connection import get_db_connection
import psycopg2.extras
from MDO import MDO
from person_name import PersonName
class Person(MDO):
sql_dict={"get_all":"SELECT sid, common_name, birth_date, birth_place, person_uuid FROM fc_person",
"insert":"INSERT INTO fc_person(common_name, birth_date, birth_place) VALUES(%s,%s,%s) RETURNING sid",
"delete":"DELETE FROM fc_person WHERE sid=%s",
"load":"SELECT sid, common_name, birth_date, birth_place, person_uuid FROM fc_person WHERE sid=%s",
"update_person":"UPDATE fc_person SET common_name=%s, birth_date=%s, birth_place=%s WHERE sid=%s"
}
def __init__(self, sid=None, common_name=None, birth_date=None, birth_place=None, person_uuid=None):
super(Person, self).__init__(Person.sql_dict,sid,person_uuid)
self.common_name=common_name
self.birth_date=birth_date
self.birth_place=birth_place
if sid!=None:
self.names=PersonName().get_all_for_foreign_key(self.sid)
else:
self.names=[]
def load_object_from_db(self,rec):
self.common_name=rec.common_name
self.birth_date=rec.birth_date
self.birth_place=rec.birth_place
self.uuid=rec.person_uuid
self.names=PersonName().get_all_for_foreign_key(self.sid)
def get_insert_data(self):
return (self.common_name,self.birth_date,self.birth_place)
def insert(self):
super(Person, self).insert()
for name in self.names:
name.person_sid=self.sid
name.insert()
get_db_connection().commit()
def add_name(self, person_name_sid, person_name_role, person_sid, namepart_list):
self.names.append(PersonName(person_name_sid, person_name_role, person_sid, namepart_list))
def fabric_method(self,rec):
return Person(rec.sid, rec.common_name, rec.birth_date, rec.birth_place, rec.person_uuid)
def | (self, other):
cur=get_db_connection().cursor(cursor_factory=psycopg2.extras.NamedTupleCursor)
data=(other.common_name, other.birth_date, other.birth_place, self.sid)
cur.execute(Person.sql_dict["update_person"],data)
cur.close()
# update person_names
# delete outdated person_names
for person_name in self.names:
if person_name not in other.names:
person_name.delete()
for person_name in other.names:
if person_name not in self.names:
person_name.insert()
get_db_connection().commit()
| update | identifier_name |
person.py |
__author__="vvladych"
__date__ ="$09.10.2014 23:01:15$"
from forecastmgmt.dao.db_connection import get_db_connection
import psycopg2.extras
from MDO import MDO
from person_name import PersonName
class Person(MDO):
sql_dict={"get_all":"SELECT sid, common_name, birth_date, birth_place, person_uuid FROM fc_person",
"insert":"INSERT INTO fc_person(common_name, birth_date, birth_place) VALUES(%s,%s,%s) RETURNING sid",
"delete":"DELETE FROM fc_person WHERE sid=%s",
"load":"SELECT sid, common_name, birth_date, birth_place, person_uuid FROM fc_person WHERE sid=%s",
"update_person":"UPDATE fc_person SET common_name=%s, birth_date=%s, birth_place=%s WHERE sid=%s"
}
def __init__(self, sid=None, common_name=None, birth_date=None, birth_place=None, person_uuid=None):
super(Person, self).__init__(Person.sql_dict,sid,person_uuid)
self.common_name=common_name
self.birth_date=birth_date
self.birth_place=birth_place
if sid!=None:
|
else:
self.names=[]
def load_object_from_db(self,rec):
self.common_name=rec.common_name
self.birth_date=rec.birth_date
self.birth_place=rec.birth_place
self.uuid=rec.person_uuid
self.names=PersonName().get_all_for_foreign_key(self.sid)
def get_insert_data(self):
return (self.common_name,self.birth_date,self.birth_place)
def insert(self):
super(Person, self).insert()
for name in self.names:
name.person_sid=self.sid
name.insert()
get_db_connection().commit()
def add_name(self, person_name_sid, person_name_role, person_sid, namepart_list):
self.names.append(PersonName(person_name_sid, person_name_role, person_sid, namepart_list))
def fabric_method(self,rec):
return Person(rec.sid, rec.common_name, rec.birth_date, rec.birth_place, rec.person_uuid)
def update(self, other):
cur=get_db_connection().cursor(cursor_factory=psycopg2.extras.NamedTupleCursor)
data=(other.common_name, other.birth_date, other.birth_place, self.sid)
cur.execute(Person.sql_dict["update_person"],data)
cur.close()
# update person_names
# delete outdated person_names
for person_name in self.names:
if person_name not in other.names:
person_name.delete()
for person_name in other.names:
if person_name not in self.names:
person_name.insert()
get_db_connection().commit()
| self.names=PersonName().get_all_for_foreign_key(self.sid) | conditional_block |
person.py | __author__="vvladych"
__date__ ="$09.10.2014 23:01:15$"
from forecastmgmt.dao.db_connection import get_db_connection
import psycopg2.extras
from MDO import MDO
from person_name import PersonName
class Person(MDO):
sql_dict={"get_all":"SELECT sid, common_name, birth_date, birth_place, person_uuid FROM fc_person",
"insert":"INSERT INTO fc_person(common_name, birth_date, birth_place) VALUES(%s,%s,%s) RETURNING sid",
"delete":"DELETE FROM fc_person WHERE sid=%s",
"load":"SELECT sid, common_name, birth_date, birth_place, person_uuid FROM fc_person WHERE sid=%s",
"update_person":"UPDATE fc_person SET common_name=%s, birth_date=%s, birth_place=%s WHERE sid=%s"
}
def __init__(self, sid=None, common_name=None, birth_date=None, birth_place=None, person_uuid=None):
super(Person, self).__init__(Person.sql_dict,sid,person_uuid)
self.common_name=common_name
self.birth_date=birth_date
self.birth_place=birth_place
if sid!=None:
self.names=PersonName().get_all_for_foreign_key(self.sid)
else:
self.names=[]
def load_object_from_db(self,rec):
self.common_name=rec.common_name
self.birth_date=rec.birth_date
self.birth_place=rec.birth_place
self.uuid=rec.person_uuid
self.names=PersonName().get_all_for_foreign_key(self.sid)
def get_insert_data(self):
return (self.common_name,self.birth_date,self.birth_place)
def insert(self):
super(Person, self).insert()
for name in self.names:
name.person_sid=self.sid
name.insert()
get_db_connection().commit()
def add_name(self, person_name_sid, person_name_role, person_sid, namepart_list):
self.names.append(PersonName(person_name_sid, person_name_role, person_sid, namepart_list))
def fabric_method(self,rec): | data=(other.common_name, other.birth_date, other.birth_place, self.sid)
cur.execute(Person.sql_dict["update_person"],data)
cur.close()
# update person_names
# delete outdated person_names
for person_name in self.names:
if person_name not in other.names:
person_name.delete()
for person_name in other.names:
if person_name not in self.names:
person_name.insert()
get_db_connection().commit() | return Person(rec.sid, rec.common_name, rec.birth_date, rec.birth_place, rec.person_uuid)
def update(self, other):
cur=get_db_connection().cursor(cursor_factory=psycopg2.extras.NamedTupleCursor) | random_line_split |
person.py |
__author__="vvladych"
__date__ ="$09.10.2014 23:01:15$"
from forecastmgmt.dao.db_connection import get_db_connection
import psycopg2.extras
from MDO import MDO
from person_name import PersonName
class Person(MDO):
sql_dict={"get_all":"SELECT sid, common_name, birth_date, birth_place, person_uuid FROM fc_person",
"insert":"INSERT INTO fc_person(common_name, birth_date, birth_place) VALUES(%s,%s,%s) RETURNING sid",
"delete":"DELETE FROM fc_person WHERE sid=%s",
"load":"SELECT sid, common_name, birth_date, birth_place, person_uuid FROM fc_person WHERE sid=%s",
"update_person":"UPDATE fc_person SET common_name=%s, birth_date=%s, birth_place=%s WHERE sid=%s"
}
def __init__(self, sid=None, common_name=None, birth_date=None, birth_place=None, person_uuid=None):
super(Person, self).__init__(Person.sql_dict,sid,person_uuid)
self.common_name=common_name
self.birth_date=birth_date
self.birth_place=birth_place
if sid!=None:
self.names=PersonName().get_all_for_foreign_key(self.sid)
else:
self.names=[]
def load_object_from_db(self,rec):
self.common_name=rec.common_name
self.birth_date=rec.birth_date
self.birth_place=rec.birth_place
self.uuid=rec.person_uuid
self.names=PersonName().get_all_for_foreign_key(self.sid)
def get_insert_data(self):
return (self.common_name,self.birth_date,self.birth_place)
def insert(self):
super(Person, self).insert()
for name in self.names:
name.person_sid=self.sid
name.insert()
get_db_connection().commit()
def add_name(self, person_name_sid, person_name_role, person_sid, namepart_list):
self.names.append(PersonName(person_name_sid, person_name_role, person_sid, namepart_list))
def fabric_method(self,rec):
|
def update(self, other):
cur=get_db_connection().cursor(cursor_factory=psycopg2.extras.NamedTupleCursor)
data=(other.common_name, other.birth_date, other.birth_place, self.sid)
cur.execute(Person.sql_dict["update_person"],data)
cur.close()
# update person_names
# delete outdated person_names
for person_name in self.names:
if person_name not in other.names:
person_name.delete()
for person_name in other.names:
if person_name not in self.names:
person_name.insert()
get_db_connection().commit()
| return Person(rec.sid, rec.common_name, rec.birth_date, rec.birth_place, rec.person_uuid) | identifier_body |
expr-block.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)]
// Tests for standalone blocks as expressions
fn test_basic() { let rs: bool = { true }; assert!((rs)); }
struct RS { v1: isize, v2: isize }
fn test_rec() { let rs = { RS {v1: 10, v2: 20} }; assert_eq!(rs.v2, 20); }
fn test_filled_with_stuff() {
let rs = { let mut a = 0; while a < 10 { a += 1; } a };
assert_eq!(rs, 10);
}
pub fn main() { test_basic(); test_rec(); test_filled_with_stuff(); } | random_line_split | |
expr-block.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)]
// Tests for standalone blocks as expressions
fn test_basic() |
struct RS { v1: isize, v2: isize }
fn test_rec() { let rs = { RS {v1: 10, v2: 20} }; assert_eq!(rs.v2, 20); }
fn test_filled_with_stuff() {
let rs = { let mut a = 0; while a < 10 { a += 1; } a };
assert_eq!(rs, 10);
}
pub fn main() { test_basic(); test_rec(); test_filled_with_stuff(); }
| { let rs: bool = { true }; assert!((rs)); } | identifier_body |
expr-block.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)]
// Tests for standalone blocks as expressions
fn test_basic() { let rs: bool = { true }; assert!((rs)); }
struct RS { v1: isize, v2: isize }
fn test_rec() { let rs = { RS {v1: 10, v2: 20} }; assert_eq!(rs.v2, 20); }
fn test_filled_with_stuff() {
let rs = { let mut a = 0; while a < 10 { a += 1; } a };
assert_eq!(rs, 10);
}
pub fn | () { test_basic(); test_rec(); test_filled_with_stuff(); }
| main | identifier_name |
cfg.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Control flow graph analysis.
Given a Python AST we construct a control flow graph, with edges both to the
next and previous statements (so it can easily walk the graph both ways). Its
nodes contain the AST of the statements. It can then perform forward or backward
analysis on this CFG.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import functools
import operator
import gast
from tensorflow.contrib.autograph.pyct import anno
from tensorflow.contrib.autograph.pyct.static_analysis import activity
class CfgNode(object):
"""A node in the CFG."""
__slots__ = ['next', 'value', 'prev']
def __init__(self, value):
self.next = set()
self.prev = set()
self.value = value
class Cfg(namedtuple('Cfg', ['entry', 'exit'])):
"""A Control Flow Graph.
Each statement is represented as a node. For control flow statements such
as conditionals and loops the conditional itself is a node which either
branches or cycles, respectively.
Attributes:
entry: The entry node, which contains the `gast.arguments` node of the
function definition.
exit: The exit node. This node is special because it has no value (i.e. no
corresponding AST node). This is because Python functions can have
multiple return statements.
"""
pass
class CfgBuilder(gast.NodeVisitor):
"""Construct a control flow graph.
Construct a CFG starting from a FunctionDef node.
Usage:
cfg_obj = CfgBuilder().build_cfg(fndef_node)
"""
def __init__(self):
# The current leaves of the CFG
self.current_leaves = []
# TODO(alexbw): generalize to break, return, continue, yield, etc.
# A stack of lists, tracking continue statements
self.continue_ = []
# A stack of lists tracking break nodes
self.break_ = []
def set_current_leaves(self, cfg_node):
"""Link this cfg_node to the current leaves.
This is the central function for building the CFG. It links the current
head cfg_nodes to the passed cfg_node. It then resets the head to the
passed cfg_node.
Args:
cfg_node: A CfgNode instance.
"""
for head in self.current_leaves:
head.next.add(cfg_node)
# While we're linking the CFG forward, add backlinks
cfg_node.prev.add(head)
self.current_leaves = [cfg_node]
def build_cfg(self, node):
"""Build a CFG for a function.
Implementation of building a CFG for dataflow analysis. See, e.g.:
https://www.seas.harvard.edu/courses/cs252/2011sp/slides/Lec02-Dataflow.pdf
Args:
node: A function definition the body of which to analyze.
Returns:
A CFG object.
Raises:
TypeError: If the input is not a function definition.
"""
if not isinstance(node, gast.FunctionDef):
raise TypeError('input must be a function definition')
entry_cfg_node = CfgNode(node.args)
self.current_leaves = [entry_cfg_node]
self.visit_statements(node.body)
exit_cfg_node = CfgNode(None)
self.set_current_leaves(exit_cfg_node)
return Cfg(entry_cfg_node, exit_cfg_node)
def visit_statements(self, nodes):
for node in nodes:
# Check for control flow
if isinstance(node, (gast.For, gast.While, gast.If, gast.Try, gast.Break,
gast.Continue, gast.With)):
self.visit(node)
else:
expr = CfgNode(node)
self.set_current_leaves(expr)
def generic_visit(self, node):
raise ValueError('unknown control flow')
def visit_If(self, node):
# TODO(alexbw): change this to use immutable tuples instead of lists
# The current head will hold the conditional
test = CfgNode(node.test)
self.set_current_leaves(test)
# Handle the body
self.visit_statements(node.body)
body_exit = self.current_leaves
self.current_leaves = [test]
# Handle the orelse
self.visit_statements(node.orelse)
self.current_leaves.extend(body_exit)
def visit_While(self, node):
test = CfgNode(node.test)
self.set_current_leaves(test)
# Start a new level of nesting
self.break_.append([])
self.continue_.append([])
# Handle the body
self.visit_statements(node.body)
body_exit = self.current_leaves
self.current_leaves.extend(self.continue_.pop())
self.set_current_leaves(test)
# Handle the orelse
self.visit_statements(node.orelse)
# The break statements and the test go to the next node
self.current_leaves.extend(self.break_.pop())
# Body and orelse statements can reach out of the loop
self.current_leaves.extend(body_exit)
def visit_For(self, node):
iter_ = CfgNode(node.iter)
self.set_current_leaves(iter_)
self.break_.append([])
self.continue_.append([])
self.visit_statements(node.body)
body_exit = self.current_leaves
self.current_leaves.extend(self.continue_.pop())
self.set_current_leaves(iter_)
# Handle the orelse
self.visit_statements(node.orelse)
# The break statements and the test go to the next node
self.current_leaves.extend(self.break_.pop())
# Body and orelse statements can reach out of the loop
self.current_leaves.extend(body_exit)
def | (self, node):
self.break_[-1].extend(self.current_leaves)
self.current_leaves[:] = []
def visit_Continue(self, node):
self.continue_[-1].extend(self.current_leaves)
self.current_leaves[:] = []
def visit_Try(self, node):
self.visit_statements(node.body)
body = self.current_leaves
handlers = []
for handler in node.handlers:
self.current_leaves = body[:]
self.visit_statements(handler.body)
handlers.extend(self.current_leaves)
self.current_leaves = body
self.visit_statements(node.orelse)
self.current_leaves = handlers + self.current_leaves
self.visit_statements(node.finalbody)
def visit_With(self, node):
for item in node.items:
self.set_current_leaves(CfgNode(item))
self.visit_statements(node.body)
# TODO(alexbw): once CFG analysis occurs at a block level,
# this extra class will not be necessary
class PropagateAnalysis(gast.NodeVisitor):
"""Port analysis annotations from statements to their enclosing blocks."""
def __init__(self, analysis):
self.transfer_fn = analysis.transfer_fn
self.in_label = analysis.in_label
self.out_label = analysis.out_label
super(PropagateAnalysis, self).__init__()
def visit_If(self, node):
# Depth-first.
self.generic_visit(node)
incoming = anno.getanno(node.body[0], self.in_label)
incoming |= anno.getanno(node.test, self.in_label)
outgoing = anno.getanno(node.body[-1], self.out_label)
outgoing |= anno.getanno(node.test, self.out_label)
if node.orelse:
orelse_outgoing = anno.getanno(node.orelse[-1], self.out_label)
outgoing = self.transfer_fn(outgoing, orelse_outgoing)
anno.setanno(node, self.in_label, incoming)
anno.setanno(node, self.out_label, outgoing)
def visit_For(self, node):
self.generic_visit(node)
incoming = set(anno.getanno(node.body[0], self.in_label))
incoming -= set((anno.getanno(node.target, anno.Basic.QN),))
outgoing = anno.getanno(node.body[-1], self.out_label)
if node.orelse:
orelse_outgoing = anno.getanno(node.orelse[-1], self.out_label)
outgoing = self.transfer_fn(outgoing, orelse_outgoing)
anno.setanno(node, self.in_label, frozenset(incoming))
anno.setanno(node, self.out_label, outgoing)
def visit_While(self, node):
self.generic_visit(node)
incoming = anno.getanno(node.body[0], self.in_label)
incoming |= anno.getanno(node.test, self.in_label)
outgoing = anno.getanno(node.body[-1], self.out_label)
if node.orelse:
orelse_outgoing = anno.getanno(node.orelse[-1], self.out_label)
outgoing = self.transfer_fn(outgoing, orelse_outgoing)
anno.setanno(node, self.in_label, incoming)
anno.setanno(node, self.out_label, outgoing)
def visit_With(self, node):
self.generic_visit(node)
incoming = anno.getanno(node.body[0], self.in_label)
for item in node.items:
incoming |= anno.getanno(item, self.in_label)
outgoing = anno.getanno(node.body[-1], self.out_label)
anno.setanno(node, self.in_label, incoming)
anno.setanno(node, self.out_label, outgoing)
# TODO(alexbw): Abstract the CFG walking machinery into a superclass
# which is parameterized on which fields it selects when walking.
# TODO(alexbw): Abstract the application of dataflow analysis
class Forward(object):
"""Forward analysis on CFG.
Args:
label: A name for this analysis e.g. 'active' for activity analysis. The AST
nodes in the CFG will be given annotations 'name_in', 'name_out',
'name_gen' and 'name_kill' which contain the incoming values, outgoing
values, values generated by the statement, and values deleted by the
statement respectively.
transfer_fn: Either the AND or OR operator. If the AND operator is used it
turns into forward must analysis (i.e. a value will only be carried
forward if it appears on all incoming paths). The OR operator means that
forward may analysis is done (i.e. the union of incoming values will be
taken).
"""
def __init__(self, label, source_info, transfer_fn=operator.or_):
self.transfer_fn = transfer_fn
self.source_info = source_info
self.out_label = label + '_out'
self.in_label = label + '_in'
self.gen_label = label + '_gen'
self.kill_label = label + '_kill'
# TODO(alexbw): see if we can simplify by visiting breadth-first
def visit(self, node):
"""Depth-first walking the CFG, applying dataflow info propagation."""
# node.value is None only for the exit CfgNode.
if not node.value:
return
if anno.hasanno(node.value, self.out_label):
before = hash(anno.getanno(node.value, self.out_label))
else:
before = None
preds = [
anno.getanno(pred.value, self.out_label)
for pred in node.prev
if anno.hasanno(pred.value, self.out_label)
]
if preds:
incoming = functools.reduce(self.transfer_fn, preds[1:], preds[0])
else:
incoming = frozenset()
anno.setanno(node.value, self.in_label, incoming)
gen, kill = self.get_gen_kill(node, incoming)
anno.setanno(node.value, self.gen_label, gen)
anno.setanno(node.value, self.kill_label, kill)
anno.setanno(node.value, self.out_label, (incoming - kill) | gen)
if hash(anno.getanno(node.value, self.out_label)) != before:
for succ in node.next:
self.visit(succ)
def get_gen_kill(self, cfg_node, incoming):
"""Calculate Gen and Kill properties of a CFG node in dataflow analysis.
A function which takes the CFG node as well as a set of incoming
values. It must return a set of newly generated values by the statement as
well as a set of deleted (killed) values.
Args:
cfg_node: A CfgNode instance.
incoming:
"""
raise NotImplementedError()
class Backward(Forward):
"""Backward analysis on CFG."""
def visit(self, cfg_node):
# cfg_node.value is None for the exit node, which will be visited only once
if not cfg_node.value:
for pred in cfg_node.prev:
self.visit(pred)
return
if anno.hasanno(cfg_node.value, self.in_label):
before = hash(anno.getanno(cfg_node.value, self.in_label))
else:
before = None
succs = [
anno.getanno(succ.value, self.in_label)
for succ in cfg_node.next
if anno.hasanno(succ.value, self.in_label)
]
if succs:
incoming = functools.reduce(self.transfer_fn, succs[1:], succs[0])
else:
incoming = frozenset()
anno.setanno(cfg_node.value, self.out_label, incoming)
gen, kill = self.get_gen_kill(cfg_node, incoming)
anno.setanno(cfg_node.value, self.gen_label, gen)
anno.setanno(cfg_node.value, self.kill_label, kill)
anno.setanno(cfg_node.value, self.in_label, (incoming - kill) | gen)
if hash(anno.getanno(cfg_node.value, self.in_label)) != before:
for pred in cfg_node.prev:
self.visit(pred)
def run_analyses(node, analyses):
"""Perform dataflow analysis on all functions within an AST.
Args:
node: An AST node on which to run dataflow analysis.
analyses: Either an instance of the Forward or Backward dataflow analysis
class, or a list or tuple of them.
Returns:
node: The node, but now with annotations on the AST nodes containing the
results of the dataflow analyses.
"""
if not isinstance(analyses, (tuple, list)):
analyses = (analyses,)
for analysis in analyses:
if not isinstance(analysis, (Forward, Backward)):
raise TypeError('not a valid forward analysis object')
for child_node in gast.walk(node):
if isinstance(child_node, gast.FunctionDef):
cfg_obj = CfgBuilder().build_cfg(child_node)
for analysis in analyses:
if isinstance(analysis, Backward):
analysis.visit(cfg_obj.exit)
elif isinstance(analysis, Forward):
analysis.visit(cfg_obj.entry)
for analysis in analyses:
PropagateAnalysis(analysis).visit(node)
return node
class Liveness(Backward):
"""Perform a liveness analysis.
Each statement is annotated with a set of variables that may be used
later in the program.
"""
def __init__(self, source_info):
super(Liveness, self).__init__('live', source_info)
def get_gen_kill(self, node, _):
# A variable's parents are live if it is live
# e.g. x is live if x.y is live. This means gen needs to return
# all parents of a variable (if it's an Attribute or Subscript).
# This doesn't apply to kill (e.g. del x.y doesn't affect liveness of x)
gen = activity.get_read(node.value, self.source_info)
gen = functools.reduce(lambda left, right: left | right.support_set, gen,
gen)
kill = activity.get_updated(node.value, self.source_info)
return gen, kill
class ReachingDefinitions(Forward):
"""Perform reaching definition analysis.
Each statement is annotated with a set of (variable, definition) pairs.
"""
def __init__(self, source_info):
super(ReachingDefinitions, self).__init__('definitions', source_info)
def get_gen_kill(self, node, incoming):
definitions = activity.get_updated(node.value, self.source_info)
gen = frozenset((id_, node.value) for id_ in definitions)
kill = frozenset(def_ for def_ in incoming if def_[0] in definitions)
return gen, kill
class Defined(Forward):
"""Perform defined variable analysis.
Each statement is annotated with a set of variables which are guaranteed to
be defined at that point.
"""
def __init__(self, source_info):
super(Defined, self).__init__(
'defined', source_info, transfer_fn=operator.and_)
def get_gen_kill(self, node, _):
gen = activity.get_updated(node.value, self.source_info)
return gen, frozenset()
| visit_Break | identifier_name |
cfg.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Control flow graph analysis.
Given a Python AST we construct a control flow graph, with edges both to the
next and previous statements (so it can easily walk the graph both ways). Its
nodes contain the AST of the statements. It can then perform forward or backward
analysis on this CFG.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import functools
import operator
import gast
from tensorflow.contrib.autograph.pyct import anno
from tensorflow.contrib.autograph.pyct.static_analysis import activity
class CfgNode(object):
"""A node in the CFG."""
__slots__ = ['next', 'value', 'prev']
def __init__(self, value):
self.next = set()
self.prev = set()
self.value = value
class Cfg(namedtuple('Cfg', ['entry', 'exit'])):
"""A Control Flow Graph.
Each statement is represented as a node. For control flow statements such
as conditionals and loops the conditional itself is a node which either
branches or cycles, respectively.
Attributes:
entry: The entry node, which contains the `gast.arguments` node of the
function definition.
exit: The exit node. This node is special because it has no value (i.e. no
corresponding AST node). This is because Python functions can have
multiple return statements.
"""
pass
class CfgBuilder(gast.NodeVisitor):
"""Construct a control flow graph.
Construct a CFG starting from a FunctionDef node.
Usage:
cfg_obj = CfgBuilder().build_cfg(fndef_node)
"""
def __init__(self):
# The current leaves of the CFG
self.current_leaves = []
# TODO(alexbw): generalize to break, return, continue, yield, etc.
# A stack of lists, tracking continue statements
self.continue_ = []
# A stack of lists tracking break nodes
self.break_ = []
def set_current_leaves(self, cfg_node):
"""Link this cfg_node to the current leaves.
This is the central function for building the CFG. It links the current
head cfg_nodes to the passed cfg_node. It then resets the head to the
passed cfg_node.
Args:
cfg_node: A CfgNode instance.
"""
for head in self.current_leaves:
head.next.add(cfg_node)
# While we're linking the CFG forward, add backlinks
cfg_node.prev.add(head)
self.current_leaves = [cfg_node]
def build_cfg(self, node):
"""Build a CFG for a function.
Implementation of building a CFG for dataflow analysis. See, e.g.:
https://www.seas.harvard.edu/courses/cs252/2011sp/slides/Lec02-Dataflow.pdf
Args:
node: A function definition the body of which to analyze.
Returns:
A CFG object.
Raises:
TypeError: If the input is not a function definition.
"""
if not isinstance(node, gast.FunctionDef):
raise TypeError('input must be a function definition')
entry_cfg_node = CfgNode(node.args)
self.current_leaves = [entry_cfg_node]
self.visit_statements(node.body)
exit_cfg_node = CfgNode(None)
self.set_current_leaves(exit_cfg_node)
return Cfg(entry_cfg_node, exit_cfg_node)
def visit_statements(self, nodes):
for node in nodes:
# Check for control flow
if isinstance(node, (gast.For, gast.While, gast.If, gast.Try, gast.Break,
gast.Continue, gast.With)):
self.visit(node)
else:
expr = CfgNode(node)
self.set_current_leaves(expr)
def generic_visit(self, node):
raise ValueError('unknown control flow')
def visit_If(self, node):
# TODO(alexbw): change this to use immutable tuples instead of lists
# The current head will hold the conditional
test = CfgNode(node.test)
self.set_current_leaves(test)
# Handle the body
self.visit_statements(node.body)
body_exit = self.current_leaves
self.current_leaves = [test]
# Handle the orelse
self.visit_statements(node.orelse)
self.current_leaves.extend(body_exit)
def visit_While(self, node):
test = CfgNode(node.test)
self.set_current_leaves(test)
# Start a new level of nesting
self.break_.append([])
self.continue_.append([])
# Handle the body
self.visit_statements(node.body)
body_exit = self.current_leaves
self.current_leaves.extend(self.continue_.pop())
self.set_current_leaves(test)
# Handle the orelse
self.visit_statements(node.orelse)
# The break statements and the test go to the next node
self.current_leaves.extend(self.break_.pop())
# Body and orelse statements can reach out of the loop
self.current_leaves.extend(body_exit)
def visit_For(self, node):
iter_ = CfgNode(node.iter)
self.set_current_leaves(iter_)
self.break_.append([])
self.continue_.append([])
self.visit_statements(node.body)
body_exit = self.current_leaves
self.current_leaves.extend(self.continue_.pop())
self.set_current_leaves(iter_)
# Handle the orelse
self.visit_statements(node.orelse)
# The break statements and the test go to the next node
self.current_leaves.extend(self.break_.pop())
# Body and orelse statements can reach out of the loop
self.current_leaves.extend(body_exit)
def visit_Break(self, node):
self.break_[-1].extend(self.current_leaves)
self.current_leaves[:] = []
def visit_Continue(self, node):
self.continue_[-1].extend(self.current_leaves)
self.current_leaves[:] = []
def visit_Try(self, node):
self.visit_statements(node.body)
body = self.current_leaves
handlers = []
for handler in node.handlers:
self.current_leaves = body[:]
self.visit_statements(handler.body)
handlers.extend(self.current_leaves)
self.current_leaves = body
self.visit_statements(node.orelse)
self.current_leaves = handlers + self.current_leaves
self.visit_statements(node.finalbody)
def visit_With(self, node):
for item in node.items:
self.set_current_leaves(CfgNode(item))
self.visit_statements(node.body)
# TODO(alexbw): once CFG analysis occurs at a block level,
# this extra class will not be necessary
class PropagateAnalysis(gast.NodeVisitor):
"""Port analysis annotations from statements to their enclosing blocks."""
def __init__(self, analysis):
self.transfer_fn = analysis.transfer_fn
self.in_label = analysis.in_label
self.out_label = analysis.out_label
super(PropagateAnalysis, self).__init__()
def visit_If(self, node):
# Depth-first.
self.generic_visit(node)
incoming = anno.getanno(node.body[0], self.in_label)
incoming |= anno.getanno(node.test, self.in_label)
outgoing = anno.getanno(node.body[-1], self.out_label)
outgoing |= anno.getanno(node.test, self.out_label)
if node.orelse:
orelse_outgoing = anno.getanno(node.orelse[-1], self.out_label)
outgoing = self.transfer_fn(outgoing, orelse_outgoing)
anno.setanno(node, self.in_label, incoming)
anno.setanno(node, self.out_label, outgoing)
def visit_For(self, node):
self.generic_visit(node)
incoming = set(anno.getanno(node.body[0], self.in_label))
incoming -= set((anno.getanno(node.target, anno.Basic.QN),))
outgoing = anno.getanno(node.body[-1], self.out_label)
if node.orelse:
orelse_outgoing = anno.getanno(node.orelse[-1], self.out_label)
outgoing = self.transfer_fn(outgoing, orelse_outgoing)
anno.setanno(node, self.in_label, frozenset(incoming))
anno.setanno(node, self.out_label, outgoing)
def visit_While(self, node):
self.generic_visit(node)
incoming = anno.getanno(node.body[0], self.in_label)
incoming |= anno.getanno(node.test, self.in_label)
outgoing = anno.getanno(node.body[-1], self.out_label)
if node.orelse:
orelse_outgoing = anno.getanno(node.orelse[-1], self.out_label)
outgoing = self.transfer_fn(outgoing, orelse_outgoing)
anno.setanno(node, self.in_label, incoming)
anno.setanno(node, self.out_label, outgoing)
def visit_With(self, node):
self.generic_visit(node)
incoming = anno.getanno(node.body[0], self.in_label)
for item in node.items:
incoming |= anno.getanno(item, self.in_label)
outgoing = anno.getanno(node.body[-1], self.out_label)
anno.setanno(node, self.in_label, incoming)
anno.setanno(node, self.out_label, outgoing)
# TODO(alexbw): Abstract the CFG walking machinery into a superclass
# which is parameterized on which fields it selects when walking.
# TODO(alexbw): Abstract the application of dataflow analysis
class Forward(object):
"""Forward analysis on CFG.
Args:
label: A name for this analysis e.g. 'active' for activity analysis. The AST
nodes in the CFG will be given annotations 'name_in', 'name_out',
'name_gen' and 'name_kill' which contain the incoming values, outgoing
values, values generated by the statement, and values deleted by the
statement respectively.
transfer_fn: Either the AND or OR operator. If the AND operator is used it
turns into forward must analysis (i.e. a value will only be carried
forward if it appears on all incoming paths). The OR operator means that
forward may analysis is done (i.e. the union of incoming values will be
taken).
"""
def __init__(self, label, source_info, transfer_fn=operator.or_):
self.transfer_fn = transfer_fn
self.source_info = source_info
self.out_label = label + '_out'
self.in_label = label + '_in'
self.gen_label = label + '_gen'
self.kill_label = label + '_kill'
# TODO(alexbw): see if we can simplify by visiting breadth-first
def visit(self, node):
|
def get_gen_kill(self, cfg_node, incoming):
"""Calculate Gen and Kill properties of a CFG node in dataflow analysis.
A function which takes the CFG node as well as a set of incoming
values. It must return a set of newly generated values by the statement as
well as a set of deleted (killed) values.
Args:
cfg_node: A CfgNode instance.
incoming:
"""
raise NotImplementedError()
class Backward(Forward):
"""Backward analysis on CFG."""
def visit(self, cfg_node):
# cfg_node.value is None for the exit node, which will be visited only once
if not cfg_node.value:
for pred in cfg_node.prev:
self.visit(pred)
return
if anno.hasanno(cfg_node.value, self.in_label):
before = hash(anno.getanno(cfg_node.value, self.in_label))
else:
before = None
succs = [
anno.getanno(succ.value, self.in_label)
for succ in cfg_node.next
if anno.hasanno(succ.value, self.in_label)
]
if succs:
incoming = functools.reduce(self.transfer_fn, succs[1:], succs[0])
else:
incoming = frozenset()
anno.setanno(cfg_node.value, self.out_label, incoming)
gen, kill = self.get_gen_kill(cfg_node, incoming)
anno.setanno(cfg_node.value, self.gen_label, gen)
anno.setanno(cfg_node.value, self.kill_label, kill)
anno.setanno(cfg_node.value, self.in_label, (incoming - kill) | gen)
if hash(anno.getanno(cfg_node.value, self.in_label)) != before:
for pred in cfg_node.prev:
self.visit(pred)
def run_analyses(node, analyses):
"""Perform dataflow analysis on all functions within an AST.
Args:
node: An AST node on which to run dataflow analysis.
analyses: Either an instance of the Forward or Backward dataflow analysis
class, or a list or tuple of them.
Returns:
node: The node, but now with annotations on the AST nodes containing the
results of the dataflow analyses.
"""
if not isinstance(analyses, (tuple, list)):
analyses = (analyses,)
for analysis in analyses:
if not isinstance(analysis, (Forward, Backward)):
raise TypeError('not a valid forward analysis object')
for child_node in gast.walk(node):
if isinstance(child_node, gast.FunctionDef):
cfg_obj = CfgBuilder().build_cfg(child_node)
for analysis in analyses:
if isinstance(analysis, Backward):
analysis.visit(cfg_obj.exit)
elif isinstance(analysis, Forward):
analysis.visit(cfg_obj.entry)
for analysis in analyses:
PropagateAnalysis(analysis).visit(node)
return node
class Liveness(Backward):
"""Perform a liveness analysis.
Each statement is annotated with a set of variables that may be used
later in the program.
"""
def __init__(self, source_info):
super(Liveness, self).__init__('live', source_info)
def get_gen_kill(self, node, _):
# A variable's parents are live if it is live
# e.g. x is live if x.y is live. This means gen needs to return
# all parents of a variable (if it's an Attribute or Subscript).
# This doesn't apply to kill (e.g. del x.y doesn't affect liveness of x)
gen = activity.get_read(node.value, self.source_info)
gen = functools.reduce(lambda left, right: left | right.support_set, gen,
gen)
kill = activity.get_updated(node.value, self.source_info)
return gen, kill
class ReachingDefinitions(Forward):
"""Perform reaching definition analysis.
Each statement is annotated with a set of (variable, definition) pairs.
"""
def __init__(self, source_info):
super(ReachingDefinitions, self).__init__('definitions', source_info)
def get_gen_kill(self, node, incoming):
definitions = activity.get_updated(node.value, self.source_info)
gen = frozenset((id_, node.value) for id_ in definitions)
kill = frozenset(def_ for def_ in incoming if def_[0] in definitions)
return gen, kill
class Defined(Forward):
"""Perform defined variable analysis.
Each statement is annotated with a set of variables which are guaranteed to
be defined at that point.
"""
def __init__(self, source_info):
super(Defined, self).__init__(
'defined', source_info, transfer_fn=operator.and_)
def get_gen_kill(self, node, _):
gen = activity.get_updated(node.value, self.source_info)
return gen, frozenset()
| """Depth-first walking the CFG, applying dataflow info propagation."""
# node.value is None only for the exit CfgNode.
if not node.value:
return
if anno.hasanno(node.value, self.out_label):
before = hash(anno.getanno(node.value, self.out_label))
else:
before = None
preds = [
anno.getanno(pred.value, self.out_label)
for pred in node.prev
if anno.hasanno(pred.value, self.out_label)
]
if preds:
incoming = functools.reduce(self.transfer_fn, preds[1:], preds[0])
else:
incoming = frozenset()
anno.setanno(node.value, self.in_label, incoming)
gen, kill = self.get_gen_kill(node, incoming)
anno.setanno(node.value, self.gen_label, gen)
anno.setanno(node.value, self.kill_label, kill)
anno.setanno(node.value, self.out_label, (incoming - kill) | gen)
if hash(anno.getanno(node.value, self.out_label)) != before:
for succ in node.next:
self.visit(succ) | identifier_body |
cfg.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Control flow graph analysis.
Given a Python AST we construct a control flow graph, with edges both to the
next and previous statements (so it can easily walk the graph both ways). Its
nodes contain the AST of the statements. It can then perform forward or backward
analysis on this CFG.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import functools
import operator
import gast
from tensorflow.contrib.autograph.pyct import anno
from tensorflow.contrib.autograph.pyct.static_analysis import activity
class CfgNode(object):
"""A node in the CFG."""
__slots__ = ['next', 'value', 'prev']
def __init__(self, value):
self.next = set()
self.prev = set()
self.value = value
class Cfg(namedtuple('Cfg', ['entry', 'exit'])):
"""A Control Flow Graph.
Each statement is represented as a node. For control flow statements such
as conditionals and loops the conditional itself is a node which either
branches or cycles, respectively.
Attributes:
entry: The entry node, which contains the `gast.arguments` node of the
function definition.
exit: The exit node. This node is special because it has no value (i.e. no
corresponding AST node). This is because Python functions can have
multiple return statements.
"""
pass
class CfgBuilder(gast.NodeVisitor):
"""Construct a control flow graph.
Construct a CFG starting from a FunctionDef node.
Usage:
cfg_obj = CfgBuilder().build_cfg(fndef_node)
"""
def __init__(self):
# The current leaves of the CFG
self.current_leaves = []
# TODO(alexbw): generalize to break, return, continue, yield, etc.
# A stack of lists, tracking continue statements
self.continue_ = []
# A stack of lists tracking break nodes
self.break_ = []
def set_current_leaves(self, cfg_node):
"""Link this cfg_node to the current leaves.
This is the central function for building the CFG. It links the current
head cfg_nodes to the passed cfg_node. It then resets the head to the
passed cfg_node.
Args:
cfg_node: A CfgNode instance.
"""
for head in self.current_leaves:
head.next.add(cfg_node)
# While we're linking the CFG forward, add backlinks
cfg_node.prev.add(head)
self.current_leaves = [cfg_node]
def build_cfg(self, node):
"""Build a CFG for a function.
Implementation of building a CFG for dataflow analysis. See, e.g.:
https://www.seas.harvard.edu/courses/cs252/2011sp/slides/Lec02-Dataflow.pdf
Args:
node: A function definition the body of which to analyze.
Returns:
A CFG object.
Raises:
TypeError: If the input is not a function definition.
"""
if not isinstance(node, gast.FunctionDef):
raise TypeError('input must be a function definition')
entry_cfg_node = CfgNode(node.args)
self.current_leaves = [entry_cfg_node]
self.visit_statements(node.body)
exit_cfg_node = CfgNode(None)
self.set_current_leaves(exit_cfg_node)
return Cfg(entry_cfg_node, exit_cfg_node)
def visit_statements(self, nodes):
for node in nodes:
# Check for control flow
if isinstance(node, (gast.For, gast.While, gast.If, gast.Try, gast.Break,
gast.Continue, gast.With)):
self.visit(node)
else:
expr = CfgNode(node)
self.set_current_leaves(expr)
def generic_visit(self, node):
raise ValueError('unknown control flow')
def visit_If(self, node):
# TODO(alexbw): change this to use immutable tuples instead of lists
# The current head will hold the conditional
test = CfgNode(node.test)
self.set_current_leaves(test)
# Handle the body
self.visit_statements(node.body)
body_exit = self.current_leaves
self.current_leaves = [test]
# Handle the orelse
self.visit_statements(node.orelse)
self.current_leaves.extend(body_exit)
def visit_While(self, node):
test = CfgNode(node.test)
self.set_current_leaves(test)
# Start a new level of nesting
self.break_.append([])
self.continue_.append([])
# Handle the body
self.visit_statements(node.body)
body_exit = self.current_leaves
self.current_leaves.extend(self.continue_.pop())
self.set_current_leaves(test)
# Handle the orelse
self.visit_statements(node.orelse)
# The break statements and the test go to the next node
self.current_leaves.extend(self.break_.pop())
# Body and orelse statements can reach out of the loop
self.current_leaves.extend(body_exit)
def visit_For(self, node):
iter_ = CfgNode(node.iter)
self.set_current_leaves(iter_)
self.break_.append([])
self.continue_.append([])
self.visit_statements(node.body)
body_exit = self.current_leaves
self.current_leaves.extend(self.continue_.pop())
self.set_current_leaves(iter_)
# Handle the orelse
self.visit_statements(node.orelse)
# The break statements and the test go to the next node
self.current_leaves.extend(self.break_.pop())
# Body and orelse statements can reach out of the loop
self.current_leaves.extend(body_exit)
def visit_Break(self, node):
self.break_[-1].extend(self.current_leaves)
self.current_leaves[:] = []
def visit_Continue(self, node):
self.continue_[-1].extend(self.current_leaves)
self.current_leaves[:] = []
def visit_Try(self, node):
self.visit_statements(node.body)
body = self.current_leaves
handlers = []
for handler in node.handlers:
|
self.current_leaves = body
self.visit_statements(node.orelse)
self.current_leaves = handlers + self.current_leaves
self.visit_statements(node.finalbody)
def visit_With(self, node):
for item in node.items:
self.set_current_leaves(CfgNode(item))
self.visit_statements(node.body)
# TODO(alexbw): once CFG analysis occurs at a block level,
# this extra class will not be necessary
class PropagateAnalysis(gast.NodeVisitor):
"""Port analysis annotations from statements to their enclosing blocks."""
def __init__(self, analysis):
self.transfer_fn = analysis.transfer_fn
self.in_label = analysis.in_label
self.out_label = analysis.out_label
super(PropagateAnalysis, self).__init__()
def visit_If(self, node):
# Depth-first.
self.generic_visit(node)
incoming = anno.getanno(node.body[0], self.in_label)
incoming |= anno.getanno(node.test, self.in_label)
outgoing = anno.getanno(node.body[-1], self.out_label)
outgoing |= anno.getanno(node.test, self.out_label)
if node.orelse:
orelse_outgoing = anno.getanno(node.orelse[-1], self.out_label)
outgoing = self.transfer_fn(outgoing, orelse_outgoing)
anno.setanno(node, self.in_label, incoming)
anno.setanno(node, self.out_label, outgoing)
def visit_For(self, node):
self.generic_visit(node)
incoming = set(anno.getanno(node.body[0], self.in_label))
incoming -= set((anno.getanno(node.target, anno.Basic.QN),))
outgoing = anno.getanno(node.body[-1], self.out_label)
if node.orelse:
orelse_outgoing = anno.getanno(node.orelse[-1], self.out_label)
outgoing = self.transfer_fn(outgoing, orelse_outgoing)
anno.setanno(node, self.in_label, frozenset(incoming))
anno.setanno(node, self.out_label, outgoing)
def visit_While(self, node):
self.generic_visit(node)
incoming = anno.getanno(node.body[0], self.in_label)
incoming |= anno.getanno(node.test, self.in_label)
outgoing = anno.getanno(node.body[-1], self.out_label)
if node.orelse:
orelse_outgoing = anno.getanno(node.orelse[-1], self.out_label)
outgoing = self.transfer_fn(outgoing, orelse_outgoing)
anno.setanno(node, self.in_label, incoming)
anno.setanno(node, self.out_label, outgoing)
def visit_With(self, node):
self.generic_visit(node)
incoming = anno.getanno(node.body[0], self.in_label)
for item in node.items:
incoming |= anno.getanno(item, self.in_label)
outgoing = anno.getanno(node.body[-1], self.out_label)
anno.setanno(node, self.in_label, incoming)
anno.setanno(node, self.out_label, outgoing)
# TODO(alexbw): Abstract the CFG walking machinery into a superclass
# which is parameterized on which fields it selects when walking.
# TODO(alexbw): Abstract the application of dataflow analysis
class Forward(object):
"""Forward analysis on CFG.
Args:
label: A name for this analysis e.g. 'active' for activity analysis. The AST
nodes in the CFG will be given annotations 'name_in', 'name_out',
'name_gen' and 'name_kill' which contain the incoming values, outgoing
values, values generated by the statement, and values deleted by the
statement respectively.
transfer_fn: Either the AND or OR operator. If the AND operator is used it
turns into forward must analysis (i.e. a value will only be carried
forward if it appears on all incoming paths). The OR operator means that
forward may analysis is done (i.e. the union of incoming values will be
taken).
"""
def __init__(self, label, source_info, transfer_fn=operator.or_):
self.transfer_fn = transfer_fn
self.source_info = source_info
self.out_label = label + '_out'
self.in_label = label + '_in'
self.gen_label = label + '_gen'
self.kill_label = label + '_kill'
# TODO(alexbw): see if we can simplify by visiting breadth-first
def visit(self, node):
"""Depth-first walking the CFG, applying dataflow info propagation."""
# node.value is None only for the exit CfgNode.
if not node.value:
return
if anno.hasanno(node.value, self.out_label):
before = hash(anno.getanno(node.value, self.out_label))
else:
before = None
preds = [
anno.getanno(pred.value, self.out_label)
for pred in node.prev
if anno.hasanno(pred.value, self.out_label)
]
if preds:
incoming = functools.reduce(self.transfer_fn, preds[1:], preds[0])
else:
incoming = frozenset()
anno.setanno(node.value, self.in_label, incoming)
gen, kill = self.get_gen_kill(node, incoming)
anno.setanno(node.value, self.gen_label, gen)
anno.setanno(node.value, self.kill_label, kill)
anno.setanno(node.value, self.out_label, (incoming - kill) | gen)
if hash(anno.getanno(node.value, self.out_label)) != before:
for succ in node.next:
self.visit(succ)
def get_gen_kill(self, cfg_node, incoming):
"""Calculate Gen and Kill properties of a CFG node in dataflow analysis.
A function which takes the CFG node as well as a set of incoming
values. It must return a set of newly generated values by the statement as
well as a set of deleted (killed) values.
Args:
cfg_node: A CfgNode instance.
incoming:
"""
raise NotImplementedError()
class Backward(Forward):
"""Backward analysis on CFG."""
def visit(self, cfg_node):
# cfg_node.value is None for the exit node, which will be visited only once
if not cfg_node.value:
for pred in cfg_node.prev:
self.visit(pred)
return
if anno.hasanno(cfg_node.value, self.in_label):
before = hash(anno.getanno(cfg_node.value, self.in_label))
else:
before = None
succs = [
anno.getanno(succ.value, self.in_label)
for succ in cfg_node.next
if anno.hasanno(succ.value, self.in_label)
]
if succs:
incoming = functools.reduce(self.transfer_fn, succs[1:], succs[0])
else:
incoming = frozenset()
anno.setanno(cfg_node.value, self.out_label, incoming)
gen, kill = self.get_gen_kill(cfg_node, incoming)
anno.setanno(cfg_node.value, self.gen_label, gen)
anno.setanno(cfg_node.value, self.kill_label, kill)
anno.setanno(cfg_node.value, self.in_label, (incoming - kill) | gen)
if hash(anno.getanno(cfg_node.value, self.in_label)) != before:
for pred in cfg_node.prev:
self.visit(pred)
def run_analyses(node, analyses):
"""Perform dataflow analysis on all functions within an AST.
Args:
node: An AST node on which to run dataflow analysis.
analyses: Either an instance of the Forward or Backward dataflow analysis
class, or a list or tuple of them.
Returns:
node: The node, but now with annotations on the AST nodes containing the
results of the dataflow analyses.
"""
if not isinstance(analyses, (tuple, list)):
analyses = (analyses,)
for analysis in analyses:
if not isinstance(analysis, (Forward, Backward)):
raise TypeError('not a valid forward analysis object')
for child_node in gast.walk(node):
if isinstance(child_node, gast.FunctionDef):
cfg_obj = CfgBuilder().build_cfg(child_node)
for analysis in analyses:
if isinstance(analysis, Backward):
analysis.visit(cfg_obj.exit)
elif isinstance(analysis, Forward):
analysis.visit(cfg_obj.entry)
for analysis in analyses:
PropagateAnalysis(analysis).visit(node)
return node
class Liveness(Backward):
"""Perform a liveness analysis.
Each statement is annotated with a set of variables that may be used
later in the program.
"""
def __init__(self, source_info):
super(Liveness, self).__init__('live', source_info)
def get_gen_kill(self, node, _):
# A variable's parents are live if it is live
# e.g. x is live if x.y is live. This means gen needs to return
# all parents of a variable (if it's an Attribute or Subscript).
# This doesn't apply to kill (e.g. del x.y doesn't affect liveness of x)
gen = activity.get_read(node.value, self.source_info)
gen = functools.reduce(lambda left, right: left | right.support_set, gen,
gen)
kill = activity.get_updated(node.value, self.source_info)
return gen, kill
class ReachingDefinitions(Forward):
"""Perform reaching definition analysis.
Each statement is annotated with a set of (variable, definition) pairs.
"""
def __init__(self, source_info):
super(ReachingDefinitions, self).__init__('definitions', source_info)
def get_gen_kill(self, node, incoming):
definitions = activity.get_updated(node.value, self.source_info)
gen = frozenset((id_, node.value) for id_ in definitions)
kill = frozenset(def_ for def_ in incoming if def_[0] in definitions)
return gen, kill
class Defined(Forward):
"""Perform defined variable analysis.
Each statement is annotated with a set of variables which are guaranteed to
be defined at that point.
"""
def __init__(self, source_info):
super(Defined, self).__init__(
'defined', source_info, transfer_fn=operator.and_)
def get_gen_kill(self, node, _):
gen = activity.get_updated(node.value, self.source_info)
return gen, frozenset()
| self.current_leaves = body[:]
self.visit_statements(handler.body)
handlers.extend(self.current_leaves) | conditional_block |
cfg.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Control flow graph analysis.
Given a Python AST we construct a control flow graph, with edges both to the
next and previous statements (so it can easily walk the graph both ways). Its
nodes contain the AST of the statements. It can then perform forward or backward
analysis on this CFG.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import functools
import operator
import gast
from tensorflow.contrib.autograph.pyct import anno
from tensorflow.contrib.autograph.pyct.static_analysis import activity
class CfgNode(object):
"""A node in the CFG."""
__slots__ = ['next', 'value', 'prev']
def __init__(self, value):
self.next = set()
self.prev = set()
self.value = value
class Cfg(namedtuple('Cfg', ['entry', 'exit'])):
"""A Control Flow Graph.
Each statement is represented as a node. For control flow statements such
as conditionals and loops the conditional itself is a node which either
branches or cycles, respectively.
Attributes:
entry: The entry node, which contains the `gast.arguments` node of the
function definition.
exit: The exit node. This node is special because it has no value (i.e. no
corresponding AST node). This is because Python functions can have
multiple return statements.
"""
pass
class CfgBuilder(gast.NodeVisitor):
"""Construct a control flow graph.
Construct a CFG starting from a FunctionDef node.
Usage:
cfg_obj = CfgBuilder().build_cfg(fndef_node)
"""
def __init__(self):
# The current leaves of the CFG
self.current_leaves = []
# TODO(alexbw): generalize to break, return, continue, yield, etc.
# A stack of lists, tracking continue statements
self.continue_ = []
# A stack of lists tracking break nodes
self.break_ = []
def set_current_leaves(self, cfg_node):
"""Link this cfg_node to the current leaves.
This is the central function for building the CFG. It links the current
head cfg_nodes to the passed cfg_node. It then resets the head to the
passed cfg_node.
Args:
cfg_node: A CfgNode instance.
"""
for head in self.current_leaves:
head.next.add(cfg_node)
# While we're linking the CFG forward, add backlinks
cfg_node.prev.add(head)
self.current_leaves = [cfg_node]
def build_cfg(self, node):
"""Build a CFG for a function.
Implementation of building a CFG for dataflow analysis. See, e.g.:
https://www.seas.harvard.edu/courses/cs252/2011sp/slides/Lec02-Dataflow.pdf
Args:
node: A function definition the body of which to analyze.
Returns:
A CFG object.
Raises:
TypeError: If the input is not a function definition.
"""
if not isinstance(node, gast.FunctionDef):
raise TypeError('input must be a function definition')
entry_cfg_node = CfgNode(node.args)
self.current_leaves = [entry_cfg_node]
self.visit_statements(node.body)
exit_cfg_node = CfgNode(None)
self.set_current_leaves(exit_cfg_node)
return Cfg(entry_cfg_node, exit_cfg_node)
def visit_statements(self, nodes):
for node in nodes:
# Check for control flow
if isinstance(node, (gast.For, gast.While, gast.If, gast.Try, gast.Break,
gast.Continue, gast.With)):
self.visit(node)
else:
expr = CfgNode(node)
self.set_current_leaves(expr)
def generic_visit(self, node):
raise ValueError('unknown control flow')
def visit_If(self, node):
# TODO(alexbw): change this to use immutable tuples instead of lists
# The current head will hold the conditional
test = CfgNode(node.test)
self.set_current_leaves(test)
# Handle the body
self.visit_statements(node.body)
body_exit = self.current_leaves
self.current_leaves = [test]
# Handle the orelse
self.visit_statements(node.orelse)
self.current_leaves.extend(body_exit)
def visit_While(self, node):
test = CfgNode(node.test)
self.set_current_leaves(test)
# Start a new level of nesting
self.break_.append([])
self.continue_.append([])
# Handle the body
self.visit_statements(node.body)
body_exit = self.current_leaves
self.current_leaves.extend(self.continue_.pop())
self.set_current_leaves(test)
# Handle the orelse
self.visit_statements(node.orelse)
# The break statements and the test go to the next node
self.current_leaves.extend(self.break_.pop())
# Body and orelse statements can reach out of the loop
self.current_leaves.extend(body_exit)
def visit_For(self, node):
iter_ = CfgNode(node.iter)
self.set_current_leaves(iter_)
self.break_.append([])
self.continue_.append([])
self.visit_statements(node.body)
body_exit = self.current_leaves
self.current_leaves.extend(self.continue_.pop())
self.set_current_leaves(iter_)
# Handle the orelse
self.visit_statements(node.orelse)
# The break statements and the test go to the next node
self.current_leaves.extend(self.break_.pop())
# Body and orelse statements can reach out of the loop
self.current_leaves.extend(body_exit)
def visit_Break(self, node):
self.break_[-1].extend(self.current_leaves)
self.current_leaves[:] = []
def visit_Continue(self, node):
self.continue_[-1].extend(self.current_leaves)
self.current_leaves[:] = []
def visit_Try(self, node):
self.visit_statements(node.body)
body = self.current_leaves
handlers = []
for handler in node.handlers:
self.current_leaves = body[:]
self.visit_statements(handler.body)
handlers.extend(self.current_leaves)
self.current_leaves = body
self.visit_statements(node.orelse)
self.current_leaves = handlers + self.current_leaves
self.visit_statements(node.finalbody)
def visit_With(self, node):
for item in node.items:
self.set_current_leaves(CfgNode(item))
self.visit_statements(node.body)
# TODO(alexbw): once CFG analysis occurs at a block level,
# this extra class will not be necessary
class PropagateAnalysis(gast.NodeVisitor): | def __init__(self, analysis):
self.transfer_fn = analysis.transfer_fn
self.in_label = analysis.in_label
self.out_label = analysis.out_label
super(PropagateAnalysis, self).__init__()
def visit_If(self, node):
# Depth-first.
self.generic_visit(node)
incoming = anno.getanno(node.body[0], self.in_label)
incoming |= anno.getanno(node.test, self.in_label)
outgoing = anno.getanno(node.body[-1], self.out_label)
outgoing |= anno.getanno(node.test, self.out_label)
if node.orelse:
orelse_outgoing = anno.getanno(node.orelse[-1], self.out_label)
outgoing = self.transfer_fn(outgoing, orelse_outgoing)
anno.setanno(node, self.in_label, incoming)
anno.setanno(node, self.out_label, outgoing)
def visit_For(self, node):
self.generic_visit(node)
incoming = set(anno.getanno(node.body[0], self.in_label))
incoming -= set((anno.getanno(node.target, anno.Basic.QN),))
outgoing = anno.getanno(node.body[-1], self.out_label)
if node.orelse:
orelse_outgoing = anno.getanno(node.orelse[-1], self.out_label)
outgoing = self.transfer_fn(outgoing, orelse_outgoing)
anno.setanno(node, self.in_label, frozenset(incoming))
anno.setanno(node, self.out_label, outgoing)
def visit_While(self, node):
self.generic_visit(node)
incoming = anno.getanno(node.body[0], self.in_label)
incoming |= anno.getanno(node.test, self.in_label)
outgoing = anno.getanno(node.body[-1], self.out_label)
if node.orelse:
orelse_outgoing = anno.getanno(node.orelse[-1], self.out_label)
outgoing = self.transfer_fn(outgoing, orelse_outgoing)
anno.setanno(node, self.in_label, incoming)
anno.setanno(node, self.out_label, outgoing)
def visit_With(self, node):
self.generic_visit(node)
incoming = anno.getanno(node.body[0], self.in_label)
for item in node.items:
incoming |= anno.getanno(item, self.in_label)
outgoing = anno.getanno(node.body[-1], self.out_label)
anno.setanno(node, self.in_label, incoming)
anno.setanno(node, self.out_label, outgoing)
# TODO(alexbw): Abstract the CFG walking machinery into a superclass
# which is parameterized on which fields it selects when walking.
# TODO(alexbw): Abstract the application of dataflow analysis
class Forward(object):
"""Forward analysis on CFG.
Args:
label: A name for this analysis e.g. 'active' for activity analysis. The AST
nodes in the CFG will be given annotations 'name_in', 'name_out',
'name_gen' and 'name_kill' which contain the incoming values, outgoing
values, values generated by the statement, and values deleted by the
statement respectively.
transfer_fn: Either the AND or OR operator. If the AND operator is used it
turns into forward must analysis (i.e. a value will only be carried
forward if it appears on all incoming paths). The OR operator means that
forward may analysis is done (i.e. the union of incoming values will be
taken).
"""
def __init__(self, label, source_info, transfer_fn=operator.or_):
self.transfer_fn = transfer_fn
self.source_info = source_info
self.out_label = label + '_out'
self.in_label = label + '_in'
self.gen_label = label + '_gen'
self.kill_label = label + '_kill'
# TODO(alexbw): see if we can simplify by visiting breadth-first
def visit(self, node):
"""Depth-first walking the CFG, applying dataflow info propagation."""
# node.value is None only for the exit CfgNode.
if not node.value:
return
if anno.hasanno(node.value, self.out_label):
before = hash(anno.getanno(node.value, self.out_label))
else:
before = None
preds = [
anno.getanno(pred.value, self.out_label)
for pred in node.prev
if anno.hasanno(pred.value, self.out_label)
]
if preds:
incoming = functools.reduce(self.transfer_fn, preds[1:], preds[0])
else:
incoming = frozenset()
anno.setanno(node.value, self.in_label, incoming)
gen, kill = self.get_gen_kill(node, incoming)
anno.setanno(node.value, self.gen_label, gen)
anno.setanno(node.value, self.kill_label, kill)
anno.setanno(node.value, self.out_label, (incoming - kill) | gen)
if hash(anno.getanno(node.value, self.out_label)) != before:
for succ in node.next:
self.visit(succ)
def get_gen_kill(self, cfg_node, incoming):
"""Calculate Gen and Kill properties of a CFG node in dataflow analysis.
A function which takes the CFG node as well as a set of incoming
values. It must return a set of newly generated values by the statement as
well as a set of deleted (killed) values.
Args:
cfg_node: A CfgNode instance.
incoming:
"""
raise NotImplementedError()
class Backward(Forward):
"""Backward analysis on CFG."""
def visit(self, cfg_node):
# cfg_node.value is None for the exit node, which will be visited only once
if not cfg_node.value:
for pred in cfg_node.prev:
self.visit(pred)
return
if anno.hasanno(cfg_node.value, self.in_label):
before = hash(anno.getanno(cfg_node.value, self.in_label))
else:
before = None
succs = [
anno.getanno(succ.value, self.in_label)
for succ in cfg_node.next
if anno.hasanno(succ.value, self.in_label)
]
if succs:
incoming = functools.reduce(self.transfer_fn, succs[1:], succs[0])
else:
incoming = frozenset()
anno.setanno(cfg_node.value, self.out_label, incoming)
gen, kill = self.get_gen_kill(cfg_node, incoming)
anno.setanno(cfg_node.value, self.gen_label, gen)
anno.setanno(cfg_node.value, self.kill_label, kill)
anno.setanno(cfg_node.value, self.in_label, (incoming - kill) | gen)
if hash(anno.getanno(cfg_node.value, self.in_label)) != before:
for pred in cfg_node.prev:
self.visit(pred)
def run_analyses(node, analyses):
"""Perform dataflow analysis on all functions within an AST.
Args:
node: An AST node on which to run dataflow analysis.
analyses: Either an instance of the Forward or Backward dataflow analysis
class, or a list or tuple of them.
Returns:
node: The node, but now with annotations on the AST nodes containing the
results of the dataflow analyses.
"""
if not isinstance(analyses, (tuple, list)):
analyses = (analyses,)
for analysis in analyses:
if not isinstance(analysis, (Forward, Backward)):
raise TypeError('not a valid forward analysis object')
for child_node in gast.walk(node):
if isinstance(child_node, gast.FunctionDef):
cfg_obj = CfgBuilder().build_cfg(child_node)
for analysis in analyses:
if isinstance(analysis, Backward):
analysis.visit(cfg_obj.exit)
elif isinstance(analysis, Forward):
analysis.visit(cfg_obj.entry)
for analysis in analyses:
PropagateAnalysis(analysis).visit(node)
return node
class Liveness(Backward):
"""Perform a liveness analysis.
Each statement is annotated with a set of variables that may be used
later in the program.
"""
def __init__(self, source_info):
super(Liveness, self).__init__('live', source_info)
def get_gen_kill(self, node, _):
# A variable's parents are live if it is live
# e.g. x is live if x.y is live. This means gen needs to return
# all parents of a variable (if it's an Attribute or Subscript).
# This doesn't apply to kill (e.g. del x.y doesn't affect liveness of x)
gen = activity.get_read(node.value, self.source_info)
gen = functools.reduce(lambda left, right: left | right.support_set, gen,
gen)
kill = activity.get_updated(node.value, self.source_info)
return gen, kill
class ReachingDefinitions(Forward):
"""Perform reaching definition analysis.
Each statement is annotated with a set of (variable, definition) pairs.
"""
def __init__(self, source_info):
super(ReachingDefinitions, self).__init__('definitions', source_info)
def get_gen_kill(self, node, incoming):
definitions = activity.get_updated(node.value, self.source_info)
gen = frozenset((id_, node.value) for id_ in definitions)
kill = frozenset(def_ for def_ in incoming if def_[0] in definitions)
return gen, kill
class Defined(Forward):
"""Perform defined variable analysis.
Each statement is annotated with a set of variables which are guaranteed to
be defined at that point.
"""
def __init__(self, source_info):
super(Defined, self).__init__(
'defined', source_info, transfer_fn=operator.and_)
def get_gen_kill(self, node, _):
gen = activity.get_updated(node.value, self.source_info)
return gen, frozenset() | """Port analysis annotations from statements to their enclosing blocks."""
| random_line_split |
exp_entries.py | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module defines Entry classes for containing experimental data.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Jun 27, 2012"
from pymatgen.analysis.phase_diagram import PDEntry
from pymatgen.core.composition import Composition
from monty.json import MSONable
from pymatgen.analysis.thermochemistry import ThermoData
class ExpEntry(PDEntry, MSONable):
"""
An lightweight ExpEntry object containing experimental data for a
composition for many purposes. Extends a PDEntry so that it can be used for
phase diagram generation and reaction calculation.
Current version works only with solid phases and at 298K. Further
extensions for temperature dependence are planned.
"""
def __init__(self, composition, thermodata, temperature=298):
"""
Args:
composition: Composition of the entry. For flexibility, this can take
the form of all the typical input taken by a Composition, including
a {symbol: amt} dict, a string formula, and others.
thermodata: A sequence of ThermoData associated with the entry.
temperature: A temperature for the entry in Kelvin. Defaults to 298K.
"""
comp = Composition(composition)
self._thermodata = thermodata
found = False
enthalpy = float("inf")
for data in self._thermodata:
if data.type == "fH" and data.value < enthalpy and \
(data.phaseinfo != "gas" and data.phaseinfo != "liquid"):
enthalpy = data.value
found = True
if not found:
raise ValueError("List of Thermodata does not contain enthalpy "
"values.")
self.temperature = temperature
super().__init__(comp, enthalpy)
def __repr__(self):
return "ExpEntry {}, Energy = {:.4f}".format(self.composition.formula,
self.energy)
def __str__(self):
return self.__repr__()
@classmethod
def from_dict(cls, d):
|
def as_dict(self):
"""
:return: MSONable dict
"""
return {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"thermodata": [td.as_dict() for td in self._thermodata],
"composition": self.composition.as_dict(),
"temperature": self.temperature}
| """
:param d: Dict representation.
:return: ExpEntry
"""
thermodata = [ThermoData.from_dict(td) for td in d["thermodata"]]
return cls(d["composition"], thermodata, d["temperature"]) | identifier_body |
exp_entries.py | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module defines Entry classes for containing experimental data.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Jun 27, 2012"
from pymatgen.analysis.phase_diagram import PDEntry
from pymatgen.core.composition import Composition
from monty.json import MSONable
from pymatgen.analysis.thermochemistry import ThermoData
class ExpEntry(PDEntry, MSONable):
"""
An lightweight ExpEntry object containing experimental data for a
composition for many purposes. Extends a PDEntry so that it can be used for
phase diagram generation and reaction calculation.
Current version works only with solid phases and at 298K. Further
extensions for temperature dependence are planned.
"""
def __init__(self, composition, thermodata, temperature=298):
"""
Args:
composition: Composition of the entry. For flexibility, this can take
the form of all the typical input taken by a Composition, including
a {symbol: amt} dict, a string formula, and others.
thermodata: A sequence of ThermoData associated with the entry.
temperature: A temperature for the entry in Kelvin. Defaults to 298K.
"""
comp = Composition(composition)
self._thermodata = thermodata
found = False
enthalpy = float("inf")
for data in self._thermodata:
if data.type == "fH" and data.value < enthalpy and \
(data.phaseinfo != "gas" and data.phaseinfo != "liquid"):
enthalpy = data.value
found = True
if not found:
raise ValueError("List of Thermodata does not contain enthalpy "
"values.")
self.temperature = temperature
super().__init__(comp, enthalpy)
def __repr__(self):
return "ExpEntry {}, Energy = {:.4f}".format(self.composition.formula,
self.energy)
def __str__(self):
return self.__repr__()
@classmethod
def | (cls, d):
"""
:param d: Dict representation.
:return: ExpEntry
"""
thermodata = [ThermoData.from_dict(td) for td in d["thermodata"]]
return cls(d["composition"], thermodata, d["temperature"])
def as_dict(self):
"""
:return: MSONable dict
"""
return {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"thermodata": [td.as_dict() for td in self._thermodata],
"composition": self.composition.as_dict(),
"temperature": self.temperature}
| from_dict | identifier_name |
exp_entries.py | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module defines Entry classes for containing experimental data.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Jun 27, 2012"
from pymatgen.analysis.phase_diagram import PDEntry
from pymatgen.core.composition import Composition
from monty.json import MSONable
from pymatgen.analysis.thermochemistry import ThermoData
class ExpEntry(PDEntry, MSONable):
"""
An lightweight ExpEntry object containing experimental data for a
composition for many purposes. Extends a PDEntry so that it can be used for
phase diagram generation and reaction calculation.
Current version works only with solid phases and at 298K. Further
extensions for temperature dependence are planned.
"""
def __init__(self, composition, thermodata, temperature=298):
"""
Args:
composition: Composition of the entry. For flexibility, this can take
the form of all the typical input taken by a Composition, including
a {symbol: amt} dict, a string formula, and others.
thermodata: A sequence of ThermoData associated with the entry.
temperature: A temperature for the entry in Kelvin. Defaults to 298K.
"""
comp = Composition(composition)
self._thermodata = thermodata
found = False
enthalpy = float("inf")
for data in self._thermodata:
if data.type == "fH" and data.value < enthalpy and \
(data.phaseinfo != "gas" and data.phaseinfo != "liquid"):
enthalpy = data.value
found = True
if not found:
|
self.temperature = temperature
super().__init__(comp, enthalpy)
def __repr__(self):
return "ExpEntry {}, Energy = {:.4f}".format(self.composition.formula,
self.energy)
def __str__(self):
return self.__repr__()
@classmethod
def from_dict(cls, d):
"""
:param d: Dict representation.
:return: ExpEntry
"""
thermodata = [ThermoData.from_dict(td) for td in d["thermodata"]]
return cls(d["composition"], thermodata, d["temperature"])
def as_dict(self):
"""
:return: MSONable dict
"""
return {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"thermodata": [td.as_dict() for td in self._thermodata],
"composition": self.composition.as_dict(),
"temperature": self.temperature}
| raise ValueError("List of Thermodata does not contain enthalpy "
"values.") | conditional_block |
exp_entries.py | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module defines Entry classes for containing experimental data.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Jun 27, 2012"
from pymatgen.analysis.phase_diagram import PDEntry
from pymatgen.core.composition import Composition
from monty.json import MSONable
from pymatgen.analysis.thermochemistry import ThermoData
class ExpEntry(PDEntry, MSONable):
"""
An lightweight ExpEntry object containing experimental data for a
composition for many purposes. Extends a PDEntry so that it can be used for
phase diagram generation and reaction calculation.
Current version works only with solid phases and at 298K. Further
extensions for temperature dependence are planned.
"""
def __init__(self, composition, thermodata, temperature=298):
""" | thermodata: A sequence of ThermoData associated with the entry.
temperature: A temperature for the entry in Kelvin. Defaults to 298K.
"""
comp = Composition(composition)
self._thermodata = thermodata
found = False
enthalpy = float("inf")
for data in self._thermodata:
if data.type == "fH" and data.value < enthalpy and \
(data.phaseinfo != "gas" and data.phaseinfo != "liquid"):
enthalpy = data.value
found = True
if not found:
raise ValueError("List of Thermodata does not contain enthalpy "
"values.")
self.temperature = temperature
super().__init__(comp, enthalpy)
def __repr__(self):
return "ExpEntry {}, Energy = {:.4f}".format(self.composition.formula,
self.energy)
def __str__(self):
return self.__repr__()
@classmethod
def from_dict(cls, d):
"""
:param d: Dict representation.
:return: ExpEntry
"""
thermodata = [ThermoData.from_dict(td) for td in d["thermodata"]]
return cls(d["composition"], thermodata, d["temperature"])
def as_dict(self):
"""
:return: MSONable dict
"""
return {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"thermodata": [td.as_dict() for td in self._thermodata],
"composition": self.composition.as_dict(),
"temperature": self.temperature} | Args:
composition: Composition of the entry. For flexibility, this can take
the form of all the typical input taken by a Composition, including
a {symbol: amt} dict, a string formula, and others. | random_line_split |
test_hashes.py | #!/usr/bin/env python3
# Copyright (C) 2017-2021 The btclib developers
#
# This file is part of btclib. It is subject to the license terms in the
# LICENSE file found in the top-level directory of this distribution.
#
# No part of btclib including this file, may be copied, modified, propagated,
# or distributed except according to the terms contained in the LICENSE file.
"Tests for the `btclib.hashes` module."
from btclib.hashes import hash160, hash256
from tests.test_to_key import (
net_unaware_compressed_pub_keys,
net_unaware_uncompressed_pub_keys,
plain_prv_keys,
)
def test_hash160_hash256() -> None:
|
# def test_fingerprint() -> None:
#
# seed = "bfc4cbaad0ff131aa97fa30a48d09ae7df914bcc083af1e07793cd0a7c61a03f65d622848209ad3366a419f4718a80ec9037df107d8d12c19b83202de00a40ad"
# xprv = rootxprv_from_seed(seed)
# pf = fingerprint(xprv) # xprv is automatically converted to xpub
# child_key = derive(xprv, 0x80000000)
# pf2 = BIP32KeyData.b58decode(child_key).parent_fingerprint
# assert pf == pf2
| test_vectors = (
plain_prv_keys
+ net_unaware_compressed_pub_keys
+ net_unaware_uncompressed_pub_keys
)
for hexstring in test_vectors:
hash160(hexstring)
hash256(hexstring) | identifier_body |
test_hashes.py | #!/usr/bin/env python3
| # This file is part of btclib. It is subject to the license terms in the
# LICENSE file found in the top-level directory of this distribution.
#
# No part of btclib including this file, may be copied, modified, propagated,
# or distributed except according to the terms contained in the LICENSE file.
"Tests for the `btclib.hashes` module."
from btclib.hashes import hash160, hash256
from tests.test_to_key import (
net_unaware_compressed_pub_keys,
net_unaware_uncompressed_pub_keys,
plain_prv_keys,
)
def test_hash160_hash256() -> None:
test_vectors = (
plain_prv_keys
+ net_unaware_compressed_pub_keys
+ net_unaware_uncompressed_pub_keys
)
for hexstring in test_vectors:
hash160(hexstring)
hash256(hexstring)
# def test_fingerprint() -> None:
#
# seed = "bfc4cbaad0ff131aa97fa30a48d09ae7df914bcc083af1e07793cd0a7c61a03f65d622848209ad3366a419f4718a80ec9037df107d8d12c19b83202de00a40ad"
# xprv = rootxprv_from_seed(seed)
# pf = fingerprint(xprv) # xprv is automatically converted to xpub
# child_key = derive(xprv, 0x80000000)
# pf2 = BIP32KeyData.b58decode(child_key).parent_fingerprint
# assert pf == pf2 | # Copyright (C) 2017-2021 The btclib developers
# | random_line_split |
test_hashes.py | #!/usr/bin/env python3
# Copyright (C) 2017-2021 The btclib developers
#
# This file is part of btclib. It is subject to the license terms in the
# LICENSE file found in the top-level directory of this distribution.
#
# No part of btclib including this file, may be copied, modified, propagated,
# or distributed except according to the terms contained in the LICENSE file.
"Tests for the `btclib.hashes` module."
from btclib.hashes import hash160, hash256
from tests.test_to_key import (
net_unaware_compressed_pub_keys,
net_unaware_uncompressed_pub_keys,
plain_prv_keys,
)
def test_hash160_hash256() -> None:
test_vectors = (
plain_prv_keys
+ net_unaware_compressed_pub_keys
+ net_unaware_uncompressed_pub_keys
)
for hexstring in test_vectors:
|
# def test_fingerprint() -> None:
#
# seed = "bfc4cbaad0ff131aa97fa30a48d09ae7df914bcc083af1e07793cd0a7c61a03f65d622848209ad3366a419f4718a80ec9037df107d8d12c19b83202de00a40ad"
# xprv = rootxprv_from_seed(seed)
# pf = fingerprint(xprv) # xprv is automatically converted to xpub
# child_key = derive(xprv, 0x80000000)
# pf2 = BIP32KeyData.b58decode(child_key).parent_fingerprint
# assert pf == pf2
| hash160(hexstring)
hash256(hexstring) | conditional_block |
test_hashes.py | #!/usr/bin/env python3
# Copyright (C) 2017-2021 The btclib developers
#
# This file is part of btclib. It is subject to the license terms in the
# LICENSE file found in the top-level directory of this distribution.
#
# No part of btclib including this file, may be copied, modified, propagated,
# or distributed except according to the terms contained in the LICENSE file.
"Tests for the `btclib.hashes` module."
from btclib.hashes import hash160, hash256
from tests.test_to_key import (
net_unaware_compressed_pub_keys,
net_unaware_uncompressed_pub_keys,
plain_prv_keys,
)
def | () -> None:
test_vectors = (
plain_prv_keys
+ net_unaware_compressed_pub_keys
+ net_unaware_uncompressed_pub_keys
)
for hexstring in test_vectors:
hash160(hexstring)
hash256(hexstring)
# def test_fingerprint() -> None:
#
# seed = "bfc4cbaad0ff131aa97fa30a48d09ae7df914bcc083af1e07793cd0a7c61a03f65d622848209ad3366a419f4718a80ec9037df107d8d12c19b83202de00a40ad"
# xprv = rootxprv_from_seed(seed)
# pf = fingerprint(xprv) # xprv is automatically converted to xpub
# child_key = derive(xprv, 0x80000000)
# pf2 = BIP32KeyData.b58decode(child_key).parent_fingerprint
# assert pf == pf2
| test_hash160_hash256 | identifier_name |
crypter.rs | // Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use byteorder::{BigEndian, ByteOrder};
use derive_more::Deref;
use engine_traits::EncryptionMethod as DBEncryptionMethod;
use kvproto::encryptionpb::EncryptionMethod;
use openssl::symm::{self, Cipher as OCipher};
use rand::{rngs::OsRng, RngCore};
use tikv_util::{box_err, impl_display_as_debug};
use crate::{Error, Result};
#[cfg(not(feature = "prost-codec"))]
pub fn encryption_method_to_db_encryption_method(method: EncryptionMethod) -> DBEncryptionMethod {
match method {
EncryptionMethod::Plaintext => DBEncryptionMethod::Plaintext,
EncryptionMethod::Aes128Ctr => DBEncryptionMethod::Aes128Ctr,
EncryptionMethod::Aes192Ctr => DBEncryptionMethod::Aes192Ctr,
EncryptionMethod::Aes256Ctr => DBEncryptionMethod::Aes256Ctr,
EncryptionMethod::Unknown => DBEncryptionMethod::Unknown,
}
}
pub fn encryption_method_from_db_encryption_method(method: DBEncryptionMethod) -> EncryptionMethod {
match method {
DBEncryptionMethod::Plaintext => EncryptionMethod::Plaintext,
DBEncryptionMethod::Aes128Ctr => EncryptionMethod::Aes128Ctr,
DBEncryptionMethod::Aes192Ctr => EncryptionMethod::Aes192Ctr,
DBEncryptionMethod::Aes256Ctr => EncryptionMethod::Aes256Ctr,
DBEncryptionMethod::Unknown => EncryptionMethod::Unknown,
}
}
#[cfg(not(feature = "prost-codec"))]
pub fn compat(method: EncryptionMethod) -> EncryptionMethod {
method
}
#[cfg(feature = "prost-codec")]
pub fn encryption_method_to_db_encryption_method(
method: i32, /* EncryptionMethod */
) -> DBEncryptionMethod {
match method {
1/* EncryptionMethod::Plaintext */ => DBEncryptionMethod::Plaintext,
2/* EncryptionMethod::Aes128Ctr */ => DBEncryptionMethod::Aes128Ctr,
3/* EncryptionMethod::Aes192Ctr */ => DBEncryptionMethod::Aes192Ctr,
4/* EncryptionMethod::Aes256Ctr */ => DBEncryptionMethod::Aes256Ctr,
_/* EncryptionMethod::Unknown */ => DBEncryptionMethod::Unknown,
}
}
#[cfg(feature = "prost-codec")]
pub fn compat(method: EncryptionMethod) -> i32 {
match method {
EncryptionMethod::Unknown => 0,
EncryptionMethod::Plaintext => 1,
EncryptionMethod::Aes128Ctr => 2,
EncryptionMethod::Aes192Ctr => 3,
EncryptionMethod::Aes256Ctr => 4,
}
}
pub fn get_method_key_length(method: EncryptionMethod) -> usize {
match method {
EncryptionMethod::Plaintext => 0,
EncryptionMethod::Aes128Ctr => 16,
EncryptionMethod::Aes192Ctr => 24,
EncryptionMethod::Aes256Ctr => 32,
unknown => panic!("bad EncryptionMethod {:?}", unknown),
}
}
// IV's the length should be 12 btyes for GCM mode.
const GCM_IV_12: usize = 12;
// IV's the length should be 16 btyes for CTR mode.
const CTR_IV_16: usize = 16;
#[derive(Debug, Clone, Copy)]
pub enum Iv {
Gcm([u8; GCM_IV_12]),
Ctr([u8; CTR_IV_16]),
}
impl Iv {
/// Generate a random IV for AES-GCM.
pub fn new_gcm() -> Iv {
let mut iv = [0u8; GCM_IV_12];
OsRng.fill_bytes(&mut iv);
Iv::Gcm(iv)
}
/// Generate a random IV for AES-CTR.
pub fn new_ctr() -> Iv {
let mut iv = [0u8; CTR_IV_16];
OsRng.fill_bytes(&mut iv);
Iv::Ctr(iv)
}
pub fn from_slice(src: &[u8]) -> Result<Iv> {
if src.len() == CTR_IV_16 {
let mut iv = [0; CTR_IV_16];
iv.copy_from_slice(src);
Ok(Iv::Ctr(iv))
} else if src.len() == GCM_IV_12 {
let mut iv = [0; GCM_IV_12];
iv.copy_from_slice(src);
Ok(Iv::Gcm(iv))
} else {
Err(box_err!(
"Nonce + Counter must be 12/16 bytes, {}",
src.len()
))
}
}
pub fn as_slice(&self) -> &[u8] {
match self {
Iv::Ctr(iv) => iv,
Iv::Gcm(iv) => iv,
}
}
pub fn add_offset(&mut self, offset: u64) -> Result<()> {
match self {
Iv::Ctr(iv) => {
let v = BigEndian::read_u128(iv);
BigEndian::write_u128(iv, v.wrapping_add(offset as u128));
Ok(())
}
Iv::Gcm(_) => Err(box_err!("offset addition is not supported for GCM mode")),
}
}
}
// The length GCM tag must be 16 bytes.
const GCM_TAG_LEN: usize = 16;
pub struct AesGcmTag([u8; GCM_TAG_LEN]);
impl<'a> From<&'a [u8]> for AesGcmTag {
fn from(src: &'a [u8]) -> AesGcmTag {
assert!(src.len() >= GCM_TAG_LEN, "AES GCM tag must be 16 bytes");
let mut tag = [0; GCM_TAG_LEN];
tag.copy_from_slice(src);
AesGcmTag(tag)
}
}
impl AesGcmTag {
pub fn as_slice(&self) -> &[u8] {
&self.0
}
}
/// An Aes256-GCM crypter.
pub struct AesGcmCrypter<'k> {
iv: Iv,
key: &'k PlainKey,
}
impl<'k> AesGcmCrypter<'k> {
/// The key length of `AesGcmCrypter` is 32 bytes.
pub const KEY_LEN: usize = 32;
pub fn new(key: &'k PlainKey, iv: Iv) -> AesGcmCrypter<'k> {
AesGcmCrypter { iv, key }
}
pub fn encrypt(&self, pt: &[u8]) -> Result<(Vec<u8>, AesGcmTag)> {
let cipher = OCipher::aes_256_gcm();
let mut tag = AesGcmTag([0u8; GCM_TAG_LEN]);
let ciphertext = symm::encrypt_aead(
cipher,
&self.key.0,
Some(self.iv.as_slice()),
&[], /* AAD */
pt,
&mut tag.0,
)?;
Ok((ciphertext, tag))
}
pub fn decrypt(&self, ct: &[u8], tag: AesGcmTag) -> Result<Vec<u8>> {
let cipher = OCipher::aes_256_gcm();
let plaintext = symm::decrypt_aead(
cipher,
&self.key.0,
Some(self.iv.as_slice()),
&[], /* AAD */
ct,
&tag.0,
)?;
Ok(plaintext)
}
}
pub fn verify_encryption_config(method: EncryptionMethod, key: &[u8]) -> Result<()> {
if method == EncryptionMethod::Unknown {
return Err(Error::UnknownEncryption);
}
if method != EncryptionMethod::Plaintext {
let key_len = get_method_key_length(method);
if key.len() != key_len {
return Err(box_err!(
"unexpected key length, expected {} vs actual {}",
key_len,
key.len()
));
}
}
Ok(())
}
// PlainKey is a newtype used to mark a vector a plaintext key.
// It requires the vec to be a valid AesGcmCrypter key.
#[derive(Deref)]
pub struct PlainKey(Vec<u8>);
impl PlainKey {
pub fn new(key: Vec<u8>) -> Result<Self> {
if key.len() != AesGcmCrypter::KEY_LEN {
return Err(box_err!(
"encryption method and key length mismatch, expect {} get {}",
AesGcmCrypter::KEY_LEN,
key.len()
));
}
Ok(Self(key))
}
}
// Don't expose the key in a debug print
impl std::fmt::Debug for PlainKey {
fn | (&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("PlainKey")
.field(&"REDACTED".to_string())
.finish()
}
}
// Don't expose the key in a display print
impl_display_as_debug!(PlainKey);
#[cfg(test)]
mod tests {
use hex::FromHex;
use super::*;
#[test]
fn test_iv() {
let mut ivs = Vec::with_capacity(100);
for c in 0..100 {
if c % 2 == 0 {
ivs.push(Iv::new_ctr());
} else {
ivs.push(Iv::new_gcm());
}
}
ivs.dedup_by(|a, b| a.as_slice() == b.as_slice());
assert_eq!(ivs.len(), 100);
for iv in ivs {
let iv1 = Iv::from_slice(iv.as_slice()).unwrap();
assert_eq!(iv.as_slice(), iv1.as_slice());
}
}
#[test]
fn test_ase_256_gcm() {
// See more http://csrc.nist.gov/groups/STM/cavp/documents/mac/gcmtestvectors.zip
//
// [Keylen = 256]
// [IVlen = 96]
// [PTlen = 256]
// [AADlen = 0]
// [Taglen = 128]
//
// Count = 0
// Key = c3d99825f2181f4808acd2068eac7441a65bd428f14d2aab43fefc0129091139
// IV = cafabd9672ca6c79a2fbdc22
// CT = 84e5f23f95648fa247cb28eef53abec947dbf05ac953734618111583840bd980
// AAD =
// Tag = 79651c875f7941793d42bbd0af1cce7c
// PT = 25431587e9ecffc7c37f8d6d52a9bc3310651d46fb0e3bad2726c8f2db653749
let pt = "25431587e9ecffc7c37f8d6d52a9bc3310651d46fb0e3bad2726c8f2db653749";
let ct = "84e5f23f95648fa247cb28eef53abec947dbf05ac953734618111583840bd980";
let key = "c3d99825f2181f4808acd2068eac7441a65bd428f14d2aab43fefc0129091139";
let iv = "cafabd9672ca6c79a2fbdc22";
let tag = "79651c875f7941793d42bbd0af1cce7c";
let pt = Vec::from_hex(pt).unwrap();
let ct = Vec::from_hex(ct).unwrap();
let key = PlainKey::new(Vec::from_hex(key).unwrap()).unwrap();
let iv = Iv::from_slice(Vec::from_hex(iv).unwrap().as_slice()).unwrap();
let tag = Vec::from_hex(tag).unwrap();
let crypter = AesGcmCrypter::new(&key, iv);
let (ciphertext, gcm_tag) = crypter.encrypt(&pt).unwrap();
assert_eq!(ciphertext, ct, "{}", hex::encode(&ciphertext));
assert_eq!(gcm_tag.0.to_vec(), tag, "{}", hex::encode(&gcm_tag.0));
let plaintext = crypter.decrypt(&ct, gcm_tag).unwrap();
assert_eq!(plaintext, pt, "{}", hex::encode(&plaintext));
// Fail to decrypt with a wrong tag.
crypter
.decrypt(&ct, AesGcmTag([0u8; GCM_TAG_LEN]))
.unwrap_err();
}
}
| fmt | identifier_name |
crypter.rs | // Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use byteorder::{BigEndian, ByteOrder};
use derive_more::Deref;
use engine_traits::EncryptionMethod as DBEncryptionMethod;
use kvproto::encryptionpb::EncryptionMethod;
use openssl::symm::{self, Cipher as OCipher};
use rand::{rngs::OsRng, RngCore};
use tikv_util::{box_err, impl_display_as_debug};
use crate::{Error, Result};
#[cfg(not(feature = "prost-codec"))]
pub fn encryption_method_to_db_encryption_method(method: EncryptionMethod) -> DBEncryptionMethod {
match method {
EncryptionMethod::Plaintext => DBEncryptionMethod::Plaintext,
EncryptionMethod::Aes128Ctr => DBEncryptionMethod::Aes128Ctr,
EncryptionMethod::Aes192Ctr => DBEncryptionMethod::Aes192Ctr,
EncryptionMethod::Aes256Ctr => DBEncryptionMethod::Aes256Ctr,
EncryptionMethod::Unknown => DBEncryptionMethod::Unknown,
}
}
pub fn encryption_method_from_db_encryption_method(method: DBEncryptionMethod) -> EncryptionMethod {
match method {
DBEncryptionMethod::Plaintext => EncryptionMethod::Plaintext,
DBEncryptionMethod::Aes128Ctr => EncryptionMethod::Aes128Ctr,
DBEncryptionMethod::Aes192Ctr => EncryptionMethod::Aes192Ctr,
DBEncryptionMethod::Aes256Ctr => EncryptionMethod::Aes256Ctr,
DBEncryptionMethod::Unknown => EncryptionMethod::Unknown,
}
}
#[cfg(not(feature = "prost-codec"))]
pub fn compat(method: EncryptionMethod) -> EncryptionMethod {
method
}
#[cfg(feature = "prost-codec")]
pub fn encryption_method_to_db_encryption_method(
method: i32, /* EncryptionMethod */
) -> DBEncryptionMethod {
match method {
1/* EncryptionMethod::Plaintext */ => DBEncryptionMethod::Plaintext,
2/* EncryptionMethod::Aes128Ctr */ => DBEncryptionMethod::Aes128Ctr,
3/* EncryptionMethod::Aes192Ctr */ => DBEncryptionMethod::Aes192Ctr,
4/* EncryptionMethod::Aes256Ctr */ => DBEncryptionMethod::Aes256Ctr,
_/* EncryptionMethod::Unknown */ => DBEncryptionMethod::Unknown,
}
}
#[cfg(feature = "prost-codec")]
pub fn compat(method: EncryptionMethod) -> i32 {
match method {
EncryptionMethod::Unknown => 0,
EncryptionMethod::Plaintext => 1,
EncryptionMethod::Aes128Ctr => 2,
EncryptionMethod::Aes192Ctr => 3,
EncryptionMethod::Aes256Ctr => 4,
}
}
pub fn get_method_key_length(method: EncryptionMethod) -> usize {
match method {
EncryptionMethod::Plaintext => 0,
EncryptionMethod::Aes128Ctr => 16,
EncryptionMethod::Aes192Ctr => 24,
EncryptionMethod::Aes256Ctr => 32,
unknown => panic!("bad EncryptionMethod {:?}", unknown),
}
}
// IV's the length should be 12 btyes for GCM mode.
const GCM_IV_12: usize = 12;
// IV's the length should be 16 btyes for CTR mode.
const CTR_IV_16: usize = 16;
#[derive(Debug, Clone, Copy)]
pub enum Iv {
Gcm([u8; GCM_IV_12]),
Ctr([u8; CTR_IV_16]),
}
impl Iv {
/// Generate a random IV for AES-GCM.
pub fn new_gcm() -> Iv {
let mut iv = [0u8; GCM_IV_12];
OsRng.fill_bytes(&mut iv);
Iv::Gcm(iv)
}
/// Generate a random IV for AES-CTR.
pub fn new_ctr() -> Iv {
let mut iv = [0u8; CTR_IV_16];
OsRng.fill_bytes(&mut iv);
Iv::Ctr(iv)
}
pub fn from_slice(src: &[u8]) -> Result<Iv> {
if src.len() == CTR_IV_16 {
let mut iv = [0; CTR_IV_16];
iv.copy_from_slice(src);
Ok(Iv::Ctr(iv))
} else if src.len() == GCM_IV_12 {
let mut iv = [0; GCM_IV_12];
iv.copy_from_slice(src);
Ok(Iv::Gcm(iv))
} else {
Err(box_err!(
"Nonce + Counter must be 12/16 bytes, {}",
src.len()
))
}
}
pub fn as_slice(&self) -> &[u8] {
match self {
Iv::Ctr(iv) => iv,
Iv::Gcm(iv) => iv,
}
}
pub fn add_offset(&mut self, offset: u64) -> Result<()> {
match self {
Iv::Ctr(iv) => {
let v = BigEndian::read_u128(iv);
BigEndian::write_u128(iv, v.wrapping_add(offset as u128));
Ok(())
}
Iv::Gcm(_) => Err(box_err!("offset addition is not supported for GCM mode")),
}
}
}
// The length GCM tag must be 16 bytes.
const GCM_TAG_LEN: usize = 16;
pub struct AesGcmTag([u8; GCM_TAG_LEN]);
impl<'a> From<&'a [u8]> for AesGcmTag {
fn from(src: &'a [u8]) -> AesGcmTag {
assert!(src.len() >= GCM_TAG_LEN, "AES GCM tag must be 16 bytes");
let mut tag = [0; GCM_TAG_LEN];
tag.copy_from_slice(src);
AesGcmTag(tag)
}
}
impl AesGcmTag {
pub fn as_slice(&self) -> &[u8] {
&self.0
}
}
/// An Aes256-GCM crypter.
pub struct AesGcmCrypter<'k> {
iv: Iv,
key: &'k PlainKey,
}
impl<'k> AesGcmCrypter<'k> {
/// The key length of `AesGcmCrypter` is 32 bytes.
pub const KEY_LEN: usize = 32;
pub fn new(key: &'k PlainKey, iv: Iv) -> AesGcmCrypter<'k> {
AesGcmCrypter { iv, key }
}
pub fn encrypt(&self, pt: &[u8]) -> Result<(Vec<u8>, AesGcmTag)> {
let cipher = OCipher::aes_256_gcm();
let mut tag = AesGcmTag([0u8; GCM_TAG_LEN]);
let ciphertext = symm::encrypt_aead(
cipher,
&self.key.0, | &mut tag.0,
)?;
Ok((ciphertext, tag))
}
pub fn decrypt(&self, ct: &[u8], tag: AesGcmTag) -> Result<Vec<u8>> {
let cipher = OCipher::aes_256_gcm();
let plaintext = symm::decrypt_aead(
cipher,
&self.key.0,
Some(self.iv.as_slice()),
&[], /* AAD */
ct,
&tag.0,
)?;
Ok(plaintext)
}
}
pub fn verify_encryption_config(method: EncryptionMethod, key: &[u8]) -> Result<()> {
if method == EncryptionMethod::Unknown {
return Err(Error::UnknownEncryption);
}
if method != EncryptionMethod::Plaintext {
let key_len = get_method_key_length(method);
if key.len() != key_len {
return Err(box_err!(
"unexpected key length, expected {} vs actual {}",
key_len,
key.len()
));
}
}
Ok(())
}
// PlainKey is a newtype used to mark a vector a plaintext key.
// It requires the vec to be a valid AesGcmCrypter key.
#[derive(Deref)]
pub struct PlainKey(Vec<u8>);
impl PlainKey {
pub fn new(key: Vec<u8>) -> Result<Self> {
if key.len() != AesGcmCrypter::KEY_LEN {
return Err(box_err!(
"encryption method and key length mismatch, expect {} get {}",
AesGcmCrypter::KEY_LEN,
key.len()
));
}
Ok(Self(key))
}
}
// Don't expose the key in a debug print
impl std::fmt::Debug for PlainKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("PlainKey")
.field(&"REDACTED".to_string())
.finish()
}
}
// Don't expose the key in a display print
impl_display_as_debug!(PlainKey);
#[cfg(test)]
mod tests {
use hex::FromHex;
use super::*;
#[test]
fn test_iv() {
let mut ivs = Vec::with_capacity(100);
for c in 0..100 {
if c % 2 == 0 {
ivs.push(Iv::new_ctr());
} else {
ivs.push(Iv::new_gcm());
}
}
ivs.dedup_by(|a, b| a.as_slice() == b.as_slice());
assert_eq!(ivs.len(), 100);
for iv in ivs {
let iv1 = Iv::from_slice(iv.as_slice()).unwrap();
assert_eq!(iv.as_slice(), iv1.as_slice());
}
}
#[test]
fn test_ase_256_gcm() {
// See more http://csrc.nist.gov/groups/STM/cavp/documents/mac/gcmtestvectors.zip
//
// [Keylen = 256]
// [IVlen = 96]
// [PTlen = 256]
// [AADlen = 0]
// [Taglen = 128]
//
// Count = 0
// Key = c3d99825f2181f4808acd2068eac7441a65bd428f14d2aab43fefc0129091139
// IV = cafabd9672ca6c79a2fbdc22
// CT = 84e5f23f95648fa247cb28eef53abec947dbf05ac953734618111583840bd980
// AAD =
// Tag = 79651c875f7941793d42bbd0af1cce7c
// PT = 25431587e9ecffc7c37f8d6d52a9bc3310651d46fb0e3bad2726c8f2db653749
let pt = "25431587e9ecffc7c37f8d6d52a9bc3310651d46fb0e3bad2726c8f2db653749";
let ct = "84e5f23f95648fa247cb28eef53abec947dbf05ac953734618111583840bd980";
let key = "c3d99825f2181f4808acd2068eac7441a65bd428f14d2aab43fefc0129091139";
let iv = "cafabd9672ca6c79a2fbdc22";
let tag = "79651c875f7941793d42bbd0af1cce7c";
let pt = Vec::from_hex(pt).unwrap();
let ct = Vec::from_hex(ct).unwrap();
let key = PlainKey::new(Vec::from_hex(key).unwrap()).unwrap();
let iv = Iv::from_slice(Vec::from_hex(iv).unwrap().as_slice()).unwrap();
let tag = Vec::from_hex(tag).unwrap();
let crypter = AesGcmCrypter::new(&key, iv);
let (ciphertext, gcm_tag) = crypter.encrypt(&pt).unwrap();
assert_eq!(ciphertext, ct, "{}", hex::encode(&ciphertext));
assert_eq!(gcm_tag.0.to_vec(), tag, "{}", hex::encode(&gcm_tag.0));
let plaintext = crypter.decrypt(&ct, gcm_tag).unwrap();
assert_eq!(plaintext, pt, "{}", hex::encode(&plaintext));
// Fail to decrypt with a wrong tag.
crypter
.decrypt(&ct, AesGcmTag([0u8; GCM_TAG_LEN]))
.unwrap_err();
}
} | Some(self.iv.as_slice()),
&[], /* AAD */
pt, | random_line_split |
07f975f81f03_remove_team_domain.py | # -*- coding: utf-8 -*-
"""Remove team domain
Revision ID: 07f975f81f03
Revises: 4e206c5ddabd
Create Date: 2017-08-04 15:12:11.992856
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '07f975f81f03'
down_revision = '4e206c5ddabd'
branch_labels = None
depends_on = None
def upgrade():
op.drop_index('ix_team_domain', table_name='team')
op.drop_column('team', 'domain')
def downgrade():
| op.add_column(
'team',
sa.Column('domain', sa.VARCHAR(length=253), autoincrement=False, nullable=True),
)
op.create_index('ix_team_domain', 'team', ['domain'], unique=False) | identifier_body | |
07f975f81f03_remove_team_domain.py | # -*- coding: utf-8 -*-
"""Remove team domain
Revision ID: 07f975f81f03
Revises: 4e206c5ddabd
Create Date: 2017-08-04 15:12:11.992856
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '07f975f81f03'
down_revision = '4e206c5ddabd'
branch_labels = None
depends_on = None
def upgrade():
op.drop_index('ix_team_domain', table_name='team')
op.drop_column('team', 'domain')
def | ():
op.add_column(
'team',
sa.Column('domain', sa.VARCHAR(length=253), autoincrement=False, nullable=True),
)
op.create_index('ix_team_domain', 'team', ['domain'], unique=False)
| downgrade | identifier_name |
07f975f81f03_remove_team_domain.py | # -*- coding: utf-8 -*-
"""Remove team domain
Revision ID: 07f975f81f03
Revises: 4e206c5ddabd
Create Date: 2017-08-04 15:12:11.992856
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '07f975f81f03'
down_revision = '4e206c5ddabd'
branch_labels = None
depends_on = None
def upgrade():
op.drop_index('ix_team_domain', table_name='team')
op.drop_column('team', 'domain')
def downgrade(): | op.add_column(
'team',
sa.Column('domain', sa.VARCHAR(length=253), autoincrement=False, nullable=True),
)
op.create_index('ix_team_domain', 'team', ['domain'], unique=False) | random_line_split | |
ext.js |
var appName = csInterface.hostEnvironment.appName;
if(appName != "FLPR"){
loadJSX();
}
var appNames = ["PHXS"];
for (var i = 0; i < appNames.length; i++) {
var name = appNames[i];
if (appName.indexOf(name) >= 0) {
var btn = document.getElementById("btn_" + name);
if (btn)
btn.disabled = false;
}
}
updateThemeWithAppSkinInfo(csInterface.hostEnvironment.appSkinInfo);
// Update the color of the panel when the theme color of the product changed.
csInterface.addEventListener(CSInterface.THEME_COLOR_CHANGED_EVENT, onAppThemeColorChanged);
}
/**
* Update the theme with the AppSkinInfo retrieved from the host product.
*/
function updateThemeWithAppSkinInfo(appSkinInfo) {
//Update the background color of the panel
var panelBackgroundColor = appSkinInfo.panelBackgroundColor.color;
document.body.bgColor = toHex(panelBackgroundColor);
var styleId = "ppstyle";
var csInterface = new CSInterface();
var appName = csInterface.hostEnvironment.appName;
if(appName == "PHXS"){
addRule(styleId, "button, select, input[type=button], input[type=submit]", "border-radius:3px;");
}
if(appName == "PHXS" || appName == "PPRO" || appName == "PRLD") {
////////////////////////////////////////////////////////////////////////////////////////////////
// NOTE: Below theme related code are only suitable for Photoshop. //
// If you want to achieve same effect on other products please make your own changes here. //
////////////////////////////////////////////////////////////////////////////////////////////////
var gradientBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, 40) + " , " + toHex(panelBackgroundColor, 10) + ");";
var gradientDisabledBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, 15) + " , " + toHex(panelBackgroundColor, 5) + ");";
var boxShadow = "-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.4), 0 1px 1px rgba(0, 0, 0, 0.2);";
var boxActiveShadow = "-webkit-box-shadow: inset 0 1px 4px rgba(0, 0, 0, 0.6);";
var isPanelThemeLight = panelBackgroundColor.red > 127;
var fontColor, disabledFontColor;
var borderColor;
var inputBackgroundColor;
var gradientHighlightBg;
if(isPanelThemeLight) {
fontColor = "#000000;";
disabledFontColor = "color:" + toHex(panelBackgroundColor, -70) + ";";
borderColor = "border-color: " + toHex(panelBackgroundColor, -90) + ";";
inputBackgroundColor = toHex(panelBackgroundColor, 54) + ";";
gradientHighlightBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, -40) + " , " + toHex(panelBackgroundColor,-50) + ");";
} else {
fontColor = "#ffffff;";
disabledFontColor = "color:" + toHex(panelBackgroundColor, 100) + ";";
borderColor = "border-color: " + toHex(panelBackgroundColor, -45) + ";";
inputBackgroundColor = toHex(panelBackgroundColor, -20) + ";";
gradientHighlightBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, -20) + " , " + toHex(panelBackgroundColor, -30) + ");";
}
//Update the default text style with pp values
addRule(styleId, ".default", "font-size:" + appSkinInfo.baseFontSize + "px" + "; color:" + fontColor + "; background-color:" + toHex(panelBackgroundColor) + ";");
addRule(styleId, "button, select, input[type=text], input[type=button], input[type=submit]", borderColor);
addRule(styleId, "button, select, input[type=button], input[type=submit]", gradientBg);
addRule(styleId, "button, select, input[type=button], input[type=submit]", boxShadow);
addRule(styleId, "button:enabled:active, input[type=button]:enabled:active, input[type=submit]:enabled:active", gradientHighlightBg);
addRule(styleId, "button:enabled:active, input[type=button]:enabled:active, input[type=submit]:enabled:active", boxActiveShadow);
addRule(styleId, "[disabled]", gradientDisabledBg);
addRule(styleId, "[disabled]", disabledFontColor);
addRule(styleId, "input[type=text]", "padding:1px 3px;");
addRule(styleId, "input[type=text]", "background-color: " + inputBackgroundColor) + ";";
addRule(styleId, "input[type=text]:focus", "background-color: #ffffff;");
addRule(styleId, "input[type=text]:focus", "color: #000000;");
} else {
// For AI, ID and FL use old implementation
addRule(styleId, ".default", "font-size:" + appSkinInfo.baseFontSize + "px" + "; color:" + reverseColor(panelBackgroundColor) + "; background-color:" + toHex(panelBackgroundColor, 20));
addRule(styleId, "button", "border-color: " + toHex(panelBgColor, -50));
}
}
function addRule(stylesheetId, selector, rule) {
var stylesheet = document.getElementById(stylesheetId);
if (stylesheet) {
stylesheet = stylesheet.sheet;
if( stylesheet.addRule ){
stylesheet.addRule(selector, rule);
} else if( stylesheet.insertRule ){
stylesheet.insertRule(selector + ' { ' + rule + ' }', stylesheet.cssRules.length);
}
}
}
function reverseColor(color, delta) {
return toHex({red:Math.abs(255-color.red), green:Math.abs(255-color.green), blue:Math.abs(255-color.blue)}, delta);
}
/**
* Convert the Color object to string in hexadecimal format;
*/
function toHex(color, delta) {
function computeValue(value, delta) {
var computedValue = !isNaN(delta) ? value + delta : value;
if (computedValue < 0) {
computedValue = 0;
} else if (computedValue > 255) {
computedValue = 255;
}
computedValue = computedValue.toString(16);
return computedValue.length == 1 ? "0" + computedValue : computedValue;
}
var hex = "";
if (color) {
with (color) {
hex = computeValue(red, delta) + computeValue(green, delta) + computeValue(blue, delta);
};
}
return "#" + hex;
}
function onAppThemeColorChanged(event) {
// Should get a latest HostEnvironment object from application.
var skinInfo = JSON.parse(window.__adobe_cep__.getHostEnvironment()).appSkinInfo;
// Gets the style information such as color info from the skinInfo,
// and redraw all UI controls of your extension according to the style info.
updateThemeWithAppSkinInfo(skinInfo);
}
/**
* Load JSX file into the scripting context of the product. All the jsx files in
* folder [ExtensionRoot]/jsx will be loaded.
*/
function loadJSX() {
var csInterface = new CSInterface();
var extensionRoot = csInterface.getSystemPath(SystemPath.EXTENSION) + "/jsx/";
csInterface.evalScript('$._ext.evalFiles("' + extensionRoot + '")');
}
function evalScript(script, callback) {
new CSInterface().evalScript(script, callback);
}
function onClickButton(ppid) {
if(ppid == "FLPR"){
var jsfl = 'fl.createDocument(); fl.getDocumentDOM().addNewText({left:100, top:100, right:300, bottom:300} , "Hello Flash!" ); ';
evalScript(jsfl);
} else {
var extScript = "$._ext_" + ppid + ".run()";
evalScript(extScript);
}
} | function onLoaded() {
var csInterface = new CSInterface();
| random_line_split | |
ext.js |
function onLoaded() {
var csInterface = new CSInterface();
var appName = csInterface.hostEnvironment.appName;
if(appName != "FLPR"){
loadJSX();
}
var appNames = ["PHXS"];
for (var i = 0; i < appNames.length; i++) {
var name = appNames[i];
if (appName.indexOf(name) >= 0) {
var btn = document.getElementById("btn_" + name);
if (btn)
btn.disabled = false;
}
}
updateThemeWithAppSkinInfo(csInterface.hostEnvironment.appSkinInfo);
// Update the color of the panel when the theme color of the product changed.
csInterface.addEventListener(CSInterface.THEME_COLOR_CHANGED_EVENT, onAppThemeColorChanged);
}
/**
* Update the theme with the AppSkinInfo retrieved from the host product.
*/
function updateThemeWithAppSkinInfo(appSkinInfo) {
//Update the background color of the panel
var panelBackgroundColor = appSkinInfo.panelBackgroundColor.color;
document.body.bgColor = toHex(panelBackgroundColor);
var styleId = "ppstyle";
var csInterface = new CSInterface();
var appName = csInterface.hostEnvironment.appName;
if(appName == "PHXS"){
addRule(styleId, "button, select, input[type=button], input[type=submit]", "border-radius:3px;");
}
if(appName == "PHXS" || appName == "PPRO" || appName == "PRLD") {
////////////////////////////////////////////////////////////////////////////////////////////////
// NOTE: Below theme related code are only suitable for Photoshop. //
// If you want to achieve same effect on other products please make your own changes here. //
////////////////////////////////////////////////////////////////////////////////////////////////
var gradientBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, 40) + " , " + toHex(panelBackgroundColor, 10) + ");";
var gradientDisabledBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, 15) + " , " + toHex(panelBackgroundColor, 5) + ");";
var boxShadow = "-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.4), 0 1px 1px rgba(0, 0, 0, 0.2);";
var boxActiveShadow = "-webkit-box-shadow: inset 0 1px 4px rgba(0, 0, 0, 0.6);";
var isPanelThemeLight = panelBackgroundColor.red > 127;
var fontColor, disabledFontColor;
var borderColor;
var inputBackgroundColor;
var gradientHighlightBg;
if(isPanelThemeLight) {
fontColor = "#000000;";
disabledFontColor = "color:" + toHex(panelBackgroundColor, -70) + ";";
borderColor = "border-color: " + toHex(panelBackgroundColor, -90) + ";";
inputBackgroundColor = toHex(panelBackgroundColor, 54) + ";";
gradientHighlightBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, -40) + " , " + toHex(panelBackgroundColor,-50) + ");";
} else {
fontColor = "#ffffff;";
disabledFontColor = "color:" + toHex(panelBackgroundColor, 100) + ";";
borderColor = "border-color: " + toHex(panelBackgroundColor, -45) + ";";
inputBackgroundColor = toHex(panelBackgroundColor, -20) + ";";
gradientHighlightBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, -20) + " , " + toHex(panelBackgroundColor, -30) + ");";
}
//Update the default text style with pp values
addRule(styleId, ".default", "font-size:" + appSkinInfo.baseFontSize + "px" + "; color:" + fontColor + "; background-color:" + toHex(panelBackgroundColor) + ";");
addRule(styleId, "button, select, input[type=text], input[type=button], input[type=submit]", borderColor);
addRule(styleId, "button, select, input[type=button], input[type=submit]", gradientBg);
addRule(styleId, "button, select, input[type=button], input[type=submit]", boxShadow);
addRule(styleId, "button:enabled:active, input[type=button]:enabled:active, input[type=submit]:enabled:active", gradientHighlightBg);
addRule(styleId, "button:enabled:active, input[type=button]:enabled:active, input[type=submit]:enabled:active", boxActiveShadow);
addRule(styleId, "[disabled]", gradientDisabledBg);
addRule(styleId, "[disabled]", disabledFontColor);
addRule(styleId, "input[type=text]", "padding:1px 3px;");
addRule(styleId, "input[type=text]", "background-color: " + inputBackgroundColor) + ";";
addRule(styleId, "input[type=text]:focus", "background-color: #ffffff;");
addRule(styleId, "input[type=text]:focus", "color: #000000;");
} else {
// For AI, ID and FL use old implementation
addRule(styleId, ".default", "font-size:" + appSkinInfo.baseFontSize + "px" + "; color:" + reverseColor(panelBackgroundColor) + "; background-color:" + toHex(panelBackgroundColor, 20));
addRule(styleId, "button", "border-color: " + toHex(panelBgColor, -50));
}
}
function addRule(stylesheetId, selector, rule) {
var stylesheet = document.getElementById(stylesheetId);
if (stylesheet) {
stylesheet = stylesheet.sheet;
if( stylesheet.addRule ){
stylesheet.addRule(selector, rule);
} else if( stylesheet.insertRule ){
stylesheet.insertRule(selector + ' { ' + rule + ' }', stylesheet.cssRules.length);
}
}
}
function reverseColor(color, delta) {
return toHex({red:Math.abs(255-color.red), green:Math.abs(255-color.green), blue:Math.abs(255-color.blue)}, delta);
}
/**
* Convert the Color object to string in hexadecimal format;
*/
function toHex(color, delta) {
function | (value, delta) {
var computedValue = !isNaN(delta) ? value + delta : value;
if (computedValue < 0) {
computedValue = 0;
} else if (computedValue > 255) {
computedValue = 255;
}
computedValue = computedValue.toString(16);
return computedValue.length == 1 ? "0" + computedValue : computedValue;
}
var hex = "";
if (color) {
with (color) {
hex = computeValue(red, delta) + computeValue(green, delta) + computeValue(blue, delta);
};
}
return "#" + hex;
}
function onAppThemeColorChanged(event) {
// Should get a latest HostEnvironment object from application.
var skinInfo = JSON.parse(window.__adobe_cep__.getHostEnvironment()).appSkinInfo;
// Gets the style information such as color info from the skinInfo,
// and redraw all UI controls of your extension according to the style info.
updateThemeWithAppSkinInfo(skinInfo);
}
/**
* Load JSX file into the scripting context of the product. All the jsx files in
* folder [ExtensionRoot]/jsx will be loaded.
*/
function loadJSX() {
var csInterface = new CSInterface();
var extensionRoot = csInterface.getSystemPath(SystemPath.EXTENSION) + "/jsx/";
csInterface.evalScript('$._ext.evalFiles("' + extensionRoot + '")');
}
function evalScript(script, callback) {
new CSInterface().evalScript(script, callback);
}
function onClickButton(ppid) {
if(ppid == "FLPR"){
var jsfl = 'fl.createDocument(); fl.getDocumentDOM().addNewText({left:100, top:100, right:300, bottom:300} , "Hello Flash!" ); ';
evalScript(jsfl);
} else {
var extScript = "$._ext_" + ppid + ".run()";
evalScript(extScript);
}
}
| computeValue | identifier_name |
ext.js |
function onLoaded() {
var csInterface = new CSInterface();
var appName = csInterface.hostEnvironment.appName;
if(appName != "FLPR") |
var appNames = ["PHXS"];
for (var i = 0; i < appNames.length; i++) {
var name = appNames[i];
if (appName.indexOf(name) >= 0) {
var btn = document.getElementById("btn_" + name);
if (btn)
btn.disabled = false;
}
}
updateThemeWithAppSkinInfo(csInterface.hostEnvironment.appSkinInfo);
// Update the color of the panel when the theme color of the product changed.
csInterface.addEventListener(CSInterface.THEME_COLOR_CHANGED_EVENT, onAppThemeColorChanged);
}
/**
* Update the theme with the AppSkinInfo retrieved from the host product.
*/
function updateThemeWithAppSkinInfo(appSkinInfo) {
//Update the background color of the panel
var panelBackgroundColor = appSkinInfo.panelBackgroundColor.color;
document.body.bgColor = toHex(panelBackgroundColor);
var styleId = "ppstyle";
var csInterface = new CSInterface();
var appName = csInterface.hostEnvironment.appName;
if(appName == "PHXS"){
addRule(styleId, "button, select, input[type=button], input[type=submit]", "border-radius:3px;");
}
if(appName == "PHXS" || appName == "PPRO" || appName == "PRLD") {
////////////////////////////////////////////////////////////////////////////////////////////////
// NOTE: Below theme related code are only suitable for Photoshop. //
// If you want to achieve same effect on other products please make your own changes here. //
////////////////////////////////////////////////////////////////////////////////////////////////
var gradientBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, 40) + " , " + toHex(panelBackgroundColor, 10) + ");";
var gradientDisabledBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, 15) + " , " + toHex(panelBackgroundColor, 5) + ");";
var boxShadow = "-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.4), 0 1px 1px rgba(0, 0, 0, 0.2);";
var boxActiveShadow = "-webkit-box-shadow: inset 0 1px 4px rgba(0, 0, 0, 0.6);";
var isPanelThemeLight = panelBackgroundColor.red > 127;
var fontColor, disabledFontColor;
var borderColor;
var inputBackgroundColor;
var gradientHighlightBg;
if(isPanelThemeLight) {
fontColor = "#000000;";
disabledFontColor = "color:" + toHex(panelBackgroundColor, -70) + ";";
borderColor = "border-color: " + toHex(panelBackgroundColor, -90) + ";";
inputBackgroundColor = toHex(panelBackgroundColor, 54) + ";";
gradientHighlightBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, -40) + " , " + toHex(panelBackgroundColor,-50) + ");";
} else {
fontColor = "#ffffff;";
disabledFontColor = "color:" + toHex(panelBackgroundColor, 100) + ";";
borderColor = "border-color: " + toHex(panelBackgroundColor, -45) + ";";
inputBackgroundColor = toHex(panelBackgroundColor, -20) + ";";
gradientHighlightBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, -20) + " , " + toHex(panelBackgroundColor, -30) + ");";
}
//Update the default text style with pp values
addRule(styleId, ".default", "font-size:" + appSkinInfo.baseFontSize + "px" + "; color:" + fontColor + "; background-color:" + toHex(panelBackgroundColor) + ";");
addRule(styleId, "button, select, input[type=text], input[type=button], input[type=submit]", borderColor);
addRule(styleId, "button, select, input[type=button], input[type=submit]", gradientBg);
addRule(styleId, "button, select, input[type=button], input[type=submit]", boxShadow);
addRule(styleId, "button:enabled:active, input[type=button]:enabled:active, input[type=submit]:enabled:active", gradientHighlightBg);
addRule(styleId, "button:enabled:active, input[type=button]:enabled:active, input[type=submit]:enabled:active", boxActiveShadow);
addRule(styleId, "[disabled]", gradientDisabledBg);
addRule(styleId, "[disabled]", disabledFontColor);
addRule(styleId, "input[type=text]", "padding:1px 3px;");
addRule(styleId, "input[type=text]", "background-color: " + inputBackgroundColor) + ";";
addRule(styleId, "input[type=text]:focus", "background-color: #ffffff;");
addRule(styleId, "input[type=text]:focus", "color: #000000;");
} else {
// For AI, ID and FL use old implementation
addRule(styleId, ".default", "font-size:" + appSkinInfo.baseFontSize + "px" + "; color:" + reverseColor(panelBackgroundColor) + "; background-color:" + toHex(panelBackgroundColor, 20));
addRule(styleId, "button", "border-color: " + toHex(panelBgColor, -50));
}
}
function addRule(stylesheetId, selector, rule) {
var stylesheet = document.getElementById(stylesheetId);
if (stylesheet) {
stylesheet = stylesheet.sheet;
if( stylesheet.addRule ){
stylesheet.addRule(selector, rule);
} else if( stylesheet.insertRule ){
stylesheet.insertRule(selector + ' { ' + rule + ' }', stylesheet.cssRules.length);
}
}
}
function reverseColor(color, delta) {
return toHex({red:Math.abs(255-color.red), green:Math.abs(255-color.green), blue:Math.abs(255-color.blue)}, delta);
}
/**
* Convert the Color object to string in hexadecimal format;
*/
function toHex(color, delta) {
function computeValue(value, delta) {
var computedValue = !isNaN(delta) ? value + delta : value;
if (computedValue < 0) {
computedValue = 0;
} else if (computedValue > 255) {
computedValue = 255;
}
computedValue = computedValue.toString(16);
return computedValue.length == 1 ? "0" + computedValue : computedValue;
}
var hex = "";
if (color) {
with (color) {
hex = computeValue(red, delta) + computeValue(green, delta) + computeValue(blue, delta);
};
}
return "#" + hex;
}
function onAppThemeColorChanged(event) {
// Should get a latest HostEnvironment object from application.
var skinInfo = JSON.parse(window.__adobe_cep__.getHostEnvironment()).appSkinInfo;
// Gets the style information such as color info from the skinInfo,
// and redraw all UI controls of your extension according to the style info.
updateThemeWithAppSkinInfo(skinInfo);
}
/**
* Load JSX file into the scripting context of the product. All the jsx files in
* folder [ExtensionRoot]/jsx will be loaded.
*/
function loadJSX() {
var csInterface = new CSInterface();
var extensionRoot = csInterface.getSystemPath(SystemPath.EXTENSION) + "/jsx/";
csInterface.evalScript('$._ext.evalFiles("' + extensionRoot + '")');
}
function evalScript(script, callback) {
new CSInterface().evalScript(script, callback);
}
function onClickButton(ppid) {
if(ppid == "FLPR"){
var jsfl = 'fl.createDocument(); fl.getDocumentDOM().addNewText({left:100, top:100, right:300, bottom:300} , "Hello Flash!" ); ';
evalScript(jsfl);
} else {
var extScript = "$._ext_" + ppid + ".run()";
evalScript(extScript);
}
}
| {
loadJSX();
} | conditional_block |
ext.js |
function onLoaded() {
var csInterface = new CSInterface();
var appName = csInterface.hostEnvironment.appName;
if(appName != "FLPR"){
loadJSX();
}
var appNames = ["PHXS"];
for (var i = 0; i < appNames.length; i++) {
var name = appNames[i];
if (appName.indexOf(name) >= 0) {
var btn = document.getElementById("btn_" + name);
if (btn)
btn.disabled = false;
}
}
updateThemeWithAppSkinInfo(csInterface.hostEnvironment.appSkinInfo);
// Update the color of the panel when the theme color of the product changed.
csInterface.addEventListener(CSInterface.THEME_COLOR_CHANGED_EVENT, onAppThemeColorChanged);
}
/**
* Update the theme with the AppSkinInfo retrieved from the host product.
*/
function updateThemeWithAppSkinInfo(appSkinInfo) {
//Update the background color of the panel
var panelBackgroundColor = appSkinInfo.panelBackgroundColor.color;
document.body.bgColor = toHex(panelBackgroundColor);
var styleId = "ppstyle";
var csInterface = new CSInterface();
var appName = csInterface.hostEnvironment.appName;
if(appName == "PHXS"){
addRule(styleId, "button, select, input[type=button], input[type=submit]", "border-radius:3px;");
}
if(appName == "PHXS" || appName == "PPRO" || appName == "PRLD") {
////////////////////////////////////////////////////////////////////////////////////////////////
// NOTE: Below theme related code are only suitable for Photoshop. //
// If you want to achieve same effect on other products please make your own changes here. //
////////////////////////////////////////////////////////////////////////////////////////////////
var gradientBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, 40) + " , " + toHex(panelBackgroundColor, 10) + ");";
var gradientDisabledBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, 15) + " , " + toHex(panelBackgroundColor, 5) + ");";
var boxShadow = "-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.4), 0 1px 1px rgba(0, 0, 0, 0.2);";
var boxActiveShadow = "-webkit-box-shadow: inset 0 1px 4px rgba(0, 0, 0, 0.6);";
var isPanelThemeLight = panelBackgroundColor.red > 127;
var fontColor, disabledFontColor;
var borderColor;
var inputBackgroundColor;
var gradientHighlightBg;
if(isPanelThemeLight) {
fontColor = "#000000;";
disabledFontColor = "color:" + toHex(panelBackgroundColor, -70) + ";";
borderColor = "border-color: " + toHex(panelBackgroundColor, -90) + ";";
inputBackgroundColor = toHex(panelBackgroundColor, 54) + ";";
gradientHighlightBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, -40) + " , " + toHex(panelBackgroundColor,-50) + ");";
} else {
fontColor = "#ffffff;";
disabledFontColor = "color:" + toHex(panelBackgroundColor, 100) + ";";
borderColor = "border-color: " + toHex(panelBackgroundColor, -45) + ";";
inputBackgroundColor = toHex(panelBackgroundColor, -20) + ";";
gradientHighlightBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, -20) + " , " + toHex(panelBackgroundColor, -30) + ");";
}
//Update the default text style with pp values
addRule(styleId, ".default", "font-size:" + appSkinInfo.baseFontSize + "px" + "; color:" + fontColor + "; background-color:" + toHex(panelBackgroundColor) + ";");
addRule(styleId, "button, select, input[type=text], input[type=button], input[type=submit]", borderColor);
addRule(styleId, "button, select, input[type=button], input[type=submit]", gradientBg);
addRule(styleId, "button, select, input[type=button], input[type=submit]", boxShadow);
addRule(styleId, "button:enabled:active, input[type=button]:enabled:active, input[type=submit]:enabled:active", gradientHighlightBg);
addRule(styleId, "button:enabled:active, input[type=button]:enabled:active, input[type=submit]:enabled:active", boxActiveShadow);
addRule(styleId, "[disabled]", gradientDisabledBg);
addRule(styleId, "[disabled]", disabledFontColor);
addRule(styleId, "input[type=text]", "padding:1px 3px;");
addRule(styleId, "input[type=text]", "background-color: " + inputBackgroundColor) + ";";
addRule(styleId, "input[type=text]:focus", "background-color: #ffffff;");
addRule(styleId, "input[type=text]:focus", "color: #000000;");
} else {
// For AI, ID and FL use old implementation
addRule(styleId, ".default", "font-size:" + appSkinInfo.baseFontSize + "px" + "; color:" + reverseColor(panelBackgroundColor) + "; background-color:" + toHex(panelBackgroundColor, 20));
addRule(styleId, "button", "border-color: " + toHex(panelBgColor, -50));
}
}
function addRule(stylesheetId, selector, rule) {
var stylesheet = document.getElementById(stylesheetId);
if (stylesheet) {
stylesheet = stylesheet.sheet;
if( stylesheet.addRule ){
stylesheet.addRule(selector, rule);
} else if( stylesheet.insertRule ){
stylesheet.insertRule(selector + ' { ' + rule + ' }', stylesheet.cssRules.length);
}
}
}
function reverseColor(color, delta) {
return toHex({red:Math.abs(255-color.red), green:Math.abs(255-color.green), blue:Math.abs(255-color.blue)}, delta);
}
/**
* Convert the Color object to string in hexadecimal format;
*/
function toHex(color, delta) |
function onAppThemeColorChanged(event) {
// Should get a latest HostEnvironment object from application.
var skinInfo = JSON.parse(window.__adobe_cep__.getHostEnvironment()).appSkinInfo;
// Gets the style information such as color info from the skinInfo,
// and redraw all UI controls of your extension according to the style info.
updateThemeWithAppSkinInfo(skinInfo);
}
/**
* Load JSX file into the scripting context of the product. All the jsx files in
* folder [ExtensionRoot]/jsx will be loaded.
*/
function loadJSX() {
var csInterface = new CSInterface();
var extensionRoot = csInterface.getSystemPath(SystemPath.EXTENSION) + "/jsx/";
csInterface.evalScript('$._ext.evalFiles("' + extensionRoot + '")');
}
function evalScript(script, callback) {
new CSInterface().evalScript(script, callback);
}
function onClickButton(ppid) {
if(ppid == "FLPR"){
var jsfl = 'fl.createDocument(); fl.getDocumentDOM().addNewText({left:100, top:100, right:300, bottom:300} , "Hello Flash!" ); ';
evalScript(jsfl);
} else {
var extScript = "$._ext_" + ppid + ".run()";
evalScript(extScript);
}
}
| {
function computeValue(value, delta) {
var computedValue = !isNaN(delta) ? value + delta : value;
if (computedValue < 0) {
computedValue = 0;
} else if (computedValue > 255) {
computedValue = 255;
}
computedValue = computedValue.toString(16);
return computedValue.length == 1 ? "0" + computedValue : computedValue;
}
var hex = "";
if (color) {
with (color) {
hex = computeValue(red, delta) + computeValue(green, delta) + computeValue(blue, delta);
};
}
return "#" + hex;
} | identifier_body |
regions-fn-subtyping.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
#![allow(unused_assignments)]
// Issue #2263.
// pretty-expanded FIXME #23616
#![allow(unused_variables)]
// Should pass region checking.
fn | (f: Box<FnMut(&usize)>) {
// Here, g is a function that can accept a usize pointer with
// lifetime r, and f is a function that can accept a usize pointer
// with any lifetime. The assignment g = f should be OK (i.e.,
// f's type should be a subtype of g's type), because f can be
// used in any context that expects g's type. But this currently
// fails.
let mut g: Box<for<'r> FnMut(&'r usize)> = Box::new(|x| { });
g = f;
}
// This version is the same as above, except that here, g's type is
// inferred.
fn ok_inferred(f: Box<FnMut(&usize)>) {
let mut g: Box<for<'r> FnMut(&'r usize)> = Box::new(|_| {});
g = f;
}
pub fn main() {
}
| ok | identifier_name |
regions-fn-subtyping.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
#![allow(unused_assignments)]
// Issue #2263.
// pretty-expanded FIXME #23616
#![allow(unused_variables)]
// Should pass region checking.
fn ok(f: Box<FnMut(&usize)>) {
// Here, g is a function that can accept a usize pointer with
// lifetime r, and f is a function that can accept a usize pointer
// with any lifetime. The assignment g = f should be OK (i.e.,
// f's type should be a subtype of g's type), because f can be
// used in any context that expects g's type. But this currently
// fails.
let mut g: Box<for<'r> FnMut(&'r usize)> = Box::new(|x| { });
g = f;
}
// This version is the same as above, except that here, g's type is
// inferred.
fn ok_inferred(f: Box<FnMut(&usize)>) {
let mut g: Box<for<'r> FnMut(&'r usize)> = Box::new(|_| {});
g = f;
}
pub fn main() | {
} | identifier_body | |
regions-fn-subtyping.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
#![allow(unused_assignments)]
// Issue #2263.
// pretty-expanded FIXME #23616
#![allow(unused_variables)]
// Should pass region checking.
fn ok(f: Box<FnMut(&usize)>) {
// Here, g is a function that can accept a usize pointer with
// lifetime r, and f is a function that can accept a usize pointer
// with any lifetime. The assignment g = f should be OK (i.e.,
// f's type should be a subtype of g's type), because f can be
// used in any context that expects g's type. But this currently
// fails.
let mut g: Box<for<'r> FnMut(&'r usize)> = Box::new(|x| { });
g = f;
}
// This version is the same as above, except that here, g's type is
// inferred.
fn ok_inferred(f: Box<FnMut(&usize)>) {
let mut g: Box<for<'r> FnMut(&'r usize)> = Box::new(|_| {});
g = f;
} | } |
pub fn main() { | random_line_split |
lib.rs | //! Asynchronous channels.
//!
//! This crate provides channels that can be used to communicate between
//! asynchronous tasks.
//!
//! All items of this library are only available when the `std` or `alloc` feature of this
//! library is activated, and it is activated by default.
#![cfg_attr(feature = "cfg-target-has-atomic", feature(cfg_target_has_atomic))]
#![cfg_attr(not(feature = "std"), no_std)]
#![warn(missing_docs, missing_debug_implementations, rust_2018_idioms, unreachable_pub)]
// It cannot be included in the published code because this lints have false positives in the minimum required version.
#![cfg_attr(test, warn(single_use_lifetimes))]
#![warn(clippy::all)]
#![doc(test(attr(deny(warnings), allow(dead_code, unused_assignments, unused_variables))))]
#![doc(html_root_url = "https://docs.rs/futures-channel/0.3.0")]
#[cfg(all(feature = "cfg-target-has-atomic", not(feature = "unstable")))]
compile_error!("The `cfg-target-has-atomic` feature requires the `unstable` feature as an explicit opt-in to unstable features");
| ($($item:item)*) => {$(
#[cfg_attr(feature = "cfg-target-has-atomic", cfg(target_has_atomic = "ptr"))]
$item
)*};
}
cfg_target_has_atomic! {
#[cfg(feature = "alloc")]
extern crate alloc;
#[cfg(feature = "alloc")]
mod lock;
#[cfg(feature = "std")]
pub mod mpsc;
#[cfg(feature = "alloc")]
pub mod oneshot;
} | macro_rules! cfg_target_has_atomic { | random_line_split |
process-spawn-with-unicode-params.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// no-prefer-dynamic
// The test copies itself into a subdirectory with a non-ASCII name and then
// runs it as a child process within the subdirectory. The parent process
// also adds an environment variable and an argument, both containing
// non-ASCII characters. The child process ensures all the strings are
// intact.
use std::old_io;
use std::old_io::fs;
use std::old_io::Command;
use std::os;
use std::old_path::Path;
fn main() {
let my_args = os::args();
let my_cwd = os::getcwd().unwrap();
let my_env = os::env();
let my_path = Path::new(os::self_exe_name().unwrap());
let my_dir = my_path.dir_path();
let my_ext = my_path.extension_str().unwrap_or("");
// some non-ASCII characters
let blah = "\u03c0\u042f\u97f3\u00e6\u221e";
let child_name = "child";
let child_dir = format!("process-spawn-with-unicode-params-{}", blah);
// parameters sent to child / expected to be received from parent
let arg = blah;
let cwd = my_dir.join(Path::new(child_dir.clone()));
let env = ("RUST_TEST_PROC_SPAWN_UNICODE".to_string(), blah.to_string());
// am I the parent or the child?
if my_args.len() == 1 { // parent
let child_filestem = Path::new(child_name);
let child_filename = child_filestem.with_extension(my_ext);
let child_path = cwd.join(child_filename);
// make a separate directory for the child
drop(fs::mkdir(&cwd, old_io::USER_RWX).is_ok()); | my_env.push(env);
// run child
let p = Command::new(&child_path)
.arg(arg)
.cwd(&cwd)
.env_set_all(&my_env)
.spawn().unwrap().wait_with_output().unwrap();
// display the output
assert!(old_io::stdout().write(&p.output).is_ok());
assert!(old_io::stderr().write(&p.error).is_ok());
// make sure the child succeeded
assert!(p.status.success());
} else { // child
// check working directory (don't try to compare with `cwd` here!)
assert!(my_cwd.ends_with_path(&Path::new(child_dir)));
// check arguments
assert_eq!(&*my_args[1], arg);
// check environment variable
assert!(my_env.contains(&env));
};
} | assert!(fs::copy(&my_path, &child_path).is_ok());
let mut my_env = my_env; | random_line_split |
process-spawn-with-unicode-params.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// no-prefer-dynamic
// The test copies itself into a subdirectory with a non-ASCII name and then
// runs it as a child process within the subdirectory. The parent process
// also adds an environment variable and an argument, both containing
// non-ASCII characters. The child process ensures all the strings are
// intact.
use std::old_io;
use std::old_io::fs;
use std::old_io::Command;
use std::os;
use std::old_path::Path;
fn main() {
let my_args = os::args();
let my_cwd = os::getcwd().unwrap();
let my_env = os::env();
let my_path = Path::new(os::self_exe_name().unwrap());
let my_dir = my_path.dir_path();
let my_ext = my_path.extension_str().unwrap_or("");
// some non-ASCII characters
let blah = "\u03c0\u042f\u97f3\u00e6\u221e";
let child_name = "child";
let child_dir = format!("process-spawn-with-unicode-params-{}", blah);
// parameters sent to child / expected to be received from parent
let arg = blah;
let cwd = my_dir.join(Path::new(child_dir.clone()));
let env = ("RUST_TEST_PROC_SPAWN_UNICODE".to_string(), blah.to_string());
// am I the parent or the child?
if my_args.len() == 1 | else { // child
// check working directory (don't try to compare with `cwd` here!)
assert!(my_cwd.ends_with_path(&Path::new(child_dir)));
// check arguments
assert_eq!(&*my_args[1], arg);
// check environment variable
assert!(my_env.contains(&env));
};
}
| { // parent
let child_filestem = Path::new(child_name);
let child_filename = child_filestem.with_extension(my_ext);
let child_path = cwd.join(child_filename);
// make a separate directory for the child
drop(fs::mkdir(&cwd, old_io::USER_RWX).is_ok());
assert!(fs::copy(&my_path, &child_path).is_ok());
let mut my_env = my_env;
my_env.push(env);
// run child
let p = Command::new(&child_path)
.arg(arg)
.cwd(&cwd)
.env_set_all(&my_env)
.spawn().unwrap().wait_with_output().unwrap();
// display the output
assert!(old_io::stdout().write(&p.output).is_ok());
assert!(old_io::stderr().write(&p.error).is_ok());
// make sure the child succeeded
assert!(p.status.success());
} | conditional_block |
process-spawn-with-unicode-params.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// no-prefer-dynamic
// The test copies itself into a subdirectory with a non-ASCII name and then
// runs it as a child process within the subdirectory. The parent process
// also adds an environment variable and an argument, both containing
// non-ASCII characters. The child process ensures all the strings are
// intact.
use std::old_io;
use std::old_io::fs;
use std::old_io::Command;
use std::os;
use std::old_path::Path;
fn | () {
let my_args = os::args();
let my_cwd = os::getcwd().unwrap();
let my_env = os::env();
let my_path = Path::new(os::self_exe_name().unwrap());
let my_dir = my_path.dir_path();
let my_ext = my_path.extension_str().unwrap_or("");
// some non-ASCII characters
let blah = "\u03c0\u042f\u97f3\u00e6\u221e";
let child_name = "child";
let child_dir = format!("process-spawn-with-unicode-params-{}", blah);
// parameters sent to child / expected to be received from parent
let arg = blah;
let cwd = my_dir.join(Path::new(child_dir.clone()));
let env = ("RUST_TEST_PROC_SPAWN_UNICODE".to_string(), blah.to_string());
// am I the parent or the child?
if my_args.len() == 1 { // parent
let child_filestem = Path::new(child_name);
let child_filename = child_filestem.with_extension(my_ext);
let child_path = cwd.join(child_filename);
// make a separate directory for the child
drop(fs::mkdir(&cwd, old_io::USER_RWX).is_ok());
assert!(fs::copy(&my_path, &child_path).is_ok());
let mut my_env = my_env;
my_env.push(env);
// run child
let p = Command::new(&child_path)
.arg(arg)
.cwd(&cwd)
.env_set_all(&my_env)
.spawn().unwrap().wait_with_output().unwrap();
// display the output
assert!(old_io::stdout().write(&p.output).is_ok());
assert!(old_io::stderr().write(&p.error).is_ok());
// make sure the child succeeded
assert!(p.status.success());
} else { // child
// check working directory (don't try to compare with `cwd` here!)
assert!(my_cwd.ends_with_path(&Path::new(child_dir)));
// check arguments
assert_eq!(&*my_args[1], arg);
// check environment variable
assert!(my_env.contains(&env));
};
}
| main | identifier_name |
process-spawn-with-unicode-params.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// no-prefer-dynamic
// The test copies itself into a subdirectory with a non-ASCII name and then
// runs it as a child process within the subdirectory. The parent process
// also adds an environment variable and an argument, both containing
// non-ASCII characters. The child process ensures all the strings are
// intact.
use std::old_io;
use std::old_io::fs;
use std::old_io::Command;
use std::os;
use std::old_path::Path;
fn main() | {
let my_args = os::args();
let my_cwd = os::getcwd().unwrap();
let my_env = os::env();
let my_path = Path::new(os::self_exe_name().unwrap());
let my_dir = my_path.dir_path();
let my_ext = my_path.extension_str().unwrap_or("");
// some non-ASCII characters
let blah = "\u03c0\u042f\u97f3\u00e6\u221e";
let child_name = "child";
let child_dir = format!("process-spawn-with-unicode-params-{}", blah);
// parameters sent to child / expected to be received from parent
let arg = blah;
let cwd = my_dir.join(Path::new(child_dir.clone()));
let env = ("RUST_TEST_PROC_SPAWN_UNICODE".to_string(), blah.to_string());
// am I the parent or the child?
if my_args.len() == 1 { // parent
let child_filestem = Path::new(child_name);
let child_filename = child_filestem.with_extension(my_ext);
let child_path = cwd.join(child_filename);
// make a separate directory for the child
drop(fs::mkdir(&cwd, old_io::USER_RWX).is_ok());
assert!(fs::copy(&my_path, &child_path).is_ok());
let mut my_env = my_env;
my_env.push(env);
// run child
let p = Command::new(&child_path)
.arg(arg)
.cwd(&cwd)
.env_set_all(&my_env)
.spawn().unwrap().wait_with_output().unwrap();
// display the output
assert!(old_io::stdout().write(&p.output).is_ok());
assert!(old_io::stderr().write(&p.error).is_ok());
// make sure the child succeeded
assert!(p.status.success());
} else { // child
// check working directory (don't try to compare with `cwd` here!)
assert!(my_cwd.ends_with_path(&Path::new(child_dir)));
// check arguments
assert_eq!(&*my_args[1], arg);
// check environment variable
assert!(my_env.contains(&env));
};
} | identifier_body | |
modules.js | var modules = {
"success" : [
{id: 1, name:"控制台", code:"console", protocol:"http", domain:"console.ecc.com", port:"18333", created:'2017-03-08 00:00:00', creator:'1', modified:'2017-03-08 00:00:00', modifier:'1'},
{id: 2, name:"服务中心", code:"service-center", protocol:"http", domain:"sc.ecc.com", port:"18222", created:'2017-03-08 00:00:00', creator:'1', modified:'2017-03-08 00:00:00', modifier:'1'}
],
"error" : {
code : "0200-ERROR", msg : "There are some errors occured."
}
}
module.exports = { | } | "modules": modules | random_line_split |
app.js | var App = angular.module("App", ["ionic"]);
App.service("ChileBitBook", ["$http","$log", ChileBitBook]);
App.controller("AppCtrl", ["$scope", "ChileBitBook", "$log", AppCtrl]);
function | ($scope, ChileBitBook, $log){
$scope.Bids = [];
$scope.Asks = [];
$scope.updateBook = function(){
ChileBitBook.getBook($scope);
};
}
function ChileBitBook($http, $log){
this.getBook = function($scope) {
$http.jsonp("https://api.blinktrade.com/api/v1/CLP/orderbook?callback=JSON_CALLBACK")
.success(function(result){
var Bid;
$.each(result.bids, function(index, stringBid){
Bid = {
Price: stringBid[0],
Volume: stringBid[1],
Id: stringBid[2],
};
$scope.Bids.push(Bid);
});
var Ask;
$.each(result.asks, function(index, stringAsk){
Ask = {
Price: stringAsk[0],
Volume: stringAsk[1],
Id: stringAsk[2],
};
$scope.Asks.push(Ask);
});
});
};
} | AppCtrl | identifier_name |
app.js | var App = angular.module("App", ["ionic"]);
App.service("ChileBitBook", ["$http","$log", ChileBitBook]);
App.controller("AppCtrl", ["$scope", "ChileBitBook", "$log", AppCtrl]);
function AppCtrl($scope, ChileBitBook, $log) |
function ChileBitBook($http, $log){
this.getBook = function($scope) {
$http.jsonp("https://api.blinktrade.com/api/v1/CLP/orderbook?callback=JSON_CALLBACK")
.success(function(result){
var Bid;
$.each(result.bids, function(index, stringBid){
Bid = {
Price: stringBid[0],
Volume: stringBid[1],
Id: stringBid[2],
};
$scope.Bids.push(Bid);
});
var Ask;
$.each(result.asks, function(index, stringAsk){
Ask = {
Price: stringAsk[0],
Volume: stringAsk[1],
Id: stringAsk[2],
};
$scope.Asks.push(Ask);
});
});
};
} | {
$scope.Bids = [];
$scope.Asks = [];
$scope.updateBook = function(){
ChileBitBook.getBook($scope);
};
} | identifier_body |
app.js | var App = angular.module("App", ["ionic"]);
App.service("ChileBitBook", ["$http","$log", ChileBitBook]);
App.controller("AppCtrl", ["$scope", "ChileBitBook", "$log", AppCtrl]);
function AppCtrl($scope, ChileBitBook, $log){
$scope.Bids = [];
$scope.Asks = [];
$scope.updateBook = function(){
ChileBitBook.getBook($scope);
};
}
function ChileBitBook($http, $log){
this.getBook = function($scope) {
$http.jsonp("https://api.blinktrade.com/api/v1/CLP/orderbook?callback=JSON_CALLBACK")
.success(function(result){
var Bid;
$.each(result.bids, function(index, stringBid){
Bid = {
Price: stringBid[0],
Volume: stringBid[1],
Id: stringBid[2],
};
$scope.Bids.push(Bid);
});
var Ask;
$.each(result.asks, function(index, stringAsk){ | Volume: stringAsk[1],
Id: stringAsk[2],
};
$scope.Asks.push(Ask);
});
});
};
} | Ask = {
Price: stringAsk[0], | random_line_split |
main.py | # -*- coding: utf-8 -*-
# This file is part of the Rocket Web Server
# Copyright (c) 2010 Timothy Farrell
# Import System Modules
import sys
import time
import socket
import logging
import traceback
from threading import Lock
try:
from queue import Queue
except ImportError:
from Queue import Queue
# Import Package Modules
from . import DEFAULTS, SERVER_SOFTWARE, NullHandler, THREAD_STOP_CHECK_INTERVAL
from .monitor import Monitor
from .threadpool import ThreadPool
from .worker import get_method
from .listener import Listener
# Setup Logging
log = logging.getLogger('Rocket')
log.addHandler(NullHandler())
class Rocket(object):
"""The Rocket class is responsible for handling threads and accepting and
dispatching connections."""
def | (self,
interfaces = ('127.0.0.1', 8000),
method = 'wsgi',
app_info = None,
min_threads = None,
max_threads = None,
queue_size = None,
timeout = 600,
handle_signals = True):
self.handle_signals = handle_signals
self.startstop_lock = Lock()
self.timeout = timeout
if not isinstance(interfaces, list):
self.interfaces = [interfaces]
else:
self.interfaces = interfaces
if min_threads is None:
min_threads = DEFAULTS['MIN_THREADS']
if max_threads is None:
max_threads = DEFAULTS['MAX_THREADS']
if not queue_size:
if hasattr(socket, 'SOMAXCONN'):
queue_size = socket.SOMAXCONN
else:
queue_size = DEFAULTS['LISTEN_QUEUE_SIZE']
if max_threads and queue_size > max_threads:
queue_size = max_threads
if isinstance(app_info, dict):
app_info['server_software'] = SERVER_SOFTWARE
self.monitor_queue = Queue()
self.active_queue = Queue()
self._threadpool = ThreadPool(get_method(method),
app_info = app_info,
active_queue = self.active_queue,
monitor_queue = self.monitor_queue,
min_threads = min_threads,
max_threads = max_threads)
# Build our socket listeners
self.listeners = [Listener(i, queue_size, self.active_queue) for i in self.interfaces]
for ndx in range(len(self.listeners)-1, 0, -1):
if not self.listeners[ndx].ready:
del self.listeners[ndx]
if not self.listeners:
log.critical("No interfaces to listen on...closing.")
sys.exit(1)
def _sigterm(self, signum, frame):
log.info('Received SIGTERM')
self.stop()
def _sighup(self, signum, frame):
log.info('Received SIGHUP')
self.restart()
def start(self, background=False):
log.info('Starting %s' % SERVER_SOFTWARE)
self.startstop_lock.acquire()
try:
# Set up our shutdown signals
if self.handle_signals:
try:
import signal
signal.signal(signal.SIGTERM, self._sigterm)
signal.signal(signal.SIGUSR1, self._sighup)
except:
log.debug('This platform does not support signals.')
# Start our worker threads
self._threadpool.start()
# Start our monitor thread
self._monitor = Monitor(self.monitor_queue,
self.active_queue,
self.timeout,
self._threadpool)
self._monitor.setDaemon(True)
self._monitor.start()
# I know that EXPR and A or B is bad but I'm keeping it for Py2.4
# compatibility.
str_extract = lambda l: (l.addr, l.port, l.secure and '*' or '')
msg = 'Listening on sockets: '
msg += ', '.join(['%s:%i%s' % str_extract(l) for l in self.listeners])
log.info(msg)
for l in self.listeners:
l.start()
finally:
self.startstop_lock.release()
if background:
return
while self._monitor.isAlive():
try:
time.sleep(THREAD_STOP_CHECK_INTERVAL)
except KeyboardInterrupt:
# Capture a keyboard interrupt when running from a console
break
except:
if self._monitor.isAlive():
log.error(str(traceback.format_exc()))
continue
return self.stop()
def stop(self, stoplogging = False):
log.info('Stopping %s' % SERVER_SOFTWARE)
self.startstop_lock.acquire()
try:
# Stop listeners
for l in self.listeners:
l.ready = False
# Encourage a context switch
time.sleep(0.01)
for l in self.listeners:
if l.isAlive():
l.join()
# Stop Monitor
self._monitor.stop()
if self._monitor.isAlive():
self._monitor.join()
# Stop Worker threads
self._threadpool.stop()
if stoplogging:
logging.shutdown()
msg = "Calling logging.shutdown() is now the responsibility of \
the application developer. Please update your \
applications to no longer call rocket.stop(True)"
try:
import warnings
raise warnings.DeprecationWarning(msg)
except ImportError:
raise RuntimeError(msg)
finally:
self.startstop_lock.release()
def restart(self):
self.stop()
self.start()
def CherryPyWSGIServer(bind_addr,
wsgi_app,
numthreads = 10,
server_name = None,
max = -1,
request_queue_size = 5,
timeout = 10,
shutdown_timeout = 5):
""" A Cherrypy wsgiserver-compatible wrapper. """
max_threads = max
if max_threads < 0:
max_threads = 0
return Rocket(bind_addr, 'wsgi', {'wsgi_app': wsgi_app},
min_threads = numthreads,
max_threads = max_threads,
queue_size = request_queue_size,
timeout = timeout)
| __init__ | identifier_name |
main.py | # -*- coding: utf-8 -*-
# This file is part of the Rocket Web Server
# Copyright (c) 2010 Timothy Farrell
# Import System Modules
import sys
import time
import socket
import logging
import traceback
from threading import Lock
try:
from queue import Queue
except ImportError:
from Queue import Queue
# Import Package Modules
from . import DEFAULTS, SERVER_SOFTWARE, NullHandler, THREAD_STOP_CHECK_INTERVAL
from .monitor import Monitor
from .threadpool import ThreadPool
from .worker import get_method
from .listener import Listener
# Setup Logging
log = logging.getLogger('Rocket')
log.addHandler(NullHandler())
class Rocket(object):
"""The Rocket class is responsible for handling threads and accepting and
dispatching connections."""
def __init__(self,
interfaces = ('127.0.0.1', 8000),
method = 'wsgi',
app_info = None,
min_threads = None,
max_threads = None,
queue_size = None,
timeout = 600,
handle_signals = True):
self.handle_signals = handle_signals
self.startstop_lock = Lock()
self.timeout = timeout
if not isinstance(interfaces, list):
self.interfaces = [interfaces]
else:
self.interfaces = interfaces
if min_threads is None:
min_threads = DEFAULTS['MIN_THREADS']
if max_threads is None:
max_threads = DEFAULTS['MAX_THREADS']
if not queue_size:
if hasattr(socket, 'SOMAXCONN'):
queue_size = socket.SOMAXCONN
else:
queue_size = DEFAULTS['LISTEN_QUEUE_SIZE']
if max_threads and queue_size > max_threads:
queue_size = max_threads
if isinstance(app_info, dict):
app_info['server_software'] = SERVER_SOFTWARE
self.monitor_queue = Queue()
self.active_queue = Queue()
self._threadpool = ThreadPool(get_method(method),
app_info = app_info,
active_queue = self.active_queue,
monitor_queue = self.monitor_queue,
min_threads = min_threads,
max_threads = max_threads)
# Build our socket listeners
self.listeners = [Listener(i, queue_size, self.active_queue) for i in self.interfaces]
for ndx in range(len(self.listeners)-1, 0, -1):
if not self.listeners[ndx].ready:
del self.listeners[ndx]
if not self.listeners:
log.critical("No interfaces to listen on...closing.")
sys.exit(1)
def _sigterm(self, signum, frame):
log.info('Received SIGTERM')
self.stop()
def _sighup(self, signum, frame):
log.info('Received SIGHUP')
self.restart()
def start(self, background=False):
log.info('Starting %s' % SERVER_SOFTWARE)
self.startstop_lock.acquire()
try:
# Set up our shutdown signals
if self.handle_signals:
try:
import signal
signal.signal(signal.SIGTERM, self._sigterm)
signal.signal(signal.SIGUSR1, self._sighup)
except:
log.debug('This platform does not support signals.')
# Start our worker threads
self._threadpool.start()
# Start our monitor thread
self._monitor = Monitor(self.monitor_queue,
self.active_queue,
self.timeout,
self._threadpool)
self._monitor.setDaemon(True)
self._monitor.start()
# I know that EXPR and A or B is bad but I'm keeping it for Py2.4
# compatibility.
str_extract = lambda l: (l.addr, l.port, l.secure and '*' or '')
msg = 'Listening on sockets: '
msg += ', '.join(['%s:%i%s' % str_extract(l) for l in self.listeners])
log.info(msg)
for l in self.listeners:
|
finally:
self.startstop_lock.release()
if background:
return
while self._monitor.isAlive():
try:
time.sleep(THREAD_STOP_CHECK_INTERVAL)
except KeyboardInterrupt:
# Capture a keyboard interrupt when running from a console
break
except:
if self._monitor.isAlive():
log.error(str(traceback.format_exc()))
continue
return self.stop()
def stop(self, stoplogging = False):
log.info('Stopping %s' % SERVER_SOFTWARE)
self.startstop_lock.acquire()
try:
# Stop listeners
for l in self.listeners:
l.ready = False
# Encourage a context switch
time.sleep(0.01)
for l in self.listeners:
if l.isAlive():
l.join()
# Stop Monitor
self._monitor.stop()
if self._monitor.isAlive():
self._monitor.join()
# Stop Worker threads
self._threadpool.stop()
if stoplogging:
logging.shutdown()
msg = "Calling logging.shutdown() is now the responsibility of \
the application developer. Please update your \
applications to no longer call rocket.stop(True)"
try:
import warnings
raise warnings.DeprecationWarning(msg)
except ImportError:
raise RuntimeError(msg)
finally:
self.startstop_lock.release()
def restart(self):
self.stop()
self.start()
def CherryPyWSGIServer(bind_addr,
wsgi_app,
numthreads = 10,
server_name = None,
max = -1,
request_queue_size = 5,
timeout = 10,
shutdown_timeout = 5):
""" A Cherrypy wsgiserver-compatible wrapper. """
max_threads = max
if max_threads < 0:
max_threads = 0
return Rocket(bind_addr, 'wsgi', {'wsgi_app': wsgi_app},
min_threads = numthreads,
max_threads = max_threads,
queue_size = request_queue_size,
timeout = timeout)
| l.start() | conditional_block |
main.py | # -*- coding: utf-8 -*-
# This file is part of the Rocket Web Server
# Copyright (c) 2010 Timothy Farrell
# Import System Modules
import sys
import time
import socket
import logging
import traceback
from threading import Lock
try:
from queue import Queue
except ImportError:
from Queue import Queue
# Import Package Modules
from . import DEFAULTS, SERVER_SOFTWARE, NullHandler, THREAD_STOP_CHECK_INTERVAL
from .monitor import Monitor
from .threadpool import ThreadPool
from .worker import get_method
from .listener import Listener
# Setup Logging
log = logging.getLogger('Rocket')
log.addHandler(NullHandler())
class Rocket(object):
"""The Rocket class is responsible for handling threads and accepting and
dispatching connections."""
def __init__(self,
interfaces = ('127.0.0.1', 8000),
method = 'wsgi',
app_info = None,
min_threads = None,
max_threads = None,
queue_size = None,
timeout = 600,
handle_signals = True):
self.handle_signals = handle_signals
self.startstop_lock = Lock()
self.timeout = timeout
if not isinstance(interfaces, list):
self.interfaces = [interfaces]
else:
self.interfaces = interfaces
if min_threads is None:
min_threads = DEFAULTS['MIN_THREADS']
if max_threads is None:
max_threads = DEFAULTS['MAX_THREADS']
if not queue_size:
if hasattr(socket, 'SOMAXCONN'):
queue_size = socket.SOMAXCONN
else:
queue_size = DEFAULTS['LISTEN_QUEUE_SIZE']
if max_threads and queue_size > max_threads:
queue_size = max_threads
if isinstance(app_info, dict):
app_info['server_software'] = SERVER_SOFTWARE
self.monitor_queue = Queue()
self.active_queue = Queue()
self._threadpool = ThreadPool(get_method(method),
app_info = app_info,
active_queue = self.active_queue,
monitor_queue = self.monitor_queue,
min_threads = min_threads,
max_threads = max_threads)
# Build our socket listeners
self.listeners = [Listener(i, queue_size, self.active_queue) for i in self.interfaces]
for ndx in range(len(self.listeners)-1, 0, -1):
if not self.listeners[ndx].ready:
del self.listeners[ndx]
if not self.listeners:
log.critical("No interfaces to listen on...closing.")
sys.exit(1)
def _sigterm(self, signum, frame):
log.info('Received SIGTERM')
self.stop()
def _sighup(self, signum, frame):
log.info('Received SIGHUP')
self.restart()
def start(self, background=False):
|
def stop(self, stoplogging = False):
log.info('Stopping %s' % SERVER_SOFTWARE)
self.startstop_lock.acquire()
try:
# Stop listeners
for l in self.listeners:
l.ready = False
# Encourage a context switch
time.sleep(0.01)
for l in self.listeners:
if l.isAlive():
l.join()
# Stop Monitor
self._monitor.stop()
if self._monitor.isAlive():
self._monitor.join()
# Stop Worker threads
self._threadpool.stop()
if stoplogging:
logging.shutdown()
msg = "Calling logging.shutdown() is now the responsibility of \
the application developer. Please update your \
applications to no longer call rocket.stop(True)"
try:
import warnings
raise warnings.DeprecationWarning(msg)
except ImportError:
raise RuntimeError(msg)
finally:
self.startstop_lock.release()
def restart(self):
self.stop()
self.start()
def CherryPyWSGIServer(bind_addr,
wsgi_app,
numthreads = 10,
server_name = None,
max = -1,
request_queue_size = 5,
timeout = 10,
shutdown_timeout = 5):
""" A Cherrypy wsgiserver-compatible wrapper. """
max_threads = max
if max_threads < 0:
max_threads = 0
return Rocket(bind_addr, 'wsgi', {'wsgi_app': wsgi_app},
min_threads = numthreads,
max_threads = max_threads,
queue_size = request_queue_size,
timeout = timeout)
| log.info('Starting %s' % SERVER_SOFTWARE)
self.startstop_lock.acquire()
try:
# Set up our shutdown signals
if self.handle_signals:
try:
import signal
signal.signal(signal.SIGTERM, self._sigterm)
signal.signal(signal.SIGUSR1, self._sighup)
except:
log.debug('This platform does not support signals.')
# Start our worker threads
self._threadpool.start()
# Start our monitor thread
self._monitor = Monitor(self.monitor_queue,
self.active_queue,
self.timeout,
self._threadpool)
self._monitor.setDaemon(True)
self._monitor.start()
# I know that EXPR and A or B is bad but I'm keeping it for Py2.4
# compatibility.
str_extract = lambda l: (l.addr, l.port, l.secure and '*' or '')
msg = 'Listening on sockets: '
msg += ', '.join(['%s:%i%s' % str_extract(l) for l in self.listeners])
log.info(msg)
for l in self.listeners:
l.start()
finally:
self.startstop_lock.release()
if background:
return
while self._monitor.isAlive():
try:
time.sleep(THREAD_STOP_CHECK_INTERVAL)
except KeyboardInterrupt:
# Capture a keyboard interrupt when running from a console
break
except:
if self._monitor.isAlive():
log.error(str(traceback.format_exc()))
continue
return self.stop() | identifier_body |
main.py | # -*- coding: utf-8 -*-
# This file is part of the Rocket Web Server
# Copyright (c) 2010 Timothy Farrell
# Import System Modules
import sys
import time
import socket
import logging
import traceback
from threading import Lock
try:
from queue import Queue
except ImportError:
from Queue import Queue
# Import Package Modules
from . import DEFAULTS, SERVER_SOFTWARE, NullHandler, THREAD_STOP_CHECK_INTERVAL
from .monitor import Monitor
from .threadpool import ThreadPool
from .worker import get_method
from .listener import Listener
# Setup Logging
log = logging.getLogger('Rocket')
log.addHandler(NullHandler())
class Rocket(object):
"""The Rocket class is responsible for handling threads and accepting and
dispatching connections."""
def __init__(self,
interfaces = ('127.0.0.1', 8000),
method = 'wsgi',
app_info = None,
min_threads = None,
max_threads = None,
queue_size = None,
timeout = 600,
handle_signals = True):
self.handle_signals = handle_signals
self.startstop_lock = Lock()
self.timeout = timeout
if not isinstance(interfaces, list):
self.interfaces = [interfaces]
else:
self.interfaces = interfaces
if min_threads is None:
min_threads = DEFAULTS['MIN_THREADS']
if max_threads is None:
max_threads = DEFAULTS['MAX_THREADS']
if not queue_size:
if hasattr(socket, 'SOMAXCONN'):
queue_size = socket.SOMAXCONN
else:
queue_size = DEFAULTS['LISTEN_QUEUE_SIZE']
if max_threads and queue_size > max_threads:
queue_size = max_threads
if isinstance(app_info, dict):
app_info['server_software'] = SERVER_SOFTWARE
self.monitor_queue = Queue()
self.active_queue = Queue()
self._threadpool = ThreadPool(get_method(method),
app_info = app_info,
active_queue = self.active_queue,
monitor_queue = self.monitor_queue,
min_threads = min_threads,
max_threads = max_threads)
# Build our socket listeners
self.listeners = [Listener(i, queue_size, self.active_queue) for i in self.interfaces]
for ndx in range(len(self.listeners)-1, 0, -1):
if not self.listeners[ndx].ready:
del self.listeners[ndx]
if not self.listeners:
log.critical("No interfaces to listen on...closing.")
sys.exit(1)
def _sigterm(self, signum, frame):
log.info('Received SIGTERM')
self.stop()
def _sighup(self, signum, frame):
log.info('Received SIGHUP')
self.restart()
def start(self, background=False):
log.info('Starting %s' % SERVER_SOFTWARE)
self.startstop_lock.acquire()
try:
# Set up our shutdown signals
if self.handle_signals:
try:
import signal |
# Start our worker threads
self._threadpool.start()
# Start our monitor thread
self._monitor = Monitor(self.monitor_queue,
self.active_queue,
self.timeout,
self._threadpool)
self._monitor.setDaemon(True)
self._monitor.start()
# I know that EXPR and A or B is bad but I'm keeping it for Py2.4
# compatibility.
str_extract = lambda l: (l.addr, l.port, l.secure and '*' or '')
msg = 'Listening on sockets: '
msg += ', '.join(['%s:%i%s' % str_extract(l) for l in self.listeners])
log.info(msg)
for l in self.listeners:
l.start()
finally:
self.startstop_lock.release()
if background:
return
while self._monitor.isAlive():
try:
time.sleep(THREAD_STOP_CHECK_INTERVAL)
except KeyboardInterrupt:
# Capture a keyboard interrupt when running from a console
break
except:
if self._monitor.isAlive():
log.error(str(traceback.format_exc()))
continue
return self.stop()
def stop(self, stoplogging = False):
log.info('Stopping %s' % SERVER_SOFTWARE)
self.startstop_lock.acquire()
try:
# Stop listeners
for l in self.listeners:
l.ready = False
# Encourage a context switch
time.sleep(0.01)
for l in self.listeners:
if l.isAlive():
l.join()
# Stop Monitor
self._monitor.stop()
if self._monitor.isAlive():
self._monitor.join()
# Stop Worker threads
self._threadpool.stop()
if stoplogging:
logging.shutdown()
msg = "Calling logging.shutdown() is now the responsibility of \
the application developer. Please update your \
applications to no longer call rocket.stop(True)"
try:
import warnings
raise warnings.DeprecationWarning(msg)
except ImportError:
raise RuntimeError(msg)
finally:
self.startstop_lock.release()
def restart(self):
self.stop()
self.start()
def CherryPyWSGIServer(bind_addr,
wsgi_app,
numthreads = 10,
server_name = None,
max = -1,
request_queue_size = 5,
timeout = 10,
shutdown_timeout = 5):
""" A Cherrypy wsgiserver-compatible wrapper. """
max_threads = max
if max_threads < 0:
max_threads = 0
return Rocket(bind_addr, 'wsgi', {'wsgi_app': wsgi_app},
min_threads = numthreads,
max_threads = max_threads,
queue_size = request_queue_size,
timeout = timeout) | signal.signal(signal.SIGTERM, self._sigterm)
signal.signal(signal.SIGUSR1, self._sighup)
except:
log.debug('This platform does not support signals.') | random_line_split |
slotNameValidator.ts | import { FunctionAppService } from 'app/shared/services/function-app.service';
import { PortalResources } from './../models/portal-resources';
import { Validations, Regex } from './../models/constants';
import { Injector } from '@angular/core/src/core';
import { ArmObj } from './../models/arm/arm-obj';
import { TranslateService } from '@ngx-translate/core';
import { Validator } from '@angular/forms/src/forms';
import { FormControl } from '@angular/forms/src/model';
import { Site } from 'app/shared/models/arm/site';
import { ArmSiteDescriptor } from '../resourceDescriptors';
export class SlotNameValidator implements Validator {
private _ts: TranslateService;
private _functionAppService: FunctionAppService;
constructor(injector: Injector, private _siteId: string) {
this._ts = injector.get(TranslateService);
this._functionAppService = injector.get(FunctionAppService);
} | }
const siteNamePlusHypenLength = this._getSiteNameLength() + 1;
if (control.value.length < Validations.websiteNameMinLength) {
return Promise.resolve({ invalidSiteName: this._ts.instant(PortalResources.validation_siteNameMinChars) });
} else if (control.value.length + siteNamePlusHypenLength > Validations.websiteNameMaxLength) {
return Promise.resolve({ invalidSiteName: this._ts.instant(PortalResources.validation_slotNameMaxChars) });
}
if (control.value.toLowerCase() === 'production') {
return Promise.resolve({ invalidSiteName: this._ts.instant(PortalResources.validation_slotNameReserved).format(control.value) });
}
const matchingChar = control.value.match(Regex.invalidEntityName);
if (matchingChar) {
return Promise.resolve({ invalidSiteName: this._ts.instant(PortalResources.validation_siteNameInvalidChar).format(matchingChar[0]) });
}
return new Promise(resolve => {
this._functionAppService.getSlotsList(this._siteId).subscribe(slots => {
if (slots.isSuccessful) {
const result = <ArmObj<Site>[]>slots.result;
let existingSlot = null;
const name = control.value;
if (name) {
if (result && name) {
existingSlot = result.find(s => {
// name is returned as FunctionName/SlotName
const parsedName = s.name.split('/');
const slotName = parsedName[parsedName.length - 1];
return slotName.toLowerCase() === name.toLowerCase();
});
}
if (!existingSlot) {
resolve(null);
} else {
resolve({
invalidSiteName: this._ts.instant(PortalResources.validation_slotNameNotAvailable).format(control.value),
});
}
}
} else {
resolve(null);
}
});
});
}
private _getSiteNameLength(): number {
try {
const siteDescriptor = new ArmSiteDescriptor(this._siteId);
return !!siteDescriptor.site ? siteDescriptor.site.length : 0;
} catch (e) {
return 0;
}
}
} |
validate(control: FormControl) {
if (!control.value) {
return Promise.resolve(null); | random_line_split |
slotNameValidator.ts | import { FunctionAppService } from 'app/shared/services/function-app.service';
import { PortalResources } from './../models/portal-resources';
import { Validations, Regex } from './../models/constants';
import { Injector } from '@angular/core/src/core';
import { ArmObj } from './../models/arm/arm-obj';
import { TranslateService } from '@ngx-translate/core';
import { Validator } from '@angular/forms/src/forms';
import { FormControl } from '@angular/forms/src/model';
import { Site } from 'app/shared/models/arm/site';
import { ArmSiteDescriptor } from '../resourceDescriptors';
export class | implements Validator {
private _ts: TranslateService;
private _functionAppService: FunctionAppService;
constructor(injector: Injector, private _siteId: string) {
this._ts = injector.get(TranslateService);
this._functionAppService = injector.get(FunctionAppService);
}
validate(control: FormControl) {
if (!control.value) {
return Promise.resolve(null);
}
const siteNamePlusHypenLength = this._getSiteNameLength() + 1;
if (control.value.length < Validations.websiteNameMinLength) {
return Promise.resolve({ invalidSiteName: this._ts.instant(PortalResources.validation_siteNameMinChars) });
} else if (control.value.length + siteNamePlusHypenLength > Validations.websiteNameMaxLength) {
return Promise.resolve({ invalidSiteName: this._ts.instant(PortalResources.validation_slotNameMaxChars) });
}
if (control.value.toLowerCase() === 'production') {
return Promise.resolve({ invalidSiteName: this._ts.instant(PortalResources.validation_slotNameReserved).format(control.value) });
}
const matchingChar = control.value.match(Regex.invalidEntityName);
if (matchingChar) {
return Promise.resolve({ invalidSiteName: this._ts.instant(PortalResources.validation_siteNameInvalidChar).format(matchingChar[0]) });
}
return new Promise(resolve => {
this._functionAppService.getSlotsList(this._siteId).subscribe(slots => {
if (slots.isSuccessful) {
const result = <ArmObj<Site>[]>slots.result;
let existingSlot = null;
const name = control.value;
if (name) {
if (result && name) {
existingSlot = result.find(s => {
// name is returned as FunctionName/SlotName
const parsedName = s.name.split('/');
const slotName = parsedName[parsedName.length - 1];
return slotName.toLowerCase() === name.toLowerCase();
});
}
if (!existingSlot) {
resolve(null);
} else {
resolve({
invalidSiteName: this._ts.instant(PortalResources.validation_slotNameNotAvailable).format(control.value),
});
}
}
} else {
resolve(null);
}
});
});
}
private _getSiteNameLength(): number {
try {
const siteDescriptor = new ArmSiteDescriptor(this._siteId);
return !!siteDescriptor.site ? siteDescriptor.site.length : 0;
} catch (e) {
return 0;
}
}
}
| SlotNameValidator | identifier_name |
section_table.rs | use crate::error::{self, Error};
use crate::pe::relocation;
use alloc::string::{String, ToString};
use scroll::{ctx, Pread, Pwrite};
#[repr(C)]
#[derive(Debug, PartialEq, Clone, Default)]
pub struct SectionTable {
pub name: [u8; 8],
pub real_name: Option<String>,
pub virtual_size: u32,
pub virtual_address: u32,
pub size_of_raw_data: u32,
pub pointer_to_raw_data: u32,
pub pointer_to_relocations: u32,
pub pointer_to_linenumbers: u32,
pub number_of_relocations: u16,
pub number_of_linenumbers: u16,
pub characteristics: u32,
}
pub const SIZEOF_SECTION_TABLE: usize = 8 * 5;
// Based on https://github.com/llvm-mirror/llvm/blob/af7b1832a03ab6486c42a40d21695b2c03b2d8a3/lib/Object/COFFObjectFile.cpp#L70
// Decodes a string table entry in base 64 (//AAAAAA). Expects string without
// prefixed slashes.
fn base64_decode_string_entry(s: &str) -> Result<usize, ()> {
assert!(s.len() <= 6, "String too long, possible overflow.");
let mut val = 0;
for c in s.bytes() {
let v = if b'A' <= c && c <= b'Z' {
// 00..=25
c - b'A'
} else if b'a' <= c && c <= b'z' {
// 26..=51
c - b'a' + 26
} else if b'0' <= c && c <= b'9' {
// 52..=61
c - b'0' + 52
} else if c == b'+' {
// 62
62
} else if c == b'/' {
// 63
63
} else {
return Err(());
};
val = val * 64 + v as usize;
}
Ok(val)
}
impl SectionTable {
pub fn parse(
bytes: &[u8],
offset: &mut usize,
string_table_offset: usize,
) -> error::Result<Self> {
let mut table = SectionTable::default();
let mut name = [0u8; 8];
name.copy_from_slice(bytes.gread_with(offset, 8)?);
table.name = name;
table.virtual_size = bytes.gread_with(offset, scroll::LE)?;
table.virtual_address = bytes.gread_with(offset, scroll::LE)?;
table.size_of_raw_data = bytes.gread_with(offset, scroll::LE)?;
table.pointer_to_raw_data = bytes.gread_with(offset, scroll::LE)?;
table.pointer_to_relocations = bytes.gread_with(offset, scroll::LE)?;
table.pointer_to_linenumbers = bytes.gread_with(offset, scroll::LE)?;
table.number_of_relocations = bytes.gread_with(offset, scroll::LE)?;
table.number_of_linenumbers = bytes.gread_with(offset, scroll::LE)?;
table.characteristics = bytes.gread_with(offset, scroll::LE)?;
if let Some(idx) = table.name_offset()? {
table.real_name = Some(bytes.pread::<&str>(string_table_offset + idx)?.to_string());
}
Ok(table)
}
pub fn name_offset(&self) -> error::Result<Option<usize>> {
// Based on https://github.com/llvm-mirror/llvm/blob/af7b1832a03ab6486c42a40d21695b2c03b2d8a3/lib/Object/COFFObjectFile.cpp#L1054
if self.name[0] == b'/' {
let idx: usize = if self.name[1] == b'/' {
let b64idx = self.name.pread::<&str>(2)?;
base64_decode_string_entry(b64idx).map_err(|_| {
Error::Malformed(format!(
"Invalid indirect section name //{}: base64 decoding failed",
b64idx
))
})?
} else {
let name = self.name.pread::<&str>(1)?;
name.parse().map_err(|err| {
Error::Malformed(format!("Invalid indirect section name /{}: {}", name, err))
})?
};
Ok(Some(idx))
} else {
Ok(None)
}
}
#[allow(clippy::useless_let_if_seq)]
pub fn set_name_offset(&mut self, mut idx: usize) -> error::Result<()> {
if idx <= 9_999_999 {
// 10^7 - 1
// write!(&mut self.name[1..], "{}", idx) without using io::Write.
// We write into a temporary since we calculate digits starting at the right.
let mut name = [0; 7];
let mut len = 0;
if idx == 0 {
name[6] = b'0';
len = 1;
} else {
while idx != 0 {
let rem = (idx % 10) as u8;
idx /= 10;
name[6 - len] = b'0' + rem;
len += 1;
}
}
self.name = [0; 8];
self.name[0] = b'/';
self.name[1..][..len].copy_from_slice(&name[7 - len..]);
Ok(())
} else if idx as u64 <= 0xfff_fff_fff {
// 64^6 - 1
self.name[0] = b'/';
self.name[1] = b'/';
for i in 0..6 {
let rem = (idx % 64) as u8;
idx /= 64;
let c = match rem {
0..=25 => b'A' + rem,
26..=51 => b'a' + rem - 26,
52..=61 => b'0' + rem - 52,
62 => b'+',
63 => b'/',
_ => unreachable!(),
};
self.name[7 - i] = c;
}
Ok(())
} else {
Err(Error::Malformed(format!(
"Invalid section name offset: {}",
idx
)))
}
}
pub fn name(&self) -> error::Result<&str> {
match self.real_name.as_ref() {
Some(s) => Ok(s),
None => Ok(self.name.pread(0)?),
}
}
pub fn relocations<'a>(&self, bytes: &'a [u8]) -> error::Result<relocation::Relocations<'a>> {
let offset = self.pointer_to_relocations as usize;
let number = self.number_of_relocations as usize;
relocation::Relocations::parse(bytes, offset, number)
}
}
impl ctx::SizeWith<scroll::Endian> for SectionTable {
fn size_with(_ctx: &scroll::Endian) -> usize {
SIZEOF_SECTION_TABLE
}
}
impl ctx::TryIntoCtx<scroll::Endian> for SectionTable {
type Error = error::Error;
fn try_into_ctx(self, bytes: &mut [u8], ctx: scroll::Endian) -> Result<usize, Self::Error> {
let offset = &mut 0;
bytes.gwrite(&self.name[..], offset)?;
bytes.gwrite_with(self.virtual_size, offset, ctx)?;
bytes.gwrite_with(self.virtual_address, offset, ctx)?;
bytes.gwrite_with(self.size_of_raw_data, offset, ctx)?;
bytes.gwrite_with(self.pointer_to_raw_data, offset, ctx)?;
bytes.gwrite_with(self.pointer_to_relocations, offset, ctx)?;
bytes.gwrite_with(self.pointer_to_linenumbers, offset, ctx)?;
bytes.gwrite_with(self.number_of_relocations, offset, ctx)?;
bytes.gwrite_with(self.number_of_linenumbers, offset, ctx)?;
bytes.gwrite_with(self.characteristics, offset, ctx)?;
Ok(SIZEOF_SECTION_TABLE)
}
}
impl ctx::IntoCtx<scroll::Endian> for SectionTable {
fn into_ctx(self, bytes: &mut [u8], ctx: scroll::Endian) {
bytes.pwrite_with(self, 0, ctx).unwrap();
}
}
/// The section should not be padded to the next boundary. This flag is obsolete and is replaced
/// by `IMAGE_SCN_ALIGN_1BYTES`. This is valid only for object files.
pub const IMAGE_SCN_TYPE_NO_PAD: u32 = 0x0000_0008;
/// The section contains executable code.
pub const IMAGE_SCN_CNT_CODE: u32 = 0x0000_0020;
/// The section contains initialized data.
pub const IMAGE_SCN_CNT_INITIALIZED_DATA: u32 = 0x0000_0040;
/// The section contains uninitialized data.
pub const IMAGE_SCN_CNT_UNINITIALIZED_DATA: u32 = 0x0000_0080;
pub const IMAGE_SCN_LNK_OTHER: u32 = 0x0000_0100;
/// The section contains comments or other information. The .drectve section has this type.
/// This is valid for object files only.
pub const IMAGE_SCN_LNK_INFO: u32 = 0x0000_0200;
/// The section will not become part of the image. This is valid only for object files.
pub const IMAGE_SCN_LNK_REMOVE: u32 = 0x0000_0800;
/// The section contains COMDAT data. This is valid only for object files.
pub const IMAGE_SCN_LNK_COMDAT: u32 = 0x0000_1000;
/// The section contains data referenced through the global pointer (GP).
pub const IMAGE_SCN_GPREL: u32 = 0x0000_8000;
pub const IMAGE_SCN_MEM_PURGEABLE: u32 = 0x0002_0000;
pub const IMAGE_SCN_MEM_16BIT: u32 = 0x0002_0000;
pub const IMAGE_SCN_MEM_LOCKED: u32 = 0x0004_0000;
pub const IMAGE_SCN_MEM_PRELOAD: u32 = 0x0008_0000;
pub const IMAGE_SCN_ALIGN_1BYTES: u32 = 0x0010_0000;
pub const IMAGE_SCN_ALIGN_2BYTES: u32 = 0x0020_0000;
pub const IMAGE_SCN_ALIGN_4BYTES: u32 = 0x0030_0000;
pub const IMAGE_SCN_ALIGN_8BYTES: u32 = 0x0040_0000;
pub const IMAGE_SCN_ALIGN_16BYTES: u32 = 0x0050_0000;
pub const IMAGE_SCN_ALIGN_32BYTES: u32 = 0x0060_0000;
pub const IMAGE_SCN_ALIGN_64BYTES: u32 = 0x0070_0000;
pub const IMAGE_SCN_ALIGN_128BYTES: u32 = 0x0080_0000;
pub const IMAGE_SCN_ALIGN_256BYTES: u32 = 0x0090_0000;
pub const IMAGE_SCN_ALIGN_512BYTES: u32 = 0x00A0_0000;
pub const IMAGE_SCN_ALIGN_1024BYTES: u32 = 0x00B0_0000;
pub const IMAGE_SCN_ALIGN_2048BYTES: u32 = 0x00C0_0000;
pub const IMAGE_SCN_ALIGN_4096BYTES: u32 = 0x00D0_0000;
pub const IMAGE_SCN_ALIGN_8192BYTES: u32 = 0x00E0_0000;
pub const IMAGE_SCN_ALIGN_MASK: u32 = 0x00F0_0000;
/// The section contains extended relocations.
pub const IMAGE_SCN_LNK_NRELOC_OVFL: u32 = 0x0100_0000;
/// The section can be discarded as needed.
pub const IMAGE_SCN_MEM_DISCARDABLE: u32 = 0x0200_0000;
/// The section cannot be cached.
pub const IMAGE_SCN_MEM_NOT_CACHED: u32 = 0x0400_0000;
/// The section is not pageable.
pub const IMAGE_SCN_MEM_NOT_PAGED: u32 = 0x0800_0000;
/// The section can be shared in memory.
pub const IMAGE_SCN_MEM_SHARED: u32 = 0x1000_0000;
/// The section can be executed as code.
pub const IMAGE_SCN_MEM_EXECUTE: u32 = 0x2000_0000;
/// The section can be read.
pub const IMAGE_SCN_MEM_READ: u32 = 0x4000_0000;
/// The section can be written to.
pub const IMAGE_SCN_MEM_WRITE: u32 = 0x8000_0000;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn set_name_offset() |
}
| {
let mut section = SectionTable::default();
for &(offset, name) in [
(0usize, b"/0\0\0\0\0\0\0"),
(1, b"/1\0\0\0\0\0\0"),
(9_999_999, b"/9999999"),
(10_000_000, b"//AAmJaA"),
#[cfg(target_pointer_width = "64")]
(0xfff_fff_fff, b"////////"),
]
.iter()
{
section.set_name_offset(offset).unwrap();
assert_eq!(§ion.name, name);
assert_eq!(section.name_offset().unwrap(), Some(offset));
}
#[cfg(target_pointer_width = "64")]
assert!(section.set_name_offset(0x1_000_000_000).is_err());
} | identifier_body |
section_table.rs | use crate::error::{self, Error};
use crate::pe::relocation;
use alloc::string::{String, ToString};
use scroll::{ctx, Pread, Pwrite};
#[repr(C)]
#[derive(Debug, PartialEq, Clone, Default)]
pub struct SectionTable {
pub name: [u8; 8],
pub real_name: Option<String>,
pub virtual_size: u32,
pub virtual_address: u32,
pub size_of_raw_data: u32,
pub pointer_to_raw_data: u32,
pub pointer_to_relocations: u32,
pub pointer_to_linenumbers: u32,
pub number_of_relocations: u16,
pub number_of_linenumbers: u16,
pub characteristics: u32,
}
pub const SIZEOF_SECTION_TABLE: usize = 8 * 5;
// Based on https://github.com/llvm-mirror/llvm/blob/af7b1832a03ab6486c42a40d21695b2c03b2d8a3/lib/Object/COFFObjectFile.cpp#L70
// Decodes a string table entry in base 64 (//AAAAAA). Expects string without
// prefixed slashes.
fn base64_decode_string_entry(s: &str) -> Result<usize, ()> {
assert!(s.len() <= 6, "String too long, possible overflow.");
let mut val = 0;
for c in s.bytes() {
let v = if b'A' <= c && c <= b'Z' {
// 00..=25
c - b'A'
} else if b'a' <= c && c <= b'z' {
// 26..=51
c - b'a' + 26
} else if b'0' <= c && c <= b'9' {
// 52..=61
c - b'0' + 52
} else if c == b'+' {
// 62
62
} else if c == b'/' {
// 63
63
} else {
return Err(());
};
val = val * 64 + v as usize;
}
Ok(val)
}
impl SectionTable {
pub fn parse(
bytes: &[u8],
offset: &mut usize,
string_table_offset: usize,
) -> error::Result<Self> {
let mut table = SectionTable::default();
let mut name = [0u8; 8];
name.copy_from_slice(bytes.gread_with(offset, 8)?);
table.name = name;
table.virtual_size = bytes.gread_with(offset, scroll::LE)?;
table.virtual_address = bytes.gread_with(offset, scroll::LE)?;
table.size_of_raw_data = bytes.gread_with(offset, scroll::LE)?;
table.pointer_to_raw_data = bytes.gread_with(offset, scroll::LE)?;
table.pointer_to_relocations = bytes.gread_with(offset, scroll::LE)?;
table.pointer_to_linenumbers = bytes.gread_with(offset, scroll::LE)?;
table.number_of_relocations = bytes.gread_with(offset, scroll::LE)?;
table.number_of_linenumbers = bytes.gread_with(offset, scroll::LE)?;
table.characteristics = bytes.gread_with(offset, scroll::LE)?;
if let Some(idx) = table.name_offset()? {
table.real_name = Some(bytes.pread::<&str>(string_table_offset + idx)?.to_string());
}
Ok(table)
}
pub fn name_offset(&self) -> error::Result<Option<usize>> {
// Based on https://github.com/llvm-mirror/llvm/blob/af7b1832a03ab6486c42a40d21695b2c03b2d8a3/lib/Object/COFFObjectFile.cpp#L1054
if self.name[0] == b'/' {
let idx: usize = if self.name[1] == b'/' {
let b64idx = self.name.pread::<&str>(2)?;
base64_decode_string_entry(b64idx).map_err(|_| {
Error::Malformed(format!(
"Invalid indirect section name //{}: base64 decoding failed",
b64idx
))
})?
} else {
let name = self.name.pread::<&str>(1)?;
name.parse().map_err(|err| {
Error::Malformed(format!("Invalid indirect section name /{}: {}", name, err))
})?
};
Ok(Some(idx))
} else {
Ok(None)
}
}
#[allow(clippy::useless_let_if_seq)]
pub fn set_name_offset(&mut self, mut idx: usize) -> error::Result<()> {
if idx <= 9_999_999 | else if idx as u64 <= 0xfff_fff_fff {
// 64^6 - 1
self.name[0] = b'/';
self.name[1] = b'/';
for i in 0..6 {
let rem = (idx % 64) as u8;
idx /= 64;
let c = match rem {
0..=25 => b'A' + rem,
26..=51 => b'a' + rem - 26,
52..=61 => b'0' + rem - 52,
62 => b'+',
63 => b'/',
_ => unreachable!(),
};
self.name[7 - i] = c;
}
Ok(())
} else {
Err(Error::Malformed(format!(
"Invalid section name offset: {}",
idx
)))
}
}
pub fn name(&self) -> error::Result<&str> {
match self.real_name.as_ref() {
Some(s) => Ok(s),
None => Ok(self.name.pread(0)?),
}
}
pub fn relocations<'a>(&self, bytes: &'a [u8]) -> error::Result<relocation::Relocations<'a>> {
let offset = self.pointer_to_relocations as usize;
let number = self.number_of_relocations as usize;
relocation::Relocations::parse(bytes, offset, number)
}
}
impl ctx::SizeWith<scroll::Endian> for SectionTable {
fn size_with(_ctx: &scroll::Endian) -> usize {
SIZEOF_SECTION_TABLE
}
}
impl ctx::TryIntoCtx<scroll::Endian> for SectionTable {
type Error = error::Error;
fn try_into_ctx(self, bytes: &mut [u8], ctx: scroll::Endian) -> Result<usize, Self::Error> {
let offset = &mut 0;
bytes.gwrite(&self.name[..], offset)?;
bytes.gwrite_with(self.virtual_size, offset, ctx)?;
bytes.gwrite_with(self.virtual_address, offset, ctx)?;
bytes.gwrite_with(self.size_of_raw_data, offset, ctx)?;
bytes.gwrite_with(self.pointer_to_raw_data, offset, ctx)?;
bytes.gwrite_with(self.pointer_to_relocations, offset, ctx)?;
bytes.gwrite_with(self.pointer_to_linenumbers, offset, ctx)?;
bytes.gwrite_with(self.number_of_relocations, offset, ctx)?;
bytes.gwrite_with(self.number_of_linenumbers, offset, ctx)?;
bytes.gwrite_with(self.characteristics, offset, ctx)?;
Ok(SIZEOF_SECTION_TABLE)
}
}
impl ctx::IntoCtx<scroll::Endian> for SectionTable {
fn into_ctx(self, bytes: &mut [u8], ctx: scroll::Endian) {
bytes.pwrite_with(self, 0, ctx).unwrap();
}
}
/// The section should not be padded to the next boundary. This flag is obsolete and is replaced
/// by `IMAGE_SCN_ALIGN_1BYTES`. This is valid only for object files.
pub const IMAGE_SCN_TYPE_NO_PAD: u32 = 0x0000_0008;
/// The section contains executable code.
pub const IMAGE_SCN_CNT_CODE: u32 = 0x0000_0020;
/// The section contains initialized data.
pub const IMAGE_SCN_CNT_INITIALIZED_DATA: u32 = 0x0000_0040;
/// The section contains uninitialized data.
pub const IMAGE_SCN_CNT_UNINITIALIZED_DATA: u32 = 0x0000_0080;
pub const IMAGE_SCN_LNK_OTHER: u32 = 0x0000_0100;
/// The section contains comments or other information. The .drectve section has this type.
/// This is valid for object files only.
pub const IMAGE_SCN_LNK_INFO: u32 = 0x0000_0200;
/// The section will not become part of the image. This is valid only for object files.
pub const IMAGE_SCN_LNK_REMOVE: u32 = 0x0000_0800;
/// The section contains COMDAT data. This is valid only for object files.
pub const IMAGE_SCN_LNK_COMDAT: u32 = 0x0000_1000;
/// The section contains data referenced through the global pointer (GP).
pub const IMAGE_SCN_GPREL: u32 = 0x0000_8000;
pub const IMAGE_SCN_MEM_PURGEABLE: u32 = 0x0002_0000;
pub const IMAGE_SCN_MEM_16BIT: u32 = 0x0002_0000;
pub const IMAGE_SCN_MEM_LOCKED: u32 = 0x0004_0000;
pub const IMAGE_SCN_MEM_PRELOAD: u32 = 0x0008_0000;
pub const IMAGE_SCN_ALIGN_1BYTES: u32 = 0x0010_0000;
pub const IMAGE_SCN_ALIGN_2BYTES: u32 = 0x0020_0000;
pub const IMAGE_SCN_ALIGN_4BYTES: u32 = 0x0030_0000;
pub const IMAGE_SCN_ALIGN_8BYTES: u32 = 0x0040_0000;
pub const IMAGE_SCN_ALIGN_16BYTES: u32 = 0x0050_0000;
pub const IMAGE_SCN_ALIGN_32BYTES: u32 = 0x0060_0000;
pub const IMAGE_SCN_ALIGN_64BYTES: u32 = 0x0070_0000;
pub const IMAGE_SCN_ALIGN_128BYTES: u32 = 0x0080_0000;
pub const IMAGE_SCN_ALIGN_256BYTES: u32 = 0x0090_0000;
pub const IMAGE_SCN_ALIGN_512BYTES: u32 = 0x00A0_0000;
pub const IMAGE_SCN_ALIGN_1024BYTES: u32 = 0x00B0_0000;
pub const IMAGE_SCN_ALIGN_2048BYTES: u32 = 0x00C0_0000;
pub const IMAGE_SCN_ALIGN_4096BYTES: u32 = 0x00D0_0000;
pub const IMAGE_SCN_ALIGN_8192BYTES: u32 = 0x00E0_0000;
pub const IMAGE_SCN_ALIGN_MASK: u32 = 0x00F0_0000;
/// The section contains extended relocations.
pub const IMAGE_SCN_LNK_NRELOC_OVFL: u32 = 0x0100_0000;
/// The section can be discarded as needed.
pub const IMAGE_SCN_MEM_DISCARDABLE: u32 = 0x0200_0000;
/// The section cannot be cached.
pub const IMAGE_SCN_MEM_NOT_CACHED: u32 = 0x0400_0000;
/// The section is not pageable.
pub const IMAGE_SCN_MEM_NOT_PAGED: u32 = 0x0800_0000;
/// The section can be shared in memory.
pub const IMAGE_SCN_MEM_SHARED: u32 = 0x1000_0000;
/// The section can be executed as code.
pub const IMAGE_SCN_MEM_EXECUTE: u32 = 0x2000_0000;
/// The section can be read.
pub const IMAGE_SCN_MEM_READ: u32 = 0x4000_0000;
/// The section can be written to.
pub const IMAGE_SCN_MEM_WRITE: u32 = 0x8000_0000;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn set_name_offset() {
let mut section = SectionTable::default();
for &(offset, name) in [
(0usize, b"/0\0\0\0\0\0\0"),
(1, b"/1\0\0\0\0\0\0"),
(9_999_999, b"/9999999"),
(10_000_000, b"//AAmJaA"),
#[cfg(target_pointer_width = "64")]
(0xfff_fff_fff, b"////////"),
]
.iter()
{
section.set_name_offset(offset).unwrap();
assert_eq!(§ion.name, name);
assert_eq!(section.name_offset().unwrap(), Some(offset));
}
#[cfg(target_pointer_width = "64")]
assert!(section.set_name_offset(0x1_000_000_000).is_err());
}
}
| {
// 10^7 - 1
// write!(&mut self.name[1..], "{}", idx) without using io::Write.
// We write into a temporary since we calculate digits starting at the right.
let mut name = [0; 7];
let mut len = 0;
if idx == 0 {
name[6] = b'0';
len = 1;
} else {
while idx != 0 {
let rem = (idx % 10) as u8;
idx /= 10;
name[6 - len] = b'0' + rem;
len += 1;
}
}
self.name = [0; 8];
self.name[0] = b'/';
self.name[1..][..len].copy_from_slice(&name[7 - len..]);
Ok(())
} | conditional_block |
section_table.rs | use crate::error::{self, Error};
use crate::pe::relocation;
use alloc::string::{String, ToString};
use scroll::{ctx, Pread, Pwrite};
#[repr(C)]
#[derive(Debug, PartialEq, Clone, Default)]
pub struct SectionTable {
pub name: [u8; 8],
pub real_name: Option<String>,
pub virtual_size: u32,
pub virtual_address: u32,
pub size_of_raw_data: u32,
pub pointer_to_raw_data: u32,
pub pointer_to_relocations: u32,
pub pointer_to_linenumbers: u32,
pub number_of_relocations: u16,
pub number_of_linenumbers: u16,
pub characteristics: u32,
}
pub const SIZEOF_SECTION_TABLE: usize = 8 * 5;
// Based on https://github.com/llvm-mirror/llvm/blob/af7b1832a03ab6486c42a40d21695b2c03b2d8a3/lib/Object/COFFObjectFile.cpp#L70
// Decodes a string table entry in base 64 (//AAAAAA). Expects string without
// prefixed slashes.
fn base64_decode_string_entry(s: &str) -> Result<usize, ()> {
assert!(s.len() <= 6, "String too long, possible overflow.");
let mut val = 0;
for c in s.bytes() {
let v = if b'A' <= c && c <= b'Z' {
// 00..=25
c - b'A'
} else if b'a' <= c && c <= b'z' {
// 26..=51
c - b'a' + 26
} else if b'0' <= c && c <= b'9' {
// 52..=61
c - b'0' + 52
} else if c == b'+' {
// 62 | } else if c == b'/' {
// 63
63
} else {
return Err(());
};
val = val * 64 + v as usize;
}
Ok(val)
}
impl SectionTable {
pub fn parse(
bytes: &[u8],
offset: &mut usize,
string_table_offset: usize,
) -> error::Result<Self> {
let mut table = SectionTable::default();
let mut name = [0u8; 8];
name.copy_from_slice(bytes.gread_with(offset, 8)?);
table.name = name;
table.virtual_size = bytes.gread_with(offset, scroll::LE)?;
table.virtual_address = bytes.gread_with(offset, scroll::LE)?;
table.size_of_raw_data = bytes.gread_with(offset, scroll::LE)?;
table.pointer_to_raw_data = bytes.gread_with(offset, scroll::LE)?;
table.pointer_to_relocations = bytes.gread_with(offset, scroll::LE)?;
table.pointer_to_linenumbers = bytes.gread_with(offset, scroll::LE)?;
table.number_of_relocations = bytes.gread_with(offset, scroll::LE)?;
table.number_of_linenumbers = bytes.gread_with(offset, scroll::LE)?;
table.characteristics = bytes.gread_with(offset, scroll::LE)?;
if let Some(idx) = table.name_offset()? {
table.real_name = Some(bytes.pread::<&str>(string_table_offset + idx)?.to_string());
}
Ok(table)
}
pub fn name_offset(&self) -> error::Result<Option<usize>> {
// Based on https://github.com/llvm-mirror/llvm/blob/af7b1832a03ab6486c42a40d21695b2c03b2d8a3/lib/Object/COFFObjectFile.cpp#L1054
if self.name[0] == b'/' {
let idx: usize = if self.name[1] == b'/' {
let b64idx = self.name.pread::<&str>(2)?;
base64_decode_string_entry(b64idx).map_err(|_| {
Error::Malformed(format!(
"Invalid indirect section name //{}: base64 decoding failed",
b64idx
))
})?
} else {
let name = self.name.pread::<&str>(1)?;
name.parse().map_err(|err| {
Error::Malformed(format!("Invalid indirect section name /{}: {}", name, err))
})?
};
Ok(Some(idx))
} else {
Ok(None)
}
}
#[allow(clippy::useless_let_if_seq)]
pub fn set_name_offset(&mut self, mut idx: usize) -> error::Result<()> {
if idx <= 9_999_999 {
// 10^7 - 1
// write!(&mut self.name[1..], "{}", idx) without using io::Write.
// We write into a temporary since we calculate digits starting at the right.
let mut name = [0; 7];
let mut len = 0;
if idx == 0 {
name[6] = b'0';
len = 1;
} else {
while idx != 0 {
let rem = (idx % 10) as u8;
idx /= 10;
name[6 - len] = b'0' + rem;
len += 1;
}
}
self.name = [0; 8];
self.name[0] = b'/';
self.name[1..][..len].copy_from_slice(&name[7 - len..]);
Ok(())
} else if idx as u64 <= 0xfff_fff_fff {
// 64^6 - 1
self.name[0] = b'/';
self.name[1] = b'/';
for i in 0..6 {
let rem = (idx % 64) as u8;
idx /= 64;
let c = match rem {
0..=25 => b'A' + rem,
26..=51 => b'a' + rem - 26,
52..=61 => b'0' + rem - 52,
62 => b'+',
63 => b'/',
_ => unreachable!(),
};
self.name[7 - i] = c;
}
Ok(())
} else {
Err(Error::Malformed(format!(
"Invalid section name offset: {}",
idx
)))
}
}
pub fn name(&self) -> error::Result<&str> {
match self.real_name.as_ref() {
Some(s) => Ok(s),
None => Ok(self.name.pread(0)?),
}
}
pub fn relocations<'a>(&self, bytes: &'a [u8]) -> error::Result<relocation::Relocations<'a>> {
let offset = self.pointer_to_relocations as usize;
let number = self.number_of_relocations as usize;
relocation::Relocations::parse(bytes, offset, number)
}
}
impl ctx::SizeWith<scroll::Endian> for SectionTable {
fn size_with(_ctx: &scroll::Endian) -> usize {
SIZEOF_SECTION_TABLE
}
}
impl ctx::TryIntoCtx<scroll::Endian> for SectionTable {
type Error = error::Error;
fn try_into_ctx(self, bytes: &mut [u8], ctx: scroll::Endian) -> Result<usize, Self::Error> {
let offset = &mut 0;
bytes.gwrite(&self.name[..], offset)?;
bytes.gwrite_with(self.virtual_size, offset, ctx)?;
bytes.gwrite_with(self.virtual_address, offset, ctx)?;
bytes.gwrite_with(self.size_of_raw_data, offset, ctx)?;
bytes.gwrite_with(self.pointer_to_raw_data, offset, ctx)?;
bytes.gwrite_with(self.pointer_to_relocations, offset, ctx)?;
bytes.gwrite_with(self.pointer_to_linenumbers, offset, ctx)?;
bytes.gwrite_with(self.number_of_relocations, offset, ctx)?;
bytes.gwrite_with(self.number_of_linenumbers, offset, ctx)?;
bytes.gwrite_with(self.characteristics, offset, ctx)?;
Ok(SIZEOF_SECTION_TABLE)
}
}
impl ctx::IntoCtx<scroll::Endian> for SectionTable {
fn into_ctx(self, bytes: &mut [u8], ctx: scroll::Endian) {
bytes.pwrite_with(self, 0, ctx).unwrap();
}
}
/// The section should not be padded to the next boundary. This flag is obsolete and is replaced
/// by `IMAGE_SCN_ALIGN_1BYTES`. This is valid only for object files.
pub const IMAGE_SCN_TYPE_NO_PAD: u32 = 0x0000_0008;
/// The section contains executable code.
pub const IMAGE_SCN_CNT_CODE: u32 = 0x0000_0020;
/// The section contains initialized data.
pub const IMAGE_SCN_CNT_INITIALIZED_DATA: u32 = 0x0000_0040;
/// The section contains uninitialized data.
pub const IMAGE_SCN_CNT_UNINITIALIZED_DATA: u32 = 0x0000_0080;
pub const IMAGE_SCN_LNK_OTHER: u32 = 0x0000_0100;
/// The section contains comments or other information. The .drectve section has this type.
/// This is valid for object files only.
pub const IMAGE_SCN_LNK_INFO: u32 = 0x0000_0200;
/// The section will not become part of the image. This is valid only for object files.
pub const IMAGE_SCN_LNK_REMOVE: u32 = 0x0000_0800;
/// The section contains COMDAT data. This is valid only for object files.
pub const IMAGE_SCN_LNK_COMDAT: u32 = 0x0000_1000;
/// The section contains data referenced through the global pointer (GP).
pub const IMAGE_SCN_GPREL: u32 = 0x0000_8000;
pub const IMAGE_SCN_MEM_PURGEABLE: u32 = 0x0002_0000;
pub const IMAGE_SCN_MEM_16BIT: u32 = 0x0002_0000;
pub const IMAGE_SCN_MEM_LOCKED: u32 = 0x0004_0000;
pub const IMAGE_SCN_MEM_PRELOAD: u32 = 0x0008_0000;
pub const IMAGE_SCN_ALIGN_1BYTES: u32 = 0x0010_0000;
pub const IMAGE_SCN_ALIGN_2BYTES: u32 = 0x0020_0000;
pub const IMAGE_SCN_ALIGN_4BYTES: u32 = 0x0030_0000;
pub const IMAGE_SCN_ALIGN_8BYTES: u32 = 0x0040_0000;
pub const IMAGE_SCN_ALIGN_16BYTES: u32 = 0x0050_0000;
pub const IMAGE_SCN_ALIGN_32BYTES: u32 = 0x0060_0000;
pub const IMAGE_SCN_ALIGN_64BYTES: u32 = 0x0070_0000;
pub const IMAGE_SCN_ALIGN_128BYTES: u32 = 0x0080_0000;
pub const IMAGE_SCN_ALIGN_256BYTES: u32 = 0x0090_0000;
pub const IMAGE_SCN_ALIGN_512BYTES: u32 = 0x00A0_0000;
pub const IMAGE_SCN_ALIGN_1024BYTES: u32 = 0x00B0_0000;
pub const IMAGE_SCN_ALIGN_2048BYTES: u32 = 0x00C0_0000;
pub const IMAGE_SCN_ALIGN_4096BYTES: u32 = 0x00D0_0000;
pub const IMAGE_SCN_ALIGN_8192BYTES: u32 = 0x00E0_0000;
pub const IMAGE_SCN_ALIGN_MASK: u32 = 0x00F0_0000;
/// The section contains extended relocations.
pub const IMAGE_SCN_LNK_NRELOC_OVFL: u32 = 0x0100_0000;
/// The section can be discarded as needed.
pub const IMAGE_SCN_MEM_DISCARDABLE: u32 = 0x0200_0000;
/// The section cannot be cached.
pub const IMAGE_SCN_MEM_NOT_CACHED: u32 = 0x0400_0000;
/// The section is not pageable.
pub const IMAGE_SCN_MEM_NOT_PAGED: u32 = 0x0800_0000;
/// The section can be shared in memory.
pub const IMAGE_SCN_MEM_SHARED: u32 = 0x1000_0000;
/// The section can be executed as code.
pub const IMAGE_SCN_MEM_EXECUTE: u32 = 0x2000_0000;
/// The section can be read.
pub const IMAGE_SCN_MEM_READ: u32 = 0x4000_0000;
/// The section can be written to.
pub const IMAGE_SCN_MEM_WRITE: u32 = 0x8000_0000;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn set_name_offset() {
let mut section = SectionTable::default();
for &(offset, name) in [
(0usize, b"/0\0\0\0\0\0\0"),
(1, b"/1\0\0\0\0\0\0"),
(9_999_999, b"/9999999"),
(10_000_000, b"//AAmJaA"),
#[cfg(target_pointer_width = "64")]
(0xfff_fff_fff, b"////////"),
]
.iter()
{
section.set_name_offset(offset).unwrap();
assert_eq!(§ion.name, name);
assert_eq!(section.name_offset().unwrap(), Some(offset));
}
#[cfg(target_pointer_width = "64")]
assert!(section.set_name_offset(0x1_000_000_000).is_err());
}
} | 62 | random_line_split |
section_table.rs | use crate::error::{self, Error};
use crate::pe::relocation;
use alloc::string::{String, ToString};
use scroll::{ctx, Pread, Pwrite};
#[repr(C)]
#[derive(Debug, PartialEq, Clone, Default)]
pub struct SectionTable {
pub name: [u8; 8],
pub real_name: Option<String>,
pub virtual_size: u32,
pub virtual_address: u32,
pub size_of_raw_data: u32,
pub pointer_to_raw_data: u32,
pub pointer_to_relocations: u32,
pub pointer_to_linenumbers: u32,
pub number_of_relocations: u16,
pub number_of_linenumbers: u16,
pub characteristics: u32,
}
pub const SIZEOF_SECTION_TABLE: usize = 8 * 5;
// Based on https://github.com/llvm-mirror/llvm/blob/af7b1832a03ab6486c42a40d21695b2c03b2d8a3/lib/Object/COFFObjectFile.cpp#L70
// Decodes a string table entry in base 64 (//AAAAAA). Expects string without
// prefixed slashes.
fn base64_decode_string_entry(s: &str) -> Result<usize, ()> {
assert!(s.len() <= 6, "String too long, possible overflow.");
let mut val = 0;
for c in s.bytes() {
let v = if b'A' <= c && c <= b'Z' {
// 00..=25
c - b'A'
} else if b'a' <= c && c <= b'z' {
// 26..=51
c - b'a' + 26
} else if b'0' <= c && c <= b'9' {
// 52..=61
c - b'0' + 52
} else if c == b'+' {
// 62
62
} else if c == b'/' {
// 63
63
} else {
return Err(());
};
val = val * 64 + v as usize;
}
Ok(val)
}
impl SectionTable {
pub fn parse(
bytes: &[u8],
offset: &mut usize,
string_table_offset: usize,
) -> error::Result<Self> {
let mut table = SectionTable::default();
let mut name = [0u8; 8];
name.copy_from_slice(bytes.gread_with(offset, 8)?);
table.name = name;
table.virtual_size = bytes.gread_with(offset, scroll::LE)?;
table.virtual_address = bytes.gread_with(offset, scroll::LE)?;
table.size_of_raw_data = bytes.gread_with(offset, scroll::LE)?;
table.pointer_to_raw_data = bytes.gread_with(offset, scroll::LE)?;
table.pointer_to_relocations = bytes.gread_with(offset, scroll::LE)?;
table.pointer_to_linenumbers = bytes.gread_with(offset, scroll::LE)?;
table.number_of_relocations = bytes.gread_with(offset, scroll::LE)?;
table.number_of_linenumbers = bytes.gread_with(offset, scroll::LE)?;
table.characteristics = bytes.gread_with(offset, scroll::LE)?;
if let Some(idx) = table.name_offset()? {
table.real_name = Some(bytes.pread::<&str>(string_table_offset + idx)?.to_string());
}
Ok(table)
}
pub fn name_offset(&self) -> error::Result<Option<usize>> {
// Based on https://github.com/llvm-mirror/llvm/blob/af7b1832a03ab6486c42a40d21695b2c03b2d8a3/lib/Object/COFFObjectFile.cpp#L1054
if self.name[0] == b'/' {
let idx: usize = if self.name[1] == b'/' {
let b64idx = self.name.pread::<&str>(2)?;
base64_decode_string_entry(b64idx).map_err(|_| {
Error::Malformed(format!(
"Invalid indirect section name //{}: base64 decoding failed",
b64idx
))
})?
} else {
let name = self.name.pread::<&str>(1)?;
name.parse().map_err(|err| {
Error::Malformed(format!("Invalid indirect section name /{}: {}", name, err))
})?
};
Ok(Some(idx))
} else {
Ok(None)
}
}
#[allow(clippy::useless_let_if_seq)]
pub fn set_name_offset(&mut self, mut idx: usize) -> error::Result<()> {
if idx <= 9_999_999 {
// 10^7 - 1
// write!(&mut self.name[1..], "{}", idx) without using io::Write.
// We write into a temporary since we calculate digits starting at the right.
let mut name = [0; 7];
let mut len = 0;
if idx == 0 {
name[6] = b'0';
len = 1;
} else {
while idx != 0 {
let rem = (idx % 10) as u8;
idx /= 10;
name[6 - len] = b'0' + rem;
len += 1;
}
}
self.name = [0; 8];
self.name[0] = b'/';
self.name[1..][..len].copy_from_slice(&name[7 - len..]);
Ok(())
} else if idx as u64 <= 0xfff_fff_fff {
// 64^6 - 1
self.name[0] = b'/';
self.name[1] = b'/';
for i in 0..6 {
let rem = (idx % 64) as u8;
idx /= 64;
let c = match rem {
0..=25 => b'A' + rem,
26..=51 => b'a' + rem - 26,
52..=61 => b'0' + rem - 52,
62 => b'+',
63 => b'/',
_ => unreachable!(),
};
self.name[7 - i] = c;
}
Ok(())
} else {
Err(Error::Malformed(format!(
"Invalid section name offset: {}",
idx
)))
}
}
pub fn name(&self) -> error::Result<&str> {
match self.real_name.as_ref() {
Some(s) => Ok(s),
None => Ok(self.name.pread(0)?),
}
}
pub fn relocations<'a>(&self, bytes: &'a [u8]) -> error::Result<relocation::Relocations<'a>> {
let offset = self.pointer_to_relocations as usize;
let number = self.number_of_relocations as usize;
relocation::Relocations::parse(bytes, offset, number)
}
}
impl ctx::SizeWith<scroll::Endian> for SectionTable {
fn | (_ctx: &scroll::Endian) -> usize {
SIZEOF_SECTION_TABLE
}
}
impl ctx::TryIntoCtx<scroll::Endian> for SectionTable {
type Error = error::Error;
fn try_into_ctx(self, bytes: &mut [u8], ctx: scroll::Endian) -> Result<usize, Self::Error> {
let offset = &mut 0;
bytes.gwrite(&self.name[..], offset)?;
bytes.gwrite_with(self.virtual_size, offset, ctx)?;
bytes.gwrite_with(self.virtual_address, offset, ctx)?;
bytes.gwrite_with(self.size_of_raw_data, offset, ctx)?;
bytes.gwrite_with(self.pointer_to_raw_data, offset, ctx)?;
bytes.gwrite_with(self.pointer_to_relocations, offset, ctx)?;
bytes.gwrite_with(self.pointer_to_linenumbers, offset, ctx)?;
bytes.gwrite_with(self.number_of_relocations, offset, ctx)?;
bytes.gwrite_with(self.number_of_linenumbers, offset, ctx)?;
bytes.gwrite_with(self.characteristics, offset, ctx)?;
Ok(SIZEOF_SECTION_TABLE)
}
}
impl ctx::IntoCtx<scroll::Endian> for SectionTable {
fn into_ctx(self, bytes: &mut [u8], ctx: scroll::Endian) {
bytes.pwrite_with(self, 0, ctx).unwrap();
}
}
/// The section should not be padded to the next boundary. This flag is obsolete and is replaced
/// by `IMAGE_SCN_ALIGN_1BYTES`. This is valid only for object files.
pub const IMAGE_SCN_TYPE_NO_PAD: u32 = 0x0000_0008;
/// The section contains executable code.
pub const IMAGE_SCN_CNT_CODE: u32 = 0x0000_0020;
/// The section contains initialized data.
pub const IMAGE_SCN_CNT_INITIALIZED_DATA: u32 = 0x0000_0040;
/// The section contains uninitialized data.
pub const IMAGE_SCN_CNT_UNINITIALIZED_DATA: u32 = 0x0000_0080;
pub const IMAGE_SCN_LNK_OTHER: u32 = 0x0000_0100;
/// The section contains comments or other information. The .drectve section has this type.
/// This is valid for object files only.
pub const IMAGE_SCN_LNK_INFO: u32 = 0x0000_0200;
/// The section will not become part of the image. This is valid only for object files.
pub const IMAGE_SCN_LNK_REMOVE: u32 = 0x0000_0800;
/// The section contains COMDAT data. This is valid only for object files.
pub const IMAGE_SCN_LNK_COMDAT: u32 = 0x0000_1000;
/// The section contains data referenced through the global pointer (GP).
pub const IMAGE_SCN_GPREL: u32 = 0x0000_8000;
pub const IMAGE_SCN_MEM_PURGEABLE: u32 = 0x0002_0000;
pub const IMAGE_SCN_MEM_16BIT: u32 = 0x0002_0000;
pub const IMAGE_SCN_MEM_LOCKED: u32 = 0x0004_0000;
pub const IMAGE_SCN_MEM_PRELOAD: u32 = 0x0008_0000;
pub const IMAGE_SCN_ALIGN_1BYTES: u32 = 0x0010_0000;
pub const IMAGE_SCN_ALIGN_2BYTES: u32 = 0x0020_0000;
pub const IMAGE_SCN_ALIGN_4BYTES: u32 = 0x0030_0000;
pub const IMAGE_SCN_ALIGN_8BYTES: u32 = 0x0040_0000;
pub const IMAGE_SCN_ALIGN_16BYTES: u32 = 0x0050_0000;
pub const IMAGE_SCN_ALIGN_32BYTES: u32 = 0x0060_0000;
pub const IMAGE_SCN_ALIGN_64BYTES: u32 = 0x0070_0000;
pub const IMAGE_SCN_ALIGN_128BYTES: u32 = 0x0080_0000;
pub const IMAGE_SCN_ALIGN_256BYTES: u32 = 0x0090_0000;
pub const IMAGE_SCN_ALIGN_512BYTES: u32 = 0x00A0_0000;
pub const IMAGE_SCN_ALIGN_1024BYTES: u32 = 0x00B0_0000;
pub const IMAGE_SCN_ALIGN_2048BYTES: u32 = 0x00C0_0000;
pub const IMAGE_SCN_ALIGN_4096BYTES: u32 = 0x00D0_0000;
pub const IMAGE_SCN_ALIGN_8192BYTES: u32 = 0x00E0_0000;
pub const IMAGE_SCN_ALIGN_MASK: u32 = 0x00F0_0000;
/// The section contains extended relocations.
pub const IMAGE_SCN_LNK_NRELOC_OVFL: u32 = 0x0100_0000;
/// The section can be discarded as needed.
pub const IMAGE_SCN_MEM_DISCARDABLE: u32 = 0x0200_0000;
/// The section cannot be cached.
pub const IMAGE_SCN_MEM_NOT_CACHED: u32 = 0x0400_0000;
/// The section is not pageable.
pub const IMAGE_SCN_MEM_NOT_PAGED: u32 = 0x0800_0000;
/// The section can be shared in memory.
pub const IMAGE_SCN_MEM_SHARED: u32 = 0x1000_0000;
/// The section can be executed as code.
pub const IMAGE_SCN_MEM_EXECUTE: u32 = 0x2000_0000;
/// The section can be read.
pub const IMAGE_SCN_MEM_READ: u32 = 0x4000_0000;
/// The section can be written to.
pub const IMAGE_SCN_MEM_WRITE: u32 = 0x8000_0000;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn set_name_offset() {
let mut section = SectionTable::default();
for &(offset, name) in [
(0usize, b"/0\0\0\0\0\0\0"),
(1, b"/1\0\0\0\0\0\0"),
(9_999_999, b"/9999999"),
(10_000_000, b"//AAmJaA"),
#[cfg(target_pointer_width = "64")]
(0xfff_fff_fff, b"////////"),
]
.iter()
{
section.set_name_offset(offset).unwrap();
assert_eq!(§ion.name, name);
assert_eq!(section.name_offset().unwrap(), Some(offset));
}
#[cfg(target_pointer_width = "64")]
assert!(section.set_name_offset(0x1_000_000_000).is_err());
}
}
| size_with | identifier_name |
mod.rs | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::CNDTR2 {
#[doc = r" Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.write(|w| w)
}
} | #[doc = r" Value of the field"]
pub struct NDTR {
bits: u16,
}
impl NDTR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u16 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _NDTW<'a> {
w: &'a mut W,
}
impl<'a> _NDTW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u16) -> &'a mut W {
const MASK: u16 = 65535;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:15 - Number of data to transfer"]
#[inline(always)]
pub fn ndt(&self) -> NDTR {
let bits = {
const MASK: u16 = 65535;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u16
};
NDTR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline(always)]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:15 - Number of data to transfer"]
#[inline(always)]
pub fn ndt(&mut self) -> _NDTW {
_NDTW { w: self }
}
} | random_line_split | |
mod.rs | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::CNDTR2 {
#[doc = r" Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline(always)]
pub fn | <F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct NDTR {
bits: u16,
}
impl NDTR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u16 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _NDTW<'a> {
w: &'a mut W,
}
impl<'a> _NDTW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u16) -> &'a mut W {
const MASK: u16 = 65535;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:15 - Number of data to transfer"]
#[inline(always)]
pub fn ndt(&self) -> NDTR {
let bits = {
const MASK: u16 = 65535;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u16
};
NDTR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline(always)]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:15 - Number of data to transfer"]
#[inline(always)]
pub fn ndt(&mut self) -> _NDTW {
_NDTW { w: self }
}
}
| write | identifier_name |
mod.rs | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::CNDTR2 {
#[doc = r" Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
|
#[doc = r" Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct NDTR {
bits: u16,
}
impl NDTR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u16 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _NDTW<'a> {
w: &'a mut W,
}
impl<'a> _NDTW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u16) -> &'a mut W {
const MASK: u16 = 65535;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:15 - Number of data to transfer"]
#[inline(always)]
pub fn ndt(&self) -> NDTR {
let bits = {
const MASK: u16 = 65535;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u16
};
NDTR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline(always)]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:15 - Number of data to transfer"]
#[inline(always)]
pub fn ndt(&mut self) -> _NDTW {
_NDTW { w: self }
}
}
| {
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
} | identifier_body |
__init__.py | # Copyright 2001 by Tarjei Mikkelsen. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""BioPython Pathway module.
Bio.Pathway is a lightweight class library designed to support the following tasks:
- Data interchange and preprocessing between pathway databases and analysis software.
- Quick prototyping of pathway analysis algorithms
The basic object in the Bio.Pathway model is Interaction, which represents an arbitrary
interaction between any number of biochemical species.
Network objects are used to represent the connectivity between species in pathways
and reaction networks.
For applications where it is not necessary to explicitly represent network connectivity,
the specialized classes Reaction and System should be used in place of Interacton and
Network.
The Bio.Pathway classes, especially Interaction, are intentionally
designed to be very flexible. Their intended use are as wrappers around database
specific records, such as BIND objects. The value-added in this module is a
framework for representing collections of reactions in a way that supports
graph theoretic and numeric analysis.
Note: This module should be regarded as a prototype only. API changes are likely.
Comments and feature requests are most welcome.
"""
import sys
# Add path to Bio
sys.path.append('../..')
from functools import reduce
from Bio.Pathway.Rep.MultiGraph import *
__docformat__ = "restructuredtext en"
class Reaction(object):
"""Abstraction for a biochemical transformation.
This class represents a (potentially reversible) biochemical
transformation of the type:
a S1 + b S2 + ... --> c P1 + d P2 + ...
where
- a, b, c, d ... are positive numeric stochiometric coefficients,
- S1, S2, ... are substrates
- P1, P2, ... are products
A Reaction should be viewed as the net result of one or more individual
reaction steps, where each step is potentially facilitated by a different
catalyst. Support for 'Reaction algebra' will be added at some point in
the future.
Attributes:
- reactants -- map of involved species to their stochiometric coefficients:
reactants[S] = stochiometric constant for S
- catalysts -- list of tuples of catalysts required for this reaction
- reversible -- true iff reaction is reversible
- data -- reference to arbitrary additional data
Invariants:
- for all S in reactants: reactants[S] != 0
- for all C in catalysts: catalysts[C] != 0
"""
def __init__(self, reactants={}, catalysts=[],
reversible=0, data=None):
"""Initializes a new Reaction object."""
# enforce invariants on reactants:
self.reactants = reactants.copy()
# loop over original, edit the copy
for r, value in reactants.items():
if value == 0:
del self.reactants[r]
self.catalysts = sorted(set(catalysts))
self.data = data
self.reversible = reversible
def __eq__(self, r):
"""Returns true iff self is equal to r."""
return isinstance(r, Reaction) and \
self.reactants == r.reactants and \
self.catalysts == r.catalysts and \
self.data == r.data and \
self.reversible == r.reversible
def __ne__(self, r):
"""Returns true iff self is not equal to r."""
return not self.__eq__(r)
def __hash__(self):
"""Returns a hashcode for self."""
t = tuple(self.species())
return hash(t)
def __repr__(self):
"""Returns a debugging string representation of self."""
return "Reaction(" + \
",".join(map(repr, [self.reactants,
self.catalysts,
self.data,
self.reversible])) + ")"
def __str__(self):
"""Returns a string representation of self."""
substrates = ""
products = ""
all_species = sorted(self.reactants)
for species in all_species:
stoch = self.reactants[species]
if stoch < 0:
# species is a substrate:
if substrates != "":
substrates = substrates + " + "
if stoch != -1:
substrates = substrates + str(abs(stoch)) + " "
substrates = substrates + str(species)
elif stoch > 0:
# species is a product:
if products != "":
products = products + " + "
if stoch != 1:
products = products + str(stoch) + " "
products = products + str(species)
else:
raise AttributeError("Invalid 0 coefficient in Reaction.reactants")
if self.reversible:
return substrates + " <=> " + products
else:
|
def reverse(self):
"""Returns a new Reaction that is the reverse of self."""
reactants = {}
for r in self.reactants:
reactants[r] = - self.reactants[r]
return Reaction(reactants, self.catalysts,
self.reversible, self.data)
def species(self):
"""Returns a list of all Species involved in self."""
return list(self.reactants)
class System(object):
"""Abstraction for a collection of reactions.
This class is used in the Bio.Pathway framework to represent an arbitrary
collection of reactions without explicitly defined links.
Attributes:
None
"""
def __init__(self, reactions=[]):
"""Initializes a new System object."""
self.__reactions = set(reactions)
def __repr__(self):
"""Returns a debugging string representation of self."""
return "System(" + ",".join(map(repr, self.__reactions)) + ")"
def __str__(self):
"""Returns a string representation of self."""
return "System of " + str(len(self.__reactions)) + \
" reactions involving " + str(len(self.species())) + \
" species"
def add_reaction(self, reaction):
"""Adds reaction to self."""
self.__reactions.add(reaction)
def remove_reaction(self, reaction):
"""Removes reaction from self."""
self.__reactions.remove(reaction)
def reactions(self):
"""Returns a list of the reactions in this system.
Note the order is arbitrary!
"""
# TODO - Define __lt__ so that Reactions can be sorted on Python?
return list(self.__reactions)
def species(self):
"""Returns a list of the species in this system."""
return sorted(set(reduce(lambda s, x: s + x,
[x.species() for x in self.reactions()], [])))
def stochiometry(self):
"""Computes the stoichiometry matrix for self.
Returns (species, reactions, stoch) where
- species = ordered list of species in this system
- reactions = ordered list of reactions in this system
- stoch = 2D array where stoch[i][j] is coef of the
jth species in the ith reaction, as defined
by species and reactions above
"""
# Note: This an inefficient and ugly temporary implementation.
# To be practical, stochiometric matrices should probably
# be implemented by sparse matrices, which would require
# NumPy dependencies.
#
# PS: We should implement automatic checking for NumPy here.
species = self.species()
reactions = self.reactions()
stoch = [] * len(reactions)
for i in range(len(reactions)):
stoch[i] = 0 * len(species)
for s in reactions[i].species():
stoch[species.index(s)] = reactions[i].reactants[s]
return (species, reactions, stoch)
class Interaction(object):
"""An arbitrary interaction between any number of species.
This class definition is intended solely as a minimal wrapper interface that should
be implemented and extended by more specific abstractions.
Attributes:
- data -- reference to arbitrary additional data
"""
def __init_(self, data):
self.data = data
def __hash__(self):
"""Returns a hashcode for self."""
return hash(self.data)
def __repr__(self):
"""Returns a debugging string representation of self."""
return "Interaction(" + repr(self.data) + ")"
def __str__(self):
"""Returns a string representation of self."""
return "<" + str(self.data) + ">"
class Network(object):
"""A set of species that are explicitly linked by interactions.
The network is a directed multigraph with labeled edges. The nodes in the graph
are the biochemical species involved. The edges represent an interaction between
two species, and the edge label is a reference to the associated Interaction
object.
Attributes:
None
"""
def __init__(self, species=[]):
"""Initializes a new Network object."""
self.__graph = MultiGraph(species)
def __repr__(self):
"""Returns a debugging string representation of this network."""
return "<Network: __graph: " + repr(self.__graph) + ">"
def __str__(self):
"""Returns a string representation of this network."""
return "Network of " + str(len(self.species())) + " species and " + \
str(len(self.interactions())) + " interactions."
def add_species(self, species):
"""Adds species to this network."""
self.__graph.add_node(species)
def add_interaction(self, source, sink, interaction):
"""Adds interaction to this network."""
self.__graph.add_edge(source, sink, interaction)
def source(self, species):
"""Returns list of unique sources for species."""
return self.__graph.parents(species)
def source_interactions(self, species):
"""Returns list of (source, interaction) pairs for species."""
return self.__graph.parent_edges(species)
def sink(self, species):
"""Returns list of unique sinks for species."""
return self.__graph.children(species)
def sink_interactions(self, species):
"""Returns list of (sink, interaction) pairs for species."""
return self.__graph.child_edges(species)
def species(self):
"""Returns list of the species in this network."""
return self.__graph.nodes()
def interactions(self):
"""Returns list of the unique interactions in this network."""
return self.__graph.labels()
| return substrates + " --> " + products | conditional_block |
__init__.py | # Copyright 2001 by Tarjei Mikkelsen. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""BioPython Pathway module.
Bio.Pathway is a lightweight class library designed to support the following tasks:
- Data interchange and preprocessing between pathway databases and analysis software.
- Quick prototyping of pathway analysis algorithms
The basic object in the Bio.Pathway model is Interaction, which represents an arbitrary
interaction between any number of biochemical species.
Network objects are used to represent the connectivity between species in pathways
and reaction networks.
For applications where it is not necessary to explicitly represent network connectivity,
the specialized classes Reaction and System should be used in place of Interacton and
Network.
The Bio.Pathway classes, especially Interaction, are intentionally
designed to be very flexible. Their intended use are as wrappers around database
specific records, such as BIND objects. The value-added in this module is a
framework for representing collections of reactions in a way that supports
graph theoretic and numeric analysis.
Note: This module should be regarded as a prototype only. API changes are likely.
Comments and feature requests are most welcome.
"""
import sys
# Add path to Bio
sys.path.append('../..')
from functools import reduce
from Bio.Pathway.Rep.MultiGraph import *
__docformat__ = "restructuredtext en"
class Reaction(object):
"""Abstraction for a biochemical transformation.
This class represents a (potentially reversible) biochemical
transformation of the type:
a S1 + b S2 + ... --> c P1 + d P2 + ...
where
- a, b, c, d ... are positive numeric stochiometric coefficients,
- S1, S2, ... are substrates
- P1, P2, ... are products
A Reaction should be viewed as the net result of one or more individual
reaction steps, where each step is potentially facilitated by a different
catalyst. Support for 'Reaction algebra' will be added at some point in
the future.
Attributes:
- reactants -- map of involved species to their stochiometric coefficients:
reactants[S] = stochiometric constant for S
- catalysts -- list of tuples of catalysts required for this reaction
- reversible -- true iff reaction is reversible
- data -- reference to arbitrary additional data
Invariants:
- for all S in reactants: reactants[S] != 0
- for all C in catalysts: catalysts[C] != 0
"""
def | (self, reactants={}, catalysts=[],
reversible=0, data=None):
"""Initializes a new Reaction object."""
# enforce invariants on reactants:
self.reactants = reactants.copy()
# loop over original, edit the copy
for r, value in reactants.items():
if value == 0:
del self.reactants[r]
self.catalysts = sorted(set(catalysts))
self.data = data
self.reversible = reversible
def __eq__(self, r):
"""Returns true iff self is equal to r."""
return isinstance(r, Reaction) and \
self.reactants == r.reactants and \
self.catalysts == r.catalysts and \
self.data == r.data and \
self.reversible == r.reversible
def __ne__(self, r):
"""Returns true iff self is not equal to r."""
return not self.__eq__(r)
def __hash__(self):
"""Returns a hashcode for self."""
t = tuple(self.species())
return hash(t)
def __repr__(self):
"""Returns a debugging string representation of self."""
return "Reaction(" + \
",".join(map(repr, [self.reactants,
self.catalysts,
self.data,
self.reversible])) + ")"
def __str__(self):
"""Returns a string representation of self."""
substrates = ""
products = ""
all_species = sorted(self.reactants)
for species in all_species:
stoch = self.reactants[species]
if stoch < 0:
# species is a substrate:
if substrates != "":
substrates = substrates + " + "
if stoch != -1:
substrates = substrates + str(abs(stoch)) + " "
substrates = substrates + str(species)
elif stoch > 0:
# species is a product:
if products != "":
products = products + " + "
if stoch != 1:
products = products + str(stoch) + " "
products = products + str(species)
else:
raise AttributeError("Invalid 0 coefficient in Reaction.reactants")
if self.reversible:
return substrates + " <=> " + products
else:
return substrates + " --> " + products
def reverse(self):
"""Returns a new Reaction that is the reverse of self."""
reactants = {}
for r in self.reactants:
reactants[r] = - self.reactants[r]
return Reaction(reactants, self.catalysts,
self.reversible, self.data)
def species(self):
"""Returns a list of all Species involved in self."""
return list(self.reactants)
class System(object):
"""Abstraction for a collection of reactions.
This class is used in the Bio.Pathway framework to represent an arbitrary
collection of reactions without explicitly defined links.
Attributes:
None
"""
def __init__(self, reactions=[]):
"""Initializes a new System object."""
self.__reactions = set(reactions)
def __repr__(self):
"""Returns a debugging string representation of self."""
return "System(" + ",".join(map(repr, self.__reactions)) + ")"
def __str__(self):
"""Returns a string representation of self."""
return "System of " + str(len(self.__reactions)) + \
" reactions involving " + str(len(self.species())) + \
" species"
def add_reaction(self, reaction):
"""Adds reaction to self."""
self.__reactions.add(reaction)
def remove_reaction(self, reaction):
"""Removes reaction from self."""
self.__reactions.remove(reaction)
def reactions(self):
"""Returns a list of the reactions in this system.
Note the order is arbitrary!
"""
# TODO - Define __lt__ so that Reactions can be sorted on Python?
return list(self.__reactions)
def species(self):
"""Returns a list of the species in this system."""
return sorted(set(reduce(lambda s, x: s + x,
[x.species() for x in self.reactions()], [])))
def stochiometry(self):
"""Computes the stoichiometry matrix for self.
Returns (species, reactions, stoch) where
- species = ordered list of species in this system
- reactions = ordered list of reactions in this system
- stoch = 2D array where stoch[i][j] is coef of the
jth species in the ith reaction, as defined
by species and reactions above
"""
# Note: This an inefficient and ugly temporary implementation.
# To be practical, stochiometric matrices should probably
# be implemented by sparse matrices, which would require
# NumPy dependencies.
#
# PS: We should implement automatic checking for NumPy here.
species = self.species()
reactions = self.reactions()
stoch = [] * len(reactions)
for i in range(len(reactions)):
stoch[i] = 0 * len(species)
for s in reactions[i].species():
stoch[species.index(s)] = reactions[i].reactants[s]
return (species, reactions, stoch)
class Interaction(object):
"""An arbitrary interaction between any number of species.
This class definition is intended solely as a minimal wrapper interface that should
be implemented and extended by more specific abstractions.
Attributes:
- data -- reference to arbitrary additional data
"""
def __init_(self, data):
self.data = data
def __hash__(self):
"""Returns a hashcode for self."""
return hash(self.data)
def __repr__(self):
"""Returns a debugging string representation of self."""
return "Interaction(" + repr(self.data) + ")"
def __str__(self):
"""Returns a string representation of self."""
return "<" + str(self.data) + ">"
class Network(object):
"""A set of species that are explicitly linked by interactions.
The network is a directed multigraph with labeled edges. The nodes in the graph
are the biochemical species involved. The edges represent an interaction between
two species, and the edge label is a reference to the associated Interaction
object.
Attributes:
None
"""
def __init__(self, species=[]):
"""Initializes a new Network object."""
self.__graph = MultiGraph(species)
def __repr__(self):
"""Returns a debugging string representation of this network."""
return "<Network: __graph: " + repr(self.__graph) + ">"
def __str__(self):
"""Returns a string representation of this network."""
return "Network of " + str(len(self.species())) + " species and " + \
str(len(self.interactions())) + " interactions."
def add_species(self, species):
"""Adds species to this network."""
self.__graph.add_node(species)
def add_interaction(self, source, sink, interaction):
"""Adds interaction to this network."""
self.__graph.add_edge(source, sink, interaction)
def source(self, species):
"""Returns list of unique sources for species."""
return self.__graph.parents(species)
def source_interactions(self, species):
"""Returns list of (source, interaction) pairs for species."""
return self.__graph.parent_edges(species)
def sink(self, species):
"""Returns list of unique sinks for species."""
return self.__graph.children(species)
def sink_interactions(self, species):
"""Returns list of (sink, interaction) pairs for species."""
return self.__graph.child_edges(species)
def species(self):
"""Returns list of the species in this network."""
return self.__graph.nodes()
def interactions(self):
"""Returns list of the unique interactions in this network."""
return self.__graph.labels()
| __init__ | identifier_name |
__init__.py | # Copyright 2001 by Tarjei Mikkelsen. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""BioPython Pathway module.
Bio.Pathway is a lightweight class library designed to support the following tasks:
- Data interchange and preprocessing between pathway databases and analysis software.
- Quick prototyping of pathway analysis algorithms
The basic object in the Bio.Pathway model is Interaction, which represents an arbitrary
interaction between any number of biochemical species.
Network objects are used to represent the connectivity between species in pathways
and reaction networks.
For applications where it is not necessary to explicitly represent network connectivity,
the specialized classes Reaction and System should be used in place of Interacton and
Network.
The Bio.Pathway classes, especially Interaction, are intentionally
designed to be very flexible. Their intended use are as wrappers around database
specific records, such as BIND objects. The value-added in this module is a
framework for representing collections of reactions in a way that supports
graph theoretic and numeric analysis.
Note: This module should be regarded as a prototype only. API changes are likely.
Comments and feature requests are most welcome.
"""
import sys
# Add path to Bio
sys.path.append('../..')
from functools import reduce
from Bio.Pathway.Rep.MultiGraph import *
__docformat__ = "restructuredtext en"
class Reaction(object):
"""Abstraction for a biochemical transformation.
This class represents a (potentially reversible) biochemical
transformation of the type:
a S1 + b S2 + ... --> c P1 + d P2 + ...
where
- a, b, c, d ... are positive numeric stochiometric coefficients,
- S1, S2, ... are substrates
- P1, P2, ... are products
A Reaction should be viewed as the net result of one or more individual
reaction steps, where each step is potentially facilitated by a different
catalyst. Support for 'Reaction algebra' will be added at some point in
the future.
Attributes:
- reactants -- map of involved species to their stochiometric coefficients:
reactants[S] = stochiometric constant for S
- catalysts -- list of tuples of catalysts required for this reaction
- reversible -- true iff reaction is reversible
- data -- reference to arbitrary additional data
Invariants:
- for all S in reactants: reactants[S] != 0
- for all C in catalysts: catalysts[C] != 0
"""
def __init__(self, reactants={}, catalysts=[],
reversible=0, data=None):
"""Initializes a new Reaction object."""
# enforce invariants on reactants:
self.reactants = reactants.copy()
# loop over original, edit the copy
for r, value in reactants.items():
if value == 0:
del self.reactants[r]
self.catalysts = sorted(set(catalysts))
self.data = data
self.reversible = reversible
def __eq__(self, r):
"""Returns true iff self is equal to r."""
return isinstance(r, Reaction) and \
self.reactants == r.reactants and \
self.catalysts == r.catalysts and \
self.data == r.data and \
self.reversible == r.reversible
def __ne__(self, r):
"""Returns true iff self is not equal to r."""
return not self.__eq__(r)
def __hash__(self):
"""Returns a hashcode for self."""
t = tuple(self.species())
return hash(t)
def __repr__(self):
"""Returns a debugging string representation of self."""
return "Reaction(" + \ |
def __str__(self):
"""Returns a string representation of self."""
substrates = ""
products = ""
all_species = sorted(self.reactants)
for species in all_species:
stoch = self.reactants[species]
if stoch < 0:
# species is a substrate:
if substrates != "":
substrates = substrates + " + "
if stoch != -1:
substrates = substrates + str(abs(stoch)) + " "
substrates = substrates + str(species)
elif stoch > 0:
# species is a product:
if products != "":
products = products + " + "
if stoch != 1:
products = products + str(stoch) + " "
products = products + str(species)
else:
raise AttributeError("Invalid 0 coefficient in Reaction.reactants")
if self.reversible:
return substrates + " <=> " + products
else:
return substrates + " --> " + products
def reverse(self):
"""Returns a new Reaction that is the reverse of self."""
reactants = {}
for r in self.reactants:
reactants[r] = - self.reactants[r]
return Reaction(reactants, self.catalysts,
self.reversible, self.data)
def species(self):
"""Returns a list of all Species involved in self."""
return list(self.reactants)
class System(object):
"""Abstraction for a collection of reactions.
This class is used in the Bio.Pathway framework to represent an arbitrary
collection of reactions without explicitly defined links.
Attributes:
None
"""
def __init__(self, reactions=[]):
"""Initializes a new System object."""
self.__reactions = set(reactions)
def __repr__(self):
"""Returns a debugging string representation of self."""
return "System(" + ",".join(map(repr, self.__reactions)) + ")"
def __str__(self):
"""Returns a string representation of self."""
return "System of " + str(len(self.__reactions)) + \
" reactions involving " + str(len(self.species())) + \
" species"
def add_reaction(self, reaction):
"""Adds reaction to self."""
self.__reactions.add(reaction)
def remove_reaction(self, reaction):
"""Removes reaction from self."""
self.__reactions.remove(reaction)
def reactions(self):
"""Returns a list of the reactions in this system.
Note the order is arbitrary!
"""
# TODO - Define __lt__ so that Reactions can be sorted on Python?
return list(self.__reactions)
def species(self):
"""Returns a list of the species in this system."""
return sorted(set(reduce(lambda s, x: s + x,
[x.species() for x in self.reactions()], [])))
def stochiometry(self):
"""Computes the stoichiometry matrix for self.
Returns (species, reactions, stoch) where
- species = ordered list of species in this system
- reactions = ordered list of reactions in this system
- stoch = 2D array where stoch[i][j] is coef of the
jth species in the ith reaction, as defined
by species and reactions above
"""
# Note: This an inefficient and ugly temporary implementation.
# To be practical, stochiometric matrices should probably
# be implemented by sparse matrices, which would require
# NumPy dependencies.
#
# PS: We should implement automatic checking for NumPy here.
species = self.species()
reactions = self.reactions()
stoch = [] * len(reactions)
for i in range(len(reactions)):
stoch[i] = 0 * len(species)
for s in reactions[i].species():
stoch[species.index(s)] = reactions[i].reactants[s]
return (species, reactions, stoch)
class Interaction(object):
"""An arbitrary interaction between any number of species.
This class definition is intended solely as a minimal wrapper interface that should
be implemented and extended by more specific abstractions.
Attributes:
- data -- reference to arbitrary additional data
"""
def __init_(self, data):
self.data = data
def __hash__(self):
"""Returns a hashcode for self."""
return hash(self.data)
def __repr__(self):
"""Returns a debugging string representation of self."""
return "Interaction(" + repr(self.data) + ")"
def __str__(self):
"""Returns a string representation of self."""
return "<" + str(self.data) + ">"
class Network(object):
"""A set of species that are explicitly linked by interactions.
The network is a directed multigraph with labeled edges. The nodes in the graph
are the biochemical species involved. The edges represent an interaction between
two species, and the edge label is a reference to the associated Interaction
object.
Attributes:
None
"""
def __init__(self, species=[]):
"""Initializes a new Network object."""
self.__graph = MultiGraph(species)
def __repr__(self):
"""Returns a debugging string representation of this network."""
return "<Network: __graph: " + repr(self.__graph) + ">"
def __str__(self):
"""Returns a string representation of this network."""
return "Network of " + str(len(self.species())) + " species and " + \
str(len(self.interactions())) + " interactions."
def add_species(self, species):
"""Adds species to this network."""
self.__graph.add_node(species)
def add_interaction(self, source, sink, interaction):
"""Adds interaction to this network."""
self.__graph.add_edge(source, sink, interaction)
def source(self, species):
"""Returns list of unique sources for species."""
return self.__graph.parents(species)
def source_interactions(self, species):
"""Returns list of (source, interaction) pairs for species."""
return self.__graph.parent_edges(species)
def sink(self, species):
"""Returns list of unique sinks for species."""
return self.__graph.children(species)
def sink_interactions(self, species):
"""Returns list of (sink, interaction) pairs for species."""
return self.__graph.child_edges(species)
def species(self):
"""Returns list of the species in this network."""
return self.__graph.nodes()
def interactions(self):
"""Returns list of the unique interactions in this network."""
return self.__graph.labels() | ",".join(map(repr, [self.reactants,
self.catalysts,
self.data,
self.reversible])) + ")" | random_line_split |
__init__.py | # Copyright 2001 by Tarjei Mikkelsen. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""BioPython Pathway module.
Bio.Pathway is a lightweight class library designed to support the following tasks:
- Data interchange and preprocessing between pathway databases and analysis software.
- Quick prototyping of pathway analysis algorithms
The basic object in the Bio.Pathway model is Interaction, which represents an arbitrary
interaction between any number of biochemical species.
Network objects are used to represent the connectivity between species in pathways
and reaction networks.
For applications where it is not necessary to explicitly represent network connectivity,
the specialized classes Reaction and System should be used in place of Interacton and
Network.
The Bio.Pathway classes, especially Interaction, are intentionally
designed to be very flexible. Their intended use are as wrappers around database
specific records, such as BIND objects. The value-added in this module is a
framework for representing collections of reactions in a way that supports
graph theoretic and numeric analysis.
Note: This module should be regarded as a prototype only. API changes are likely.
Comments and feature requests are most welcome.
"""
import sys
# Add path to Bio
sys.path.append('../..')
from functools import reduce
from Bio.Pathway.Rep.MultiGraph import *
__docformat__ = "restructuredtext en"
class Reaction(object):
"""Abstraction for a biochemical transformation.
This class represents a (potentially reversible) biochemical
transformation of the type:
a S1 + b S2 + ... --> c P1 + d P2 + ...
where
- a, b, c, d ... are positive numeric stochiometric coefficients,
- S1, S2, ... are substrates
- P1, P2, ... are products
A Reaction should be viewed as the net result of one or more individual
reaction steps, where each step is potentially facilitated by a different
catalyst. Support for 'Reaction algebra' will be added at some point in
the future.
Attributes:
- reactants -- map of involved species to their stochiometric coefficients:
reactants[S] = stochiometric constant for S
- catalysts -- list of tuples of catalysts required for this reaction
- reversible -- true iff reaction is reversible
- data -- reference to arbitrary additional data
Invariants:
- for all S in reactants: reactants[S] != 0
- for all C in catalysts: catalysts[C] != 0
"""
def __init__(self, reactants={}, catalysts=[],
reversible=0, data=None):
"""Initializes a new Reaction object."""
# enforce invariants on reactants:
self.reactants = reactants.copy()
# loop over original, edit the copy
for r, value in reactants.items():
if value == 0:
del self.reactants[r]
self.catalysts = sorted(set(catalysts))
self.data = data
self.reversible = reversible
def __eq__(self, r):
"""Returns true iff self is equal to r."""
return isinstance(r, Reaction) and \
self.reactants == r.reactants and \
self.catalysts == r.catalysts and \
self.data == r.data and \
self.reversible == r.reversible
def __ne__(self, r):
"""Returns true iff self is not equal to r."""
return not self.__eq__(r)
def __hash__(self):
"""Returns a hashcode for self."""
t = tuple(self.species())
return hash(t)
def __repr__(self):
"""Returns a debugging string representation of self."""
return "Reaction(" + \
",".join(map(repr, [self.reactants,
self.catalysts,
self.data,
self.reversible])) + ")"
def __str__(self):
"""Returns a string representation of self."""
substrates = ""
products = ""
all_species = sorted(self.reactants)
for species in all_species:
stoch = self.reactants[species]
if stoch < 0:
# species is a substrate:
if substrates != "":
substrates = substrates + " + "
if stoch != -1:
substrates = substrates + str(abs(stoch)) + " "
substrates = substrates + str(species)
elif stoch > 0:
# species is a product:
if products != "":
products = products + " + "
if stoch != 1:
products = products + str(stoch) + " "
products = products + str(species)
else:
raise AttributeError("Invalid 0 coefficient in Reaction.reactants")
if self.reversible:
return substrates + " <=> " + products
else:
return substrates + " --> " + products
def reverse(self):
"""Returns a new Reaction that is the reverse of self."""
reactants = {}
for r in self.reactants:
reactants[r] = - self.reactants[r]
return Reaction(reactants, self.catalysts,
self.reversible, self.data)
def species(self):
"""Returns a list of all Species involved in self."""
return list(self.reactants)
class System(object):
"""Abstraction for a collection of reactions.
This class is used in the Bio.Pathway framework to represent an arbitrary
collection of reactions without explicitly defined links.
Attributes:
None
"""
def __init__(self, reactions=[]):
"""Initializes a new System object."""
self.__reactions = set(reactions)
def __repr__(self):
"""Returns a debugging string representation of self."""
return "System(" + ",".join(map(repr, self.__reactions)) + ")"
def __str__(self):
"""Returns a string representation of self."""
return "System of " + str(len(self.__reactions)) + \
" reactions involving " + str(len(self.species())) + \
" species"
def add_reaction(self, reaction):
|
def remove_reaction(self, reaction):
"""Removes reaction from self."""
self.__reactions.remove(reaction)
def reactions(self):
"""Returns a list of the reactions in this system.
Note the order is arbitrary!
"""
# TODO - Define __lt__ so that Reactions can be sorted on Python?
return list(self.__reactions)
def species(self):
"""Returns a list of the species in this system."""
return sorted(set(reduce(lambda s, x: s + x,
[x.species() for x in self.reactions()], [])))
def stochiometry(self):
"""Computes the stoichiometry matrix for self.
Returns (species, reactions, stoch) where
- species = ordered list of species in this system
- reactions = ordered list of reactions in this system
- stoch = 2D array where stoch[i][j] is coef of the
jth species in the ith reaction, as defined
by species and reactions above
"""
# Note: This an inefficient and ugly temporary implementation.
# To be practical, stochiometric matrices should probably
# be implemented by sparse matrices, which would require
# NumPy dependencies.
#
# PS: We should implement automatic checking for NumPy here.
species = self.species()
reactions = self.reactions()
stoch = [] * len(reactions)
for i in range(len(reactions)):
stoch[i] = 0 * len(species)
for s in reactions[i].species():
stoch[species.index(s)] = reactions[i].reactants[s]
return (species, reactions, stoch)
class Interaction(object):
"""An arbitrary interaction between any number of species.
This class definition is intended solely as a minimal wrapper interface that should
be implemented and extended by more specific abstractions.
Attributes:
- data -- reference to arbitrary additional data
"""
def __init_(self, data):
self.data = data
def __hash__(self):
"""Returns a hashcode for self."""
return hash(self.data)
def __repr__(self):
"""Returns a debugging string representation of self."""
return "Interaction(" + repr(self.data) + ")"
def __str__(self):
"""Returns a string representation of self."""
return "<" + str(self.data) + ">"
class Network(object):
"""A set of species that are explicitly linked by interactions.
The network is a directed multigraph with labeled edges. The nodes in the graph
are the biochemical species involved. The edges represent an interaction between
two species, and the edge label is a reference to the associated Interaction
object.
Attributes:
None
"""
def __init__(self, species=[]):
"""Initializes a new Network object."""
self.__graph = MultiGraph(species)
def __repr__(self):
"""Returns a debugging string representation of this network."""
return "<Network: __graph: " + repr(self.__graph) + ">"
def __str__(self):
"""Returns a string representation of this network."""
return "Network of " + str(len(self.species())) + " species and " + \
str(len(self.interactions())) + " interactions."
def add_species(self, species):
"""Adds species to this network."""
self.__graph.add_node(species)
def add_interaction(self, source, sink, interaction):
"""Adds interaction to this network."""
self.__graph.add_edge(source, sink, interaction)
def source(self, species):
"""Returns list of unique sources for species."""
return self.__graph.parents(species)
def source_interactions(self, species):
"""Returns list of (source, interaction) pairs for species."""
return self.__graph.parent_edges(species)
def sink(self, species):
"""Returns list of unique sinks for species."""
return self.__graph.children(species)
def sink_interactions(self, species):
"""Returns list of (sink, interaction) pairs for species."""
return self.__graph.child_edges(species)
def species(self):
"""Returns list of the species in this network."""
return self.__graph.nodes()
def interactions(self):
"""Returns list of the unique interactions in this network."""
return self.__graph.labels()
| """Adds reaction to self."""
self.__reactions.add(reaction) | identifier_body |
ReactBrowserEventEmitter.js | /**
* Copyright 2013-2014 Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @providesModule ReactBrowserEventEmitter
* @typechecks static-only
*/
"use strict";
var EventConstants = require("./EventConstants");
var EventPluginHub = require("./EventPluginHub");
var EventPluginRegistry = require("./EventPluginRegistry");
var ReactEventEmitterMixin = require("./ReactEventEmitterMixin");
var ViewportMetrics = require("./ViewportMetrics");
var isEventSupported = require("./isEventSupported");
var merge = require("./merge");
/**
* Summary of `ReactBrowserEventEmitter` event handling:
*
* - Top-level delegation is used to trap most native browser events. This
* may only occur in the main thread and is the responsibility of
* ReactEventListener, which is injected and can therefore support pluggable
* event sources. This is the only work that occurs in the main thread.
*
* - We normalize and de-duplicate events to account for browser quirks. This
* may be done in the worker thread.
*
* - Forward these native events (with the associated top-level type used to
* trap it) to `EventPluginHub`, which in turn will ask plugins if they want
* to extract any synthetic events.
*
* - The `EventPluginHub` will then process each event by annotating them with
* "dispatches", a sequence of listeners and IDs that care about that event.
*
* - The `EventPluginHub` then dispatches the events.
*
* Overview of React and the event system:
*
* +------------+ .
* | DOM | .
* +------------+ .
* | .
* v .
* +------------+ .
* | ReactEvent | .
* | Listener | .
* +------------+ . +-----------+
* | . +--------+|SimpleEvent|
* | . | |Plugin |
* +-----|------+ . v +-----------+
* | | | . +--------------+ +------------+
* | +-----------.--->|EventPluginHub| | Event |
* | | . | | +-----------+ | Propagators|
* | ReactEvent | . | | |TapEvent | |------------|
* | Emitter | . | |<---+|Plugin | |other plugin|
* | | . | | +-----------+ | utilities |
* | +-----------.--->| | +------------+
* | | | . +--------------+
* +-----|------+ . ^ +-----------+
* | . | |Enter/Leave|
* + . +-------+|Plugin |
* +-------------+ . +-----------+
* | application | .
* |-------------| .
* | | .
* | | .
* +-------------+ .
* .
* React Core . General Purpose Event Plugin System
*/
var alreadyListeningTo = {};
var isMonitoringScrollValue = false;
var reactTopListenersCounter = 0;
// For events like 'submit' which don't consistently bubble (which we trap at a
// lower node than `document`), binding at `document` would cause duplicate
// events so we don't include them here
var topEventMapping = {
topBlur: 'blur',
topChange: 'change',
topClick: 'click',
topCompositionEnd: 'compositionend',
topCompositionStart: 'compositionstart',
topCompositionUpdate: 'compositionupdate',
topContextMenu: 'contextmenu',
topCopy: 'copy',
topCut: 'cut',
topDoubleClick: 'dblclick',
topDrag: 'drag',
topDragEnd: 'dragend',
topDragEnter: 'dragenter',
topDragExit: 'dragexit',
topDragLeave: 'dragleave',
topDragOver: 'dragover',
topDragStart: 'dragstart',
topDrop: 'drop',
topFocus: 'focus',
topInput: 'input',
topKeyDown: 'keydown',
topKeyPress: 'keypress',
topKeyUp: 'keyup',
topMouseDown: 'mousedown',
topMouseMove: 'mousemove',
topMouseOut: 'mouseout',
topMouseOver: 'mouseover',
topMouseUp: 'mouseup',
topPaste: 'paste',
topScroll: 'scroll',
topSelectionChange: 'selectionchange',
topTextInput: 'textInput',
topTouchCancel: 'touchcancel',
topTouchEnd: 'touchend',
topTouchMove: 'touchmove',
topTouchStart: 'touchstart',
topWheel: 'wheel'
};
/**
* To ensure no conflicts with other potential React instances on the page
*/
var topListenersIDKey = "_reactListenersID" + String(Math.random()).slice(2);
function getListeningForDocument(mountAt) |
/**
* `ReactBrowserEventEmitter` is used to attach top-level event listeners. For
* example:
*
* ReactBrowserEventEmitter.putListener('myID', 'onClick', myFunction);
*
* This would allocate a "registration" of `('onClick', myFunction)` on 'myID'.
*
* @internal
*/
var ReactBrowserEventEmitter = merge(ReactEventEmitterMixin, {
/**
* Injectable event backend
*/
ReactEventListener: null,
injection: {
/**
* @param {object} ReactEventListener
*/
injectReactEventListener: function(ReactEventListener) {
ReactEventListener.setHandleTopLevel(
ReactBrowserEventEmitter.handleTopLevel
);
ReactBrowserEventEmitter.ReactEventListener = ReactEventListener;
}
},
/**
* Sets whether or not any created callbacks should be enabled.
*
* @param {boolean} enabled True if callbacks should be enabled.
*/
setEnabled: function(enabled) {
if (ReactBrowserEventEmitter.ReactEventListener) {
ReactBrowserEventEmitter.ReactEventListener.setEnabled(enabled);
}
},
/**
* @return {boolean} True if callbacks are enabled.
*/
isEnabled: function() {
return !!(
ReactBrowserEventEmitter.ReactEventListener &&
ReactBrowserEventEmitter.ReactEventListener.isEnabled()
);
},
/**
* We listen for bubbled touch events on the document object.
*
* Firefox v8.01 (and possibly others) exhibited strange behavior when
* mounting `onmousemove` events at some node that was not the document
* element. The symptoms were that if your mouse is not moving over something
* contained within that mount point (for example on the background) the
* top-level listeners for `onmousemove` won't be called. However, if you
* register the `mousemove` on the document object, then it will of course
* catch all `mousemove`s. This along with iOS quirks, justifies restricting
* top-level listeners to the document object only, at least for these
* movement types of events and possibly all events.
*
* @see http://www.quirksmode.org/blog/archives/2010/09/click_event_del.html
*
* Also, `keyup`/`keypress`/`keydown` do not bubble to the window on IE, but
* they bubble to document.
*
* @param {string} registrationName Name of listener (e.g. `onClick`).
* @param {object} contentDocumentHandle Document which owns the container
*/
listenTo: function(registrationName, contentDocumentHandle) {
var mountAt = contentDocumentHandle;
var isListening = getListeningForDocument(mountAt);
var dependencies = EventPluginRegistry.
registrationNameDependencies[registrationName];
var topLevelTypes = EventConstants.topLevelTypes;
for (var i = 0, l = dependencies.length; i < l; i++) {
var dependency = dependencies[i];
if (!(
isListening.hasOwnProperty(dependency) &&
isListening[dependency]
)) {
if (dependency === topLevelTypes.topWheel) {
if (isEventSupported('wheel')) {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topWheel,
'wheel',
mountAt
);
} else if (isEventSupported('mousewheel')) {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topWheel,
'mousewheel',
mountAt
);
} else {
// Firefox needs to capture a different mouse scroll event.
// @see http://www.quirksmode.org/dom/events/tests/scroll.html
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topWheel,
'DOMMouseScroll',
mountAt
);
}
} else if (dependency === topLevelTypes.topScroll) {
if (isEventSupported('scroll', true)) {
ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelTypes.topScroll,
'scroll',
mountAt
);
} else {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topScroll,
'scroll',
ReactBrowserEventEmitter.ReactEventListener.WINDOW_HANDLE
);
}
} else if (dependency === topLevelTypes.topFocus ||
dependency === topLevelTypes.topBlur) {
if (isEventSupported('focus', true)) {
ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelTypes.topFocus,
'focus',
mountAt
);
ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelTypes.topBlur,
'blur',
mountAt
);
} else if (isEventSupported('focusin')) {
// IE has `focusin` and `focusout` events which bubble.
// @see http://www.quirksmode.org/blog/archives/2008/04/delegating_the.html
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topFocus,
'focusin',
mountAt
);
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topBlur,
'focusout',
mountAt
);
}
// to make sure blur and focus event listeners are only attached once
isListening[topLevelTypes.topBlur] = true;
isListening[topLevelTypes.topFocus] = true;
} else if (topEventMapping.hasOwnProperty(dependency)) {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
dependency,
topEventMapping[dependency],
mountAt
);
}
isListening[dependency] = true;
}
}
},
trapBubbledEvent: function(topLevelType, handlerBaseName, handle) {
return ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelType,
handlerBaseName,
handle
);
},
trapCapturedEvent: function(topLevelType, handlerBaseName, handle) {
return ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelType,
handlerBaseName,
handle
);
},
/**
* Listens to window scroll and resize events. We cache scroll values so that
* application code can access them without triggering reflows.
*
* NOTE: Scroll events do not bubble.
*
* @see http://www.quirksmode.org/dom/events/scroll.html
*/
ensureScrollValueMonitoring: function(){
if (!isMonitoringScrollValue) {
var refresh = ViewportMetrics.refreshScrollValues;
ReactBrowserEventEmitter.ReactEventListener.monitorScrollValue(refresh);
isMonitoringScrollValue = true;
}
},
eventNameDispatchConfigs: EventPluginHub.eventNameDispatchConfigs,
registrationNameModules: EventPluginHub.registrationNameModules,
putListener: EventPluginHub.putListener,
getListener: EventPluginHub.getListener,
deleteListener: EventPluginHub.deleteListener,
deleteAllListeners: EventPluginHub.deleteAllListeners
});
module.exports = ReactBrowserEventEmitter;
| {
// In IE8, `mountAt` is a host object and doesn't have `hasOwnProperty`
// directly.
if (!Object.prototype.hasOwnProperty.call(mountAt, topListenersIDKey)) {
mountAt[topListenersIDKey] = reactTopListenersCounter++;
alreadyListeningTo[mountAt[topListenersIDKey]] = {};
}
return alreadyListeningTo[mountAt[topListenersIDKey]];
} | identifier_body |
ReactBrowserEventEmitter.js | /**
* Copyright 2013-2014 Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @providesModule ReactBrowserEventEmitter
* @typechecks static-only
*/
"use strict";
var EventConstants = require("./EventConstants");
var EventPluginHub = require("./EventPluginHub");
var EventPluginRegistry = require("./EventPluginRegistry");
var ReactEventEmitterMixin = require("./ReactEventEmitterMixin");
var ViewportMetrics = require("./ViewportMetrics");
var isEventSupported = require("./isEventSupported");
var merge = require("./merge");
/**
* Summary of `ReactBrowserEventEmitter` event handling:
*
* - Top-level delegation is used to trap most native browser events. This
* may only occur in the main thread and is the responsibility of
* ReactEventListener, which is injected and can therefore support pluggable
* event sources. This is the only work that occurs in the main thread.
*
* - We normalize and de-duplicate events to account for browser quirks. This
* may be done in the worker thread.
*
* - Forward these native events (with the associated top-level type used to
* trap it) to `EventPluginHub`, which in turn will ask plugins if they want
* to extract any synthetic events.
*
* - The `EventPluginHub` will then process each event by annotating them with
* "dispatches", a sequence of listeners and IDs that care about that event.
*
* - The `EventPluginHub` then dispatches the events.
*
* Overview of React and the event system:
*
* +------------+ .
* | DOM | .
* +------------+ .
* | .
* v .
* +------------+ .
* | ReactEvent | .
* | Listener | .
* +------------+ . +-----------+
* | . +--------+|SimpleEvent|
* | . | |Plugin |
* +-----|------+ . v +-----------+
* | | | . +--------------+ +------------+
* | +-----------.--->|EventPluginHub| | Event |
* | | . | | +-----------+ | Propagators|
* | ReactEvent | . | | |TapEvent | |------------|
* | Emitter | . | |<---+|Plugin | |other plugin|
* | | . | | +-----------+ | utilities |
* | +-----------.--->| | +------------+
* | | | . +--------------+
* +-----|------+ . ^ +-----------+
* | . | |Enter/Leave|
* + . +-------+|Plugin |
* +-------------+ . +-----------+
* | application | .
* |-------------| .
* | | .
* | | .
* +-------------+ .
* .
* React Core . General Purpose Event Plugin System
*/
var alreadyListeningTo = {};
var isMonitoringScrollValue = false;
var reactTopListenersCounter = 0;
// For events like 'submit' which don't consistently bubble (which we trap at a
// lower node than `document`), binding at `document` would cause duplicate
// events so we don't include them here
var topEventMapping = {
topBlur: 'blur',
topChange: 'change',
topClick: 'click',
topCompositionEnd: 'compositionend',
topCompositionStart: 'compositionstart',
topCompositionUpdate: 'compositionupdate',
topContextMenu: 'contextmenu',
topCopy: 'copy',
topCut: 'cut',
topDoubleClick: 'dblclick',
topDrag: 'drag',
topDragEnd: 'dragend',
topDragEnter: 'dragenter',
topDragExit: 'dragexit',
topDragLeave: 'dragleave',
topDragOver: 'dragover',
topDragStart: 'dragstart',
topDrop: 'drop',
topFocus: 'focus',
topInput: 'input',
topKeyDown: 'keydown', | topKeyPress: 'keypress',
topKeyUp: 'keyup',
topMouseDown: 'mousedown',
topMouseMove: 'mousemove',
topMouseOut: 'mouseout',
topMouseOver: 'mouseover',
topMouseUp: 'mouseup',
topPaste: 'paste',
topScroll: 'scroll',
topSelectionChange: 'selectionchange',
topTextInput: 'textInput',
topTouchCancel: 'touchcancel',
topTouchEnd: 'touchend',
topTouchMove: 'touchmove',
topTouchStart: 'touchstart',
topWheel: 'wheel'
};
/**
* To ensure no conflicts with other potential React instances on the page
*/
var topListenersIDKey = "_reactListenersID" + String(Math.random()).slice(2);
function getListeningForDocument(mountAt) {
// In IE8, `mountAt` is a host object and doesn't have `hasOwnProperty`
// directly.
if (!Object.prototype.hasOwnProperty.call(mountAt, topListenersIDKey)) {
mountAt[topListenersIDKey] = reactTopListenersCounter++;
alreadyListeningTo[mountAt[topListenersIDKey]] = {};
}
return alreadyListeningTo[mountAt[topListenersIDKey]];
}
/**
* `ReactBrowserEventEmitter` is used to attach top-level event listeners. For
* example:
*
* ReactBrowserEventEmitter.putListener('myID', 'onClick', myFunction);
*
* This would allocate a "registration" of `('onClick', myFunction)` on 'myID'.
*
* @internal
*/
var ReactBrowserEventEmitter = merge(ReactEventEmitterMixin, {
/**
* Injectable event backend
*/
ReactEventListener: null,
injection: {
/**
* @param {object} ReactEventListener
*/
injectReactEventListener: function(ReactEventListener) {
ReactEventListener.setHandleTopLevel(
ReactBrowserEventEmitter.handleTopLevel
);
ReactBrowserEventEmitter.ReactEventListener = ReactEventListener;
}
},
/**
* Sets whether or not any created callbacks should be enabled.
*
* @param {boolean} enabled True if callbacks should be enabled.
*/
setEnabled: function(enabled) {
if (ReactBrowserEventEmitter.ReactEventListener) {
ReactBrowserEventEmitter.ReactEventListener.setEnabled(enabled);
}
},
/**
* @return {boolean} True if callbacks are enabled.
*/
isEnabled: function() {
return !!(
ReactBrowserEventEmitter.ReactEventListener &&
ReactBrowserEventEmitter.ReactEventListener.isEnabled()
);
},
/**
* We listen for bubbled touch events on the document object.
*
* Firefox v8.01 (and possibly others) exhibited strange behavior when
* mounting `onmousemove` events at some node that was not the document
* element. The symptoms were that if your mouse is not moving over something
* contained within that mount point (for example on the background) the
* top-level listeners for `onmousemove` won't be called. However, if you
* register the `mousemove` on the document object, then it will of course
* catch all `mousemove`s. This along with iOS quirks, justifies restricting
* top-level listeners to the document object only, at least for these
* movement types of events and possibly all events.
*
* @see http://www.quirksmode.org/blog/archives/2010/09/click_event_del.html
*
* Also, `keyup`/`keypress`/`keydown` do not bubble to the window on IE, but
* they bubble to document.
*
* @param {string} registrationName Name of listener (e.g. `onClick`).
* @param {object} contentDocumentHandle Document which owns the container
*/
listenTo: function(registrationName, contentDocumentHandle) {
var mountAt = contentDocumentHandle;
var isListening = getListeningForDocument(mountAt);
var dependencies = EventPluginRegistry.
registrationNameDependencies[registrationName];
var topLevelTypes = EventConstants.topLevelTypes;
for (var i = 0, l = dependencies.length; i < l; i++) {
var dependency = dependencies[i];
if (!(
isListening.hasOwnProperty(dependency) &&
isListening[dependency]
)) {
if (dependency === topLevelTypes.topWheel) {
if (isEventSupported('wheel')) {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topWheel,
'wheel',
mountAt
);
} else if (isEventSupported('mousewheel')) {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topWheel,
'mousewheel',
mountAt
);
} else {
// Firefox needs to capture a different mouse scroll event.
// @see http://www.quirksmode.org/dom/events/tests/scroll.html
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topWheel,
'DOMMouseScroll',
mountAt
);
}
} else if (dependency === topLevelTypes.topScroll) {
if (isEventSupported('scroll', true)) {
ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelTypes.topScroll,
'scroll',
mountAt
);
} else {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topScroll,
'scroll',
ReactBrowserEventEmitter.ReactEventListener.WINDOW_HANDLE
);
}
} else if (dependency === topLevelTypes.topFocus ||
dependency === topLevelTypes.topBlur) {
if (isEventSupported('focus', true)) {
ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelTypes.topFocus,
'focus',
mountAt
);
ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelTypes.topBlur,
'blur',
mountAt
);
} else if (isEventSupported('focusin')) {
// IE has `focusin` and `focusout` events which bubble.
// @see http://www.quirksmode.org/blog/archives/2008/04/delegating_the.html
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topFocus,
'focusin',
mountAt
);
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topBlur,
'focusout',
mountAt
);
}
// to make sure blur and focus event listeners are only attached once
isListening[topLevelTypes.topBlur] = true;
isListening[topLevelTypes.topFocus] = true;
} else if (topEventMapping.hasOwnProperty(dependency)) {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
dependency,
topEventMapping[dependency],
mountAt
);
}
isListening[dependency] = true;
}
}
},
trapBubbledEvent: function(topLevelType, handlerBaseName, handle) {
return ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelType,
handlerBaseName,
handle
);
},
trapCapturedEvent: function(topLevelType, handlerBaseName, handle) {
return ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelType,
handlerBaseName,
handle
);
},
/**
* Listens to window scroll and resize events. We cache scroll values so that
* application code can access them without triggering reflows.
*
* NOTE: Scroll events do not bubble.
*
* @see http://www.quirksmode.org/dom/events/scroll.html
*/
ensureScrollValueMonitoring: function(){
if (!isMonitoringScrollValue) {
var refresh = ViewportMetrics.refreshScrollValues;
ReactBrowserEventEmitter.ReactEventListener.monitorScrollValue(refresh);
isMonitoringScrollValue = true;
}
},
eventNameDispatchConfigs: EventPluginHub.eventNameDispatchConfigs,
registrationNameModules: EventPluginHub.registrationNameModules,
putListener: EventPluginHub.putListener,
getListener: EventPluginHub.getListener,
deleteListener: EventPluginHub.deleteListener,
deleteAllListeners: EventPluginHub.deleteAllListeners
});
module.exports = ReactBrowserEventEmitter; | random_line_split | |
ReactBrowserEventEmitter.js | /**
* Copyright 2013-2014 Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @providesModule ReactBrowserEventEmitter
* @typechecks static-only
*/
"use strict";
var EventConstants = require("./EventConstants");
var EventPluginHub = require("./EventPluginHub");
var EventPluginRegistry = require("./EventPluginRegistry");
var ReactEventEmitterMixin = require("./ReactEventEmitterMixin");
var ViewportMetrics = require("./ViewportMetrics");
var isEventSupported = require("./isEventSupported");
var merge = require("./merge");
/**
* Summary of `ReactBrowserEventEmitter` event handling:
*
* - Top-level delegation is used to trap most native browser events. This
* may only occur in the main thread and is the responsibility of
* ReactEventListener, which is injected and can therefore support pluggable
* event sources. This is the only work that occurs in the main thread.
*
* - We normalize and de-duplicate events to account for browser quirks. This
* may be done in the worker thread.
*
* - Forward these native events (with the associated top-level type used to
* trap it) to `EventPluginHub`, which in turn will ask plugins if they want
* to extract any synthetic events.
*
* - The `EventPluginHub` will then process each event by annotating them with
* "dispatches", a sequence of listeners and IDs that care about that event.
*
* - The `EventPluginHub` then dispatches the events.
*
* Overview of React and the event system:
*
* +------------+ .
* | DOM | .
* +------------+ .
* | .
* v .
* +------------+ .
* | ReactEvent | .
* | Listener | .
* +------------+ . +-----------+
* | . +--------+|SimpleEvent|
* | . | |Plugin |
* +-----|------+ . v +-----------+
* | | | . +--------------+ +------------+
* | +-----------.--->|EventPluginHub| | Event |
* | | . | | +-----------+ | Propagators|
* | ReactEvent | . | | |TapEvent | |------------|
* | Emitter | . | |<---+|Plugin | |other plugin|
* | | . | | +-----------+ | utilities |
* | +-----------.--->| | +------------+
* | | | . +--------------+
* +-----|------+ . ^ +-----------+
* | . | |Enter/Leave|
* + . +-------+|Plugin |
* +-------------+ . +-----------+
* | application | .
* |-------------| .
* | | .
* | | .
* +-------------+ .
* .
* React Core . General Purpose Event Plugin System
*/
var alreadyListeningTo = {};
var isMonitoringScrollValue = false;
var reactTopListenersCounter = 0;
// For events like 'submit' which don't consistently bubble (which we trap at a
// lower node than `document`), binding at `document` would cause duplicate
// events so we don't include them here
var topEventMapping = {
topBlur: 'blur',
topChange: 'change',
topClick: 'click',
topCompositionEnd: 'compositionend',
topCompositionStart: 'compositionstart',
topCompositionUpdate: 'compositionupdate',
topContextMenu: 'contextmenu',
topCopy: 'copy',
topCut: 'cut',
topDoubleClick: 'dblclick',
topDrag: 'drag',
topDragEnd: 'dragend',
topDragEnter: 'dragenter',
topDragExit: 'dragexit',
topDragLeave: 'dragleave',
topDragOver: 'dragover',
topDragStart: 'dragstart',
topDrop: 'drop',
topFocus: 'focus',
topInput: 'input',
topKeyDown: 'keydown',
topKeyPress: 'keypress',
topKeyUp: 'keyup',
topMouseDown: 'mousedown',
topMouseMove: 'mousemove',
topMouseOut: 'mouseout',
topMouseOver: 'mouseover',
topMouseUp: 'mouseup',
topPaste: 'paste',
topScroll: 'scroll',
topSelectionChange: 'selectionchange',
topTextInput: 'textInput',
topTouchCancel: 'touchcancel',
topTouchEnd: 'touchend',
topTouchMove: 'touchmove',
topTouchStart: 'touchstart',
topWheel: 'wheel'
};
/**
* To ensure no conflicts with other potential React instances on the page
*/
var topListenersIDKey = "_reactListenersID" + String(Math.random()).slice(2);
function | (mountAt) {
// In IE8, `mountAt` is a host object and doesn't have `hasOwnProperty`
// directly.
if (!Object.prototype.hasOwnProperty.call(mountAt, topListenersIDKey)) {
mountAt[topListenersIDKey] = reactTopListenersCounter++;
alreadyListeningTo[mountAt[topListenersIDKey]] = {};
}
return alreadyListeningTo[mountAt[topListenersIDKey]];
}
/**
* `ReactBrowserEventEmitter` is used to attach top-level event listeners. For
* example:
*
* ReactBrowserEventEmitter.putListener('myID', 'onClick', myFunction);
*
* This would allocate a "registration" of `('onClick', myFunction)` on 'myID'.
*
* @internal
*/
var ReactBrowserEventEmitter = merge(ReactEventEmitterMixin, {
/**
* Injectable event backend
*/
ReactEventListener: null,
injection: {
/**
* @param {object} ReactEventListener
*/
injectReactEventListener: function(ReactEventListener) {
ReactEventListener.setHandleTopLevel(
ReactBrowserEventEmitter.handleTopLevel
);
ReactBrowserEventEmitter.ReactEventListener = ReactEventListener;
}
},
/**
* Sets whether or not any created callbacks should be enabled.
*
* @param {boolean} enabled True if callbacks should be enabled.
*/
setEnabled: function(enabled) {
if (ReactBrowserEventEmitter.ReactEventListener) {
ReactBrowserEventEmitter.ReactEventListener.setEnabled(enabled);
}
},
/**
* @return {boolean} True if callbacks are enabled.
*/
isEnabled: function() {
return !!(
ReactBrowserEventEmitter.ReactEventListener &&
ReactBrowserEventEmitter.ReactEventListener.isEnabled()
);
},
/**
* We listen for bubbled touch events on the document object.
*
* Firefox v8.01 (and possibly others) exhibited strange behavior when
* mounting `onmousemove` events at some node that was not the document
* element. The symptoms were that if your mouse is not moving over something
* contained within that mount point (for example on the background) the
* top-level listeners for `onmousemove` won't be called. However, if you
* register the `mousemove` on the document object, then it will of course
* catch all `mousemove`s. This along with iOS quirks, justifies restricting
* top-level listeners to the document object only, at least for these
* movement types of events and possibly all events.
*
* @see http://www.quirksmode.org/blog/archives/2010/09/click_event_del.html
*
* Also, `keyup`/`keypress`/`keydown` do not bubble to the window on IE, but
* they bubble to document.
*
* @param {string} registrationName Name of listener (e.g. `onClick`).
* @param {object} contentDocumentHandle Document which owns the container
*/
listenTo: function(registrationName, contentDocumentHandle) {
var mountAt = contentDocumentHandle;
var isListening = getListeningForDocument(mountAt);
var dependencies = EventPluginRegistry.
registrationNameDependencies[registrationName];
var topLevelTypes = EventConstants.topLevelTypes;
for (var i = 0, l = dependencies.length; i < l; i++) {
var dependency = dependencies[i];
if (!(
isListening.hasOwnProperty(dependency) &&
isListening[dependency]
)) {
if (dependency === topLevelTypes.topWheel) {
if (isEventSupported('wheel')) {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topWheel,
'wheel',
mountAt
);
} else if (isEventSupported('mousewheel')) {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topWheel,
'mousewheel',
mountAt
);
} else {
// Firefox needs to capture a different mouse scroll event.
// @see http://www.quirksmode.org/dom/events/tests/scroll.html
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topWheel,
'DOMMouseScroll',
mountAt
);
}
} else if (dependency === topLevelTypes.topScroll) {
if (isEventSupported('scroll', true)) {
ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelTypes.topScroll,
'scroll',
mountAt
);
} else {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topScroll,
'scroll',
ReactBrowserEventEmitter.ReactEventListener.WINDOW_HANDLE
);
}
} else if (dependency === topLevelTypes.topFocus ||
dependency === topLevelTypes.topBlur) {
if (isEventSupported('focus', true)) {
ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelTypes.topFocus,
'focus',
mountAt
);
ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelTypes.topBlur,
'blur',
mountAt
);
} else if (isEventSupported('focusin')) {
// IE has `focusin` and `focusout` events which bubble.
// @see http://www.quirksmode.org/blog/archives/2008/04/delegating_the.html
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topFocus,
'focusin',
mountAt
);
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topBlur,
'focusout',
mountAt
);
}
// to make sure blur and focus event listeners are only attached once
isListening[topLevelTypes.topBlur] = true;
isListening[topLevelTypes.topFocus] = true;
} else if (topEventMapping.hasOwnProperty(dependency)) {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
dependency,
topEventMapping[dependency],
mountAt
);
}
isListening[dependency] = true;
}
}
},
trapBubbledEvent: function(topLevelType, handlerBaseName, handle) {
return ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelType,
handlerBaseName,
handle
);
},
trapCapturedEvent: function(topLevelType, handlerBaseName, handle) {
return ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelType,
handlerBaseName,
handle
);
},
/**
* Listens to window scroll and resize events. We cache scroll values so that
* application code can access them without triggering reflows.
*
* NOTE: Scroll events do not bubble.
*
* @see http://www.quirksmode.org/dom/events/scroll.html
*/
ensureScrollValueMonitoring: function(){
if (!isMonitoringScrollValue) {
var refresh = ViewportMetrics.refreshScrollValues;
ReactBrowserEventEmitter.ReactEventListener.monitorScrollValue(refresh);
isMonitoringScrollValue = true;
}
},
eventNameDispatchConfigs: EventPluginHub.eventNameDispatchConfigs,
registrationNameModules: EventPluginHub.registrationNameModules,
putListener: EventPluginHub.putListener,
getListener: EventPluginHub.getListener,
deleteListener: EventPluginHub.deleteListener,
deleteAllListeners: EventPluginHub.deleteAllListeners
});
module.exports = ReactBrowserEventEmitter;
| getListeningForDocument | identifier_name |
lint-ctypes.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(ctypes)]
extern crate libc;
extern {
pub fn bare_type1(size: int); //~ ERROR: found rust type
pub fn bare_type2(size: uint); //~ ERROR: found rust type
pub fn ptr_type1(size: *int); //~ ERROR: found rust type
pub fn ptr_type2(size: *uint); //~ ERROR: found rust type
pub fn good1(size: *libc::c_int);
pub fn good2(size: *libc::c_uint);
}
fn | () {
}
| main | identifier_name |
lint-ctypes.rs | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(ctypes)]
extern crate libc;
extern {
pub fn bare_type1(size: int); //~ ERROR: found rust type
pub fn bare_type2(size: uint); //~ ERROR: found rust type
pub fn ptr_type1(size: *int); //~ ERROR: found rust type
pub fn ptr_type2(size: *uint); //~ ERROR: found rust type
pub fn good1(size: *libc::c_int);
pub fn good2(size: *libc::c_uint);
}
fn main() {
} | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
// | random_line_split | |
lint-ctypes.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(ctypes)]
extern crate libc;
extern {
pub fn bare_type1(size: int); //~ ERROR: found rust type
pub fn bare_type2(size: uint); //~ ERROR: found rust type
pub fn ptr_type1(size: *int); //~ ERROR: found rust type
pub fn ptr_type2(size: *uint); //~ ERROR: found rust type
pub fn good1(size: *libc::c_int);
pub fn good2(size: *libc::c_uint);
}
fn main() | {
} | identifier_body | |
utils.js | 'use strict';
// copied from http://www.broofa.com/Tools/Math.uuid.js
var CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
.split('');
exports.uuid = function () {
var chars = CHARS, uuid = new Array(36), rnd=0, r;
for (var i = 0; i < 36; i++) {
if (i===8 || i===13 || i===18 || i===23) {
uuid[i] = '-';
}
else if (i===14) {
uuid[i] = '4';
}
else {
if (rnd <= 0x02) rnd = 0x2000000 + (Math.random()*0x1000000)|0;
r = rnd & 0xf;
rnd = rnd >> 4;
uuid[i] = chars[(i === 19) ? (r & 0x3) | 0x8 : r];
}
}
return uuid.join('');
};
exports.in_array = function (item, array) {
if (!array) return false;
if (!Array.isArray(array)) return false;
return (array.indexOf(item) !== -1);
};
exports.to_object = function (array) {
if (typeof array === 'string') {
array = array.split(/[\s,;]+/);
}
if (!Array.isArray(array)) {
throw "arguments to to_object must be a string or array";
}
var rv = {};
for (var i = 0; i < array.length; i++) {
if (array[i] === undefined) { continue; }
rv[array[i]] = true;
}
return rv;
};
exports.sort_keys = function (obj) {
return Object.keys(obj).sort();
};
exports.uniq = function (arr) {
var out = [];
var o = 0;
for (var i=0,l=arr.length; i < l; i++) {
if (out.length === 0) {
out.push(arr[i]);
}
else if (out[o] !== arr[i]) {
out.push(arr[i]);
o++;
}
}
return out;
};
exports.extend = function (target) {
// http://stackoverflow.com/questions/14974864/
var sources = [].slice.call(arguments, 1);
sources.forEach(function (source) {
for (var prop in source) {
target[prop] = source[prop];
}
});
return target;
};
exports.ISODate = function (d) {
function pad(n) |
return d.getUTCFullYear()+'-' +
pad(d.getUTCMonth()+1)+'-' +
pad(d.getUTCDate())+'T' +
pad(d.getUTCHours())+':' +
pad(d.getUTCMinutes())+':' +
pad(d.getUTCSeconds())+'Z' ;
};
var _daynames = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
var _monnames = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
function _pad (num, n, p) {
var s = '' + num;
p = p || '0';
while (s.length < n) s = p + s;
return s;
}
exports.pad = _pad;
exports.date_to_str = function (d) {
return _daynames[d.getDay()] + ', ' + _pad(d.getDate(),2) + ' ' +
_monnames[d.getMonth()] + ' ' + d.getFullYear() + ' ' +
_pad(d.getHours(),2) + ':' + _pad(d.getMinutes(),2) + ':' +
_pad(d.getSeconds(),2) +
' ' + d.toString().match(/\sGMT([+-]\d+)/)[1];
};
exports.decode_qp = function (line) {
line = line.replace(/\r\n/g,"\n").replace(/[ \t]+\r?\n/g,"\n");
if (! /=/.test(line)) {
// this may be a pointless optimisation...
return new Buffer(line);
}
line = line.replace(/=\n/mg, '');
var buf = new Buffer(line.length);
var pos = 0;
for (var i=0,l=line.length; i < l; i++) {
if (line[i] === '=' &&
/=[0-9a-fA-F]{2}/.test(line[i] + line[i+1] + line[i+2])) {
i++;
buf[pos] = parseInt(line[i] + line[i+1], 16);
i++;
}
else {
buf[pos] = line.charCodeAt(i);
}
pos++;
}
return buf.slice(0, pos);
};
function _char_to_qp (ch) {
return "=" + _pad(ch.charCodeAt(0).toString(16).toUpperCase(), 2);
}
// Shameless attempt to copy from Perl's MIME::QuotedPrint::Perl code.
exports.encode_qp = function (str) {
str = str.replace(
/([^\ \t\n!"#\$%&'()*+,\-.\/0-9:;<>?\@A-Z\[\\\]^_`a-z{|}~])/g,
function (orig, p1) {
return _char_to_qp(p1);
}
).replace(/([ \t]+)$/gm, function (orig, p1) {
return p1.split('').map(_char_to_qp).join('');
});
// Now shorten lines to 76 chars, but don't break =XX encodes.
// Method: iterate over to char 73.
// If char 74, 75 or 76 is = we need to break before the =.
// Otherwise break at 76.
var cur_length = 0;
var out = '';
for (var i=0; i<str.length; i++) {
if (str[i] === '\n') {
out += '\n';
cur_length = 0;
continue;
}
cur_length++;
if (cur_length <= 73) {
out += str[i];
}
else if (cur_length > 73 && cur_length < 76) {
if (str[i] === '=') {
out += '=\n=';
cur_length = 1;
}
else {
out += str[i];
}
}
else {
// Otherwise got to char 76
// Don't insert '=\n' if end of string or next char is already \n:
if ((i === (str.length - 1)) || (str[i+1] === '\n')) {
out += str[i];
}
else {
out += '=\n' + str[i];
cur_length = 1;
}
}
}
return out;
};
exports.node_min = function (min, cur) {
var wants = min.split('.');
var has = (cur || process.version.substring(1)).split('.');
for (var i=0; i<=3; i++) {
// note use of unary + for fast type conversion to num
if (+has[i] > +wants[i]) { return true; }
if (+has[i] < +wants[i]) { return false; }
}
// they're identical
return true;
};
exports.existsSync =
require(exports.node_min('0.8') ? 'fs' : 'path').existsSync;
exports.indexOfLF = function (buf, maxlength) {
for (var i=0; i<buf.length; i++) {
if (maxlength && (i === maxlength)) break;
if (buf[i] === 0x0a) return i;
}
return -1;
};
exports.prettySize = function (size) {
if (size === 0 || !size) return 0;
var i = Math.floor(Math.log(size)/Math.log(1024));
var units = ['B', 'kB', 'MB', 'GB', 'TB'];
return (size/Math.pow(1024,i)).toFixed(2) * 1 + '' + units[i];
};
exports.valid_regexes = function (list, file) {
// list: an array of regexes. file: the file name containing the regex list
var valid = [];
for (var i=0; i<list.length; i++) {
try {
new RegExp(list[i]);
}
catch (e) {
require('./logger')
.logerror("invalid regex in " + file + ", " + list[i]);
continue;
}
valid.push(list[i]);
}
return valid; // returns a list of valid regexes
};
exports.regexp_escape = function(text) {
return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
};
exports.base64 = function (str) {
return new Buffer(str, "UTF-8").toString("base64");
};
exports.unbase64 = function (str) {
return new Buffer(str, "base64").toString("UTF-8");
};
// Fisher-Yates shuffle
// http://bost.ocks.org/mike/shuffle/
exports.shuffle = function(array) {
var m = array.length, t, i;
// While there remain elements to shuffle…
while (m) {
// Pick a remaining element…
i = Math.floor(Math.random() * m--);
// And swap it with the current element.
t = array[m];
array[m] = array[i];
array[i] = t;
}
return array;
};
exports.elapsed = function (start, decimal_places) {
var diff = (Date.now() - start) / 1000; // in seconds
if (decimal_places === undefined) {
decimal_places = diff > 5 ? 0 : diff > 2 ? 1 : 2;
}
else {
decimal_places = parseInt(decimal_places);
if (isNaN(decimal_places)) {
decimal_places = 2;
}
}
return diff.toFixed(decimal_places);
};
exports.wildcard_to_regexp = function (str) {
return str
.replace(/[-\[\]\/{}()*+?.,\\^$|#\s]/g, "\\$&")
.replace('\\*', '.*')
.replace('\\?', '.') + '$';
};
| { return n<10 ? '0'+n : n; } | identifier_body |
utils.js | 'use strict';
// copied from http://www.broofa.com/Tools/Math.uuid.js
var CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
.split('');
exports.uuid = function () {
var chars = CHARS, uuid = new Array(36), rnd=0, r;
for (var i = 0; i < 36; i++) {
if (i===8 || i===13 || i===18 || i===23) {
uuid[i] = '-';
}
else if (i===14) {
uuid[i] = '4';
}
else {
if (rnd <= 0x02) rnd = 0x2000000 + (Math.random()*0x1000000)|0;
r = rnd & 0xf;
rnd = rnd >> 4;
uuid[i] = chars[(i === 19) ? (r & 0x3) | 0x8 : r];
}
}
return uuid.join('');
};
exports.in_array = function (item, array) {
if (!array) return false;
if (!Array.isArray(array)) return false;
return (array.indexOf(item) !== -1);
};
exports.to_object = function (array) {
if (typeof array === 'string') {
array = array.split(/[\s,;]+/);
}
if (!Array.isArray(array)) |
var rv = {};
for (var i = 0; i < array.length; i++) {
if (array[i] === undefined) { continue; }
rv[array[i]] = true;
}
return rv;
};
exports.sort_keys = function (obj) {
return Object.keys(obj).sort();
};
exports.uniq = function (arr) {
var out = [];
var o = 0;
for (var i=0,l=arr.length; i < l; i++) {
if (out.length === 0) {
out.push(arr[i]);
}
else if (out[o] !== arr[i]) {
out.push(arr[i]);
o++;
}
}
return out;
};
exports.extend = function (target) {
// http://stackoverflow.com/questions/14974864/
var sources = [].slice.call(arguments, 1);
sources.forEach(function (source) {
for (var prop in source) {
target[prop] = source[prop];
}
});
return target;
};
exports.ISODate = function (d) {
function pad(n) { return n<10 ? '0'+n : n; }
return d.getUTCFullYear()+'-' +
pad(d.getUTCMonth()+1)+'-' +
pad(d.getUTCDate())+'T' +
pad(d.getUTCHours())+':' +
pad(d.getUTCMinutes())+':' +
pad(d.getUTCSeconds())+'Z' ;
};
var _daynames = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
var _monnames = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
function _pad (num, n, p) {
var s = '' + num;
p = p || '0';
while (s.length < n) s = p + s;
return s;
}
exports.pad = _pad;
exports.date_to_str = function (d) {
return _daynames[d.getDay()] + ', ' + _pad(d.getDate(),2) + ' ' +
_monnames[d.getMonth()] + ' ' + d.getFullYear() + ' ' +
_pad(d.getHours(),2) + ':' + _pad(d.getMinutes(),2) + ':' +
_pad(d.getSeconds(),2) +
' ' + d.toString().match(/\sGMT([+-]\d+)/)[1];
};
exports.decode_qp = function (line) {
line = line.replace(/\r\n/g,"\n").replace(/[ \t]+\r?\n/g,"\n");
if (! /=/.test(line)) {
// this may be a pointless optimisation...
return new Buffer(line);
}
line = line.replace(/=\n/mg, '');
var buf = new Buffer(line.length);
var pos = 0;
for (var i=0,l=line.length; i < l; i++) {
if (line[i] === '=' &&
/=[0-9a-fA-F]{2}/.test(line[i] + line[i+1] + line[i+2])) {
i++;
buf[pos] = parseInt(line[i] + line[i+1], 16);
i++;
}
else {
buf[pos] = line.charCodeAt(i);
}
pos++;
}
return buf.slice(0, pos);
};
function _char_to_qp (ch) {
return "=" + _pad(ch.charCodeAt(0).toString(16).toUpperCase(), 2);
}
// Shameless attempt to copy from Perl's MIME::QuotedPrint::Perl code.
exports.encode_qp = function (str) {
str = str.replace(
/([^\ \t\n!"#\$%&'()*+,\-.\/0-9:;<>?\@A-Z\[\\\]^_`a-z{|}~])/g,
function (orig, p1) {
return _char_to_qp(p1);
}
).replace(/([ \t]+)$/gm, function (orig, p1) {
return p1.split('').map(_char_to_qp).join('');
});
// Now shorten lines to 76 chars, but don't break =XX encodes.
// Method: iterate over to char 73.
// If char 74, 75 or 76 is = we need to break before the =.
// Otherwise break at 76.
var cur_length = 0;
var out = '';
for (var i=0; i<str.length; i++) {
if (str[i] === '\n') {
out += '\n';
cur_length = 0;
continue;
}
cur_length++;
if (cur_length <= 73) {
out += str[i];
}
else if (cur_length > 73 && cur_length < 76) {
if (str[i] === '=') {
out += '=\n=';
cur_length = 1;
}
else {
out += str[i];
}
}
else {
// Otherwise got to char 76
// Don't insert '=\n' if end of string or next char is already \n:
if ((i === (str.length - 1)) || (str[i+1] === '\n')) {
out += str[i];
}
else {
out += '=\n' + str[i];
cur_length = 1;
}
}
}
return out;
};
exports.node_min = function (min, cur) {
var wants = min.split('.');
var has = (cur || process.version.substring(1)).split('.');
for (var i=0; i<=3; i++) {
// note use of unary + for fast type conversion to num
if (+has[i] > +wants[i]) { return true; }
if (+has[i] < +wants[i]) { return false; }
}
// they're identical
return true;
};
exports.existsSync =
require(exports.node_min('0.8') ? 'fs' : 'path').existsSync;
exports.indexOfLF = function (buf, maxlength) {
for (var i=0; i<buf.length; i++) {
if (maxlength && (i === maxlength)) break;
if (buf[i] === 0x0a) return i;
}
return -1;
};
exports.prettySize = function (size) {
if (size === 0 || !size) return 0;
var i = Math.floor(Math.log(size)/Math.log(1024));
var units = ['B', 'kB', 'MB', 'GB', 'TB'];
return (size/Math.pow(1024,i)).toFixed(2) * 1 + '' + units[i];
};
exports.valid_regexes = function (list, file) {
// list: an array of regexes. file: the file name containing the regex list
var valid = [];
for (var i=0; i<list.length; i++) {
try {
new RegExp(list[i]);
}
catch (e) {
require('./logger')
.logerror("invalid regex in " + file + ", " + list[i]);
continue;
}
valid.push(list[i]);
}
return valid; // returns a list of valid regexes
};
exports.regexp_escape = function(text) {
return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
};
exports.base64 = function (str) {
return new Buffer(str, "UTF-8").toString("base64");
};
exports.unbase64 = function (str) {
return new Buffer(str, "base64").toString("UTF-8");
};
// Fisher-Yates shuffle
// http://bost.ocks.org/mike/shuffle/
exports.shuffle = function(array) {
var m = array.length, t, i;
// While there remain elements to shuffle…
while (m) {
// Pick a remaining element…
i = Math.floor(Math.random() * m--);
// And swap it with the current element.
t = array[m];
array[m] = array[i];
array[i] = t;
}
return array;
};
exports.elapsed = function (start, decimal_places) {
var diff = (Date.now() - start) / 1000; // in seconds
if (decimal_places === undefined) {
decimal_places = diff > 5 ? 0 : diff > 2 ? 1 : 2;
}
else {
decimal_places = parseInt(decimal_places);
if (isNaN(decimal_places)) {
decimal_places = 2;
}
}
return diff.toFixed(decimal_places);
};
exports.wildcard_to_regexp = function (str) {
return str
.replace(/[-\[\]\/{}()*+?.,\\^$|#\s]/g, "\\$&")
.replace('\\*', '.*')
.replace('\\?', '.') + '$';
};
| {
throw "arguments to to_object must be a string or array";
} | conditional_block |
utils.js | 'use strict';
// copied from http://www.broofa.com/Tools/Math.uuid.js
var CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
.split('');
exports.uuid = function () {
var chars = CHARS, uuid = new Array(36), rnd=0, r;
for (var i = 0; i < 36; i++) {
if (i===8 || i===13 || i===18 || i===23) {
uuid[i] = '-';
}
else if (i===14) {
uuid[i] = '4';
}
else {
if (rnd <= 0x02) rnd = 0x2000000 + (Math.random()*0x1000000)|0;
r = rnd & 0xf;
rnd = rnd >> 4;
uuid[i] = chars[(i === 19) ? (r & 0x3) | 0x8 : r];
}
}
return uuid.join('');
};
exports.in_array = function (item, array) {
if (!array) return false;
if (!Array.isArray(array)) return false;
return (array.indexOf(item) !== -1);
};
exports.to_object = function (array) {
if (typeof array === 'string') {
array = array.split(/[\s,;]+/);
}
if (!Array.isArray(array)) {
throw "arguments to to_object must be a string or array";
}
var rv = {};
for (var i = 0; i < array.length; i++) {
if (array[i] === undefined) { continue; }
rv[array[i]] = true;
}
return rv;
};
exports.sort_keys = function (obj) {
return Object.keys(obj).sort();
};
exports.uniq = function (arr) {
var out = [];
var o = 0;
for (var i=0,l=arr.length; i < l; i++) {
if (out.length === 0) {
out.push(arr[i]);
}
else if (out[o] !== arr[i]) {
out.push(arr[i]);
o++;
}
}
return out;
};
exports.extend = function (target) {
// http://stackoverflow.com/questions/14974864/
var sources = [].slice.call(arguments, 1);
sources.forEach(function (source) {
for (var prop in source) {
target[prop] = source[prop];
}
});
return target;
};
exports.ISODate = function (d) {
function pad(n) { return n<10 ? '0'+n : n; }
return d.getUTCFullYear()+'-' +
pad(d.getUTCMonth()+1)+'-' +
pad(d.getUTCDate())+'T' +
pad(d.getUTCHours())+':' +
pad(d.getUTCMinutes())+':' +
pad(d.getUTCSeconds())+'Z' ;
};
var _daynames = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
var _monnames = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
function _pad (num, n, p) {
var s = '' + num;
p = p || '0';
while (s.length < n) s = p + s;
return s;
}
exports.pad = _pad;
exports.date_to_str = function (d) {
return _daynames[d.getDay()] + ', ' + _pad(d.getDate(),2) + ' ' +
_monnames[d.getMonth()] + ' ' + d.getFullYear() + ' ' +
_pad(d.getHours(),2) + ':' + _pad(d.getMinutes(),2) + ':' +
_pad(d.getSeconds(),2) +
' ' + d.toString().match(/\sGMT([+-]\d+)/)[1];
};
exports.decode_qp = function (line) {
line = line.replace(/\r\n/g,"\n").replace(/[ \t]+\r?\n/g,"\n");
if (! /=/.test(line)) {
// this may be a pointless optimisation...
return new Buffer(line);
}
line = line.replace(/=\n/mg, '');
var buf = new Buffer(line.length);
var pos = 0;
for (var i=0,l=line.length; i < l; i++) {
if (line[i] === '=' &&
/=[0-9a-fA-F]{2}/.test(line[i] + line[i+1] + line[i+2])) {
i++;
buf[pos] = parseInt(line[i] + line[i+1], 16);
i++;
}
else {
buf[pos] = line.charCodeAt(i);
}
pos++;
}
return buf.slice(0, pos);
};
function _char_to_qp (ch) {
return "=" + _pad(ch.charCodeAt(0).toString(16).toUpperCase(), 2);
}
// Shameless attempt to copy from Perl's MIME::QuotedPrint::Perl code.
exports.encode_qp = function (str) {
str = str.replace(
/([^\ \t\n!"#\$%&'()*+,\-.\/0-9:;<>?\@A-Z\[\\\]^_`a-z{|}~])/g,
function (orig, p1) {
return _char_to_qp(p1);
}
).replace(/([ \t]+)$/gm, function (orig, p1) {
return p1.split('').map(_char_to_qp).join('');
});
// Now shorten lines to 76 chars, but don't break =XX encodes.
// Method: iterate over to char 73.
// If char 74, 75 or 76 is = we need to break before the =.
// Otherwise break at 76.
var cur_length = 0;
var out = '';
for (var i=0; i<str.length; i++) {
if (str[i] === '\n') {
out += '\n';
cur_length = 0;
continue;
}
cur_length++;
if (cur_length <= 73) {
out += str[i];
}
else if (cur_length > 73 && cur_length < 76) {
if (str[i] === '=') {
out += '=\n=';
cur_length = 1;
}
else {
out += str[i];
}
}
else {
// Otherwise got to char 76
// Don't insert '=\n' if end of string or next char is already \n: | out += '=\n' + str[i];
cur_length = 1;
}
}
}
return out;
};
exports.node_min = function (min, cur) {
var wants = min.split('.');
var has = (cur || process.version.substring(1)).split('.');
for (var i=0; i<=3; i++) {
// note use of unary + for fast type conversion to num
if (+has[i] > +wants[i]) { return true; }
if (+has[i] < +wants[i]) { return false; }
}
// they're identical
return true;
};
exports.existsSync =
require(exports.node_min('0.8') ? 'fs' : 'path').existsSync;
exports.indexOfLF = function (buf, maxlength) {
for (var i=0; i<buf.length; i++) {
if (maxlength && (i === maxlength)) break;
if (buf[i] === 0x0a) return i;
}
return -1;
};
exports.prettySize = function (size) {
if (size === 0 || !size) return 0;
var i = Math.floor(Math.log(size)/Math.log(1024));
var units = ['B', 'kB', 'MB', 'GB', 'TB'];
return (size/Math.pow(1024,i)).toFixed(2) * 1 + '' + units[i];
};
exports.valid_regexes = function (list, file) {
// list: an array of regexes. file: the file name containing the regex list
var valid = [];
for (var i=0; i<list.length; i++) {
try {
new RegExp(list[i]);
}
catch (e) {
require('./logger')
.logerror("invalid regex in " + file + ", " + list[i]);
continue;
}
valid.push(list[i]);
}
return valid; // returns a list of valid regexes
};
exports.regexp_escape = function(text) {
return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
};
exports.base64 = function (str) {
return new Buffer(str, "UTF-8").toString("base64");
};
exports.unbase64 = function (str) {
return new Buffer(str, "base64").toString("UTF-8");
};
// Fisher-Yates shuffle
// http://bost.ocks.org/mike/shuffle/
exports.shuffle = function(array) {
var m = array.length, t, i;
// While there remain elements to shuffle…
while (m) {
// Pick a remaining element…
i = Math.floor(Math.random() * m--);
// And swap it with the current element.
t = array[m];
array[m] = array[i];
array[i] = t;
}
return array;
};
exports.elapsed = function (start, decimal_places) {
var diff = (Date.now() - start) / 1000; // in seconds
if (decimal_places === undefined) {
decimal_places = diff > 5 ? 0 : diff > 2 ? 1 : 2;
}
else {
decimal_places = parseInt(decimal_places);
if (isNaN(decimal_places)) {
decimal_places = 2;
}
}
return diff.toFixed(decimal_places);
};
exports.wildcard_to_regexp = function (str) {
return str
.replace(/[-\[\]\/{}()*+?.,\\^$|#\s]/g, "\\$&")
.replace('\\*', '.*')
.replace('\\?', '.') + '$';
}; | if ((i === (str.length - 1)) || (str[i+1] === '\n')) {
out += str[i];
}
else { | random_line_split |
utils.js | 'use strict';
// copied from http://www.broofa.com/Tools/Math.uuid.js
var CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
.split('');
exports.uuid = function () {
var chars = CHARS, uuid = new Array(36), rnd=0, r;
for (var i = 0; i < 36; i++) {
if (i===8 || i===13 || i===18 || i===23) {
uuid[i] = '-';
}
else if (i===14) {
uuid[i] = '4';
}
else {
if (rnd <= 0x02) rnd = 0x2000000 + (Math.random()*0x1000000)|0;
r = rnd & 0xf;
rnd = rnd >> 4;
uuid[i] = chars[(i === 19) ? (r & 0x3) | 0x8 : r];
}
}
return uuid.join('');
};
exports.in_array = function (item, array) {
if (!array) return false;
if (!Array.isArray(array)) return false;
return (array.indexOf(item) !== -1);
};
exports.to_object = function (array) {
if (typeof array === 'string') {
array = array.split(/[\s,;]+/);
}
if (!Array.isArray(array)) {
throw "arguments to to_object must be a string or array";
}
var rv = {};
for (var i = 0; i < array.length; i++) {
if (array[i] === undefined) { continue; }
rv[array[i]] = true;
}
return rv;
};
exports.sort_keys = function (obj) {
return Object.keys(obj).sort();
};
exports.uniq = function (arr) {
var out = [];
var o = 0;
for (var i=0,l=arr.length; i < l; i++) {
if (out.length === 0) {
out.push(arr[i]);
}
else if (out[o] !== arr[i]) {
out.push(arr[i]);
o++;
}
}
return out;
};
exports.extend = function (target) {
// http://stackoverflow.com/questions/14974864/
var sources = [].slice.call(arguments, 1);
sources.forEach(function (source) {
for (var prop in source) {
target[prop] = source[prop];
}
});
return target;
};
exports.ISODate = function (d) {
function | (n) { return n<10 ? '0'+n : n; }
return d.getUTCFullYear()+'-' +
pad(d.getUTCMonth()+1)+'-' +
pad(d.getUTCDate())+'T' +
pad(d.getUTCHours())+':' +
pad(d.getUTCMinutes())+':' +
pad(d.getUTCSeconds())+'Z' ;
};
var _daynames = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
var _monnames = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
function _pad (num, n, p) {
var s = '' + num;
p = p || '0';
while (s.length < n) s = p + s;
return s;
}
exports.pad = _pad;
exports.date_to_str = function (d) {
return _daynames[d.getDay()] + ', ' + _pad(d.getDate(),2) + ' ' +
_monnames[d.getMonth()] + ' ' + d.getFullYear() + ' ' +
_pad(d.getHours(),2) + ':' + _pad(d.getMinutes(),2) + ':' +
_pad(d.getSeconds(),2) +
' ' + d.toString().match(/\sGMT([+-]\d+)/)[1];
};
exports.decode_qp = function (line) {
line = line.replace(/\r\n/g,"\n").replace(/[ \t]+\r?\n/g,"\n");
if (! /=/.test(line)) {
// this may be a pointless optimisation...
return new Buffer(line);
}
line = line.replace(/=\n/mg, '');
var buf = new Buffer(line.length);
var pos = 0;
for (var i=0,l=line.length; i < l; i++) {
if (line[i] === '=' &&
/=[0-9a-fA-F]{2}/.test(line[i] + line[i+1] + line[i+2])) {
i++;
buf[pos] = parseInt(line[i] + line[i+1], 16);
i++;
}
else {
buf[pos] = line.charCodeAt(i);
}
pos++;
}
return buf.slice(0, pos);
};
function _char_to_qp (ch) {
return "=" + _pad(ch.charCodeAt(0).toString(16).toUpperCase(), 2);
}
// Shameless attempt to copy from Perl's MIME::QuotedPrint::Perl code.
exports.encode_qp = function (str) {
str = str.replace(
/([^\ \t\n!"#\$%&'()*+,\-.\/0-9:;<>?\@A-Z\[\\\]^_`a-z{|}~])/g,
function (orig, p1) {
return _char_to_qp(p1);
}
).replace(/([ \t]+)$/gm, function (orig, p1) {
return p1.split('').map(_char_to_qp).join('');
});
// Now shorten lines to 76 chars, but don't break =XX encodes.
// Method: iterate over to char 73.
// If char 74, 75 or 76 is = we need to break before the =.
// Otherwise break at 76.
var cur_length = 0;
var out = '';
for (var i=0; i<str.length; i++) {
if (str[i] === '\n') {
out += '\n';
cur_length = 0;
continue;
}
cur_length++;
if (cur_length <= 73) {
out += str[i];
}
else if (cur_length > 73 && cur_length < 76) {
if (str[i] === '=') {
out += '=\n=';
cur_length = 1;
}
else {
out += str[i];
}
}
else {
// Otherwise got to char 76
// Don't insert '=\n' if end of string or next char is already \n:
if ((i === (str.length - 1)) || (str[i+1] === '\n')) {
out += str[i];
}
else {
out += '=\n' + str[i];
cur_length = 1;
}
}
}
return out;
};
exports.node_min = function (min, cur) {
var wants = min.split('.');
var has = (cur || process.version.substring(1)).split('.');
for (var i=0; i<=3; i++) {
// note use of unary + for fast type conversion to num
if (+has[i] > +wants[i]) { return true; }
if (+has[i] < +wants[i]) { return false; }
}
// they're identical
return true;
};
exports.existsSync =
require(exports.node_min('0.8') ? 'fs' : 'path').existsSync;
exports.indexOfLF = function (buf, maxlength) {
for (var i=0; i<buf.length; i++) {
if (maxlength && (i === maxlength)) break;
if (buf[i] === 0x0a) return i;
}
return -1;
};
exports.prettySize = function (size) {
if (size === 0 || !size) return 0;
var i = Math.floor(Math.log(size)/Math.log(1024));
var units = ['B', 'kB', 'MB', 'GB', 'TB'];
return (size/Math.pow(1024,i)).toFixed(2) * 1 + '' + units[i];
};
exports.valid_regexes = function (list, file) {
// list: an array of regexes. file: the file name containing the regex list
var valid = [];
for (var i=0; i<list.length; i++) {
try {
new RegExp(list[i]);
}
catch (e) {
require('./logger')
.logerror("invalid regex in " + file + ", " + list[i]);
continue;
}
valid.push(list[i]);
}
return valid; // returns a list of valid regexes
};
exports.regexp_escape = function(text) {
return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
};
exports.base64 = function (str) {
return new Buffer(str, "UTF-8").toString("base64");
};
exports.unbase64 = function (str) {
return new Buffer(str, "base64").toString("UTF-8");
};
// Fisher-Yates shuffle
// http://bost.ocks.org/mike/shuffle/
exports.shuffle = function(array) {
var m = array.length, t, i;
// While there remain elements to shuffle…
while (m) {
// Pick a remaining element…
i = Math.floor(Math.random() * m--);
// And swap it with the current element.
t = array[m];
array[m] = array[i];
array[i] = t;
}
return array;
};
exports.elapsed = function (start, decimal_places) {
var diff = (Date.now() - start) / 1000; // in seconds
if (decimal_places === undefined) {
decimal_places = diff > 5 ? 0 : diff > 2 ? 1 : 2;
}
else {
decimal_places = parseInt(decimal_places);
if (isNaN(decimal_places)) {
decimal_places = 2;
}
}
return diff.toFixed(decimal_places);
};
exports.wildcard_to_regexp = function (str) {
return str
.replace(/[-\[\]\/{}()*+?.,\\^$|#\s]/g, "\\$&")
.replace('\\*', '.*')
.replace('\\?', '.') + '$';
};
| pad | identifier_name |
assets.py | """
Asset compilation and collection.
"""
from __future__ import print_function
import argparse
from paver.easy import sh, path, task, cmdopts, needs, consume_args, call_task
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
import glob
import traceback
from .utils.envs import Env
from .utils.cmd import cmd, django_cmd
COFFEE_DIRS = ['lms', 'cms', 'common']
SASS_LOAD_PATHS = ['./common/static/sass']
SASS_UPDATE_DIRS = ['*/static']
SASS_CACHE_PATH = '/tmp/sass-cache'
class CoffeeScriptWatcher(PatternMatchingEventHandler):
"""
Watches for coffeescript changes
"""
ignore_directories = True
patterns = ['*.coffee']
def register(self, observer):
"""
register files with observer
"""
dirnames = set()
for filename in sh(coffeescript_files(), capture=True).splitlines():
dirnames.add(path(filename).dirname())
for dirname in dirnames:
observer.schedule(self, dirname)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
compile_coffeescript(event.src_path)
except Exception: # pylint: disable=W0703
traceback.print_exc()
class SassWatcher(PatternMatchingEventHandler):
"""
Watches for sass file changes
"""
ignore_directories = True
patterns = ['*.scss']
ignore_patterns = ['common/static/xmodule/*']
def register(self, observer):
"""
register files with observer
"""
for dirname in SASS_LOAD_PATHS + SASS_UPDATE_DIRS + theme_sass_paths():
paths = []
if '*' in dirname:
paths.extend(glob.glob(dirname))
else:
paths.append(dirname)
for dirname in paths:
observer.schedule(self, dirname, recursive=True)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
compile_sass()
except Exception: # pylint: disable=W0703
traceback.print_exc()
class XModuleSassWatcher(SassWatcher):
"""
Watches for sass file changes
"""
ignore_directories = True
ignore_patterns = []
def register(self, observer):
|
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
process_xmodule_assets()
except Exception: # pylint: disable=W0703
traceback.print_exc()
def theme_sass_paths():
"""
Return the a list of paths to the theme's sass assets,
or an empty list if no theme is configured.
"""
edxapp_env = Env()
if edxapp_env.feature_flags.get('USE_CUSTOM_THEME', False):
theme_name = edxapp_env.env_tokens.get('THEME_NAME', '')
parent_dir = path(edxapp_env.REPO_ROOT).abspath().parent
theme_root = parent_dir / "themes" / theme_name
return [theme_root / "static" / "sass"]
else:
return []
def coffeescript_files():
"""
return find command for paths containing coffee files
"""
dirs = " ".join([Env.REPO_ROOT / coffee_dir for coffee_dir in COFFEE_DIRS])
return cmd('find', dirs, '-type f', '-name \"*.coffee\"')
def compile_coffeescript(*files):
"""
Compile CoffeeScript to JavaScript.
"""
if not files:
files = ["`{}`".format(coffeescript_files())]
sh(cmd(
"node_modules/.bin/coffee", "--compile", *files
))
def compile_sass(debug=False):
"""
Compile Sass to CSS.
"""
theme_paths = theme_sass_paths()
sh(cmd(
'sass', '' if debug else '--style compressed',
"--cache-location {cache}".format(cache=SASS_CACHE_PATH),
"--load-path", " ".join(SASS_LOAD_PATHS + theme_paths),
"--update", "-E", "utf-8", " ".join(SASS_UPDATE_DIRS + theme_paths)
))
def compile_templated_sass(systems, settings):
"""
Render Mako templates for Sass files.
`systems` is a list of systems (e.g. 'lms' or 'studio' or both)
`settings` is the Django settings module to use.
"""
for sys in systems:
sh(django_cmd(sys, settings, 'preprocess_assets'))
def process_xmodule_assets():
"""
Process XModule static assets.
"""
sh('xmodule_assets common/static/xmodule')
def collect_assets(systems, settings):
"""
Collect static assets, including Django pipeline processing.
`systems` is a list of systems (e.g. 'lms' or 'studio' or both)
`settings` is the Django settings module to use.
"""
for sys in systems:
sh(django_cmd(sys, settings, "collectstatic --noinput > /dev/null"))
@task
@cmdopts([('background', 'b', 'Background mode')])
def watch_assets(options):
"""
Watch for changes to asset files, and regenerate js/css
"""
observer = Observer()
CoffeeScriptWatcher().register(observer)
SassWatcher().register(observer)
XModuleSassWatcher().register(observer)
print("Starting asset watcher...")
observer.start()
if not getattr(options, 'background', False):
# when running as a separate process, the main thread needs to loop
# in order to allow for shutdown by contrl-c
try:
while True:
observer.join(2)
except KeyboardInterrupt:
observer.stop()
print("\nStopped asset watcher.")
@task
@needs('pavelib.prereqs.install_prereqs')
@consume_args
def update_assets(args):
"""
Compile CoffeeScript and Sass, then collect static assets.
"""
parser = argparse.ArgumentParser(prog='paver update_assets')
parser.add_argument(
'system', type=str, nargs='*', default=['lms', 'studio'],
help="lms or studio",
)
parser.add_argument(
'--settings', type=str, default="dev",
help="Django settings module",
)
parser.add_argument(
'--debug', action='store_true', default=False,
help="Disable Sass compression",
)
parser.add_argument(
'--skip-collect', dest='collect', action='store_false', default=True,
help="Skip collection of static assets",
)
parser.add_argument(
'--watch', action='store_true', default=False,
help="Watch files for changes",
)
args = parser.parse_args(args)
compile_templated_sass(args.system, args.settings)
process_xmodule_assets()
compile_coffeescript()
compile_sass(args.debug)
if args.collect:
collect_assets(args.system, args.settings)
if args.watch:
call_task('watch_assets', options={'background': not args.debug})
| """
register files with observer
"""
observer.schedule(self, 'common/lib/xmodule/', recursive=True) | identifier_body |
assets.py | """
Asset compilation and collection.
"""
from __future__ import print_function
import argparse
from paver.easy import sh, path, task, cmdopts, needs, consume_args, call_task
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
import glob
import traceback
from .utils.envs import Env
from .utils.cmd import cmd, django_cmd
COFFEE_DIRS = ['lms', 'cms', 'common']
SASS_LOAD_PATHS = ['./common/static/sass']
SASS_UPDATE_DIRS = ['*/static']
SASS_CACHE_PATH = '/tmp/sass-cache'
class CoffeeScriptWatcher(PatternMatchingEventHandler):
"""
Watches for coffeescript changes
"""
ignore_directories = True
patterns = ['*.coffee']
def register(self, observer):
"""
register files with observer
"""
dirnames = set()
for filename in sh(coffeescript_files(), capture=True).splitlines():
dirnames.add(path(filename).dirname())
for dirname in dirnames:
observer.schedule(self, dirname)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
compile_coffeescript(event.src_path)
except Exception: # pylint: disable=W0703
traceback.print_exc()
class SassWatcher(PatternMatchingEventHandler):
"""
Watches for sass file changes
"""
ignore_directories = True
patterns = ['*.scss']
ignore_patterns = ['common/static/xmodule/*']
def register(self, observer):
"""
register files with observer
"""
for dirname in SASS_LOAD_PATHS + SASS_UPDATE_DIRS + theme_sass_paths():
paths = []
if '*' in dirname:
paths.extend(glob.glob(dirname))
else:
paths.append(dirname)
for dirname in paths:
observer.schedule(self, dirname, recursive=True)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
compile_sass()
except Exception: # pylint: disable=W0703
traceback.print_exc()
class XModuleSassWatcher(SassWatcher):
"""
Watches for sass file changes
"""
ignore_directories = True
ignore_patterns = []
def register(self, observer):
"""
register files with observer
"""
observer.schedule(self, 'common/lib/xmodule/', recursive=True)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
process_xmodule_assets()
except Exception: # pylint: disable=W0703
traceback.print_exc()
def theme_sass_paths():
"""
Return the a list of paths to the theme's sass assets,
or an empty list if no theme is configured.
"""
edxapp_env = Env()
if edxapp_env.feature_flags.get('USE_CUSTOM_THEME', False):
theme_name = edxapp_env.env_tokens.get('THEME_NAME', '')
parent_dir = path(edxapp_env.REPO_ROOT).abspath().parent
theme_root = parent_dir / "themes" / theme_name
return [theme_root / "static" / "sass"]
else:
return []
def coffeescript_files():
"""
return find command for paths containing coffee files
"""
dirs = " ".join([Env.REPO_ROOT / coffee_dir for coffee_dir in COFFEE_DIRS])
return cmd('find', dirs, '-type f', '-name \"*.coffee\"')
def compile_coffeescript(*files):
"""
Compile CoffeeScript to JavaScript.
"""
if not files:
files = ["`{}`".format(coffeescript_files())]
sh(cmd(
"node_modules/.bin/coffee", "--compile", *files
))
def compile_sass(debug=False):
"""
Compile Sass to CSS.
"""
theme_paths = theme_sass_paths()
sh(cmd(
'sass', '' if debug else '--style compressed',
"--cache-location {cache}".format(cache=SASS_CACHE_PATH),
"--load-path", " ".join(SASS_LOAD_PATHS + theme_paths),
"--update", "-E", "utf-8", " ".join(SASS_UPDATE_DIRS + theme_paths)
))
def compile_templated_sass(systems, settings):
"""
Render Mako templates for Sass files.
`systems` is a list of systems (e.g. 'lms' or 'studio' or both)
`settings` is the Django settings module to use.
"""
for sys in systems:
sh(django_cmd(sys, settings, 'preprocess_assets'))
def process_xmodule_assets():
"""
Process XModule static assets.
"""
sh('xmodule_assets common/static/xmodule')
def collect_assets(systems, settings):
"""
Collect static assets, including Django pipeline processing.
`systems` is a list of systems (e.g. 'lms' or 'studio' or both)
`settings` is the Django settings module to use.
"""
for sys in systems:
sh(django_cmd(sys, settings, "collectstatic --noinput > /dev/null"))
@task
@cmdopts([('background', 'b', 'Background mode')])
def watch_assets(options):
"""
Watch for changes to asset files, and regenerate js/css
"""
observer = Observer()
CoffeeScriptWatcher().register(observer)
SassWatcher().register(observer)
XModuleSassWatcher().register(observer)
print("Starting asset watcher...")
observer.start()
if not getattr(options, 'background', False):
# when running as a separate process, the main thread needs to loop
# in order to allow for shutdown by contrl-c
try:
while True:
observer.join(2)
except KeyboardInterrupt:
observer.stop()
print("\nStopped asset watcher.")
@task
@needs('pavelib.prereqs.install_prereqs')
@consume_args
def | (args):
"""
Compile CoffeeScript and Sass, then collect static assets.
"""
parser = argparse.ArgumentParser(prog='paver update_assets')
parser.add_argument(
'system', type=str, nargs='*', default=['lms', 'studio'],
help="lms or studio",
)
parser.add_argument(
'--settings', type=str, default="dev",
help="Django settings module",
)
parser.add_argument(
'--debug', action='store_true', default=False,
help="Disable Sass compression",
)
parser.add_argument(
'--skip-collect', dest='collect', action='store_false', default=True,
help="Skip collection of static assets",
)
parser.add_argument(
'--watch', action='store_true', default=False,
help="Watch files for changes",
)
args = parser.parse_args(args)
compile_templated_sass(args.system, args.settings)
process_xmodule_assets()
compile_coffeescript()
compile_sass(args.debug)
if args.collect:
collect_assets(args.system, args.settings)
if args.watch:
call_task('watch_assets', options={'background': not args.debug})
| update_assets | identifier_name |
assets.py | """
Asset compilation and collection.
"""
from __future__ import print_function
import argparse
from paver.easy import sh, path, task, cmdopts, needs, consume_args, call_task
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
import glob
import traceback
from .utils.envs import Env
from .utils.cmd import cmd, django_cmd
COFFEE_DIRS = ['lms', 'cms', 'common']
SASS_LOAD_PATHS = ['./common/static/sass']
SASS_UPDATE_DIRS = ['*/static']
SASS_CACHE_PATH = '/tmp/sass-cache'
class CoffeeScriptWatcher(PatternMatchingEventHandler):
"""
Watches for coffeescript changes
"""
ignore_directories = True
patterns = ['*.coffee']
def register(self, observer):
"""
register files with observer
"""
dirnames = set()
for filename in sh(coffeescript_files(), capture=True).splitlines():
dirnames.add(path(filename).dirname())
for dirname in dirnames:
observer.schedule(self, dirname)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
compile_coffeescript(event.src_path)
except Exception: # pylint: disable=W0703
traceback.print_exc()
class SassWatcher(PatternMatchingEventHandler):
"""
Watches for sass file changes
"""
ignore_directories = True
patterns = ['*.scss']
ignore_patterns = ['common/static/xmodule/*']
def register(self, observer):
"""
register files with observer
"""
for dirname in SASS_LOAD_PATHS + SASS_UPDATE_DIRS + theme_sass_paths():
paths = []
if '*' in dirname:
paths.extend(glob.glob(dirname))
else:
paths.append(dirname)
for dirname in paths:
observer.schedule(self, dirname, recursive=True)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
compile_sass()
except Exception: # pylint: disable=W0703
traceback.print_exc()
class XModuleSassWatcher(SassWatcher):
"""
Watches for sass file changes
"""
ignore_directories = True
ignore_patterns = []
def register(self, observer):
"""
register files with observer
"""
observer.schedule(self, 'common/lib/xmodule/', recursive=True)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
process_xmodule_assets()
except Exception: # pylint: disable=W0703
traceback.print_exc()
def theme_sass_paths():
"""
Return the a list of paths to the theme's sass assets,
or an empty list if no theme is configured.
"""
edxapp_env = Env()
if edxapp_env.feature_flags.get('USE_CUSTOM_THEME', False):
theme_name = edxapp_env.env_tokens.get('THEME_NAME', '')
parent_dir = path(edxapp_env.REPO_ROOT).abspath().parent
theme_root = parent_dir / "themes" / theme_name
return [theme_root / "static" / "sass"]
else:
return []
def coffeescript_files():
"""
return find command for paths containing coffee files
"""
dirs = " ".join([Env.REPO_ROOT / coffee_dir for coffee_dir in COFFEE_DIRS])
return cmd('find', dirs, '-type f', '-name \"*.coffee\"')
def compile_coffeescript(*files):
"""
Compile CoffeeScript to JavaScript.
"""
if not files:
files = ["`{}`".format(coffeescript_files())]
sh(cmd(
"node_modules/.bin/coffee", "--compile", *files
))
def compile_sass(debug=False):
"""
Compile Sass to CSS.
"""
theme_paths = theme_sass_paths()
sh(cmd(
'sass', '' if debug else '--style compressed',
"--cache-location {cache}".format(cache=SASS_CACHE_PATH),
"--load-path", " ".join(SASS_LOAD_PATHS + theme_paths),
"--update", "-E", "utf-8", " ".join(SASS_UPDATE_DIRS + theme_paths)
))
def compile_templated_sass(systems, settings):
"""
Render Mako templates for Sass files.
`systems` is a list of systems (e.g. 'lms' or 'studio' or both)
`settings` is the Django settings module to use.
"""
for sys in systems:
sh(django_cmd(sys, settings, 'preprocess_assets'))
def process_xmodule_assets():
"""
Process XModule static assets.
"""
sh('xmodule_assets common/static/xmodule')
def collect_assets(systems, settings):
"""
Collect static assets, including Django pipeline processing.
`systems` is a list of systems (e.g. 'lms' or 'studio' or both)
`settings` is the Django settings module to use.
"""
for sys in systems:
sh(django_cmd(sys, settings, "collectstatic --noinput > /dev/null"))
@task
@cmdopts([('background', 'b', 'Background mode')])
def watch_assets(options):
"""
Watch for changes to asset files, and regenerate js/css
"""
observer = Observer()
CoffeeScriptWatcher().register(observer)
SassWatcher().register(observer)
XModuleSassWatcher().register(observer)
print("Starting asset watcher...")
observer.start()
if not getattr(options, 'background', False):
# when running as a separate process, the main thread needs to loop
# in order to allow for shutdown by contrl-c
try:
while True:
observer.join(2)
except KeyboardInterrupt:
observer.stop()
print("\nStopped asset watcher.")
@task
@needs('pavelib.prereqs.install_prereqs')
@consume_args
def update_assets(args):
"""
Compile CoffeeScript and Sass, then collect static assets.
"""
parser = argparse.ArgumentParser(prog='paver update_assets')
parser.add_argument(
'system', type=str, nargs='*', default=['lms', 'studio'],
help="lms or studio",
)
parser.add_argument(
'--settings', type=str, default="dev",
help="Django settings module",
)
parser.add_argument(
'--debug', action='store_true', default=False,
help="Disable Sass compression",
)
parser.add_argument(
'--skip-collect', dest='collect', action='store_false', default=True,
help="Skip collection of static assets",
)
parser.add_argument(
'--watch', action='store_true', default=False,
help="Watch files for changes",
)
args = parser.parse_args(args)
compile_templated_sass(args.system, args.settings)
process_xmodule_assets()
compile_coffeescript() |
if args.watch:
call_task('watch_assets', options={'background': not args.debug}) | compile_sass(args.debug)
if args.collect:
collect_assets(args.system, args.settings) | random_line_split |
assets.py | """
Asset compilation and collection.
"""
from __future__ import print_function
import argparse
from paver.easy import sh, path, task, cmdopts, needs, consume_args, call_task
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
import glob
import traceback
from .utils.envs import Env
from .utils.cmd import cmd, django_cmd
COFFEE_DIRS = ['lms', 'cms', 'common']
SASS_LOAD_PATHS = ['./common/static/sass']
SASS_UPDATE_DIRS = ['*/static']
SASS_CACHE_PATH = '/tmp/sass-cache'
class CoffeeScriptWatcher(PatternMatchingEventHandler):
"""
Watches for coffeescript changes
"""
ignore_directories = True
patterns = ['*.coffee']
def register(self, observer):
"""
register files with observer
"""
dirnames = set()
for filename in sh(coffeescript_files(), capture=True).splitlines():
dirnames.add(path(filename).dirname())
for dirname in dirnames:
observer.schedule(self, dirname)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
compile_coffeescript(event.src_path)
except Exception: # pylint: disable=W0703
traceback.print_exc()
class SassWatcher(PatternMatchingEventHandler):
"""
Watches for sass file changes
"""
ignore_directories = True
patterns = ['*.scss']
ignore_patterns = ['common/static/xmodule/*']
def register(self, observer):
"""
register files with observer
"""
for dirname in SASS_LOAD_PATHS + SASS_UPDATE_DIRS + theme_sass_paths():
paths = []
if '*' in dirname:
paths.extend(glob.glob(dirname))
else:
paths.append(dirname)
for dirname in paths:
|
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
compile_sass()
except Exception: # pylint: disable=W0703
traceback.print_exc()
class XModuleSassWatcher(SassWatcher):
"""
Watches for sass file changes
"""
ignore_directories = True
ignore_patterns = []
def register(self, observer):
"""
register files with observer
"""
observer.schedule(self, 'common/lib/xmodule/', recursive=True)
def on_modified(self, event):
print('\tCHANGED:', event.src_path)
try:
process_xmodule_assets()
except Exception: # pylint: disable=W0703
traceback.print_exc()
def theme_sass_paths():
"""
Return the a list of paths to the theme's sass assets,
or an empty list if no theme is configured.
"""
edxapp_env = Env()
if edxapp_env.feature_flags.get('USE_CUSTOM_THEME', False):
theme_name = edxapp_env.env_tokens.get('THEME_NAME', '')
parent_dir = path(edxapp_env.REPO_ROOT).abspath().parent
theme_root = parent_dir / "themes" / theme_name
return [theme_root / "static" / "sass"]
else:
return []
def coffeescript_files():
"""
return find command for paths containing coffee files
"""
dirs = " ".join([Env.REPO_ROOT / coffee_dir for coffee_dir in COFFEE_DIRS])
return cmd('find', dirs, '-type f', '-name \"*.coffee\"')
def compile_coffeescript(*files):
"""
Compile CoffeeScript to JavaScript.
"""
if not files:
files = ["`{}`".format(coffeescript_files())]
sh(cmd(
"node_modules/.bin/coffee", "--compile", *files
))
def compile_sass(debug=False):
"""
Compile Sass to CSS.
"""
theme_paths = theme_sass_paths()
sh(cmd(
'sass', '' if debug else '--style compressed',
"--cache-location {cache}".format(cache=SASS_CACHE_PATH),
"--load-path", " ".join(SASS_LOAD_PATHS + theme_paths),
"--update", "-E", "utf-8", " ".join(SASS_UPDATE_DIRS + theme_paths)
))
def compile_templated_sass(systems, settings):
"""
Render Mako templates for Sass files.
`systems` is a list of systems (e.g. 'lms' or 'studio' or both)
`settings` is the Django settings module to use.
"""
for sys in systems:
sh(django_cmd(sys, settings, 'preprocess_assets'))
def process_xmodule_assets():
"""
Process XModule static assets.
"""
sh('xmodule_assets common/static/xmodule')
def collect_assets(systems, settings):
"""
Collect static assets, including Django pipeline processing.
`systems` is a list of systems (e.g. 'lms' or 'studio' or both)
`settings` is the Django settings module to use.
"""
for sys in systems:
sh(django_cmd(sys, settings, "collectstatic --noinput > /dev/null"))
@task
@cmdopts([('background', 'b', 'Background mode')])
def watch_assets(options):
"""
Watch for changes to asset files, and regenerate js/css
"""
observer = Observer()
CoffeeScriptWatcher().register(observer)
SassWatcher().register(observer)
XModuleSassWatcher().register(observer)
print("Starting asset watcher...")
observer.start()
if not getattr(options, 'background', False):
# when running as a separate process, the main thread needs to loop
# in order to allow for shutdown by contrl-c
try:
while True:
observer.join(2)
except KeyboardInterrupt:
observer.stop()
print("\nStopped asset watcher.")
@task
@needs('pavelib.prereqs.install_prereqs')
@consume_args
def update_assets(args):
"""
Compile CoffeeScript and Sass, then collect static assets.
"""
parser = argparse.ArgumentParser(prog='paver update_assets')
parser.add_argument(
'system', type=str, nargs='*', default=['lms', 'studio'],
help="lms or studio",
)
parser.add_argument(
'--settings', type=str, default="dev",
help="Django settings module",
)
parser.add_argument(
'--debug', action='store_true', default=False,
help="Disable Sass compression",
)
parser.add_argument(
'--skip-collect', dest='collect', action='store_false', default=True,
help="Skip collection of static assets",
)
parser.add_argument(
'--watch', action='store_true', default=False,
help="Watch files for changes",
)
args = parser.parse_args(args)
compile_templated_sass(args.system, args.settings)
process_xmodule_assets()
compile_coffeescript()
compile_sass(args.debug)
if args.collect:
collect_assets(args.system, args.settings)
if args.watch:
call_task('watch_assets', options={'background': not args.debug})
| observer.schedule(self, dirname, recursive=True) | conditional_block |
IconButton.tsx | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/ |
interface IconButtonProps {
icon: JSX.Element;
label?: string;
onClick: MouseEventHandler<HTMLDivElement>;
}
const StyledDiv = styled.div`
display: flex;
align-items: center;
color: ${({ theme }) => theme.colors.grayscale.base};
&:hover {
color: ${({ theme }) => theme.colors.primary.base};
}
`;
const StyledSpan = styled.span`
margin-left: ${({ theme }) => theme.gridUnit * 2}px;
`;
const IconButton = ({ icon, label, onClick }: IconButtonProps) => (
<StyledDiv
tabIndex={0}
role="button"
onClick={e => {
e.preventDefault();
onClick(e);
}}
>
{icon}
{label && <StyledSpan>{label}</StyledSpan>}
</StyledDiv>
);
export default IconButton; | import React, { MouseEventHandler } from 'react';
import { styled } from '@superset-ui/core'; | random_line_split |
oauth_app.py | """
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from rest_framework.response import Response
from django.db import transaction
from oauth2_provider.models import Application as OauthApplication
from storageadmin.models import (OauthApp, User)
from storageadmin.serializers import OauthAppSerializer
import rest_framework_custom as rfc
from storageadmin.exceptions import RockStorAPIException
from storageadmin.util import handle_exception
class OauthAppView(rfc.GenericView):
serializer_class = OauthAppSerializer
def get_queryset(self, *args, **kwargs):
|
@transaction.atomic
def post(self, request):
with self._handle_exception(request):
name = request.data['name']
username = request.user.username
if (OauthApp.objects.filter(name=name).exists()):
e_msg = ('application with name: %s already exists.' % name)
handle_exception(Exception(e_msg), request)
try:
user = User.objects.get(username=username)
except:
e_msg = ('User with name: %s does not exist' % username)
handle_exception(Exception(e_msg), request)
client_type = OauthApplication.CLIENT_CONFIDENTIAL
auth_grant_type = OauthApplication.GRANT_CLIENT_CREDENTIALS
app = OauthApplication(name=name, client_type=client_type,
authorization_grant_type=auth_grant_type,
user=user.user)
app.save()
oauth_app = OauthApp(name=name, application=app, user=user)
oauth_app.save()
return Response(OauthAppSerializer(oauth_app).data)
@transaction.atomic
def delete(self, request, name):
with self._handle_exception(request):
try:
app = OauthApp.objects.get(name=name)
except:
e_msg = ('application with name: %s does not exist' % name)
handle_exception(Exception(e_msg), request)
app.application.delete()
app.delete()
return Response()
| if ('name' in self.kwargs):
self.paginate_by = 0
try:
return OauthApp.objects.get(name=self.kwargs['name'])
except:
return []
return OauthApp.objects.all() | identifier_body |
oauth_app.py | """
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from rest_framework.response import Response
from django.db import transaction
from oauth2_provider.models import Application as OauthApplication
from storageadmin.models import (OauthApp, User)
from storageadmin.serializers import OauthAppSerializer
import rest_framework_custom as rfc
from storageadmin.exceptions import RockStorAPIException
from storageadmin.util import handle_exception
class OauthAppView(rfc.GenericView):
serializer_class = OauthAppSerializer
def get_queryset(self, *args, **kwargs):
if ('name' in self.kwargs):
|
return OauthApp.objects.all()
@transaction.atomic
def post(self, request):
with self._handle_exception(request):
name = request.data['name']
username = request.user.username
if (OauthApp.objects.filter(name=name).exists()):
e_msg = ('application with name: %s already exists.' % name)
handle_exception(Exception(e_msg), request)
try:
user = User.objects.get(username=username)
except:
e_msg = ('User with name: %s does not exist' % username)
handle_exception(Exception(e_msg), request)
client_type = OauthApplication.CLIENT_CONFIDENTIAL
auth_grant_type = OauthApplication.GRANT_CLIENT_CREDENTIALS
app = OauthApplication(name=name, client_type=client_type,
authorization_grant_type=auth_grant_type,
user=user.user)
app.save()
oauth_app = OauthApp(name=name, application=app, user=user)
oauth_app.save()
return Response(OauthAppSerializer(oauth_app).data)
@transaction.atomic
def delete(self, request, name):
with self._handle_exception(request):
try:
app = OauthApp.objects.get(name=name)
except:
e_msg = ('application with name: %s does not exist' % name)
handle_exception(Exception(e_msg), request)
app.application.delete()
app.delete()
return Response()
| self.paginate_by = 0
try:
return OauthApp.objects.get(name=self.kwargs['name'])
except:
return [] | conditional_block |
oauth_app.py | """
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from rest_framework.response import Response | from storageadmin.exceptions import RockStorAPIException
from storageadmin.util import handle_exception
class OauthAppView(rfc.GenericView):
serializer_class = OauthAppSerializer
def get_queryset(self, *args, **kwargs):
if ('name' in self.kwargs):
self.paginate_by = 0
try:
return OauthApp.objects.get(name=self.kwargs['name'])
except:
return []
return OauthApp.objects.all()
@transaction.atomic
def post(self, request):
with self._handle_exception(request):
name = request.data['name']
username = request.user.username
if (OauthApp.objects.filter(name=name).exists()):
e_msg = ('application with name: %s already exists.' % name)
handle_exception(Exception(e_msg), request)
try:
user = User.objects.get(username=username)
except:
e_msg = ('User with name: %s does not exist' % username)
handle_exception(Exception(e_msg), request)
client_type = OauthApplication.CLIENT_CONFIDENTIAL
auth_grant_type = OauthApplication.GRANT_CLIENT_CREDENTIALS
app = OauthApplication(name=name, client_type=client_type,
authorization_grant_type=auth_grant_type,
user=user.user)
app.save()
oauth_app = OauthApp(name=name, application=app, user=user)
oauth_app.save()
return Response(OauthAppSerializer(oauth_app).data)
@transaction.atomic
def delete(self, request, name):
with self._handle_exception(request):
try:
app = OauthApp.objects.get(name=name)
except:
e_msg = ('application with name: %s does not exist' % name)
handle_exception(Exception(e_msg), request)
app.application.delete()
app.delete()
return Response() | from django.db import transaction
from oauth2_provider.models import Application as OauthApplication
from storageadmin.models import (OauthApp, User)
from storageadmin.serializers import OauthAppSerializer
import rest_framework_custom as rfc | random_line_split |
oauth_app.py | """
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from rest_framework.response import Response
from django.db import transaction
from oauth2_provider.models import Application as OauthApplication
from storageadmin.models import (OauthApp, User)
from storageadmin.serializers import OauthAppSerializer
import rest_framework_custom as rfc
from storageadmin.exceptions import RockStorAPIException
from storageadmin.util import handle_exception
class | (rfc.GenericView):
serializer_class = OauthAppSerializer
def get_queryset(self, *args, **kwargs):
if ('name' in self.kwargs):
self.paginate_by = 0
try:
return OauthApp.objects.get(name=self.kwargs['name'])
except:
return []
return OauthApp.objects.all()
@transaction.atomic
def post(self, request):
with self._handle_exception(request):
name = request.data['name']
username = request.user.username
if (OauthApp.objects.filter(name=name).exists()):
e_msg = ('application with name: %s already exists.' % name)
handle_exception(Exception(e_msg), request)
try:
user = User.objects.get(username=username)
except:
e_msg = ('User with name: %s does not exist' % username)
handle_exception(Exception(e_msg), request)
client_type = OauthApplication.CLIENT_CONFIDENTIAL
auth_grant_type = OauthApplication.GRANT_CLIENT_CREDENTIALS
app = OauthApplication(name=name, client_type=client_type,
authorization_grant_type=auth_grant_type,
user=user.user)
app.save()
oauth_app = OauthApp(name=name, application=app, user=user)
oauth_app.save()
return Response(OauthAppSerializer(oauth_app).data)
@transaction.atomic
def delete(self, request, name):
with self._handle_exception(request):
try:
app = OauthApp.objects.get(name=name)
except:
e_msg = ('application with name: %s does not exist' % name)
handle_exception(Exception(e_msg), request)
app.application.delete()
app.delete()
return Response()
| OauthAppView | identifier_name |
api.ts | import { Conversation, Message, MessagingClient } from '@botpress/messaging-client'
import apicache from 'apicache'
import aws from 'aws-sdk'
import axios from 'axios'
import * as sdk from 'botpress/sdk'
import { asyncMiddleware as asyncMw, BPRequest } from 'common/http'
import { Request, Response, NextFunction } from 'express'
import FormData from 'form-data'
import _ from 'lodash'
import moment from 'moment'
import multer from 'multer'
import multers3 from 'multer-s3'
import path from 'path'
import { Config } from '../config'
import Database from './db'
const ERR_USER_ID_INVALID = 'user id associated with this session must be valid'
const ERR_MSG_TYPE = '`type` is required and must be valid'
const ERR_CONV_ID_REQ = '`conversationId` is required and must be valid'
const ERR_BAD_LANGUAGE = '`language` is required and must be valid'
const ERR_BAD_CONV_ID = "The conversation ID doesn't belong to that user"
const ERR_BAD_USER_SESSION_ID = 'session id is invalid'
const USER_ID_MAX_LENGTH = 40
const MAX_MESSAGE_HISTORY = 100
const SUPPORTED_MESSAGES = [
'text',
'quick_reply',
'form',
'login_prompt',
'visit',
'request_start_conversation',
'postback',
'voice'
]
const WEBCHAT_CUSTOM_ID_KEY = 'webchatCustomId'
type ChatRequest = BPRequest & {
visitorId: string
userId: string
botId: string
conversationId: string
messaging: MessagingClient
}
const userIdIsValid = (userId: string): boolean => {
const hasBreakingConstraints = userId.length > USER_ID_MAX_LENGTH || userId.toLowerCase() === 'undefined'
return !hasBreakingConstraints && /[a-z0-9-_]+/i.test(userId)
}
export default async (bp: typeof sdk, db: Database) => {
const asyncMiddleware = asyncMw(bp.logger)
const globalConfig = (await bp.config.getModuleConfig('channel-web')) as Config
const diskStorage = multer.diskStorage({
destination: globalConfig.fileUploadPath,
// @ts-ignore typing indicates that limits isn't supported
limits: {
files: 1,
fileSize: 5242880 // 5MB
},
| (req, file, cb) {
const userId = _.get(req, 'params.userId') || 'anonymous'
const ext = path.extname(file.originalname)
cb(undefined, `${userId}_${new Date().getTime()}${ext}`)
}
})
let upload = multer({ storage: diskStorage })
if (globalConfig.uploadsUseS3) {
/*
You can override AWS's default settings here. Example:
{ region: 'us-east-1', apiVersion: '2014-10-01', credentials: {...} }
*/
const awsConfig = {
region: globalConfig.uploadsS3Region,
credentials: {
accessKeyId: globalConfig.uploadsS3AWSAccessKey,
secretAccessKey: globalConfig.uploadsS3AWSAccessSecret
}
}
if (!awsConfig.credentials.accessKeyId && !awsConfig.credentials.secretAccessKey) {
delete awsConfig.credentials
}
if (!awsConfig.region) {
delete awsConfig.region
}
// TODO use media service with a 's3' backend
const s3 = new aws.S3(awsConfig)
const s3Storage = multers3({
s3,
bucket: globalConfig.uploadsS3Bucket || 'uploads',
contentType: multers3.AUTO_CONTENT_TYPE,
cacheControl: 'max-age=31536000', // one year caching
acl: 'public-read',
key(req, file, cb) {
const userId = _.get(req, 'params.userId') || 'anonymous'
const ext = path.extname(file.originalname)
cb(undefined, `${userId}_${new Date().getTime()}${ext}`)
}
})
upload = multer({ storage: s3Storage })
}
const router = bp.http.createRouterForBot('channel-web', { checkAuthentication: false, enableJsonBodyParser: true })
const perBotCache = apicache.options({
appendKey: (req: Request, _res: Response) => `${req.method} for bot ${req.params?.boId}`,
statusCodes: { include: [200] }
}).middleware
const assertUserInfo = (options: { convoIdRequired?: boolean } = {}) => async (
req: ChatRequest,
_res: Response,
next: NextFunction
) => {
const { botId } = req.params
const { conversationId, webSessionId } = req.body || {}
req.visitorId = await bp.realtime.getVisitorIdFromGuestSocketId(webSessionId)
if (!req.visitorId) {
return next(ERR_BAD_USER_SESSION_ID)
}
if (!userIdIsValid(req.visitorId)) {
return next(ERR_USER_ID_INVALID)
}
req.messaging = await db.getMessagingClient(botId)
const userId = await db.mapVisitor(botId, req.visitorId, req.messaging)
if (conversationId) {
let conversation: Conversation
try {
conversation = await req.messaging.getConversation(conversationId)
} catch {}
if (!conversation || !userId || conversation.userId !== userId) {
return next(ERR_BAD_CONV_ID)
}
req.conversationId = conversationId
}
if (options.convoIdRequired && req.conversationId === undefined) {
return next(ERR_CONV_ID_REQ)
}
req.botId = botId
req.userId = userId
next()
}
const getRecent = async (messaging: MessagingClient, userId: string) => {
const convs = await messaging.listConversations(userId, 1)
if (convs?.length) {
return convs[0]
}
return messaging.createConversation(userId)
}
router.get(
'/botInfo',
perBotCache('1 minute'),
asyncMiddleware(async (req: BPRequest, res: Response) => {
const { botId } = req.params
const security = ((await bp.config.getModuleConfig('channel-web')) as Config).security // usage of global because a user could overwrite bot scoped configs
const config = (await bp.config.getModuleConfigForBot('channel-web', botId)) as Config
const botInfo = await bp.bots.getBotById(botId)
if (!botInfo) {
return res.sendStatus(404)
}
res.send({
showBotInfoPage: (config.infoPage && config.infoPage.enabled) || config.showBotInfoPage,
name: botInfo.name,
description: (config.infoPage && config.infoPage.description) || botInfo.description,
details: botInfo.details,
languages: botInfo.languages,
extraStylesheet: config.extraStylesheet,
disableNotificationSound: config.disableNotificationSound,
security,
lazySocket: config.lazySocket,
maxMessageLength: config.maxMessageLength
})
})
)
router.post(
'/users/customId',
bp.http.extractExternalToken,
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId } = req
const { customId } = req.body
if (!customId) {
return res.sendStatus(400)
}
await bp.users.getOrCreateUser('web', userId, botId)
await bp.users.updateAttributes('web', userId, { [WEBCHAT_CUSTOM_ID_KEY]: customId })
res.sendStatus(200)
})
)
router.post(
'/messages',
bp.http.extractExternalToken,
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId } = req
const user = await bp.users.getOrCreateUser('web', userId, botId)
const payload = req.body.payload || {}
if (!SUPPORTED_MESSAGES.includes(payload.type)) {
return res.status(400).send(ERR_MSG_TYPE)
}
if (payload.type === 'visit') {
const { timezone, language } = payload
const isValidTimezone = _.isNumber(timezone) && timezone >= -12 && timezone <= 14 && timezone % 0.5 === 0
const isValidLanguage = language.length < 4 && !_.get(user, 'result.attributes.language')
const newAttributes = {
...(isValidTimezone && { timezone }),
...(isValidLanguage && { language })
}
if (Object.getOwnPropertyNames(newAttributes).length) {
await bp.users.updateAttributes('web', userId, newAttributes)
}
}
if (!req.conversationId) {
req.conversationId = (await getRecent(req.messaging, userId)).id
}
await sendNewMessage(req, payload, !!req.headers.authorization)
res.sendStatus(200)
})
)
router.post(
'/messages/files',
upload.single('file'),
bp.http.extractExternalToken,
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest & any, res: Response) => {
const { botId, userId } = req
const payloadValue = req.body.payload || {}
await bp.users.getOrCreateUser('web', userId, botId) // Just to create the user if it doesn't exist
const payload = {
text: `Uploaded a file **${req.file.originalname}**`,
type: 'file',
storage: req.file.location ? 's3' : 'local',
url: req.file.location || req.file.path || undefined,
name: req.file.filename,
originalName: req.file.originalname,
mime: req.file.contentType || req.file.mimetype,
size: req.file.size,
payload: payloadValue
}
await sendNewMessage(req, payload, false)
return res.sendStatus(200)
})
)
router.post(
'/messages/voice',
bp.http.extractExternalToken,
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId } = req
const { audio } = req.body
if (!audio?.buffer || !audio?.title) {
throw new Error('Voices messages must contain an audio buffer and title')
}
await bp.users.getOrCreateUser('web', userId, botId) // Just to create the user if it doesn't exist
const buffer = Buffer.from(audio!.buffer, 'base64')
const formData = new FormData()
formData.append('file', buffer, audio!.title)
const axiosConfig = await bp.http.getAxiosConfigForBot(botId, { studioUrl: true })
axiosConfig.headers['Content-Type'] = `multipart/form-data; boundary=${formData.getBoundary()}`
// Upload the audio buffer to the Media Service
const {
data: { url }
} = await axios.post<{ url: string }>('/media', formData, {
...axiosConfig
})
const payload = {
type: 'voice',
audio: url
}
await sendNewMessage(req, payload, false)
return res.sendStatus(200)
})
)
router.post(
'/conversations/get',
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { conversationId, botId } = req
const config = (await bp.config.getModuleConfigForBot('channel-web', botId)) as Config
const conversation = await req.messaging.getConversation(conversationId)
const messages = await req.messaging.listMessages(conversationId, config.maxMessagesHistory)
// this function scope can (and probably will) expand to other types as well with a switch case on the type
// we do something similar in the cms to determine weather there are translated fields or not
const notEmptyPayload = payload => (payload.type === 'text' ? !!payload.text : true)
const displayableMessages = messages.filter(({ payload }) => payload.type !== 'visit' && notEmptyPayload(payload))
return res.send({ ...conversation, messages: displayableMessages })
})
)
router.post(
'/conversations/list',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
await bp.users.getOrCreateUser('web', userId, botId)
const conversations = await req.messaging.listConversations(userId, MAX_MESSAGE_HISTORY)
const config = await bp.config.getModuleConfigForBot('channel-web', botId)
const convsWithLastMessage: (Conversation & { lastMessage?: Message })[] = []
for (const conversation of conversations) {
convsWithLastMessage.push({
...conversation,
lastMessage: (await req.messaging.listMessages(conversation.id, 1))[0]
})
}
return res.send({
conversations: convsWithLastMessage,
startNewConvoOnTimeout: config.startNewConvoOnTimeout,
recentConversationLifetime: config.recentConversationLifetime
})
})
)
async function sendNewMessage(req: ChatRequest, payload: any, useDebugger: boolean) {
const config = await bp.config.getModuleConfigForBot('channel-web', req.botId)
if (payload.type === 'voice') {
if (_.isEmpty(payload.audio)) {
throw new Error('Voices messages must contain an audio buffer')
}
} else if (
(!payload.text || !_.isString(payload.text) || payload.text.length > config.maxMessageLength) &&
payload.type !== 'postback'
) {
throw new Error(`Text must be a valid string of less than ${config.maxMessageLength} chars`)
}
let sanitizedPayload = payload
if (payload.sensitive) {
const sensitive = Array.isArray(payload.sensitive) ? payload.sensitive : [payload.sensitive]
sanitizedPayload = _.omit(payload, [...sensitive, 'sensitive'])
}
const message = await req.messaging.createMessage(req.conversationId, req.userId, sanitizedPayload)
const event = bp.IO.Event({
messageId: message.id,
botId: req.botId,
channel: 'web',
direction: 'incoming',
payload,
target: req.userId,
threadId: req.conversationId,
type: payload.type,
credentials: req.credentials
})
if (useDebugger) {
event.debugger = true
}
bp.realtime.sendPayload(bp.RealTimePayload.forVisitor(req.visitorId, 'webchat.message', message))
await bp.events.sendEvent(event)
}
router.post(
'/events',
bp.http.extractExternalToken,
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
let { conversationId } = req
const payload = req.body.payload || {}
await bp.users.getOrCreateUser('web', userId, botId)
if (!conversationId) {
conversationId = (await getRecent(req.messaging, userId)).id
}
const event = bp.IO.Event({
botId,
channel: 'web',
direction: 'incoming',
target: userId,
threadId: conversationId.toString(),
type: payload.type,
payload,
credentials: req.credentials
})
await bp.events.sendEvent(event)
res.sendStatus(200)
})
)
router.post(
'/saveFeedback',
bp.http.extractExternalToken,
asyncMiddleware(async (req: BPRequest, res: Response) => {
const { botId } = req.params
const { messageId, target, feedback } = req.body
if (!target || !messageId || !feedback) {
return res.status(400).send('Missing required fields')
}
const [event] = await bp.events.findEvents({ botId, messageId })
const { userId } = await db.getMappingFromVisitor(botId, target)
try {
await bp.events.saveUserFeedback(event.incomingEventId, userId, feedback, 'qna')
res.sendStatus(200)
} catch (err) {
res.status(400).send(err)
}
})
)
router.post(
'/feedbackInfo',
bp.http.extractExternalToken,
asyncMiddleware(async (req: BPRequest, res: Response) => {
const { botId } = req.params
const { target, messageIds } = req.body
if (!target || !messageIds) {
return res.status(400).send('Missing required fields')
}
const { userId } = await db.getMappingFromVisitor(botId, target)
res.send(await db.getFeedbackInfoForMessageIds(userId, messageIds))
})
)
router.post(
'/conversations/reset',
bp.http.extractExternalToken,
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId, conversationId } = req
await bp.users.getOrCreateUser('web', userId, botId)
const payload = {
text: 'Reset the conversation',
type: 'session_reset'
}
await sendNewMessage(req, payload, false)
const sessionId = bp.dialog.createId({
botId,
target: userId,
threadId: conversationId.toString(),
channel: 'web'
})
await bp.dialog.deleteSession(sessionId, botId)
res.sendStatus(200)
})
)
router.post(
'/conversations/new',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId } = req
const conversation = await req.messaging.createConversation(userId)
res.send({ convoId: conversation.id })
})
)
router.post(
'/conversations/reference',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
try {
const { botId, userId } = req
const { reference } = req.body
let { conversationId } = req
await bp.users.getOrCreateUser('web', userId, botId)
if (typeof reference !== 'string' || !reference.length || reference.indexOf('=') === -1) {
throw new Error('Invalid reference')
}
if (!conversationId) {
conversationId = (await getRecent(req.messaging, userId)).id
}
const message = reference.slice(0, reference.lastIndexOf('='))
const signature = reference.slice(reference.lastIndexOf('=') + 1)
const verifySignature = await bp.security.getMessageSignature(message)
if (verifySignature !== signature) {
throw new Error('Bad reference signature')
}
const payload = {
text: message,
signature,
type: 'session_reference'
}
const event = bp.IO.Event({
botId,
channel: 'web',
direction: 'incoming',
target: userId,
threadId: conversationId.toString(),
type: payload.type,
payload,
credentials: req['credentials']
})
await bp.events.sendEvent(event)
res.sendStatus(200)
} catch (error) {
res.status(500).send({ message: error.message })
}
})
)
router.post(
'/preferences/get',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
const { result } = await bp.users.getOrCreateUser('web', userId, botId)
return res.send({ language: result.attributes.language })
})
)
router.post(
'/preferences',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
const payload = req.body || {}
const preferredLanguage = payload.language
const bot = await bp.bots.getBotById(botId)
const validLanguage = bot.languages.includes(preferredLanguage)
if (!validLanguage) {
return res.status(400).send(ERR_BAD_LANGUAGE)
}
await bp.users.updateAttributes('web', userId, {
language: preferredLanguage
})
return res.sendStatus(200)
})
)
const getMessageContent = (message, type) => {
const { payload } = message
if (type === 'file') {
return (payload && payload.url) || message.message_data.url
}
const wrappedText = _.get(payload, 'wrapped.text')
return (payload && payload.text) || message.message_text || wrappedText || `Event (${type})`
}
const convertToTxtFile = async (botId: string, conversation: Conversation & { messages: Message[] }) => {
const { messages } = conversation
const { result: user } = await bp.users.getOrCreateUser('web', conversation.userId)
const timeFormat = 'MM/DD/YY HH:mm'
const fullName = `${user.attributes['first_name'] || ''} ${user.attributes['last_name'] || ''}`
const metadata = `Conversation Id: ${conversation.id}\r\nCreated on: ${moment(conversation.createdOn).format(
timeFormat
)}\r\nUser: ${fullName}\r\n-----------------\r\n`
const messagesAsTxt = messages.map(message => {
const type = message.payload?.type
if (type === 'session_reset') {
return ''
}
return `[${moment(message.sentOn).format(timeFormat)}] ${message.authorId ? 'User' : botId}: ${getMessageContent(
message,
type
)}\r\n`
})
return [metadata, ...messagesAsTxt].join('')
}
router.post(
'/conversations/download/txt',
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { conversationId, botId } = req
const config = (await bp.config.getModuleConfigForBot('channel-web', botId)) as Config
const conversation = await req.messaging.getConversation(conversationId)
const messages = await req.messaging.listMessages(conversationId, config.maxMessagesHistory)
const txt = await convertToTxtFile(botId, { ...conversation, messages })
res.send({ txt, name: `Conversation ${conversation.id}.txt` })
})
)
router.post(
'/conversations/messages/delete',
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { visitorId, conversationId } = req
bp.realtime.sendPayload(bp.RealTimePayload.forVisitor(visitorId, 'webchat.clear', { conversationId }))
await req.messaging.deleteMessagesByConversation(conversationId)
res.sendStatus(204)
})
)
// NOTE: this is a temporary route and allows an agent to delete a channel web user's conversation messages
// until today this was completed by calling channel web api directly but it's api has been secured with a temporary sessionId
// soon enough, once channel-web's implementation moves to messaging api we'll be able to remove this and use messaging directly
// usage of a private router because authentication is handled for us
const privateRouter = bp.http.createRouterForBot('channel-web-private')
// NOTE : this uses duplicated code taken from public route (ln#624 - ln#636) so it's easy to remove once we can (see prev note)
privateRouter.post(
'/conversations/:id/messages/delete',
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId } = req.params
const conversationId = req.params.id
const { userId } = req.body
const messaging = await db.getMessagingClient(botId)
const conversation = await messaging.getConversation(conversationId)
if (!userId || conversation?.userId !== userId) {
return res.status(400).send(ERR_BAD_CONV_ID)
}
const { visitorId } = await db.getMappingFromUser(userId)
bp.realtime.sendPayload(bp.RealTimePayload.forVisitor(visitorId, 'webchat.clear', { conversationId }))
await messaging.deleteMessagesByConversation(conversationId)
res.sendStatus(204)
})
)
}
| filename | identifier_name |
api.ts | import { Conversation, Message, MessagingClient } from '@botpress/messaging-client'
import apicache from 'apicache'
import aws from 'aws-sdk'
import axios from 'axios'
import * as sdk from 'botpress/sdk'
import { asyncMiddleware as asyncMw, BPRequest } from 'common/http'
import { Request, Response, NextFunction } from 'express'
import FormData from 'form-data'
import _ from 'lodash'
import moment from 'moment'
import multer from 'multer'
import multers3 from 'multer-s3'
import path from 'path'
import { Config } from '../config'
import Database from './db'
const ERR_USER_ID_INVALID = 'user id associated with this session must be valid' |
const USER_ID_MAX_LENGTH = 40
const MAX_MESSAGE_HISTORY = 100
const SUPPORTED_MESSAGES = [
'text',
'quick_reply',
'form',
'login_prompt',
'visit',
'request_start_conversation',
'postback',
'voice'
]
const WEBCHAT_CUSTOM_ID_KEY = 'webchatCustomId'
type ChatRequest = BPRequest & {
visitorId: string
userId: string
botId: string
conversationId: string
messaging: MessagingClient
}
const userIdIsValid = (userId: string): boolean => {
const hasBreakingConstraints = userId.length > USER_ID_MAX_LENGTH || userId.toLowerCase() === 'undefined'
return !hasBreakingConstraints && /[a-z0-9-_]+/i.test(userId)
}
export default async (bp: typeof sdk, db: Database) => {
const asyncMiddleware = asyncMw(bp.logger)
const globalConfig = (await bp.config.getModuleConfig('channel-web')) as Config
const diskStorage = multer.diskStorage({
destination: globalConfig.fileUploadPath,
// @ts-ignore typing indicates that limits isn't supported
limits: {
files: 1,
fileSize: 5242880 // 5MB
},
filename(req, file, cb) {
const userId = _.get(req, 'params.userId') || 'anonymous'
const ext = path.extname(file.originalname)
cb(undefined, `${userId}_${new Date().getTime()}${ext}`)
}
})
let upload = multer({ storage: diskStorage })
if (globalConfig.uploadsUseS3) {
/*
You can override AWS's default settings here. Example:
{ region: 'us-east-1', apiVersion: '2014-10-01', credentials: {...} }
*/
const awsConfig = {
region: globalConfig.uploadsS3Region,
credentials: {
accessKeyId: globalConfig.uploadsS3AWSAccessKey,
secretAccessKey: globalConfig.uploadsS3AWSAccessSecret
}
}
if (!awsConfig.credentials.accessKeyId && !awsConfig.credentials.secretAccessKey) {
delete awsConfig.credentials
}
if (!awsConfig.region) {
delete awsConfig.region
}
// TODO use media service with a 's3' backend
const s3 = new aws.S3(awsConfig)
const s3Storage = multers3({
s3,
bucket: globalConfig.uploadsS3Bucket || 'uploads',
contentType: multers3.AUTO_CONTENT_TYPE,
cacheControl: 'max-age=31536000', // one year caching
acl: 'public-read',
key(req, file, cb) {
const userId = _.get(req, 'params.userId') || 'anonymous'
const ext = path.extname(file.originalname)
cb(undefined, `${userId}_${new Date().getTime()}${ext}`)
}
})
upload = multer({ storage: s3Storage })
}
const router = bp.http.createRouterForBot('channel-web', { checkAuthentication: false, enableJsonBodyParser: true })
const perBotCache = apicache.options({
appendKey: (req: Request, _res: Response) => `${req.method} for bot ${req.params?.boId}`,
statusCodes: { include: [200] }
}).middleware
const assertUserInfo = (options: { convoIdRequired?: boolean } = {}) => async (
req: ChatRequest,
_res: Response,
next: NextFunction
) => {
const { botId } = req.params
const { conversationId, webSessionId } = req.body || {}
req.visitorId = await bp.realtime.getVisitorIdFromGuestSocketId(webSessionId)
if (!req.visitorId) {
return next(ERR_BAD_USER_SESSION_ID)
}
if (!userIdIsValid(req.visitorId)) {
return next(ERR_USER_ID_INVALID)
}
req.messaging = await db.getMessagingClient(botId)
const userId = await db.mapVisitor(botId, req.visitorId, req.messaging)
if (conversationId) {
let conversation: Conversation
try {
conversation = await req.messaging.getConversation(conversationId)
} catch {}
if (!conversation || !userId || conversation.userId !== userId) {
return next(ERR_BAD_CONV_ID)
}
req.conversationId = conversationId
}
if (options.convoIdRequired && req.conversationId === undefined) {
return next(ERR_CONV_ID_REQ)
}
req.botId = botId
req.userId = userId
next()
}
const getRecent = async (messaging: MessagingClient, userId: string) => {
const convs = await messaging.listConversations(userId, 1)
if (convs?.length) {
return convs[0]
}
return messaging.createConversation(userId)
}
router.get(
'/botInfo',
perBotCache('1 minute'),
asyncMiddleware(async (req: BPRequest, res: Response) => {
const { botId } = req.params
const security = ((await bp.config.getModuleConfig('channel-web')) as Config).security // usage of global because a user could overwrite bot scoped configs
const config = (await bp.config.getModuleConfigForBot('channel-web', botId)) as Config
const botInfo = await bp.bots.getBotById(botId)
if (!botInfo) {
return res.sendStatus(404)
}
res.send({
showBotInfoPage: (config.infoPage && config.infoPage.enabled) || config.showBotInfoPage,
name: botInfo.name,
description: (config.infoPage && config.infoPage.description) || botInfo.description,
details: botInfo.details,
languages: botInfo.languages,
extraStylesheet: config.extraStylesheet,
disableNotificationSound: config.disableNotificationSound,
security,
lazySocket: config.lazySocket,
maxMessageLength: config.maxMessageLength
})
})
)
router.post(
'/users/customId',
bp.http.extractExternalToken,
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId } = req
const { customId } = req.body
if (!customId) {
return res.sendStatus(400)
}
await bp.users.getOrCreateUser('web', userId, botId)
await bp.users.updateAttributes('web', userId, { [WEBCHAT_CUSTOM_ID_KEY]: customId })
res.sendStatus(200)
})
)
router.post(
'/messages',
bp.http.extractExternalToken,
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId } = req
const user = await bp.users.getOrCreateUser('web', userId, botId)
const payload = req.body.payload || {}
if (!SUPPORTED_MESSAGES.includes(payload.type)) {
return res.status(400).send(ERR_MSG_TYPE)
}
if (payload.type === 'visit') {
const { timezone, language } = payload
const isValidTimezone = _.isNumber(timezone) && timezone >= -12 && timezone <= 14 && timezone % 0.5 === 0
const isValidLanguage = language.length < 4 && !_.get(user, 'result.attributes.language')
const newAttributes = {
...(isValidTimezone && { timezone }),
...(isValidLanguage && { language })
}
if (Object.getOwnPropertyNames(newAttributes).length) {
await bp.users.updateAttributes('web', userId, newAttributes)
}
}
if (!req.conversationId) {
req.conversationId = (await getRecent(req.messaging, userId)).id
}
await sendNewMessage(req, payload, !!req.headers.authorization)
res.sendStatus(200)
})
)
router.post(
'/messages/files',
upload.single('file'),
bp.http.extractExternalToken,
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest & any, res: Response) => {
const { botId, userId } = req
const payloadValue = req.body.payload || {}
await bp.users.getOrCreateUser('web', userId, botId) // Just to create the user if it doesn't exist
const payload = {
text: `Uploaded a file **${req.file.originalname}**`,
type: 'file',
storage: req.file.location ? 's3' : 'local',
url: req.file.location || req.file.path || undefined,
name: req.file.filename,
originalName: req.file.originalname,
mime: req.file.contentType || req.file.mimetype,
size: req.file.size,
payload: payloadValue
}
await sendNewMessage(req, payload, false)
return res.sendStatus(200)
})
)
router.post(
'/messages/voice',
bp.http.extractExternalToken,
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId } = req
const { audio } = req.body
if (!audio?.buffer || !audio?.title) {
throw new Error('Voices messages must contain an audio buffer and title')
}
await bp.users.getOrCreateUser('web', userId, botId) // Just to create the user if it doesn't exist
const buffer = Buffer.from(audio!.buffer, 'base64')
const formData = new FormData()
formData.append('file', buffer, audio!.title)
const axiosConfig = await bp.http.getAxiosConfigForBot(botId, { studioUrl: true })
axiosConfig.headers['Content-Type'] = `multipart/form-data; boundary=${formData.getBoundary()}`
// Upload the audio buffer to the Media Service
const {
data: { url }
} = await axios.post<{ url: string }>('/media', formData, {
...axiosConfig
})
const payload = {
type: 'voice',
audio: url
}
await sendNewMessage(req, payload, false)
return res.sendStatus(200)
})
)
router.post(
'/conversations/get',
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { conversationId, botId } = req
const config = (await bp.config.getModuleConfigForBot('channel-web', botId)) as Config
const conversation = await req.messaging.getConversation(conversationId)
const messages = await req.messaging.listMessages(conversationId, config.maxMessagesHistory)
// this function scope can (and probably will) expand to other types as well with a switch case on the type
// we do something similar in the cms to determine weather there are translated fields or not
const notEmptyPayload = payload => (payload.type === 'text' ? !!payload.text : true)
const displayableMessages = messages.filter(({ payload }) => payload.type !== 'visit' && notEmptyPayload(payload))
return res.send({ ...conversation, messages: displayableMessages })
})
)
router.post(
'/conversations/list',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
await bp.users.getOrCreateUser('web', userId, botId)
const conversations = await req.messaging.listConversations(userId, MAX_MESSAGE_HISTORY)
const config = await bp.config.getModuleConfigForBot('channel-web', botId)
const convsWithLastMessage: (Conversation & { lastMessage?: Message })[] = []
for (const conversation of conversations) {
convsWithLastMessage.push({
...conversation,
lastMessage: (await req.messaging.listMessages(conversation.id, 1))[0]
})
}
return res.send({
conversations: convsWithLastMessage,
startNewConvoOnTimeout: config.startNewConvoOnTimeout,
recentConversationLifetime: config.recentConversationLifetime
})
})
)
async function sendNewMessage(req: ChatRequest, payload: any, useDebugger: boolean) {
const config = await bp.config.getModuleConfigForBot('channel-web', req.botId)
if (payload.type === 'voice') {
if (_.isEmpty(payload.audio)) {
throw new Error('Voices messages must contain an audio buffer')
}
} else if (
(!payload.text || !_.isString(payload.text) || payload.text.length > config.maxMessageLength) &&
payload.type !== 'postback'
) {
throw new Error(`Text must be a valid string of less than ${config.maxMessageLength} chars`)
}
let sanitizedPayload = payload
if (payload.sensitive) {
const sensitive = Array.isArray(payload.sensitive) ? payload.sensitive : [payload.sensitive]
sanitizedPayload = _.omit(payload, [...sensitive, 'sensitive'])
}
const message = await req.messaging.createMessage(req.conversationId, req.userId, sanitizedPayload)
const event = bp.IO.Event({
messageId: message.id,
botId: req.botId,
channel: 'web',
direction: 'incoming',
payload,
target: req.userId,
threadId: req.conversationId,
type: payload.type,
credentials: req.credentials
})
if (useDebugger) {
event.debugger = true
}
bp.realtime.sendPayload(bp.RealTimePayload.forVisitor(req.visitorId, 'webchat.message', message))
await bp.events.sendEvent(event)
}
router.post(
'/events',
bp.http.extractExternalToken,
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
let { conversationId } = req
const payload = req.body.payload || {}
await bp.users.getOrCreateUser('web', userId, botId)
if (!conversationId) {
conversationId = (await getRecent(req.messaging, userId)).id
}
const event = bp.IO.Event({
botId,
channel: 'web',
direction: 'incoming',
target: userId,
threadId: conversationId.toString(),
type: payload.type,
payload,
credentials: req.credentials
})
await bp.events.sendEvent(event)
res.sendStatus(200)
})
)
router.post(
'/saveFeedback',
bp.http.extractExternalToken,
asyncMiddleware(async (req: BPRequest, res: Response) => {
const { botId } = req.params
const { messageId, target, feedback } = req.body
if (!target || !messageId || !feedback) {
return res.status(400).send('Missing required fields')
}
const [event] = await bp.events.findEvents({ botId, messageId })
const { userId } = await db.getMappingFromVisitor(botId, target)
try {
await bp.events.saveUserFeedback(event.incomingEventId, userId, feedback, 'qna')
res.sendStatus(200)
} catch (err) {
res.status(400).send(err)
}
})
)
router.post(
'/feedbackInfo',
bp.http.extractExternalToken,
asyncMiddleware(async (req: BPRequest, res: Response) => {
const { botId } = req.params
const { target, messageIds } = req.body
if (!target || !messageIds) {
return res.status(400).send('Missing required fields')
}
const { userId } = await db.getMappingFromVisitor(botId, target)
res.send(await db.getFeedbackInfoForMessageIds(userId, messageIds))
})
)
router.post(
'/conversations/reset',
bp.http.extractExternalToken,
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId, conversationId } = req
await bp.users.getOrCreateUser('web', userId, botId)
const payload = {
text: 'Reset the conversation',
type: 'session_reset'
}
await sendNewMessage(req, payload, false)
const sessionId = bp.dialog.createId({
botId,
target: userId,
threadId: conversationId.toString(),
channel: 'web'
})
await bp.dialog.deleteSession(sessionId, botId)
res.sendStatus(200)
})
)
router.post(
'/conversations/new',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId } = req
const conversation = await req.messaging.createConversation(userId)
res.send({ convoId: conversation.id })
})
)
router.post(
'/conversations/reference',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
try {
const { botId, userId } = req
const { reference } = req.body
let { conversationId } = req
await bp.users.getOrCreateUser('web', userId, botId)
if (typeof reference !== 'string' || !reference.length || reference.indexOf('=') === -1) {
throw new Error('Invalid reference')
}
if (!conversationId) {
conversationId = (await getRecent(req.messaging, userId)).id
}
const message = reference.slice(0, reference.lastIndexOf('='))
const signature = reference.slice(reference.lastIndexOf('=') + 1)
const verifySignature = await bp.security.getMessageSignature(message)
if (verifySignature !== signature) {
throw new Error('Bad reference signature')
}
const payload = {
text: message,
signature,
type: 'session_reference'
}
const event = bp.IO.Event({
botId,
channel: 'web',
direction: 'incoming',
target: userId,
threadId: conversationId.toString(),
type: payload.type,
payload,
credentials: req['credentials']
})
await bp.events.sendEvent(event)
res.sendStatus(200)
} catch (error) {
res.status(500).send({ message: error.message })
}
})
)
router.post(
'/preferences/get',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
const { result } = await bp.users.getOrCreateUser('web', userId, botId)
return res.send({ language: result.attributes.language })
})
)
router.post(
'/preferences',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
const payload = req.body || {}
const preferredLanguage = payload.language
const bot = await bp.bots.getBotById(botId)
const validLanguage = bot.languages.includes(preferredLanguage)
if (!validLanguage) {
return res.status(400).send(ERR_BAD_LANGUAGE)
}
await bp.users.updateAttributes('web', userId, {
language: preferredLanguage
})
return res.sendStatus(200)
})
)
const getMessageContent = (message, type) => {
const { payload } = message
if (type === 'file') {
return (payload && payload.url) || message.message_data.url
}
const wrappedText = _.get(payload, 'wrapped.text')
return (payload && payload.text) || message.message_text || wrappedText || `Event (${type})`
}
const convertToTxtFile = async (botId: string, conversation: Conversation & { messages: Message[] }) => {
const { messages } = conversation
const { result: user } = await bp.users.getOrCreateUser('web', conversation.userId)
const timeFormat = 'MM/DD/YY HH:mm'
const fullName = `${user.attributes['first_name'] || ''} ${user.attributes['last_name'] || ''}`
const metadata = `Conversation Id: ${conversation.id}\r\nCreated on: ${moment(conversation.createdOn).format(
timeFormat
)}\r\nUser: ${fullName}\r\n-----------------\r\n`
const messagesAsTxt = messages.map(message => {
const type = message.payload?.type
if (type === 'session_reset') {
return ''
}
return `[${moment(message.sentOn).format(timeFormat)}] ${message.authorId ? 'User' : botId}: ${getMessageContent(
message,
type
)}\r\n`
})
return [metadata, ...messagesAsTxt].join('')
}
router.post(
'/conversations/download/txt',
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { conversationId, botId } = req
const config = (await bp.config.getModuleConfigForBot('channel-web', botId)) as Config
const conversation = await req.messaging.getConversation(conversationId)
const messages = await req.messaging.listMessages(conversationId, config.maxMessagesHistory)
const txt = await convertToTxtFile(botId, { ...conversation, messages })
res.send({ txt, name: `Conversation ${conversation.id}.txt` })
})
)
router.post(
'/conversations/messages/delete',
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { visitorId, conversationId } = req
bp.realtime.sendPayload(bp.RealTimePayload.forVisitor(visitorId, 'webchat.clear', { conversationId }))
await req.messaging.deleteMessagesByConversation(conversationId)
res.sendStatus(204)
})
)
// NOTE: this is a temporary route and allows an agent to delete a channel web user's conversation messages
// until today this was completed by calling channel web api directly but it's api has been secured with a temporary sessionId
// soon enough, once channel-web's implementation moves to messaging api we'll be able to remove this and use messaging directly
// usage of a private router because authentication is handled for us
const privateRouter = bp.http.createRouterForBot('channel-web-private')
// NOTE : this uses duplicated code taken from public route (ln#624 - ln#636) so it's easy to remove once we can (see prev note)
privateRouter.post(
'/conversations/:id/messages/delete',
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId } = req.params
const conversationId = req.params.id
const { userId } = req.body
const messaging = await db.getMessagingClient(botId)
const conversation = await messaging.getConversation(conversationId)
if (!userId || conversation?.userId !== userId) {
return res.status(400).send(ERR_BAD_CONV_ID)
}
const { visitorId } = await db.getMappingFromUser(userId)
bp.realtime.sendPayload(bp.RealTimePayload.forVisitor(visitorId, 'webchat.clear', { conversationId }))
await messaging.deleteMessagesByConversation(conversationId)
res.sendStatus(204)
})
)
} | const ERR_MSG_TYPE = '`type` is required and must be valid'
const ERR_CONV_ID_REQ = '`conversationId` is required and must be valid'
const ERR_BAD_LANGUAGE = '`language` is required and must be valid'
const ERR_BAD_CONV_ID = "The conversation ID doesn't belong to that user"
const ERR_BAD_USER_SESSION_ID = 'session id is invalid' | random_line_split |
api.ts | import { Conversation, Message, MessagingClient } from '@botpress/messaging-client'
import apicache from 'apicache'
import aws from 'aws-sdk'
import axios from 'axios'
import * as sdk from 'botpress/sdk'
import { asyncMiddleware as asyncMw, BPRequest } from 'common/http'
import { Request, Response, NextFunction } from 'express'
import FormData from 'form-data'
import _ from 'lodash'
import moment from 'moment'
import multer from 'multer'
import multers3 from 'multer-s3'
import path from 'path'
import { Config } from '../config'
import Database from './db'
const ERR_USER_ID_INVALID = 'user id associated with this session must be valid'
const ERR_MSG_TYPE = '`type` is required and must be valid'
const ERR_CONV_ID_REQ = '`conversationId` is required and must be valid'
const ERR_BAD_LANGUAGE = '`language` is required and must be valid'
const ERR_BAD_CONV_ID = "The conversation ID doesn't belong to that user"
const ERR_BAD_USER_SESSION_ID = 'session id is invalid'
const USER_ID_MAX_LENGTH = 40
const MAX_MESSAGE_HISTORY = 100
const SUPPORTED_MESSAGES = [
'text',
'quick_reply',
'form',
'login_prompt',
'visit',
'request_start_conversation',
'postback',
'voice'
]
const WEBCHAT_CUSTOM_ID_KEY = 'webchatCustomId'
type ChatRequest = BPRequest & {
visitorId: string
userId: string
botId: string
conversationId: string
messaging: MessagingClient
}
const userIdIsValid = (userId: string): boolean => {
const hasBreakingConstraints = userId.length > USER_ID_MAX_LENGTH || userId.toLowerCase() === 'undefined'
return !hasBreakingConstraints && /[a-z0-9-_]+/i.test(userId)
}
export default async (bp: typeof sdk, db: Database) => {
const asyncMiddleware = asyncMw(bp.logger)
const globalConfig = (await bp.config.getModuleConfig('channel-web')) as Config
const diskStorage = multer.diskStorage({
destination: globalConfig.fileUploadPath,
// @ts-ignore typing indicates that limits isn't supported
limits: {
files: 1,
fileSize: 5242880 // 5MB
},
filename(req, file, cb) {
const userId = _.get(req, 'params.userId') || 'anonymous'
const ext = path.extname(file.originalname)
cb(undefined, `${userId}_${new Date().getTime()}${ext}`)
}
})
let upload = multer({ storage: diskStorage })
if (globalConfig.uploadsUseS3) {
/*
You can override AWS's default settings here. Example:
{ region: 'us-east-1', apiVersion: '2014-10-01', credentials: {...} }
*/
const awsConfig = {
region: globalConfig.uploadsS3Region,
credentials: {
accessKeyId: globalConfig.uploadsS3AWSAccessKey,
secretAccessKey: globalConfig.uploadsS3AWSAccessSecret
}
}
if (!awsConfig.credentials.accessKeyId && !awsConfig.credentials.secretAccessKey) {
delete awsConfig.credentials
}
if (!awsConfig.region) {
delete awsConfig.region
}
// TODO use media service with a 's3' backend
const s3 = new aws.S3(awsConfig)
const s3Storage = multers3({
s3,
bucket: globalConfig.uploadsS3Bucket || 'uploads',
contentType: multers3.AUTO_CONTENT_TYPE,
cacheControl: 'max-age=31536000', // one year caching
acl: 'public-read',
key(req, file, cb) {
const userId = _.get(req, 'params.userId') || 'anonymous'
const ext = path.extname(file.originalname)
cb(undefined, `${userId}_${new Date().getTime()}${ext}`)
}
})
upload = multer({ storage: s3Storage })
}
const router = bp.http.createRouterForBot('channel-web', { checkAuthentication: false, enableJsonBodyParser: true })
const perBotCache = apicache.options({
appendKey: (req: Request, _res: Response) => `${req.method} for bot ${req.params?.boId}`,
statusCodes: { include: [200] }
}).middleware
const assertUserInfo = (options: { convoIdRequired?: boolean } = {}) => async (
req: ChatRequest,
_res: Response,
next: NextFunction
) => {
const { botId } = req.params
const { conversationId, webSessionId } = req.body || {}
req.visitorId = await bp.realtime.getVisitorIdFromGuestSocketId(webSessionId)
if (!req.visitorId) {
return next(ERR_BAD_USER_SESSION_ID)
}
if (!userIdIsValid(req.visitorId)) {
return next(ERR_USER_ID_INVALID)
}
req.messaging = await db.getMessagingClient(botId)
const userId = await db.mapVisitor(botId, req.visitorId, req.messaging)
if (conversationId) {
let conversation: Conversation
try {
conversation = await req.messaging.getConversation(conversationId)
} catch {}
if (!conversation || !userId || conversation.userId !== userId) {
return next(ERR_BAD_CONV_ID)
}
req.conversationId = conversationId
}
if (options.convoIdRequired && req.conversationId === undefined) {
return next(ERR_CONV_ID_REQ)
}
req.botId = botId
req.userId = userId
next()
}
const getRecent = async (messaging: MessagingClient, userId: string) => {
const convs = await messaging.listConversations(userId, 1)
if (convs?.length) {
return convs[0]
}
return messaging.createConversation(userId)
}
router.get(
'/botInfo',
perBotCache('1 minute'),
asyncMiddleware(async (req: BPRequest, res: Response) => {
const { botId } = req.params
const security = ((await bp.config.getModuleConfig('channel-web')) as Config).security // usage of global because a user could overwrite bot scoped configs
const config = (await bp.config.getModuleConfigForBot('channel-web', botId)) as Config
const botInfo = await bp.bots.getBotById(botId)
if (!botInfo) {
return res.sendStatus(404)
}
res.send({
showBotInfoPage: (config.infoPage && config.infoPage.enabled) || config.showBotInfoPage,
name: botInfo.name,
description: (config.infoPage && config.infoPage.description) || botInfo.description,
details: botInfo.details,
languages: botInfo.languages,
extraStylesheet: config.extraStylesheet,
disableNotificationSound: config.disableNotificationSound,
security,
lazySocket: config.lazySocket,
maxMessageLength: config.maxMessageLength
})
})
)
router.post(
'/users/customId',
bp.http.extractExternalToken,
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId } = req
const { customId } = req.body
if (!customId) {
return res.sendStatus(400)
}
await bp.users.getOrCreateUser('web', userId, botId)
await bp.users.updateAttributes('web', userId, { [WEBCHAT_CUSTOM_ID_KEY]: customId })
res.sendStatus(200)
})
)
router.post(
'/messages',
bp.http.extractExternalToken,
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId } = req
const user = await bp.users.getOrCreateUser('web', userId, botId)
const payload = req.body.payload || {}
if (!SUPPORTED_MESSAGES.includes(payload.type)) {
return res.status(400).send(ERR_MSG_TYPE)
}
if (payload.type === 'visit') {
const { timezone, language } = payload
const isValidTimezone = _.isNumber(timezone) && timezone >= -12 && timezone <= 14 && timezone % 0.5 === 0
const isValidLanguage = language.length < 4 && !_.get(user, 'result.attributes.language')
const newAttributes = {
...(isValidTimezone && { timezone }),
...(isValidLanguage && { language })
}
if (Object.getOwnPropertyNames(newAttributes).length) {
await bp.users.updateAttributes('web', userId, newAttributes)
}
}
if (!req.conversationId) {
req.conversationId = (await getRecent(req.messaging, userId)).id
}
await sendNewMessage(req, payload, !!req.headers.authorization)
res.sendStatus(200)
})
)
router.post(
'/messages/files',
upload.single('file'),
bp.http.extractExternalToken,
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest & any, res: Response) => {
const { botId, userId } = req
const payloadValue = req.body.payload || {}
await bp.users.getOrCreateUser('web', userId, botId) // Just to create the user if it doesn't exist
const payload = {
text: `Uploaded a file **${req.file.originalname}**`,
type: 'file',
storage: req.file.location ? 's3' : 'local',
url: req.file.location || req.file.path || undefined,
name: req.file.filename,
originalName: req.file.originalname,
mime: req.file.contentType || req.file.mimetype,
size: req.file.size,
payload: payloadValue
}
await sendNewMessage(req, payload, false)
return res.sendStatus(200)
})
)
router.post(
'/messages/voice',
bp.http.extractExternalToken,
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId } = req
const { audio } = req.body
if (!audio?.buffer || !audio?.title) {
throw new Error('Voices messages must contain an audio buffer and title')
}
await bp.users.getOrCreateUser('web', userId, botId) // Just to create the user if it doesn't exist
const buffer = Buffer.from(audio!.buffer, 'base64')
const formData = new FormData()
formData.append('file', buffer, audio!.title)
const axiosConfig = await bp.http.getAxiosConfigForBot(botId, { studioUrl: true })
axiosConfig.headers['Content-Type'] = `multipart/form-data; boundary=${formData.getBoundary()}`
// Upload the audio buffer to the Media Service
const {
data: { url }
} = await axios.post<{ url: string }>('/media', formData, {
...axiosConfig
})
const payload = {
type: 'voice',
audio: url
}
await sendNewMessage(req, payload, false)
return res.sendStatus(200)
})
)
router.post(
'/conversations/get',
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { conversationId, botId } = req
const config = (await bp.config.getModuleConfigForBot('channel-web', botId)) as Config
const conversation = await req.messaging.getConversation(conversationId)
const messages = await req.messaging.listMessages(conversationId, config.maxMessagesHistory)
// this function scope can (and probably will) expand to other types as well with a switch case on the type
// we do something similar in the cms to determine weather there are translated fields or not
const notEmptyPayload = payload => (payload.type === 'text' ? !!payload.text : true)
const displayableMessages = messages.filter(({ payload }) => payload.type !== 'visit' && notEmptyPayload(payload))
return res.send({ ...conversation, messages: displayableMessages })
})
)
router.post(
'/conversations/list',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
await bp.users.getOrCreateUser('web', userId, botId)
const conversations = await req.messaging.listConversations(userId, MAX_MESSAGE_HISTORY)
const config = await bp.config.getModuleConfigForBot('channel-web', botId)
const convsWithLastMessage: (Conversation & { lastMessage?: Message })[] = []
for (const conversation of conversations) {
convsWithLastMessage.push({
...conversation,
lastMessage: (await req.messaging.listMessages(conversation.id, 1))[0]
})
}
return res.send({
conversations: convsWithLastMessage,
startNewConvoOnTimeout: config.startNewConvoOnTimeout,
recentConversationLifetime: config.recentConversationLifetime
})
})
)
async function sendNewMessage(req: ChatRequest, payload: any, useDebugger: boolean) |
router.post(
'/events',
bp.http.extractExternalToken,
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
let { conversationId } = req
const payload = req.body.payload || {}
await bp.users.getOrCreateUser('web', userId, botId)
if (!conversationId) {
conversationId = (await getRecent(req.messaging, userId)).id
}
const event = bp.IO.Event({
botId,
channel: 'web',
direction: 'incoming',
target: userId,
threadId: conversationId.toString(),
type: payload.type,
payload,
credentials: req.credentials
})
await bp.events.sendEvent(event)
res.sendStatus(200)
})
)
router.post(
'/saveFeedback',
bp.http.extractExternalToken,
asyncMiddleware(async (req: BPRequest, res: Response) => {
const { botId } = req.params
const { messageId, target, feedback } = req.body
if (!target || !messageId || !feedback) {
return res.status(400).send('Missing required fields')
}
const [event] = await bp.events.findEvents({ botId, messageId })
const { userId } = await db.getMappingFromVisitor(botId, target)
try {
await bp.events.saveUserFeedback(event.incomingEventId, userId, feedback, 'qna')
res.sendStatus(200)
} catch (err) {
res.status(400).send(err)
}
})
)
router.post(
'/feedbackInfo',
bp.http.extractExternalToken,
asyncMiddleware(async (req: BPRequest, res: Response) => {
const { botId } = req.params
const { target, messageIds } = req.body
if (!target || !messageIds) {
return res.status(400).send('Missing required fields')
}
const { userId } = await db.getMappingFromVisitor(botId, target)
res.send(await db.getFeedbackInfoForMessageIds(userId, messageIds))
})
)
router.post(
'/conversations/reset',
bp.http.extractExternalToken,
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId, conversationId } = req
await bp.users.getOrCreateUser('web', userId, botId)
const payload = {
text: 'Reset the conversation',
type: 'session_reset'
}
await sendNewMessage(req, payload, false)
const sessionId = bp.dialog.createId({
botId,
target: userId,
threadId: conversationId.toString(),
channel: 'web'
})
await bp.dialog.deleteSession(sessionId, botId)
res.sendStatus(200)
})
)
router.post(
'/conversations/new',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId } = req
const conversation = await req.messaging.createConversation(userId)
res.send({ convoId: conversation.id })
})
)
router.post(
'/conversations/reference',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
try {
const { botId, userId } = req
const { reference } = req.body
let { conversationId } = req
await bp.users.getOrCreateUser('web', userId, botId)
if (typeof reference !== 'string' || !reference.length || reference.indexOf('=') === -1) {
throw new Error('Invalid reference')
}
if (!conversationId) {
conversationId = (await getRecent(req.messaging, userId)).id
}
const message = reference.slice(0, reference.lastIndexOf('='))
const signature = reference.slice(reference.lastIndexOf('=') + 1)
const verifySignature = await bp.security.getMessageSignature(message)
if (verifySignature !== signature) {
throw new Error('Bad reference signature')
}
const payload = {
text: message,
signature,
type: 'session_reference'
}
const event = bp.IO.Event({
botId,
channel: 'web',
direction: 'incoming',
target: userId,
threadId: conversationId.toString(),
type: payload.type,
payload,
credentials: req['credentials']
})
await bp.events.sendEvent(event)
res.sendStatus(200)
} catch (error) {
res.status(500).send({ message: error.message })
}
})
)
router.post(
'/preferences/get',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
const { result } = await bp.users.getOrCreateUser('web', userId, botId)
return res.send({ language: result.attributes.language })
})
)
router.post(
'/preferences',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
const payload = req.body || {}
const preferredLanguage = payload.language
const bot = await bp.bots.getBotById(botId)
const validLanguage = bot.languages.includes(preferredLanguage)
if (!validLanguage) {
return res.status(400).send(ERR_BAD_LANGUAGE)
}
await bp.users.updateAttributes('web', userId, {
language: preferredLanguage
})
return res.sendStatus(200)
})
)
const getMessageContent = (message, type) => {
const { payload } = message
if (type === 'file') {
return (payload && payload.url) || message.message_data.url
}
const wrappedText = _.get(payload, 'wrapped.text')
return (payload && payload.text) || message.message_text || wrappedText || `Event (${type})`
}
const convertToTxtFile = async (botId: string, conversation: Conversation & { messages: Message[] }) => {
const { messages } = conversation
const { result: user } = await bp.users.getOrCreateUser('web', conversation.userId)
const timeFormat = 'MM/DD/YY HH:mm'
const fullName = `${user.attributes['first_name'] || ''} ${user.attributes['last_name'] || ''}`
const metadata = `Conversation Id: ${conversation.id}\r\nCreated on: ${moment(conversation.createdOn).format(
timeFormat
)}\r\nUser: ${fullName}\r\n-----------------\r\n`
const messagesAsTxt = messages.map(message => {
const type = message.payload?.type
if (type === 'session_reset') {
return ''
}
return `[${moment(message.sentOn).format(timeFormat)}] ${message.authorId ? 'User' : botId}: ${getMessageContent(
message,
type
)}\r\n`
})
return [metadata, ...messagesAsTxt].join('')
}
router.post(
'/conversations/download/txt',
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { conversationId, botId } = req
const config = (await bp.config.getModuleConfigForBot('channel-web', botId)) as Config
const conversation = await req.messaging.getConversation(conversationId)
const messages = await req.messaging.listMessages(conversationId, config.maxMessagesHistory)
const txt = await convertToTxtFile(botId, { ...conversation, messages })
res.send({ txt, name: `Conversation ${conversation.id}.txt` })
})
)
router.post(
'/conversations/messages/delete',
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { visitorId, conversationId } = req
bp.realtime.sendPayload(bp.RealTimePayload.forVisitor(visitorId, 'webchat.clear', { conversationId }))
await req.messaging.deleteMessagesByConversation(conversationId)
res.sendStatus(204)
})
)
// NOTE: this is a temporary route and allows an agent to delete a channel web user's conversation messages
// until today this was completed by calling channel web api directly but it's api has been secured with a temporary sessionId
// soon enough, once channel-web's implementation moves to messaging api we'll be able to remove this and use messaging directly
// usage of a private router because authentication is handled for us
const privateRouter = bp.http.createRouterForBot('channel-web-private')
// NOTE : this uses duplicated code taken from public route (ln#624 - ln#636) so it's easy to remove once we can (see prev note)
privateRouter.post(
'/conversations/:id/messages/delete',
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId } = req.params
const conversationId = req.params.id
const { userId } = req.body
const messaging = await db.getMessagingClient(botId)
const conversation = await messaging.getConversation(conversationId)
if (!userId || conversation?.userId !== userId) {
return res.status(400).send(ERR_BAD_CONV_ID)
}
const { visitorId } = await db.getMappingFromUser(userId)
bp.realtime.sendPayload(bp.RealTimePayload.forVisitor(visitorId, 'webchat.clear', { conversationId }))
await messaging.deleteMessagesByConversation(conversationId)
res.sendStatus(204)
})
)
}
| {
const config = await bp.config.getModuleConfigForBot('channel-web', req.botId)
if (payload.type === 'voice') {
if (_.isEmpty(payload.audio)) {
throw new Error('Voices messages must contain an audio buffer')
}
} else if (
(!payload.text || !_.isString(payload.text) || payload.text.length > config.maxMessageLength) &&
payload.type !== 'postback'
) {
throw new Error(`Text must be a valid string of less than ${config.maxMessageLength} chars`)
}
let sanitizedPayload = payload
if (payload.sensitive) {
const sensitive = Array.isArray(payload.sensitive) ? payload.sensitive : [payload.sensitive]
sanitizedPayload = _.omit(payload, [...sensitive, 'sensitive'])
}
const message = await req.messaging.createMessage(req.conversationId, req.userId, sanitizedPayload)
const event = bp.IO.Event({
messageId: message.id,
botId: req.botId,
channel: 'web',
direction: 'incoming',
payload,
target: req.userId,
threadId: req.conversationId,
type: payload.type,
credentials: req.credentials
})
if (useDebugger) {
event.debugger = true
}
bp.realtime.sendPayload(bp.RealTimePayload.forVisitor(req.visitorId, 'webchat.message', message))
await bp.events.sendEvent(event)
} | identifier_body |
api.ts | import { Conversation, Message, MessagingClient } from '@botpress/messaging-client'
import apicache from 'apicache'
import aws from 'aws-sdk'
import axios from 'axios'
import * as sdk from 'botpress/sdk'
import { asyncMiddleware as asyncMw, BPRequest } from 'common/http'
import { Request, Response, NextFunction } from 'express'
import FormData from 'form-data'
import _ from 'lodash'
import moment from 'moment'
import multer from 'multer'
import multers3 from 'multer-s3'
import path from 'path'
import { Config } from '../config'
import Database from './db'
const ERR_USER_ID_INVALID = 'user id associated with this session must be valid'
const ERR_MSG_TYPE = '`type` is required and must be valid'
const ERR_CONV_ID_REQ = '`conversationId` is required and must be valid'
const ERR_BAD_LANGUAGE = '`language` is required and must be valid'
const ERR_BAD_CONV_ID = "The conversation ID doesn't belong to that user"
const ERR_BAD_USER_SESSION_ID = 'session id is invalid'
const USER_ID_MAX_LENGTH = 40
const MAX_MESSAGE_HISTORY = 100
const SUPPORTED_MESSAGES = [
'text',
'quick_reply',
'form',
'login_prompt',
'visit',
'request_start_conversation',
'postback',
'voice'
]
const WEBCHAT_CUSTOM_ID_KEY = 'webchatCustomId'
type ChatRequest = BPRequest & {
visitorId: string
userId: string
botId: string
conversationId: string
messaging: MessagingClient
}
const userIdIsValid = (userId: string): boolean => {
const hasBreakingConstraints = userId.length > USER_ID_MAX_LENGTH || userId.toLowerCase() === 'undefined'
return !hasBreakingConstraints && /[a-z0-9-_]+/i.test(userId)
}
export default async (bp: typeof sdk, db: Database) => {
const asyncMiddleware = asyncMw(bp.logger)
const globalConfig = (await bp.config.getModuleConfig('channel-web')) as Config
const diskStorage = multer.diskStorage({
destination: globalConfig.fileUploadPath,
// @ts-ignore typing indicates that limits isn't supported
limits: {
files: 1,
fileSize: 5242880 // 5MB
},
filename(req, file, cb) {
const userId = _.get(req, 'params.userId') || 'anonymous'
const ext = path.extname(file.originalname)
cb(undefined, `${userId}_${new Date().getTime()}${ext}`)
}
})
let upload = multer({ storage: diskStorage })
if (globalConfig.uploadsUseS3) {
/*
You can override AWS's default settings here. Example:
{ region: 'us-east-1', apiVersion: '2014-10-01', credentials: {...} }
*/
const awsConfig = {
region: globalConfig.uploadsS3Region,
credentials: {
accessKeyId: globalConfig.uploadsS3AWSAccessKey,
secretAccessKey: globalConfig.uploadsS3AWSAccessSecret
}
}
if (!awsConfig.credentials.accessKeyId && !awsConfig.credentials.secretAccessKey) {
delete awsConfig.credentials
}
if (!awsConfig.region) {
delete awsConfig.region
}
// TODO use media service with a 's3' backend
const s3 = new aws.S3(awsConfig)
const s3Storage = multers3({
s3,
bucket: globalConfig.uploadsS3Bucket || 'uploads',
contentType: multers3.AUTO_CONTENT_TYPE,
cacheControl: 'max-age=31536000', // one year caching
acl: 'public-read',
key(req, file, cb) {
const userId = _.get(req, 'params.userId') || 'anonymous'
const ext = path.extname(file.originalname)
cb(undefined, `${userId}_${new Date().getTime()}${ext}`)
}
})
upload = multer({ storage: s3Storage })
}
const router = bp.http.createRouterForBot('channel-web', { checkAuthentication: false, enableJsonBodyParser: true })
const perBotCache = apicache.options({
appendKey: (req: Request, _res: Response) => `${req.method} for bot ${req.params?.boId}`,
statusCodes: { include: [200] }
}).middleware
const assertUserInfo = (options: { convoIdRequired?: boolean } = {}) => async (
req: ChatRequest,
_res: Response,
next: NextFunction
) => {
const { botId } = req.params
const { conversationId, webSessionId } = req.body || {}
req.visitorId = await bp.realtime.getVisitorIdFromGuestSocketId(webSessionId)
if (!req.visitorId) {
return next(ERR_BAD_USER_SESSION_ID)
}
if (!userIdIsValid(req.visitorId)) {
return next(ERR_USER_ID_INVALID)
}
req.messaging = await db.getMessagingClient(botId)
const userId = await db.mapVisitor(botId, req.visitorId, req.messaging)
if (conversationId) |
if (options.convoIdRequired && req.conversationId === undefined) {
return next(ERR_CONV_ID_REQ)
}
req.botId = botId
req.userId = userId
next()
}
const getRecent = async (messaging: MessagingClient, userId: string) => {
const convs = await messaging.listConversations(userId, 1)
if (convs?.length) {
return convs[0]
}
return messaging.createConversation(userId)
}
router.get(
'/botInfo',
perBotCache('1 minute'),
asyncMiddleware(async (req: BPRequest, res: Response) => {
const { botId } = req.params
const security = ((await bp.config.getModuleConfig('channel-web')) as Config).security // usage of global because a user could overwrite bot scoped configs
const config = (await bp.config.getModuleConfigForBot('channel-web', botId)) as Config
const botInfo = await bp.bots.getBotById(botId)
if (!botInfo) {
return res.sendStatus(404)
}
res.send({
showBotInfoPage: (config.infoPage && config.infoPage.enabled) || config.showBotInfoPage,
name: botInfo.name,
description: (config.infoPage && config.infoPage.description) || botInfo.description,
details: botInfo.details,
languages: botInfo.languages,
extraStylesheet: config.extraStylesheet,
disableNotificationSound: config.disableNotificationSound,
security,
lazySocket: config.lazySocket,
maxMessageLength: config.maxMessageLength
})
})
)
router.post(
'/users/customId',
bp.http.extractExternalToken,
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId } = req
const { customId } = req.body
if (!customId) {
return res.sendStatus(400)
}
await bp.users.getOrCreateUser('web', userId, botId)
await bp.users.updateAttributes('web', userId, { [WEBCHAT_CUSTOM_ID_KEY]: customId })
res.sendStatus(200)
})
)
router.post(
'/messages',
bp.http.extractExternalToken,
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId } = req
const user = await bp.users.getOrCreateUser('web', userId, botId)
const payload = req.body.payload || {}
if (!SUPPORTED_MESSAGES.includes(payload.type)) {
return res.status(400).send(ERR_MSG_TYPE)
}
if (payload.type === 'visit') {
const { timezone, language } = payload
const isValidTimezone = _.isNumber(timezone) && timezone >= -12 && timezone <= 14 && timezone % 0.5 === 0
const isValidLanguage = language.length < 4 && !_.get(user, 'result.attributes.language')
const newAttributes = {
...(isValidTimezone && { timezone }),
...(isValidLanguage && { language })
}
if (Object.getOwnPropertyNames(newAttributes).length) {
await bp.users.updateAttributes('web', userId, newAttributes)
}
}
if (!req.conversationId) {
req.conversationId = (await getRecent(req.messaging, userId)).id
}
await sendNewMessage(req, payload, !!req.headers.authorization)
res.sendStatus(200)
})
)
router.post(
'/messages/files',
upload.single('file'),
bp.http.extractExternalToken,
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest & any, res: Response) => {
const { botId, userId } = req
const payloadValue = req.body.payload || {}
await bp.users.getOrCreateUser('web', userId, botId) // Just to create the user if it doesn't exist
const payload = {
text: `Uploaded a file **${req.file.originalname}**`,
type: 'file',
storage: req.file.location ? 's3' : 'local',
url: req.file.location || req.file.path || undefined,
name: req.file.filename,
originalName: req.file.originalname,
mime: req.file.contentType || req.file.mimetype,
size: req.file.size,
payload: payloadValue
}
await sendNewMessage(req, payload, false)
return res.sendStatus(200)
})
)
router.post(
'/messages/voice',
bp.http.extractExternalToken,
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId } = req
const { audio } = req.body
if (!audio?.buffer || !audio?.title) {
throw new Error('Voices messages must contain an audio buffer and title')
}
await bp.users.getOrCreateUser('web', userId, botId) // Just to create the user if it doesn't exist
const buffer = Buffer.from(audio!.buffer, 'base64')
const formData = new FormData()
formData.append('file', buffer, audio!.title)
const axiosConfig = await bp.http.getAxiosConfigForBot(botId, { studioUrl: true })
axiosConfig.headers['Content-Type'] = `multipart/form-data; boundary=${formData.getBoundary()}`
// Upload the audio buffer to the Media Service
const {
data: { url }
} = await axios.post<{ url: string }>('/media', formData, {
...axiosConfig
})
const payload = {
type: 'voice',
audio: url
}
await sendNewMessage(req, payload, false)
return res.sendStatus(200)
})
)
router.post(
'/conversations/get',
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { conversationId, botId } = req
const config = (await bp.config.getModuleConfigForBot('channel-web', botId)) as Config
const conversation = await req.messaging.getConversation(conversationId)
const messages = await req.messaging.listMessages(conversationId, config.maxMessagesHistory)
// this function scope can (and probably will) expand to other types as well with a switch case on the type
// we do something similar in the cms to determine weather there are translated fields or not
const notEmptyPayload = payload => (payload.type === 'text' ? !!payload.text : true)
const displayableMessages = messages.filter(({ payload }) => payload.type !== 'visit' && notEmptyPayload(payload))
return res.send({ ...conversation, messages: displayableMessages })
})
)
router.post(
'/conversations/list',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
await bp.users.getOrCreateUser('web', userId, botId)
const conversations = await req.messaging.listConversations(userId, MAX_MESSAGE_HISTORY)
const config = await bp.config.getModuleConfigForBot('channel-web', botId)
const convsWithLastMessage: (Conversation & { lastMessage?: Message })[] = []
for (const conversation of conversations) {
convsWithLastMessage.push({
...conversation,
lastMessage: (await req.messaging.listMessages(conversation.id, 1))[0]
})
}
return res.send({
conversations: convsWithLastMessage,
startNewConvoOnTimeout: config.startNewConvoOnTimeout,
recentConversationLifetime: config.recentConversationLifetime
})
})
)
async function sendNewMessage(req: ChatRequest, payload: any, useDebugger: boolean) {
const config = await bp.config.getModuleConfigForBot('channel-web', req.botId)
if (payload.type === 'voice') {
if (_.isEmpty(payload.audio)) {
throw new Error('Voices messages must contain an audio buffer')
}
} else if (
(!payload.text || !_.isString(payload.text) || payload.text.length > config.maxMessageLength) &&
payload.type !== 'postback'
) {
throw new Error(`Text must be a valid string of less than ${config.maxMessageLength} chars`)
}
let sanitizedPayload = payload
if (payload.sensitive) {
const sensitive = Array.isArray(payload.sensitive) ? payload.sensitive : [payload.sensitive]
sanitizedPayload = _.omit(payload, [...sensitive, 'sensitive'])
}
const message = await req.messaging.createMessage(req.conversationId, req.userId, sanitizedPayload)
const event = bp.IO.Event({
messageId: message.id,
botId: req.botId,
channel: 'web',
direction: 'incoming',
payload,
target: req.userId,
threadId: req.conversationId,
type: payload.type,
credentials: req.credentials
})
if (useDebugger) {
event.debugger = true
}
bp.realtime.sendPayload(bp.RealTimePayload.forVisitor(req.visitorId, 'webchat.message', message))
await bp.events.sendEvent(event)
}
router.post(
'/events',
bp.http.extractExternalToken,
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
let { conversationId } = req
const payload = req.body.payload || {}
await bp.users.getOrCreateUser('web', userId, botId)
if (!conversationId) {
conversationId = (await getRecent(req.messaging, userId)).id
}
const event = bp.IO.Event({
botId,
channel: 'web',
direction: 'incoming',
target: userId,
threadId: conversationId.toString(),
type: payload.type,
payload,
credentials: req.credentials
})
await bp.events.sendEvent(event)
res.sendStatus(200)
})
)
router.post(
'/saveFeedback',
bp.http.extractExternalToken,
asyncMiddleware(async (req: BPRequest, res: Response) => {
const { botId } = req.params
const { messageId, target, feedback } = req.body
if (!target || !messageId || !feedback) {
return res.status(400).send('Missing required fields')
}
const [event] = await bp.events.findEvents({ botId, messageId })
const { userId } = await db.getMappingFromVisitor(botId, target)
try {
await bp.events.saveUserFeedback(event.incomingEventId, userId, feedback, 'qna')
res.sendStatus(200)
} catch (err) {
res.status(400).send(err)
}
})
)
router.post(
'/feedbackInfo',
bp.http.extractExternalToken,
asyncMiddleware(async (req: BPRequest, res: Response) => {
const { botId } = req.params
const { target, messageIds } = req.body
if (!target || !messageIds) {
return res.status(400).send('Missing required fields')
}
const { userId } = await db.getMappingFromVisitor(botId, target)
res.send(await db.getFeedbackInfoForMessageIds(userId, messageIds))
})
)
router.post(
'/conversations/reset',
bp.http.extractExternalToken,
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId, userId, conversationId } = req
await bp.users.getOrCreateUser('web', userId, botId)
const payload = {
text: 'Reset the conversation',
type: 'session_reset'
}
await sendNewMessage(req, payload, false)
const sessionId = bp.dialog.createId({
botId,
target: userId,
threadId: conversationId.toString(),
channel: 'web'
})
await bp.dialog.deleteSession(sessionId, botId)
res.sendStatus(200)
})
)
router.post(
'/conversations/new',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId } = req
const conversation = await req.messaging.createConversation(userId)
res.send({ convoId: conversation.id })
})
)
router.post(
'/conversations/reference',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
try {
const { botId, userId } = req
const { reference } = req.body
let { conversationId } = req
await bp.users.getOrCreateUser('web', userId, botId)
if (typeof reference !== 'string' || !reference.length || reference.indexOf('=') === -1) {
throw new Error('Invalid reference')
}
if (!conversationId) {
conversationId = (await getRecent(req.messaging, userId)).id
}
const message = reference.slice(0, reference.lastIndexOf('='))
const signature = reference.slice(reference.lastIndexOf('=') + 1)
const verifySignature = await bp.security.getMessageSignature(message)
if (verifySignature !== signature) {
throw new Error('Bad reference signature')
}
const payload = {
text: message,
signature,
type: 'session_reference'
}
const event = bp.IO.Event({
botId,
channel: 'web',
direction: 'incoming',
target: userId,
threadId: conversationId.toString(),
type: payload.type,
payload,
credentials: req['credentials']
})
await bp.events.sendEvent(event)
res.sendStatus(200)
} catch (error) {
res.status(500).send({ message: error.message })
}
})
)
router.post(
'/preferences/get',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
const { result } = await bp.users.getOrCreateUser('web', userId, botId)
return res.send({ language: result.attributes.language })
})
)
router.post(
'/preferences',
assertUserInfo(),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { userId, botId } = req
const payload = req.body || {}
const preferredLanguage = payload.language
const bot = await bp.bots.getBotById(botId)
const validLanguage = bot.languages.includes(preferredLanguage)
if (!validLanguage) {
return res.status(400).send(ERR_BAD_LANGUAGE)
}
await bp.users.updateAttributes('web', userId, {
language: preferredLanguage
})
return res.sendStatus(200)
})
)
const getMessageContent = (message, type) => {
const { payload } = message
if (type === 'file') {
return (payload && payload.url) || message.message_data.url
}
const wrappedText = _.get(payload, 'wrapped.text')
return (payload && payload.text) || message.message_text || wrappedText || `Event (${type})`
}
const convertToTxtFile = async (botId: string, conversation: Conversation & { messages: Message[] }) => {
const { messages } = conversation
const { result: user } = await bp.users.getOrCreateUser('web', conversation.userId)
const timeFormat = 'MM/DD/YY HH:mm'
const fullName = `${user.attributes['first_name'] || ''} ${user.attributes['last_name'] || ''}`
const metadata = `Conversation Id: ${conversation.id}\r\nCreated on: ${moment(conversation.createdOn).format(
timeFormat
)}\r\nUser: ${fullName}\r\n-----------------\r\n`
const messagesAsTxt = messages.map(message => {
const type = message.payload?.type
if (type === 'session_reset') {
return ''
}
return `[${moment(message.sentOn).format(timeFormat)}] ${message.authorId ? 'User' : botId}: ${getMessageContent(
message,
type
)}\r\n`
})
return [metadata, ...messagesAsTxt].join('')
}
router.post(
'/conversations/download/txt',
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { conversationId, botId } = req
const config = (await bp.config.getModuleConfigForBot('channel-web', botId)) as Config
const conversation = await req.messaging.getConversation(conversationId)
const messages = await req.messaging.listMessages(conversationId, config.maxMessagesHistory)
const txt = await convertToTxtFile(botId, { ...conversation, messages })
res.send({ txt, name: `Conversation ${conversation.id}.txt` })
})
)
router.post(
'/conversations/messages/delete',
assertUserInfo({ convoIdRequired: true }),
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { visitorId, conversationId } = req
bp.realtime.sendPayload(bp.RealTimePayload.forVisitor(visitorId, 'webchat.clear', { conversationId }))
await req.messaging.deleteMessagesByConversation(conversationId)
res.sendStatus(204)
})
)
// NOTE: this is a temporary route and allows an agent to delete a channel web user's conversation messages
// until today this was completed by calling channel web api directly but it's api has been secured with a temporary sessionId
// soon enough, once channel-web's implementation moves to messaging api we'll be able to remove this and use messaging directly
// usage of a private router because authentication is handled for us
const privateRouter = bp.http.createRouterForBot('channel-web-private')
// NOTE : this uses duplicated code taken from public route (ln#624 - ln#636) so it's easy to remove once we can (see prev note)
privateRouter.post(
'/conversations/:id/messages/delete',
asyncMiddleware(async (req: ChatRequest, res: Response) => {
const { botId } = req.params
const conversationId = req.params.id
const { userId } = req.body
const messaging = await db.getMessagingClient(botId)
const conversation = await messaging.getConversation(conversationId)
if (!userId || conversation?.userId !== userId) {
return res.status(400).send(ERR_BAD_CONV_ID)
}
const { visitorId } = await db.getMappingFromUser(userId)
bp.realtime.sendPayload(bp.RealTimePayload.forVisitor(visitorId, 'webchat.clear', { conversationId }))
await messaging.deleteMessagesByConversation(conversationId)
res.sendStatus(204)
})
)
}
| {
let conversation: Conversation
try {
conversation = await req.messaging.getConversation(conversationId)
} catch {}
if (!conversation || !userId || conversation.userId !== userId) {
return next(ERR_BAD_CONV_ID)
}
req.conversationId = conversationId
} | conditional_block |
4_traitsbounds_mistake1.rs | /* Writing a function which adds 2 to every element of vector and a function to multiply 2
to every element of the vector */
/* NOTES: OWNERSHIP AND BORROW RULES
1. Only one owner at a time
2. Only 1 active mutable borrow at a time
3. Every other borrow after a shared borrow should be a shared borrow
*/
trait Arith:Copy{
fn add(self, b: Self) -> Self;
fn mult(self, b: Self) -> Self;
fn print(self);
}
impl Arith for i32{
fn add(self, b: i32) -> i32{
self + b
}
fn mult(self, b: Self) -> Self{
self * b
}
fn print(self) {
println!("Val = {}", self);
} | }
fn vec_add<T: Arith>(vec: &mut Vec<T>){
for e in vec.iter_mut(){
/* e is of type &mut i32. But you can give it to print() which
expects i32 because rust derefs it implicitly */
e.print();
e.add(5);
}
}
fn main(){
println!("Hello World");
let mut vec: Vec<i32> = vec![1,2,3,4,5];
vec_add(&mut vec);
}
/*
What's the mistake with e.add(5) which is throwing below error. Isn't 'b:Self' of type i32 for the current example
<anon>:35:15: 35:16 error: mismatched types:
expected `T`,
found `_`
(expected type parameter,
found integral variable) [E0308]
<anon>:35 e.add(5);
ANS: Rust won't know the type of 'e' during compile time. It just knows that 'e' is of type 'T' and type 'T'
is implementing trait 'Arithmatic'.
It'll just compare : T.add(5) <--> fn add(val: T, b: T) because Self is implementors type
so you are comparing 5 and T here which is wrong
Practically, lets say you have implemented 'Arith' for 'f32'. e.add(5) cannot be correct for both i32 and f32
at the same time because 'f32' expects 'f32' as argument.
*/ | random_line_split | |
4_traitsbounds_mistake1.rs | /* Writing a function which adds 2 to every element of vector and a function to multiply 2
to every element of the vector */
/* NOTES: OWNERSHIP AND BORROW RULES
1. Only one owner at a time
2. Only 1 active mutable borrow at a time
3. Every other borrow after a shared borrow should be a shared borrow
*/
trait Arith:Copy{
fn add(self, b: Self) -> Self;
fn mult(self, b: Self) -> Self;
fn print(self);
}
impl Arith for i32{
fn | (self, b: i32) -> i32{
self + b
}
fn mult(self, b: Self) -> Self{
self * b
}
fn print(self) {
println!("Val = {}", self);
}
}
fn vec_add<T: Arith>(vec: &mut Vec<T>){
for e in vec.iter_mut(){
/* e is of type &mut i32. But you can give it to print() which
expects i32 because rust derefs it implicitly */
e.print();
e.add(5);
}
}
fn main(){
println!("Hello World");
let mut vec: Vec<i32> = vec![1,2,3,4,5];
vec_add(&mut vec);
}
/*
What's the mistake with e.add(5) which is throwing below error. Isn't 'b:Self' of type i32 for the current example
<anon>:35:15: 35:16 error: mismatched types:
expected `T`,
found `_`
(expected type parameter,
found integral variable) [E0308]
<anon>:35 e.add(5);
ANS: Rust won't know the type of 'e' during compile time. It just knows that 'e' is of type 'T' and type 'T'
is implementing trait 'Arithmatic'.
It'll just compare : T.add(5) <--> fn add(val: T, b: T) because Self is implementors type
so you are comparing 5 and T here which is wrong
Practically, lets say you have implemented 'Arith' for 'f32'. e.add(5) cannot be correct for both i32 and f32
at the same time because 'f32' expects 'f32' as argument.
*/
| add | identifier_name |
4_traitsbounds_mistake1.rs | /* Writing a function which adds 2 to every element of vector and a function to multiply 2
to every element of the vector */
/* NOTES: OWNERSHIP AND BORROW RULES
1. Only one owner at a time
2. Only 1 active mutable borrow at a time
3. Every other borrow after a shared borrow should be a shared borrow
*/
trait Arith:Copy{
fn add(self, b: Self) -> Self;
fn mult(self, b: Self) -> Self;
fn print(self);
}
impl Arith for i32{
fn add(self, b: i32) -> i32{
self + b
}
fn mult(self, b: Self) -> Self{
self * b
}
fn print(self) {
println!("Val = {}", self);
}
}
fn vec_add<T: Arith>(vec: &mut Vec<T>) |
fn main(){
println!("Hello World");
let mut vec: Vec<i32> = vec![1,2,3,4,5];
vec_add(&mut vec);
}
/*
What's the mistake with e.add(5) which is throwing below error. Isn't 'b:Self' of type i32 for the current example
<anon>:35:15: 35:16 error: mismatched types:
expected `T`,
found `_`
(expected type parameter,
found integral variable) [E0308]
<anon>:35 e.add(5);
ANS: Rust won't know the type of 'e' during compile time. It just knows that 'e' is of type 'T' and type 'T'
is implementing trait 'Arithmatic'.
It'll just compare : T.add(5) <--> fn add(val: T, b: T) because Self is implementors type
so you are comparing 5 and T here which is wrong
Practically, lets say you have implemented 'Arith' for 'f32'. e.add(5) cannot be correct for both i32 and f32
at the same time because 'f32' expects 'f32' as argument.
*/
| {
for e in vec.iter_mut(){
/* e is of type &mut i32. But you can give it to print() which
expects i32 because rust derefs it implicitly */
e.print();
e.add(5);
}
} | identifier_body |
001.js | {
"number": "001",
"name": "Al Fatihah",
"taud": "اعوذ بالله من الشيطان الرجيم",
"ayat": [
{
"arabic": "١. \tبِسْمِ اللَّهِ الرَّحْمَٰنِ الرَّحِيمِ",
"english": "1. \tIn the name of Allah, the Beneficent, the Merciful.",
"indonesian": "1. Dengan menyebut nama Allah Yang Maha Pemurah lagi Maha Penyayang.",
"japanese": "1. \u0026#24904;\u0026#24754;\u0026#12354;\u0026#12414;\u0026#12397;\u0026#12367;\u0026#24904;\u0026#24859;\u0026#28145;\u0026#12365;\u0026#12450;\u0026#12483;\u0026#12521;\u0026#12540;\u0026#12398;\u0026#24481;\u0026#21517;\u0026#12395;\u0026#12362;\u0026#12356;\u0026#12390;\u0026#12290;",
"korean": "1. 자비로우시고 자애로우신 하나님의 이름으로",
"chinese": "1. 奉至仁至慈的真主之名",
"tafsir": "١. لا يوجد",
"russian": "1. Во имя Аллаха, Милостивого, Милосердного!",
"spanish": "1. ¡En el nombre de Alá, el Compasivo, el Misericordioso!",
"french": "1. Au nom d\u0027Allah, le Tout Miséricordieux, le Très Miséricordieux."
},
{
"arabic": "٢. \tالْحَمْدُ لِلَّهِ رَبِّ الْعَالَمِينَ",
"english": "2. \tAll the praises and thanks be to Allah, the Lord of the \u0027Âlamîn (mankind, jinn and all that exists).",
"indonesian": "2. Segala puji bagi Allah, Tuhan semesta alam.",
"japanese": "2. \u0026#19975;\u0026#26377;\u0026#12398;\u0026#20027;\u0026#65292;\u0026#12450;\u0026#12483;\u0026#12521;\u0026#12540;\u0026#12395;\u0026#12371;\u0026#12381;\u0026#20961;\u0026#12390;\u0026#12398;\u0026#31216;\u0026#35715;\u0026#12354;\u0026#12428;\u0026#65292;",
"korean": "2. 온 우주의 주님이신 하나님께찬미를 드리나이다",
"chinese": "2. 一切贊頌,全歸真主,全世界的主,",
"tafsir": "٢. { الحمد لله } جملة خبرية قصد بها الثناء على الله بمضمونها من أنه تعالى : مالك لجميع الحمد من الخلق أو مستحق لأن يحمدوه، والله علم على المعبود بحق { ربِّ العالمين } أي مالك جميع الخلق من الإنس والجن والملائكة والدواب وغيرهم، وكل منها يُطلق عليه عالم، يقال عالم الإنس وعالم الجن إلى غير ذلك، وغلب في جمعه بالياء والنون أولي العلم على غيرهم، وهو من العلامة لأنه علامة على موجده.",
"russian": "2. Хвала Аллаху, Господу миров,",
"spanish": "2. Alabado sea Alá, Señor del universo,",
"french": "2. Louange à Allah, Seigneur de l\u0027univers."
},
{
"arabic": "٣. \tالرَّحْمَٰنِ الرَّحِيمِ",
"english": "3. \tThe Most Gracious, the Most Merciful",
"indonesian": "3. Maha Pemurah lagi Maha Penyayang.",
"japanese": "3. \u0026#24904;\u0026#24754;\u0026#12354;\u0026#12414;\u0026#12397;\u0026#12367;\u0026#24904;\u0026#24859;\u0026#28145;\u0026#12365;\u0026#24481;\u0026#26041;\u0026#65292;",
"korean": "3. 그분은 자애로우시고 자비로 우시며",
"chinese": "3. 至仁至慈的主,",
"tafsir": "٣. { الرحمن الرحيم } أي ذي الرحمة وهي إرادة الخير لأهله.",
"russian": "3. Милостивому, Милосердному,",
"spanish": "3. el Compasivo, el Misericordioso,",
"french": "3. Le Tout Miséricordieux, le Très Miséricordieux,"
},
{
"arabic": "٤. \tمَالِكِ يَوْمِ الدِّينِ",
"english": "4. \tThe Only Owner (and the Only Ruling Judge) of the Day of Recompense (i.e. the Day of Resurrection)",
"indonesian": "4. Yang menguasai di Hari Pembalasan.",
| "korean": "4. 심판의 날을 주관하시도다",
"chinese": "4. 報應日的主。",
"tafsir": "٤. أي الجزاء وهو يوم القيامة ، وخص بالذكر لأنه لا ملك ظاهرًا فيه لأحد إلا الله تعالى بدليل {لمن الملك اليوم؟ لله} ومن قرأ مالك فمعناه الأمر كله في يوم القيامة أو هو موصوف بذلك دائمًا {كغافر الذنب} فصح وقوعه صفة لمعرفة.",
"russian": "4. Властелину Дня воздаяния!",
"spanish": "4. Dueño del día del Juicio,",
"french": "4. Maître du Jour de la rétribution."
},
{
"arabic": "٥. \tإِيَّاكَ نَعْبُدُ وَإِيَّاكَ نَسْتَعِينُ",
"english": "5. \tYou (Alone) we worship, and You (Alone) we ask for help (for each and everything).",
"indonesian": "5. Hanya Engkaulah yang kami sembah, dan hanya kepada Engkaulah kami meminta pertolongan.",
"japanese": "5. \u0026#12431;\u0026#12383;\u0026#12375;\u0026#12383;\u0026#12385;\u0026#12399;\u0026#12354;\u0026#12394;\u0026#12383;\u0026#12395;\u0026#12398;\u0026#12415;\u0026#23815;\u0026#12417;\u0026#20181;\u0026#12360;\u0026#65292;\u0026#12354;\u0026#12394;\u0026#12383;\u0026#12395;\u0026#12398;\u0026#12415;\u0026#24481;\u0026#21161;\u0026#12369;\u0026#12434;\u0026#35531;\u0026#12356;\u0026#39000;\u0026#12358;\u0026#12290;",
"korean": "5. 우리는 당신만을 경배하오며 당신에게만 구원을 비노니",
"chinese": "5. 我們只崇拜你,只求你祐助,",
"tafsir": "٥. { إيَّاك نعبد وإياك نستعين } أي نخصك بالعبادة من توحيد وغيره ونطلب المعونة على العبادة وغيرها.",
"russian": "5. Тебе одному мы поклоняемся и Тебя одного молим о помощи.",
"spanish": "5. A Ti solo servimos y a Ti solo imploramos ayuda.",
"french": "5. C\u0027est Toi [Seul] que nous adorons, et c\u0027est Toi [Seul] dont nous implorons secours."
},
{
"arabic": "٦. \tاهْدِنَا الصِّرَاطَ الْمُسْتَقِيمَ",
"english": "6. \tGuide us to the Straight Way.",
"indonesian": "6. Tunjukilah kami jalan yang lurus,",
"japanese": "6. \u0026#12431;\u0026#12383;\u0026#12375;\u0026#12383;\u0026#12385;\u0026#12434;\u0026#27491;\u0026#12375;\u0026#12356;\u0026#36947;\u0026#12395;\u0026#23566;\u0026#12365;\u0026#12383;\u0026#12414;\u0026#12360;\u0026#65292;",
"korean": "6. 저희들을 올바른 길로 인도하여 주시옵소서",
"chinese": "6. 求你引導我們上正路,",
"tafsir": "٦. { اهدنا الصراط المستقيم } أي أرشدنا إليه، ويبدَل منه:",
"russian": "6. Веди нас прямым путем,",
"spanish": "6. Dirígenos por la vía recta,",
"french": "6. Guide-nous dans le droit chemin,"
},
{
"arabic": "٧. \tصِرَاطَ الَّذِينَ أَنْعَمْتَ عَلَيْهِمْ غَيْرِ الْمَغْضُوبِ عَلَيْهِمْ وَلَا الضَّالِّينَ",
"english": "7. \tThe Way of those on whom You have bestowed Your Grace, not (the way) of those who earned Your Anger (such as the Jews), nor of those who went astray (such as the Christians).",
"indonesian": "7. (yaitu) Jalan orang-orang yang telah Engkau beri nikmat kepada mereka; bukan (jalan) mereka yang dimurkai dan bukan (pula jalan) mereka yang sesat.",
"japanese": "7. \u0026#12354;\u0026#12394;\u0026#12383;\u0026#12364;\u0026#24481;\u0026#24693;\u0026#12415;\u0026#12434;\u0026#19979;\u0026#12373;\u0026#12428;\u0026#12383;\u0026#20154;\u0026#12293;\u0026#12398;\u0026#36947;\u0026#12395;\u0026#65292;\u0026#12354;\u0026#12394;\u0026#12383;\u0026#12398;\u0026#24594;\u0026#12426;\u0026#12434;\u0026#21463;\u0026#12369;\u0026#12375;\u0026#32773;\u0026#65292;\u0026#12414;\u0026#12383;\u0026#36367;\u0026#12415;\u0026#36855;\u0026#12360;\u0026#12427;\u0026#20154;\u0026#12293;\u0026#12398;\u0026#36947;\u0026#12391;\u0026#12399;\u0026#12394;\u0026#12367;\u0026#12290;",
"korean": "7. 그 길은 당신께서 축복을 내리신 길이며 노여움을 받은 자나방황하는 자들이 걷지않는 가장 올바른 길이옵니다",
"chinese": "7. 你所祐助者的路,不是受譴怒者的路,也不是迷誤者的路。",
"tafsir": "٧. { صراط الَّذين أنعمت عليهم } بالهداية ويبدل من الذين بصلته {غير المغضوب عليهم} وهم اليهود { ولا } وغير { الضالِّين } وهم النصارى ونكتة البدل إفادة أن المهتدين ليسوا يهوداً ولا نصارى . والله أعلم بالصواب، وإليه المرجع والمآب، وصلى الله على سيدنا محمد وعلى اَله وصحبه وسلم تسليما كثيراً دائما أبداً، وحسبنا الله ونعم الوكيل، ولا حول ولا قوة إلا بالله العلي العظيم.",
"russian": "7. путем тех, кого Ты облагодетельствовал, не тех, на кого пал гнев, и не заблудших.",
"spanish": "7. la vía de los que Tú has agraciado, no de los que han incurrido en la ira, ni de los extraviados.",
"french": "7. le chemin de ceux que Tu as comblés de faveurs, non pas de ceux qui ont encouru Ta colère, ni des égarés."
}
]
} | "japanese": "4. \u0026#26368;\u0026#24460;\u0026#12398;\u0026#23529;\u0026#12365;\u0026#12398;\u0026#26085;\u0026#12398;\u0026#20027;\u0026#23472;\u0026#32773;\u0026#12395;\u0026#12290;",
| random_line_split |
fn.rs | // Function that returns a boolean value
fn is_divisible_by(lhs: uint, rhs: uint) -> bool {
// Corner case, early return
if rhs == 0 {
return false; |
// This is an expression, the `return` keyword is not necessary here
lhs % rhs == 0
}
// Functions that "don't" return a value, actually return the unit type `()`
fn fizzbuzz(n: uint) -> () {
if is_divisible_by(n, 15) {
println!("fizzbuzz");
} else if is_divisible_by(n, 3) {
println!("fizz");
} else if is_divisible_by(n, 5) {
println!("buzz");
} else {
println!("{}", n);
}
}
// When a function returns `()`, the return type can be omitted from the
// signature
fn fizzbuzz_to(n: uint) {
for n in range(1, n + 1) {
fizzbuzz(n);
}
}
fn main() {
fizzbuzz_to(100);
} | } | random_line_split |
fn.rs | // Function that returns a boolean value
fn is_divisible_by(lhs: uint, rhs: uint) -> bool {
// Corner case, early return
if rhs == 0 {
return false;
}
// This is an expression, the `return` keyword is not necessary here
lhs % rhs == 0
}
// Functions that "don't" return a value, actually return the unit type `()`
fn fizzbuzz(n: uint) -> () {
if is_divisible_by(n, 15) {
println!("fizzbuzz");
} else if is_divisible_by(n, 3) {
println!("fizz");
} else if is_divisible_by(n, 5) {
println!("buzz");
} else {
println!("{}", n);
}
}
// When a function returns `()`, the return type can be omitted from the
// signature
fn | (n: uint) {
for n in range(1, n + 1) {
fizzbuzz(n);
}
}
fn main() {
fizzbuzz_to(100);
}
| fizzbuzz_to | identifier_name |
fn.rs | // Function that returns a boolean value
fn is_divisible_by(lhs: uint, rhs: uint) -> bool {
// Corner case, early return
if rhs == 0 {
return false;
}
// This is an expression, the `return` keyword is not necessary here
lhs % rhs == 0
}
// Functions that "don't" return a value, actually return the unit type `()`
fn fizzbuzz(n: uint) -> () {
if is_divisible_by(n, 15) {
println!("fizzbuzz");
} else if is_divisible_by(n, 3) {
println!("fizz");
} else if is_divisible_by(n, 5) {
println!("buzz");
} else |
}
// When a function returns `()`, the return type can be omitted from the
// signature
fn fizzbuzz_to(n: uint) {
for n in range(1, n + 1) {
fizzbuzz(n);
}
}
fn main() {
fizzbuzz_to(100);
}
| {
println!("{}", n);
} | conditional_block |
fn.rs | // Function that returns a boolean value
fn is_divisible_by(lhs: uint, rhs: uint) -> bool {
// Corner case, early return
if rhs == 0 {
return false;
}
// This is an expression, the `return` keyword is not necessary here
lhs % rhs == 0
}
// Functions that "don't" return a value, actually return the unit type `()`
fn fizzbuzz(n: uint) -> () {
if is_divisible_by(n, 15) {
println!("fizzbuzz");
} else if is_divisible_by(n, 3) {
println!("fizz");
} else if is_divisible_by(n, 5) {
println!("buzz");
} else {
println!("{}", n);
}
}
// When a function returns `()`, the return type can be omitted from the
// signature
fn fizzbuzz_to(n: uint) |
fn main() {
fizzbuzz_to(100);
}
| {
for n in range(1, n + 1) {
fizzbuzz(n);
}
} | identifier_body |
thisInInnerFunctions.js | //// [thisInInnerFunctions.ts]
class Foo {
x = "hello";
bar() {
function inner() {
this.y = "hi"; // 'this' should be not type to 'Foo' either
var f = () => this.y; // 'this' should be not type to 'Foo' either
}
}
}
function test() {
var x = () => {
(() => this)();
this;
};
}
//// [thisInInnerFunctions.js]
var Foo = (function () {
function Foo() {
this.x = "hello";
}
Foo.prototype.bar = function () {
function inner() {
var _this = this;
this.y = "hi"; // 'this' should be not type to 'Foo' either
var f = function () { return _this.y; }; // 'this' should be not type to 'Foo' either
}
};
return Foo;
}());
function test() {
var _this = this;
| } | var x = function () {
(function () { return _this; })();
_this;
};
| random_line_split |
thisInInnerFunctions.js | //// [thisInInnerFunctions.ts]
class | {
x = "hello";
bar() {
function inner() {
this.y = "hi"; // 'this' should be not type to 'Foo' either
var f = () => this.y; // 'this' should be not type to 'Foo' either
}
}
}
function test() {
var x = () => {
(() => this)();
this;
};
}
//// [thisInInnerFunctions.js]
var Foo = (function () {
function Foo() {
this.x = "hello";
}
Foo.prototype.bar = function () {
function inner() {
var _this = this;
this.y = "hi"; // 'this' should be not type to 'Foo' either
var f = function () { return _this.y; }; // 'this' should be not type to 'Foo' either
}
};
return Foo;
}());
function test() {
var _this = this;
var x = function () {
(function () { return _this; })();
_this;
};
}
| Foo | identifier_name |
thisInInnerFunctions.js | //// [thisInInnerFunctions.ts]
class Foo {
x = "hello";
bar() {
function inner() {
this.y = "hi"; // 'this' should be not type to 'Foo' either
var f = () => this.y; // 'this' should be not type to 'Foo' either
}
}
}
function test() {
var x = () => {
(() => this)();
this;
};
}
//// [thisInInnerFunctions.js]
var Foo = (function () {
function Foo() |
Foo.prototype.bar = function () {
function inner() {
var _this = this;
this.y = "hi"; // 'this' should be not type to 'Foo' either
var f = function () { return _this.y; }; // 'this' should be not type to 'Foo' either
}
};
return Foo;
}());
function test() {
var _this = this;
var x = function () {
(function () { return _this; })();
_this;
};
}
| {
this.x = "hello";
} | identifier_body |
routes.js | /**
* Created by andyf on 4/14/2017.
*/
//------------------------------------------//
//------------------ROUTES------------------//
//------------------------------------------//
var multer = require("multer");
var express = require("express");
var app = express();
var multer = require("multer");
var mongoose = require("mongoose");
var createFile = require("create-file");
var jsonfile = require("jsonfile");
var fs = require("fs");
var db = mongoose.connect('mongodb://localhost/it410database');
// Upload Photos
var create = function(file){
createFile('app/image-info/' + file.originalname + "id.json",
'{"name": "' + file.originalname + '", "imageUrl":"img/' + file.originalname + '", "imageId":"' + file.originalname + 'id"}',
function(err) {
console.log("error");
}
);
var filePlace = __dirname + '/' + 'images.json';
var object = ',{"name": "' + file.originalname + '", "imageUrl":"img/' + file.originalname + '", "imageId":"' + file.originalname + 'id"}]'
var obj = jsonfile.readFileSync(filePlace);
console.log(obj);
var newObj = JSON.stringify(obj);
fs.unlinkSync(filePlace);
var brandNew = newObj.replace(']', object);
console.log(brandNew);
jsonfile.writeFileSync(filePlace, brandNew);
};
var storage = multer.diskStorage({
destination: function (req, file, callback) {
console.log(file, req.body);
callback(null, 'app/img');
},
filename: function (req, file, callback) {
create(file);
callback(null, file.originalname);
}
});
var upload = multer({ storage : storage}).single('userPhoto');
var User = require('./models/user');
module.exports = function(app, passport){
app.get('/authenticate', function(req, res){
res.render('authenticate.ejs');
});
app.post('/api/photo',function(req,res) {
console.log("got here", req.body);
upload(req, res, function(err) {
if (err) {
console.log(err);
return res.end("Error uploading file");
}
res.end("File has uploaded");
});
});
app.get('/', function(req, res){
res.sendfile('app/index.html');
});
app.get('/admin', function(req, res){
if(req.user) {
if(req.user.local.admin || req.user.google.admin) {
res.sendfile('app/admin.html');
}
else
res.send('You are not authorized to view this page </br> <a href="/authenticate">Return Home</a>');
}
else
res.render('authenticate.ejs');
});
app.get('/checkAdmin', function(req, res) {
if (req.user.admin){
res.sendfile('app/admin.html');
}
else {
res.sendfile('app/index.html');
}
});
app.get('/admin', function(req,res) {
res.sendfile('app/admin.html');
});
app.get('/users', function(req,res){
res.json(req.user);
// Find some documents
// User.find(function(err, docs) {
// console.log("Found the following records");
// console.dir(docs);
// res.json(req.user);
// });
});
app.get('/login', function(req, res){
res.render('login.ejs', { message: req.flash('loginMessage') });
});
app.post('/login', passport.authenticate('local-login', {
successRedirect: '/checkAdmin',
failureRedirect: '/login',
failureFlash: true
}));
app.get('/signup', function(req, res){
res.render('signup.ejs', { message: req.flash('signupMessage') });
});
app.post('/signup', passport.authenticate('local-signup', {
successRedirect: '/',
failureRedirect: '/signup',
failureFlash: true
}));
app.get('/profile', isLoggedIn, function(req, res){
res.render('profile.ejs', { user: req.user });
});
app.get('/auth/facebook', passport.authenticate('facebook', {scope: ['email']}));
app.get('/auth/facebook/callback',
passport.authenticate('facebook', { successRedirect: '/checkAdmin',
failureRedirect: '/' }));
app.get('/auth/google', passport.authenticate('google', {scope: ['profile', 'email']}));
app.get('/auth/google/callback',
passport.authenticate('google', { successRedirect: '/checkAdmin',
failureRedirect: '/' }));
app.get('/logout', function(req, res){
req.logout();
res.redirect('/authenticate');
})
};
function | (req, res, next) {
if(req.isAuthenticated()){
return next();
}
res.redirect('/login');
}
| isLoggedIn | identifier_name |
routes.js | /**
* Created by andyf on 4/14/2017.
*/
//------------------------------------------//
//------------------ROUTES------------------//
//------------------------------------------//
var multer = require("multer");
var express = require("express");
var app = express();
var multer = require("multer");
var mongoose = require("mongoose");
var createFile = require("create-file");
var jsonfile = require("jsonfile");
var fs = require("fs");
var db = mongoose.connect('mongodb://localhost/it410database');
// Upload Photos
var create = function(file){
createFile('app/image-info/' + file.originalname + "id.json",
'{"name": "' + file.originalname + '", "imageUrl":"img/' + file.originalname + '", "imageId":"' + file.originalname + 'id"}',
function(err) {
console.log("error");
}
);
var filePlace = __dirname + '/' + 'images.json';
var object = ',{"name": "' + file.originalname + '", "imageUrl":"img/' + file.originalname + '", "imageId":"' + file.originalname + 'id"}]'
var obj = jsonfile.readFileSync(filePlace);
console.log(obj);
var newObj = JSON.stringify(obj);
fs.unlinkSync(filePlace);
var brandNew = newObj.replace(']', object);
console.log(brandNew);
jsonfile.writeFileSync(filePlace, brandNew);
};
var storage = multer.diskStorage({
destination: function (req, file, callback) {
console.log(file, req.body);
callback(null, 'app/img');
},
filename: function (req, file, callback) {
create(file);
callback(null, file.originalname);
}
});
var upload = multer({ storage : storage}).single('userPhoto');
var User = require('./models/user');
module.exports = function(app, passport){
app.get('/authenticate', function(req, res){
res.render('authenticate.ejs');
});
app.post('/api/photo',function(req,res) {
console.log("got here", req.body);
upload(req, res, function(err) {
if (err) {
console.log(err);
return res.end("Error uploading file");
}
res.end("File has uploaded");
});
});
app.get('/', function(req, res){
res.sendfile('app/index.html');
});
app.get('/admin', function(req, res){
if(req.user) {
if(req.user.local.admin || req.user.google.admin) {
res.sendfile('app/admin.html');
}
else
res.send('You are not authorized to view this page </br> <a href="/authenticate">Return Home</a>');
}
else
res.render('authenticate.ejs');
});
app.get('/checkAdmin', function(req, res) {
if (req.user.admin){
res.sendfile('app/admin.html');
}
else |
});
app.get('/admin', function(req,res) {
res.sendfile('app/admin.html');
});
app.get('/users', function(req,res){
res.json(req.user);
// Find some documents
// User.find(function(err, docs) {
// console.log("Found the following records");
// console.dir(docs);
// res.json(req.user);
// });
});
app.get('/login', function(req, res){
res.render('login.ejs', { message: req.flash('loginMessage') });
});
app.post('/login', passport.authenticate('local-login', {
successRedirect: '/checkAdmin',
failureRedirect: '/login',
failureFlash: true
}));
app.get('/signup', function(req, res){
res.render('signup.ejs', { message: req.flash('signupMessage') });
});
app.post('/signup', passport.authenticate('local-signup', {
successRedirect: '/',
failureRedirect: '/signup',
failureFlash: true
}));
app.get('/profile', isLoggedIn, function(req, res){
res.render('profile.ejs', { user: req.user });
});
app.get('/auth/facebook', passport.authenticate('facebook', {scope: ['email']}));
app.get('/auth/facebook/callback',
passport.authenticate('facebook', { successRedirect: '/checkAdmin',
failureRedirect: '/' }));
app.get('/auth/google', passport.authenticate('google', {scope: ['profile', 'email']}));
app.get('/auth/google/callback',
passport.authenticate('google', { successRedirect: '/checkAdmin',
failureRedirect: '/' }));
app.get('/logout', function(req, res){
req.logout();
res.redirect('/authenticate');
})
};
function isLoggedIn(req, res, next) {
if(req.isAuthenticated()){
return next();
}
res.redirect('/login');
}
| {
res.sendfile('app/index.html');
} | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.