hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71c29769d72428dea8c648a9ffeb523dd34dd62 | 6,207 | py | Python | examples/mountaincar.py | knollsen/agent57 | 7d3d7890f8eb3a1420945e0b7bcb85ad87459167 | [
"MIT"
] | 1 | 2021-02-24T08:22:35.000Z | 2021-02-24T08:22:35.000Z | examples/mountaincar.py | knollsen/agent57 | 7d3d7890f8eb3a1420945e0b7bcb85ad87459167 | [
"MIT"
] | null | null | null | examples/mountaincar.py | knollsen/agent57 | 7d3d7890f8eb3a1420945e0b7bcb85ad87459167 | [
"MIT"
] | null | null | null | import gym
from keras.optimizers import Adam
import traceback
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
from agent.agent57 import ActorUser
from agent.policy import EpsilonGreedy, AnnealingEpsilonGreedy
from agent.memory import PERRankBaseMemory, PERProportionalMemory
from agent.model import InputType, LstmType, UvfaType
from agent.model import ValueModel
from agent.common import seed_everything
from agent.callbacks import LoggerType
from agent.main_runner import run_gym_dqn, run_play, run_replay, run_gym_agent57
MINUTES_OF_TRAINING = 60
seed_everything(42)
ENV_NAME = "MountainCar-v0"
episode_save_dir = "tmp_{}.".format(ENV_NAME)
def create_parameter(env, nb_steps):
kwargs = {
"input_shape": env.observation_space.shape,
"input_type": InputType.VALUES,
"input_model": None,
"nb_actions": env.action_space.n,
"memory": "PERRankBaseMemory",
"memory_kwargs": {
"capacity": 100_000,
"alpha": 1.0, # PERの確率反映率
"beta_initial": 0.0, # IS反映率の初期値(1.0が最大)
"beta_steps": nb_steps, # IS反映率の上昇step数
"enable_is": True, # ISを有効にするかどうか
},
"optimizer_ext": Adam(lr=0.001),
"optimizer_int": Adam(lr=0.001),
"optimizer_rnd": Adam(lr=0.005),
"optimizer_emb": Adam(lr=0.005),
# NN
"batch_size": 16, # batch_size
"input_sequence": 4, # 入力フレーム数
"dense_units_num": 64, # dense層のユニット数
"enable_dueling_network": True,
"lstm_type": LstmType.STATELESS, # 使用するLSTMアルゴリズム
"lstm_units_num": 64, # LSTMのユニット数
"lstmful_input_length": 2, # ステートフルLSTMの入力数
# train
"memory_warmup_size": 1000, # 初期のメモリー確保用step数(学習しない)
"target_model_update_interval": 2000, # target networkのupdate間隔
"enable_double_dqn": True,
"enable_rescaling": False, # rescalingを有効にするか
"burnin_length": 0, # burn-in期間
"reward_multisteps": 3, # multistep reward
"demo_memory": "PERProportionalMemory",
"demo_memory_kwargs": {
"capacity": 100_000,
"alpha": 0.8,
},
"demo_episode_dir": episode_save_dir,
"demo_ratio_initial": 1.0,
"demo_ratio_final": 1.0/512.0,
"demo_ratio_steps": nb_steps,
"episode_memory": "PERProportionalMemory",
"episode_memory_kwargs": {
"capacity": 1000,
"alpha": 0.8,
},
"episode_ratio": 1.0/32.0,
# intrinsic_reward
"policy_num": 8,
"ucb_epsilon": 0.5,
"ucb_window_size": 50,
"gamma0": 0.999,
"gamma1": 0.99,
"gamma2": 0.9,
"enable_intrinsic_actval_model": False,
"beta_max": 0.3,
"uvfa_ext": [
#UvfaType.ACTION,
#UvfaType.REWARD_EXT,
#UvfaType.REWARD_INT,
#UvfaType.POLICY,
],
"uvfa_int": [
UvfaType.ACTION,
UvfaType.REWARD_EXT,
UvfaType.REWARD_INT,
UvfaType.POLICY,
],
# other
"step_interval": 1,
"enable_add_episode_end_frame": False,
}
return kwargs
#---------------------------------------------------------
def run_dqn(enable_train):
env = gym.make(ENV_NAME)
# ゲーム情報
print("action_space : " + str(env.action_space))
print("observation_space : " + str(env.observation_space))
print("reward_range : " + str(env.reward_range))
nb_steps = 100_000
kwargs = create_parameter(env, nb_steps)
kwargs["action_policy"] = AnnealingEpsilonGreedy(
initial_epsilon=0.5, # 初期ε
final_epsilon=0.01, # 最終状態でのε
exploration_steps=10_000 # 初期→最終状態になるまでのステップ数
)
#kwargs["action_policy"] = EpsilonGreedy(0.1)
run_gym_dqn(
enable_train,
env,
ENV_NAME,
kwargs,
nb_steps=nb_steps,
nb_time=MINUTES_OF_TRAINING*60,
logger_type=LoggerType.STEP,
log_interval=1000,
test_env=env,
movie_save=False,
)
env.close()
#---------------------------------------------------------
class MyActor(ActorUser):
@staticmethod
def allocate(actor_index, actor_num):
return "/device:CPU:0"
def getPolicy(self, actor_index, actor_num):
return EpsilonGreedy(0.1)
def fit(self, index, agent):
env = gym.make(ENV_NAME)
agent.fit(env, visualize=False, verbose=0)
env.close()
class MyActor1(MyActor):
def getPolicy(self, actor_index, actor_num):
return EpsilonGreedy(0.01)
class MyActor2(MyActor):
def getPolicy(self, actor_index, actor_num):
return EpsilonGreedy(0.1)
def create_env():
return gym.make(ENV_NAME)
def run_agent57(enable_train):
env = gym.make(ENV_NAME)
# ゲーム情報
print("action_space : " + str(env.action_space))
print("observation_space : " + str(env.observation_space))
print("reward_range : " + str(env.reward_range))
nb_trains = 100_000
kwargs = create_parameter(env, nb_trains)
kwargs["actors"] = [MyActor1]
kwargs["sync_actor_model_interval"] = 50 # learner から model を同期する間隔
run_gym_agent57(
enable_train,
env,
ENV_NAME,
kwargs,
nb_trains=nb_trains,
nb_time=MINUTES_OF_TRAINING*60,
logger_type=LoggerType.STEP,
log_interval=1000,
test_env=create_env,
is_load_weights=False,
movie_save=False,
)
env.close()
#----------------------
if __name__ == '__main__':
# エピソードを作成、保存
if False:
env = gym.make(ENV_NAME)
kwargs = create_parameter(env, 0)
run_play(env, episode_save_dir, kwargs["processor"])
# エピソードを再生(確認用)
if False:
run_replay(episode_save_dir)
# SingleActorレーニング
if False:
run_dqn(enable_train=True)
#run_dqn(enable_train=False) # test only
# 複数Actorレーニング
if True:
run_agent57(enable_train=True)
#run_agent57(enable_train=False) # test only
| 27.343612 | 80 | 0.595779 | import gym
from keras.optimizers import Adam
import traceback
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
from agent.agent57 import ActorUser
from agent.policy import EpsilonGreedy, AnnealingEpsilonGreedy
from agent.memory import PERRankBaseMemory, PERProportionalMemory
from agent.model import InputType, LstmType, UvfaType
from agent.model import ValueModel
from agent.common import seed_everything
from agent.callbacks import LoggerType
from agent.main_runner import run_gym_dqn, run_play, run_replay, run_gym_agent57
MINUTES_OF_TRAINING = 60
seed_everything(42)
ENV_NAME = "MountainCar-v0"
episode_save_dir = "tmp_{}.".format(ENV_NAME)
def create_parameter(env, nb_steps):
kwargs = {
"input_shape": env.observation_space.shape,
"input_type": InputType.VALUES,
"input_model": None,
"nb_actions": env.action_space.n,
"memory": "PERRankBaseMemory",
"memory_kwargs": {
"capacity": 100_000,
"alpha": 1.0,
"beta_initial": 0.0,
"beta_steps": nb_steps,
"enable_is": True,
},
"optimizer_ext": Adam(lr=0.001),
"optimizer_int": Adam(lr=0.001),
"optimizer_rnd": Adam(lr=0.005),
"optimizer_emb": Adam(lr=0.005),
"batch_size": 16,
"input_sequence": 4,
"dense_units_num": 64,
"enable_dueling_network": True,
"lstm_type": LstmType.STATELESS,
"lstm_units_num": 64,
"lstmful_input_length": 2,
"memory_warmup_size": 1000,
"target_model_update_interval": 2000,
"enable_double_dqn": True,
"enable_rescaling": False,
"burnin_length": 0,
"reward_multisteps": 3,
"demo_memory": "PERProportionalMemory",
"demo_memory_kwargs": {
"capacity": 100_000,
"alpha": 0.8,
},
"demo_episode_dir": episode_save_dir,
"demo_ratio_initial": 1.0,
"demo_ratio_final": 1.0/512.0,
"demo_ratio_steps": nb_steps,
"episode_memory": "PERProportionalMemory",
"episode_memory_kwargs": {
"capacity": 1000,
"alpha": 0.8,
},
"episode_ratio": 1.0/32.0,
"policy_num": 8,
"ucb_epsilon": 0.5,
"ucb_window_size": 50,
"gamma0": 0.999,
"gamma1": 0.99,
"gamma2": 0.9,
"enable_intrinsic_actval_model": False,
"beta_max": 0.3,
"uvfa_ext": [
],
"uvfa_int": [
UvfaType.ACTION,
UvfaType.REWARD_EXT,
UvfaType.REWARD_INT,
UvfaType.POLICY,
],
"step_interval": 1,
"enable_add_episode_end_frame": False,
}
return kwargs
def run_dqn(enable_train):
env = gym.make(ENV_NAME)
print("action_space : " + str(env.action_space))
print("observation_space : " + str(env.observation_space))
print("reward_range : " + str(env.reward_range))
nb_steps = 100_000
kwargs = create_parameter(env, nb_steps)
kwargs["action_policy"] = AnnealingEpsilonGreedy(
initial_epsilon=0.5,
final_epsilon=0.01,
exploration_steps=10_000
)
run_gym_dqn(
enable_train,
env,
ENV_NAME,
kwargs,
nb_steps=nb_steps,
nb_time=MINUTES_OF_TRAINING*60,
logger_type=LoggerType.STEP,
log_interval=1000,
test_env=env,
movie_save=False,
)
env.close()
class MyActor(ActorUser):
@staticmethod
def allocate(actor_index, actor_num):
return "/device:CPU:0"
def getPolicy(self, actor_index, actor_num):
return EpsilonGreedy(0.1)
def fit(self, index, agent):
env = gym.make(ENV_NAME)
agent.fit(env, visualize=False, verbose=0)
env.close()
class MyActor1(MyActor):
def getPolicy(self, actor_index, actor_num):
return EpsilonGreedy(0.01)
class MyActor2(MyActor):
def getPolicy(self, actor_index, actor_num):
return EpsilonGreedy(0.1)
def create_env():
return gym.make(ENV_NAME)
def run_agent57(enable_train):
env = gym.make(ENV_NAME)
print("action_space : " + str(env.action_space))
print("observation_space : " + str(env.observation_space))
print("reward_range : " + str(env.reward_range))
nb_trains = 100_000
kwargs = create_parameter(env, nb_trains)
kwargs["actors"] = [MyActor1]
kwargs["sync_actor_model_interval"] = 50
run_gym_agent57(
enable_train,
env,
ENV_NAME,
kwargs,
nb_trains=nb_trains,
nb_time=MINUTES_OF_TRAINING*60,
logger_type=LoggerType.STEP,
log_interval=1000,
test_env=create_env,
is_load_weights=False,
movie_save=False,
)
env.close()
if __name__ == '__main__':
if False:
env = gym.make(ENV_NAME)
kwargs = create_parameter(env, 0)
run_play(env, episode_save_dir, kwargs["processor"])
if False:
run_replay(episode_save_dir)
if False:
run_dqn(enable_train=True)
if True:
run_agent57(enable_train=True)
| true | true |
f71c2a25b344a3cfa098ae966121ffc6edd9ccd3 | 4,074 | py | Python | train.py | artemkurylev/Context-Aware_Crowd_Counting-pytorch | d68ddd87b99f2afc512357cb8fcb0ca41ea22865 | [
"MIT"
] | 71 | 2019-05-21T02:10:46.000Z | 2022-03-17T05:19:35.000Z | train.py | GuoleiSun/Context-Aware_Crowd_Counting-pytorch | d68ddd87b99f2afc512357cb8fcb0ca41ea22865 | [
"MIT"
] | 28 | 2019-05-14T01:32:21.000Z | 2021-07-06T08:39:10.000Z | train.py | GuoleiSun/Context-Aware_Crowd_Counting-pytorch | d68ddd87b99f2afc512357cb8fcb0ca41ea22865 | [
"MIT"
] | 22 | 2019-05-22T09:38:51.000Z | 2022-03-22T23:31:25.000Z | import numpy as np
import time
import torch
import torch.nn as nn
import os
import visdom
import random
from tqdm import tqdm as tqdm
from cannet import CANNet
from my_dataset import CrowdDataset
if __name__=="__main__":
# configuration
train_image_root='./data/Shanghai_part_A/train_data/images'
train_dmap_root='./data/Shanghai_part_A/train_data/ground_truth'
test_image_root='./data/Shanghai_part_A/test_data/images'
test_dmap_root='./data/Shanghai_part_A/test_data/ground_truth'
gpu_or_cpu='cuda' # use cuda or cpu
lr = 1e-7
batch_size = 1
momentum = 0.95
epochs = 20000
steps = [-1,1,100,150]
scales = [1,1,1,1]
workers = 4
seed = time.time()
print_freq = 30
vis=visdom.Visdom()
device=torch.device(gpu_or_cpu)
torch.cuda.manual_seed(seed)
model=CANNet().to(device)
criterion=nn.MSELoss(size_average=False).to(device)
optimizer=torch.optim.SGD(model.parameters(),lr,
momentum=momentum,
weight_decay=0)
# optimizer=torch.optim.Adam(model.parameters(),lr)
train_dataset=CrowdDataset(train_image_root,train_dmap_root,gt_downsample=8,phase='train')
train_loader=torch.utils.data.DataLoader(train_dataset,batch_size=1,shuffle=True)
test_dataset=CrowdDataset(test_image_root,test_dmap_root,gt_downsample=8,phase='test')
test_loader=torch.utils.data.DataLoader(test_dataset,batch_size=1,shuffle=False)
if not os.path.exists('./checkpoints'):
os.mkdir('./checkpoints')
min_mae=10000
min_epoch=0
train_loss_list=[]
epoch_list=[]
test_error_list=[]
for epoch in range(0,epochs):
# training phase
model.train()
epoch_loss=0
for i,(img,gt_dmap) in enumerate(tqdm(train_loader)):
img=img.to(device)
gt_dmap=gt_dmap.to(device)
# forward propagation
et_dmap=model(img)
# calculate loss
loss=criterion(et_dmap,gt_dmap)
epoch_loss+=loss.item()
optimizer.zero_grad()
loss.backward()
optimizer.step()
# print("epoch:",epoch,"loss:",epoch_loss/len(dataloader))
epoch_list.append(epoch)
train_loss_list.append(epoch_loss/len(train_loader))
torch.save(model.state_dict(),'./checkpoints/epoch_'+str(epoch)+".pth")
# testing phase
model.eval()
mae=0
for i,(img,gt_dmap) in enumerate(tqdm(test_loader)):
img=img.to(device)
gt_dmap=gt_dmap.to(device)
# forward propagation
et_dmap=model(img)
mae+=abs(et_dmap.data.sum()-gt_dmap.data.sum()).item()
del img,gt_dmap,et_dmap
if mae/len(test_loader)<min_mae:
min_mae=mae/len(test_loader)
min_epoch=epoch
test_error_list.append(mae/len(test_loader))
print("epoch:"+str(epoch)+" error:"+str(mae/len(test_loader))+" min_mae:"+str(min_mae)+" min_epoch:"+str(min_epoch))
vis.line(win=1,X=epoch_list, Y=train_loss_list, opts=dict(title='train_loss'))
vis.line(win=2,X=epoch_list, Y=test_error_list, opts=dict(title='test_error'))
# show an image
index=random.randint(0,len(test_loader)-1)
img,gt_dmap=test_dataset[index]
vis.image(win=3,img=img,opts=dict(title='img'))
vis.image(win=4,img=gt_dmap/(gt_dmap.max())*255,opts=dict(title='gt_dmap('+str(gt_dmap.sum())+')'))
img=img.unsqueeze(0).to(device)
gt_dmap=gt_dmap.unsqueeze(0)
et_dmap=model(img)
et_dmap=et_dmap.squeeze(0).detach().cpu().numpy()
vis.image(win=5,img=et_dmap/(et_dmap.max())*255,opts=dict(title='et_dmap('+str(et_dmap.sum())+')'))
import time
print(time.strftime('%Y.%m.%d %H:%M:%S',time.localtime(time.time())))
| 32.854839 | 124 | 0.60972 | import numpy as np
import time
import torch
import torch.nn as nn
import os
import visdom
import random
from tqdm import tqdm as tqdm
from cannet import CANNet
from my_dataset import CrowdDataset
if __name__=="__main__":
train_image_root='./data/Shanghai_part_A/train_data/images'
train_dmap_root='./data/Shanghai_part_A/train_data/ground_truth'
test_image_root='./data/Shanghai_part_A/test_data/images'
test_dmap_root='./data/Shanghai_part_A/test_data/ground_truth'
gpu_or_cpu='cuda'
lr = 1e-7
batch_size = 1
momentum = 0.95
epochs = 20000
steps = [-1,1,100,150]
scales = [1,1,1,1]
workers = 4
seed = time.time()
print_freq = 30
vis=visdom.Visdom()
device=torch.device(gpu_or_cpu)
torch.cuda.manual_seed(seed)
model=CANNet().to(device)
criterion=nn.MSELoss(size_average=False).to(device)
optimizer=torch.optim.SGD(model.parameters(),lr,
momentum=momentum,
weight_decay=0)
train_dataset=CrowdDataset(train_image_root,train_dmap_root,gt_downsample=8,phase='train')
train_loader=torch.utils.data.DataLoader(train_dataset,batch_size=1,shuffle=True)
test_dataset=CrowdDataset(test_image_root,test_dmap_root,gt_downsample=8,phase='test')
test_loader=torch.utils.data.DataLoader(test_dataset,batch_size=1,shuffle=False)
if not os.path.exists('./checkpoints'):
os.mkdir('./checkpoints')
min_mae=10000
min_epoch=0
train_loss_list=[]
epoch_list=[]
test_error_list=[]
for epoch in range(0,epochs):
model.train()
epoch_loss=0
for i,(img,gt_dmap) in enumerate(tqdm(train_loader)):
img=img.to(device)
gt_dmap=gt_dmap.to(device)
et_dmap=model(img)
loss=criterion(et_dmap,gt_dmap)
epoch_loss+=loss.item()
optimizer.zero_grad()
loss.backward()
optimizer.step()
epoch_list.append(epoch)
train_loss_list.append(epoch_loss/len(train_loader))
torch.save(model.state_dict(),'./checkpoints/epoch_'+str(epoch)+".pth")
model.eval()
mae=0
for i,(img,gt_dmap) in enumerate(tqdm(test_loader)):
img=img.to(device)
gt_dmap=gt_dmap.to(device)
et_dmap=model(img)
mae+=abs(et_dmap.data.sum()-gt_dmap.data.sum()).item()
del img,gt_dmap,et_dmap
if mae/len(test_loader)<min_mae:
min_mae=mae/len(test_loader)
min_epoch=epoch
test_error_list.append(mae/len(test_loader))
print("epoch:"+str(epoch)+" error:"+str(mae/len(test_loader))+" min_mae:"+str(min_mae)+" min_epoch:"+str(min_epoch))
vis.line(win=1,X=epoch_list, Y=train_loss_list, opts=dict(title='train_loss'))
vis.line(win=2,X=epoch_list, Y=test_error_list, opts=dict(title='test_error'))
index=random.randint(0,len(test_loader)-1)
img,gt_dmap=test_dataset[index]
vis.image(win=3,img=img,opts=dict(title='img'))
vis.image(win=4,img=gt_dmap/(gt_dmap.max())*255,opts=dict(title='gt_dmap('+str(gt_dmap.sum())+')'))
img=img.unsqueeze(0).to(device)
gt_dmap=gt_dmap.unsqueeze(0)
et_dmap=model(img)
et_dmap=et_dmap.squeeze(0).detach().cpu().numpy()
vis.image(win=5,img=et_dmap/(et_dmap.max())*255,opts=dict(title='et_dmap('+str(et_dmap.sum())+')'))
import time
print(time.strftime('%Y.%m.%d %H:%M:%S',time.localtime(time.time())))
| true | true |
f71c2aa0bf024d0c142df184c4a0782128c38601 | 579 | py | Python | tests/test_ext_indicator_cycles.py | ryanrussell/pandas-ta | 720bbcf0196d363d621beeced8fba711990d075d | [
"MIT"
] | 2,298 | 2019-02-20T18:38:18.000Z | 2022-03-31T07:45:50.000Z | tests/test_ext_indicator_cycles.py | ryanrussell/pandas-ta | 720bbcf0196d363d621beeced8fba711990d075d | [
"MIT"
] | 451 | 2019-02-26T00:50:02.000Z | 2022-03-31T03:17:39.000Z | tests/test_ext_indicator_cycles.py | ryanrussell/pandas-ta | 720bbcf0196d363d621beeced8fba711990d075d | [
"MIT"
] | 579 | 2019-03-19T01:53:03.000Z | 2022-03-31T11:13:50.000Z | from pandas.core.series import Series
from .config import sample_data
from .context import pandas_ta
from unittest import TestCase
from pandas import DataFrame
class TestCylesExtension(TestCase):
@classmethod
def setUpClass(cls):
cls.data = sample_data
@classmethod
def tearDownClass(cls):
del cls.data
def setUp(self): pass
def tearDown(self): pass
def test_ebsw_ext(self):
self.data.ta.ebsw(append=True)
self.assertIsInstance(self.data, DataFrame)
self.assertEqual(self.data.columns[-1], "EBSW_40_10")
| 22.269231 | 61 | 0.70639 | from pandas.core.series import Series
from .config import sample_data
from .context import pandas_ta
from unittest import TestCase
from pandas import DataFrame
class TestCylesExtension(TestCase):
@classmethod
def setUpClass(cls):
cls.data = sample_data
@classmethod
def tearDownClass(cls):
del cls.data
def setUp(self): pass
def tearDown(self): pass
def test_ebsw_ext(self):
self.data.ta.ebsw(append=True)
self.assertIsInstance(self.data, DataFrame)
self.assertEqual(self.data.columns[-1], "EBSW_40_10")
| true | true |
f71c2bcacd1a2766209c84a64e9d8e489810169f | 274 | py | Python | buildscripts/condarecipe.hsa/run_test.py | ehsantn/numba | 4749ef7ccc630b7f649ec972497bc5b7fca79303 | [
"BSD-2-Clause",
"MIT"
] | 4 | 2017-06-30T14:22:30.000Z | 2021-01-11T16:47:23.000Z | buildscripts/condarecipe.hsa/run_test.py | ehsantn/numba | 4749ef7ccc630b7f649ec972497bc5b7fca79303 | [
"BSD-2-Clause",
"MIT"
] | 1 | 2018-04-03T22:37:40.000Z | 2018-04-03T23:53:43.000Z | buildscripts/condarecipe.hsa/run_test.py | ehsantn/numba | 4749ef7ccc630b7f649ec972497bc5b7fca79303 | [
"BSD-2-Clause",
"MIT"
] | null | null | null | import sys
import numba
args = []
if sys.platform.startswith('win32'):
args += ['-b']
else:
args += ['-m', '-b']
args += ['numba.tests']
if not numba.runtests.main(*args):
print("Test failed")
sys.exit(1)
print('numba.__version__: %s' % numba.__version__)
| 18.266667 | 50 | 0.613139 | import sys
import numba
args = []
if sys.platform.startswith('win32'):
args += ['-b']
else:
args += ['-m', '-b']
args += ['numba.tests']
if not numba.runtests.main(*args):
print("Test failed")
sys.exit(1)
print('numba.__version__: %s' % numba.__version__)
| true | true |
f71c2c1d589bfaed486a8c50f486a53a358a0d09 | 59,359 | py | Python | python/ccxt/async_support/timex.py | sandutsar/ccxt | f27c187fa1626a6c261c6fa5caaae89cb657461d | [
"MIT"
] | null | null | null | python/ccxt/async_support/timex.py | sandutsar/ccxt | f27c187fa1626a6c261c6fa5caaae89cb657461d | [
"MIT"
] | null | null | null | python/ccxt/async_support/timex.py | sandutsar/ccxt | f27c187fa1626a6c261c6fa5caaae89cb657461d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NotSupported
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class timex(Exchange):
def describe(self):
return self.deep_extend(super(timex, self).describe(), {
'id': 'timex',
'name': 'TimeX',
'countries': ['AU'],
'version': 'v1',
'rateLimit': 1500,
'has': {
'CORS': None,
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'addMargin': False,
'cancelOrder': True,
'cancelOrders': True,
'createOrder': True,
'createReduceOnlyOrder': False,
'createStopLimitOrder': False,
'createStopMarketOrder': False,
'createStopOrder': False,
'editOrder': True,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchBorrowRatesPerSymbol': False,
'fetchClosedOrders': True,
'fetchCurrencies': True,
'fetchFundingHistory': False,
'fetchFundingRate': False,
'fetchFundingRateHistory': False,
'fetchFundingRates': False,
'fetchIndexOHLCV': False,
'fetchLeverage': False,
'fetchLeverageTiers': False,
'fetchMarkets': True,
'fetchMarkOHLCV': False,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenInterestHistory': False,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchPosition': False,
'fetchPositions': False,
'fetchPositionsRisk': False,
'fetchPremiumIndexOHLCV': False,
'fetchTicker': True,
'fetchTickers': True,
'fetchTrades': True,
'fetchTradingFee': True, # maker fee only
'reduceMargin': False,
'setLeverage': False,
'setMarginMode': False,
'setPositionMode': False,
},
'timeframes': {
'1m': 'I1',
'5m': 'I5',
'15m': 'I15',
'30m': 'I30',
'1h': 'H1',
'2h': 'H2',
'4h': 'H4',
'6h': 'H6',
'12h': 'H12',
'1d': 'D1',
'1w': 'W1',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/70423869-6839ab00-1a7f-11ea-8f94-13ae72c31115.jpg',
'api': 'https://plasma-relay-backend.timex.io',
'www': 'https://timex.io',
'doc': 'https://docs.timex.io',
'referral': 'https://timex.io/?refcode=1x27vNkTbP1uwkCck',
},
'api': {
'custody': {
'get': [
'credentials', # Get api key for address
'credentials/h/{hash}', # Get api key by hash
'credentials/k/{key}', # Get api key by key
'credentials/me/address', # Get api key by hash
'deposit-addresses', # Get deposit addresses list
'deposit-addresses/h/{hash}', # Get deposit address by hash
],
},
'history': {
'get': [
'orders', # Gets historical orders
'orders/details', # Gets order details
'orders/export/csv', # Export orders to csv
'trades', # Gets historical trades
'trades/export/csv', # Export trades to csv
],
},
'currencies': {
'get': [
'a/{address}', # Gets currency by address
'i/{id}', # Gets currency by id
's/{symbol}', # Gets currency by symbol
],
'post': [
'perform', # Creates new currency
'prepare', # Prepare creates new currency
'remove/perform', # Removes currency by symbol
's/{symbol}/remove/prepare', # Prepare remove currency by symbol
's/{symbol}/update/perform', # Prepare update currency by symbol
's/{symbol}/update/prepare', # Prepare update currency by symbol
],
},
'markets': {
'get': [
'i/{id}', # Gets market by id
's/{symbol}', # Gets market by symbol
],
'post': [
'perform', # Creates new market
'prepare', # Prepare creates new market
'remove/perform', # Removes market by symbol
's/{symbol}/remove/prepare', # Prepare remove market by symbol
's/{symbol}/update/perform', # Prepare update market by symbol
's/{symbol}/update/prepare', # Prepare update market by symbol
],
},
'public': {
'get': [
'candles', # Gets candles
'currencies', # Gets all the currencies
'markets', # Gets all the markets
'orderbook', # Gets orderbook
'orderbook/raw', # Gets raw orderbook
'orderbook/v2', # Gets orderbook v2
'tickers', # Gets all the tickers
'trades', # Gets trades
],
},
'statistics': {
'get': [
'address', # calculateAddressStatistics
],
},
'trading': {
'get': [
'balances', # Get trading balances for all(or selected) currencies
'fees', # Get trading fee rates for all(or selected) markets
'orders', # Gets open orders
],
'post': [
'orders', # Create new order
'orders/json', # Create orders
],
'put': [
'orders', # Cancel or update orders
'orders/json', # Update orders
],
'delete': [
'orders', # Delete orders
'orders/json', # Delete orders
],
},
'tradingview': {
'get': [
'config', # Gets config
'history', # Gets history
'symbol_info', # Gets symbol info
'time', # Gets time
],
},
},
'precisionMode': TICK_SIZE,
'exceptions': {
'exact': {
'0': ExchangeError,
'1': NotSupported,
'4000': BadRequest,
'4001': BadRequest,
'4002': InsufficientFunds,
'4003': AuthenticationError,
'4004': AuthenticationError,
'4005': BadRequest,
'4006': BadRequest,
'4007': BadRequest,
'4300': PermissionDenied,
'4100': AuthenticationError,
'4400': OrderNotFound,
'5001': InvalidOrder,
'5002': ExchangeError,
'400': BadRequest,
'401': AuthenticationError,
'403': PermissionDenied,
'404': OrderNotFound,
'429': RateLimitExceeded,
'500': ExchangeError,
'503': ExchangeNotAvailable,
},
'broad': {
'Insufficient': InsufficientFunds,
},
},
'options': {
'expireIn': 31536000, # 365 × 24 × 60 × 60
'fetchTickers': {
'period': '1d',
},
'fetchTrades': {
'sort': 'timestamp,asc',
},
'fetchMyTrades': {
'sort': 'timestamp,asc',
},
'fetchOpenOrders': {
'sort': 'createdAt,asc',
},
'fetchClosedOrders': {
'sort': 'createdAt,asc',
},
'defaultSort': 'timestamp,asc',
'defaultSortOrders': 'createdAt,asc',
},
})
async def fetch_markets(self, params={}):
"""
retrieves data on all markets for timex
:param dict params: extra parameters specific to the exchange api endpoint
:returns [dict]: an array of objects representing market data
"""
response = await self.publicGetMarkets(params)
#
# [
# {
# "symbol": "ETHBTC",
# "name": "ETH/BTC",
# "baseCurrency": "ETH",
# "baseTokenAddress": "0x45932db54b38af1f5a57136302eeba66a5975c15",
# "quoteCurrency": "BTC",
# "quoteTokenAddress": "0x8370fbc6ddec1e18b4e41e72ed943e238458487c",
# "feeCurrency": "BTC",
# "feeTokenAddress": "0x8370fbc6ddec1e18b4e41e72ed943e238458487c",
# "quantityIncrement": "0.0000001",
# "takerFee": "0.005",
# "makerFee": "0.0025",
# "tickSize": "0.00000001",
# "baseMinSize": "0.0001",
# "quoteMinSize": "0.00001",
# "locked": False
# }
# ]
#
result = []
for i in range(0, len(response)):
result.append(self.parse_market(response[i]))
return result
async def fetch_currencies(self, params={}):
"""
fetches all available currencies on an exchange
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: an associative dictionary of currencies
"""
response = await self.publicGetCurrencies(params)
#
# [
# {
# "symbol": "BTC",
# "name": "Bitcoin",
# "address": "0x8370fbc6ddec1e18b4e41e72ed943e238458487c",
# "icon": "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNjAiIGhlaWdodD0iNjAiIHZpZXdCb3g9IjAgMCA2MCA2MCIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggb3BhY2l0eT0iMC41IiBmaWxsLXJ1bGU9ImV2ZW5vZGQiIGNsaXAtcnVsZT0iZXZlbm9kZCIgZD0iTTMwIDUzQzQyLjcwMjUgNTMgNTMgNDIuNzAyNSA1MyAzMEM1MyAxNy4yOTc1IDQyLjcwMjUgNyAzMCA3QzE3LjI5NzUgNyA3IDE3LjI5NzUgNyAzMEM3IDQyLjcwMjUgMTcuMjk3NSA1MyAzMCA1M1pNMzAgNTVDNDMuODA3MSA1NSA1NSA0My44MDcxIDU1IDMwQzU1IDE2LjE5MjkgNDMuODA3MSA1IDMwIDVDMTYuMTkyOSA1IDUgMTYuMTkyOSA1IDMwQzUgNDMuODA3MSAxNi4xOTI5IDU1IDMwIDU1WiIvPgo8cGF0aCBkPSJNNDAuOTQyNSAyNi42NTg1QzQxLjQwMDMgMjMuNjExMyAzOS4wNzA1IDIxLjk3MzIgMzUuODg0OCAyMC44ODA0TDM2LjkxODIgMTYuNzUyNkwzNC4zOTUxIDE2LjEyNjRMMzMuMzg5IDIwLjE0NTVDMzIuNzI1OCAxOS45ODA5IDMyLjA0NDUgMTkuODI1NiAzMS4zNjc1IDE5LjY3MTdMMzIuMzgwOCAxNS42MjYyTDI5Ljg1OTEgMTVMMjguODI1IDE5LjEyNjRDMjguMjc2IDE5LjAwMTkgMjcuNzM3IDE4Ljg3ODggMjcuMjEzOSAxOC43NDkzTDI3LjIxNjggMTguNzM2NEwyMy43MzcyIDE3Ljg3MTJMMjMuMDY2IDIwLjU1NDhDMjMuMDY2IDIwLjU1NDggMjQuOTM4IDIwLjk4MjEgMjQuODk4NSAyMS4wMDg1QzI1LjkyMDQgMjEuMjYyNiAyNi4xMDUgMjEuOTM2IDI2LjA3NDEgMjIuNDY5OUwyNC44OTcgMjcuMTcyNEMyNC45Njc1IDI3LjE5MDMgMjUuMDU4NyAyNy4yMTYgMjUuMTU5MyAyNy4yNTYxQzI1LjA3NTMgMjcuMjM1NCAyNC45ODU0IDI3LjIxMjQgMjQuODkyNyAyNy4xOTAzTDIzLjI0MjggMzMuNzc3OEMyMy4xMTc3IDM0LjA4NjkgMjIuODAwOCAzNC41NTA2IDIyLjA4NjUgMzQuMzc0NkMyMi4xMTE3IDM0LjQxMTEgMjAuMjUyNiAzMy45MTg3IDIwLjI1MjYgMzMuOTE4N0wxOSAzNi43OTQ5TDIyLjI4MzQgMzcuNjFDMjIuODk0MiAzNy43NjI0IDIzLjQ5MjggMzcuOTIyIDI0LjA4MjEgMzguMDcyM0wyMy4wMzggNDIuMjQ3NEwyNS41NTgyIDQyLjg3MzZMMjYuNTkyMyAzOC43NDI5QzI3LjI4MDcgMzguOTI5IDI3Ljk0OSAzOS4xMDA3IDI4LjYwMyAzOS4yNjI0TDI3LjU3MjUgNDMuMzczOEwzMC4wOTU2IDQ0TDMxLjEzOTcgMzkuODMyOEMzNS40NDIyIDQwLjY0MzYgMzguNjc3NCA0MC4zMTY2IDQwLjAzOTIgMzYuNDQxNEM0MS4xMzY1IDMzLjMyMTIgMzkuOTg0NiAzMS41MjEzIDM3LjcyMDkgMzAuMzQ3N0MzOS4zNjk0IDI5Ljk2OTEgNDAuNjExMiAyOC44ODkyIDQwLjk0MjUgMjYuNjU4NVYyNi42NTg1Wk0zNS4xNzc3IDM0LjcwODhDMzQuMzk4IDM3LjgyOSAyOS4xMjI2IDM2LjE0MjIgMjcuNDEyMiAzNS43MTkzTDI4Ljc5NzcgMzAuMTg4MUMzMC41MDgxIDMwLjYxMzIgMzUuOTkyNiAzMS40NTQ4IDM1LjE3NzcgMzQuNzA4OFpNMzUuOTU4MSAyNi42MTM0QzM1LjI0NjcgMjkuNDUxNyAzMC44NTU5IDI4LjAwOTcgMjkuNDMxNiAyNy42NTYxTDMwLjY4NzcgMjIuNjM5NUMzMi4xMTIgMjIuOTkzIDM2LjY5OSAyMy42NTI4IDM1Ljk1ODEgMjYuNjEzNFoiLz4KPC9zdmc+Cg==",
# "background": "transparent",
# "fiatSymbol": "BTC",
# "decimals": 8,
# "tradeDecimals": 20,
# "displayDecimals": 4,
# "crypto": True,
# "depositEnabled": True,
# "withdrawalEnabled": True,
# "transferEnabled": True,
# "buyEnabled": False,
# "purchaseEnabled": False,
# "redeemEnabled": False,
# "active": True,
# "withdrawalFee": "50000000000000000",
# "purchaseCommissions": []
# },
# ]
#
result = []
for i in range(0, len(response)):
currency = response[i]
result.append(self.parse_currency(currency))
return self.index_by(result, 'code')
async def fetch_tickers(self, symbols=None, params={}):
"""
fetches price tickers for multiple markets, statistical calculations with the information calculated over the past 24 hours each market
:param [str]|None symbols: unified symbols of the markets to fetch the ticker for, all market tickers are returned if not assigned
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: an array of `ticker structures <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
await self.load_markets()
period = self.safe_string(self.options['fetchTickers'], 'period', '1d')
request = {
'period': self.timeframes[period], # I1, I5, I15, I30, H1, H2, H4, H6, H12, D1, W1
}
response = await self.publicGetTickers(self.extend(request, params))
#
# [
# {
# "ask": 0.017,
# "bid": 0.016,
# "high": 0.019,
# "last": 0.017,
# "low": 0.015,
# "market": "TIME/ETH",
# "open": 0.016,
# "period": "H1",
# "timestamp": "2018-12-14T20:50:36.134Z",
# "volume": 4.57,
# "volumeQuote": 0.07312
# }
# ]
#
return self.parse_tickers(response, symbols)
async def fetch_ticker(self, symbol, params={}):
"""
fetches a price ticker, a statistical calculation with the information calculated over the past 24 hours for a specific market
:param str symbol: unified symbol of the market to fetch the ticker for
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: a `ticker structure <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
await self.load_markets()
market = self.market(symbol)
period = self.safe_string(self.options['fetchTickers'], 'period', '1d')
request = {
'market': market['id'],
'period': self.timeframes[period], # I1, I5, I15, I30, H1, H2, H4, H6, H12, D1, W1
}
response = await self.publicGetTickers(self.extend(request, params))
#
# [
# {
# "ask": 0.017,
# "bid": 0.016,
# "high": 0.019,
# "last": 0.017,
# "low": 0.015,
# "market": "TIME/ETH",
# "open": 0.016,
# "period": "H1",
# "timestamp": "2018-12-14T20:50:36.134Z",
# "volume": 4.57,
# "volumeQuote": 0.07312
# }
# ]
#
ticker = self.safe_value(response, 0)
return self.parse_ticker(ticker, market)
async def fetch_order_book(self, symbol, limit=None, params={}):
"""
fetches information on open orders with bid(buy) and ask(sell) prices, volumes and other data
:param str symbol: unified symbol of the market to fetch the order book for
:param int|None limit: the maximum amount of order book entries to return
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: A dictionary of `order book structures <https://docs.ccxt.com/en/latest/manual.html#order-book-structure>` indexed by market symbols
"""
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
}
if limit is not None:
request['limit'] = limit
response = await self.publicGetOrderbookV2(self.extend(request, params))
#
# {
# "timestamp":"2019-12-05T00:21:09.538",
# "bid":[
# {
# "index":"2",
# "price":"0.02024007",
# "baseTokenAmount":"0.0096894",
# "baseTokenCumulativeAmount":"0.0096894",
# "quoteTokenAmount":"0.000196114134258",
# "quoteTokenCumulativeAmount":"0.000196114134258"
# },
# "ask":[
# {
# "index":"-3",
# "price":"0.02024012",
# "baseTokenAmount":"0.005",
# "baseTokenCumulativeAmount":"0.005",
# "quoteTokenAmount":"0.0001012006",
# "quoteTokenCumulativeAmount":"0.0001012006"
# },
# ]
# }
#
timestamp = self.parse8601(self.safe_string(response, 'timestamp'))
return self.parse_order_book(response, symbol, timestamp, 'bid', 'ask', 'price', 'baseTokenAmount')
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
"""
get the list of most recent trades for a particular symbol
:param str symbol: unified symbol of the market to fetch trades for
:param int|None since: timestamp in ms of the earliest trade to fetch
:param int|None limit: the maximum amount of trades to fetch
:param dict params: extra parameters specific to the timex api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html?#public-trades>`
"""
await self.load_markets()
market = self.market(symbol)
options = self.safe_value(self.options, 'fetchTrades', {})
defaultSort = self.safe_value(options, 'sort', 'timestamp,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
# 'address': 'string', # trade’s member account(?)
# 'cursor': 1234, # int64(?)
# 'from': self.iso8601(since),
'market': market['id'],
# 'page': 0, # results page you want to retrieve 0 .. N
# 'size': limit, # number of records per page, 100 by default
'sort': sort, # array[string], sorting criteria in the format "property,asc" or "property,desc", default is ascending
# 'till': self.iso8601(self.milliseconds()),
}
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['size'] = limit # default is 100
response = await self.publicGetTrades(self.extend(request, query))
#
# [
# {
# "id":1,
# "timestamp":"2019-06-25T17:01:50.309",
# "direction":"BUY",
# "price":"0.027",
# "quantity":"0.001"
# }
# ]
#
return self.parse_trades(response, market, since, limit)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
"""
fetches historical candlestick data containing the open, high, low, and close price, and the volume of a market
:param str symbol: unified symbol of the market to fetch OHLCV data for
:param str timeframe: the length of time each candle represents
:param int|None since: timestamp in ms of the earliest candle to fetch
:param int|None limit: the maximum amount of candles to fetch
:param dict params: extra parameters specific to the timex api endpoint
:returns [[int]]: A list of candles ordered as timestamp, open, high, low, close, volume
"""
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
'period': self.timeframes[timeframe],
}
# if since and limit are not specified
duration = self.parse_timeframe(timeframe)
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['till'] = self.iso8601(self.sum(since, self.sum(limit, 1) * duration * 1000))
elif limit is not None:
now = self.milliseconds()
request['till'] = self.iso8601(now)
request['from'] = self.iso8601(now - limit * duration * 1000 - 1)
else:
request['till'] = self.iso8601(self.milliseconds())
response = await self.publicGetCandles(self.extend(request, params))
#
# [
# {
# "timestamp":"2019-12-04T23:00:00",
# "open":"0.02024009",
# "high":"0.02024009",
# "low":"0.02024009",
# "close":"0.02024009",
# "volume":"0.00008096036",
# "volumeQuote":"0.004",
# },
# ]
#
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_balance(self, response):
result = {
'info': response,
'timestamp': None,
'datetime': None,
}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['total'] = self.safe_string(balance, 'totalBalance')
account['used'] = self.safe_string(balance, 'lockedBalance')
result[code] = account
return self.safe_balance(result)
async def fetch_balance(self, params={}):
"""
query for balance and get the amount of funds available for trading or funds locked in orders
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: a `balance structure <https://docs.ccxt.com/en/latest/manual.html?#balance-structure>`
"""
await self.load_markets()
response = await self.tradingGetBalances(params)
#
# [
# {"currency":"BTC","totalBalance":"0","lockedBalance":"0"},
# {"currency":"AUDT","totalBalance":"0","lockedBalance":"0"},
# {"currency":"ETH","totalBalance":"0","lockedBalance":"0"},
# {"currency":"TIME","totalBalance":"0","lockedBalance":"0"},
# {"currency":"USDT","totalBalance":"0","lockedBalance":"0"}
# ]
#
return self.parse_balance(response)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
"""
create a trade order
:param str symbol: unified symbol of the market to create an order in
:param str type: 'market' or 'limit'
:param str side: 'buy' or 'sell'
:param float amount: how much of currency you want to trade in units of base currency
:param float|None price: the price at which the order is to be fullfilled, in units of the quote currency, ignored in market orders
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: an `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
market = self.market(symbol)
uppercaseSide = side.upper()
uppercaseType = type.upper()
postOnly = self.safe_value(params, 'postOnly', False)
if postOnly:
uppercaseType = 'POST_ONLY'
params = self.omit(params, ['postOnly'])
request = {
'symbol': market['id'],
'quantity': self.amount_to_precision(symbol, amount),
'side': uppercaseSide,
'orderTypes': uppercaseType,
# 'clientOrderId': '123',
# 'expireIn': 1575523308, # in seconds
# 'expireTime': 1575523308, # unix timestamp
}
query = params
if (uppercaseType == 'LIMIT') or (uppercaseType == 'POST_ONLY'):
request['price'] = self.price_to_precision(symbol, price)
defaultExpireIn = self.safe_integer(self.options, 'expireIn')
expireTime = self.safe_value(params, 'expireTime')
expireIn = self.safe_value(params, 'expireIn', defaultExpireIn)
if expireTime is not None:
request['expireTime'] = expireTime
elif expireIn is not None:
request['expireIn'] = expireIn
else:
raise InvalidOrder(self.id + ' createOrder() method requires a expireTime or expireIn param for a ' + type + ' order, you can also set the expireIn exchange-wide option')
query = self.omit(params, ['expireTime', 'expireIn'])
else:
request['price'] = 0
response = await self.tradingPostOrders(self.extend(request, query))
#
# {
# "orders": [
# {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# }
# ]
# }
#
orders = self.safe_value(response, 'orders', [])
order = self.safe_value(orders, 0, {})
return self.parse_order(order, market)
async def edit_order(self, id, symbol, type, side, amount=None, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'id': id,
}
if amount is not None:
request['quantity'] = self.amount_to_precision(symbol, amount)
if price is not None:
request['price'] = self.price_to_precision(symbol, price)
response = await self.tradingPutOrders(self.extend(request, params))
#
# {
# "changedOrders": [
# {
# "newOrder": {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# },
# "oldId": "string",
# },
# ],
# "unchangedOrders": ["string"],
# }
#
if 'unchangedOrders' in response:
orderIds = self.safe_value(response, 'unchangedOrders', [])
orderId = self.safe_string(orderIds, 0)
return {
'id': orderId,
'info': response,
}
orders = self.safe_value(response, 'changedOrders', [])
firstOrder = self.safe_value(orders, 0, {})
order = self.safe_value(firstOrder, 'newOrder', {})
return self.parse_order(order, market)
async def cancel_order(self, id, symbol=None, params={}):
"""
cancels an open order
:param str id: order id
:param str|None symbol: not used by timex cancelOrder()
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
return await self.cancel_orders([id], symbol, params)
async def cancel_orders(self, ids, symbol=None, params={}):
"""
cancel multiple orders
:param [str] ids: order ids
:param str|None symbol: unified market symbol, default is None
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: an list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
request = {
'id': ids,
}
response = await self.tradingDeleteOrders(self.extend(request, params))
#
# {
# "changedOrders": [
# {
# "newOrder": {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# },
# "oldId": "string",
# },
# ],
# "unchangedOrders": ["string"],
# }
return response
async def fetch_order(self, id, symbol=None, params={}):
"""
fetches information on an order made by the user
:param str|None symbol: not used by timex fetchOrder
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
request = {
'orderHash': id,
}
response = await self.historyGetOrdersDetails(request)
#
# {
# "order": {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# },
# "trades": [
# {
# "fee": "0.3",
# "id": 100,
# "makerOrTaker": "MAKER",
# "makerOrderId": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "takerOrderId": "string",
# "timestamp": "2019-12-05T07:48:26.310Z"
# }
# ]
# }
#
order = self.safe_value(response, 'order', {})
trades = self.safe_value(response, 'trades', [])
return self.parse_order(self.extend(order, {'trades': trades}))
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetch all unfilled currently open orders
:param str|None symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch open orders for
:param int|None limit: the maximum number of open orders structures to retrieve
:param dict params: extra parameters specific to the timex api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
options = self.safe_value(self.options, 'fetchOpenOrders', {})
defaultSort = self.safe_value(options, 'sort', 'createdAt,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
# 'clientOrderId': '123', # order’s client id list for filter
# page: 0, # results page you want to retrieve(0 .. N)
'sort': sort, # sorting criteria in the format "property,asc" or "property,desc", default order is ascending, multiple sort criteria are supported
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if limit is not None:
request['size'] = limit
response = await self.tradingGetOrders(self.extend(request, query))
#
# {
# "orders": [
# {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# }
# ]
# }
#
orders = self.safe_value(response, 'orders', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetches information on multiple closed orders made by the user
:param str|None symbol: unified market symbol of the market orders were made in
:param int|None since: the earliest time in ms to fetch orders for
:param int|None limit: the maximum number of orde structures to retrieve
:param dict params: extra parameters specific to the timex api endpoint
:returns [dict]: a list of [order structures]{@link https://docs.ccxt.com/en/latest/manual.html#order-structure
"""
await self.load_markets()
options = self.safe_value(self.options, 'fetchClosedOrders', {})
defaultSort = self.safe_value(options, 'sort', 'createdAt,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
# 'clientOrderId': '123', # order’s client id list for filter
# page: 0, # results page you want to retrieve(0 .. N)
'sort': sort, # sorting criteria in the format "property,asc" or "property,desc", default order is ascending, multiple sort criteria are supported
'side': 'BUY', # or 'SELL'
# 'till': self.iso8601(self.milliseconds()),
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['size'] = limit
response = await self.historyGetOrders(self.extend(request, query))
#
# {
# "orders": [
# {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# }
# ]
# }
#
orders = self.safe_value(response, 'orders', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
"""
fetch all trades made by the user
:param str|None symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch trades for
:param int|None limit: the maximum number of trades structures to retrieve
:param dict params: extra parameters specific to the timex api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html#trade-structure>`
"""
await self.load_markets()
options = self.safe_value(self.options, 'fetchMyTrades', {})
defaultSort = self.safe_value(options, 'sort', 'timestamp,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
# 'cursorId': 123, # int64(?)
# 'from': self.iso8601(since),
# 'makerOrderId': '1234', # maker order hash
# 'owner': '...', # owner address(?)
# 'page': 0, # results page you want to retrieve(0 .. N)
# 'side': 'BUY', # or 'SELL'
# 'size': limit,
'sort': sort, # sorting criteria in the format "property,asc" or "property,desc", default order is ascending, multiple sort criteria are supported
# 'symbol': market['id'],
# 'takerOrderId': '1234',
# 'till': self.iso8601(self.milliseconds()),
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['size'] = limit
response = await self.historyGetTrades(self.extend(request, query))
#
# {
# "trades": [
# {
# "fee": "0.3",
# "id": 100,
# "makerOrTaker": "MAKER",
# "makerOrderId": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "takerOrderId": "string",
# "timestamp": "2019-12-08T04:54:11.171Z"
# }
# ]
# }
#
trades = self.safe_value(response, 'trades', [])
return self.parse_trades(trades, market, since, limit)
def parse_trading_fee(self, fee, market=None):
#
# {
# "fee": 0.0075,
# "market": "ETHBTC"
# }
#
marketId = self.safe_string(fee, 'market')
rate = self.safe_number(fee, 'fee')
return {
'info': fee,
'symbol': self.safe_symbol(marketId, market),
'maker': rate,
'taker': rate,
}
async def fetch_trading_fee(self, symbol, params={}):
"""
fetch the trading fees for a market
:param str symbol: unified market symbol
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: a `fee structure <https://docs.ccxt.com/en/latest/manual.html#fee-structure>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'markets': market['id'],
}
response = await self.tradingGetFees(self.extend(request, params))
#
# [
# {
# "fee": 0.0075,
# "market": "ETHBTC"
# }
# ]
#
result = self.safe_value(response, 0, {})
return self.parse_trading_fee(result, market)
def parse_market(self, market):
#
# {
# "symbol": "ETHBTC",
# "name": "ETH/BTC",
# "baseCurrency": "ETH",
# "baseTokenAddress": "0x45932db54b38af1f5a57136302eeba66a5975c15",
# "quoteCurrency": "BTC",
# "quoteTokenAddress": "0x8370fbc6ddec1e18b4e41e72ed943e238458487c",
# "feeCurrency": "BTC",
# "feeTokenAddress": "0x8370fbc6ddec1e18b4e41e72ed943e238458487c",
# "quantityIncrement": "0.0000001",
# "takerFee": "0.005",
# "makerFee": "0.0025",
# "tickSize": "0.00000001",
# "baseMinSize": "0.0001",
# "quoteMinSize": "0.00001",
# "locked": False
# }
#
locked = self.safe_value(market, 'locked')
id = self.safe_string(market, 'symbol')
baseId = self.safe_string(market, 'baseCurrency')
quoteId = self.safe_string(market, 'quoteCurrency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
amountIncrement = self.safe_string(market, 'quantityIncrement')
minBase = self.safe_string(market, 'baseMinSize')
minAmount = Precise.string_max(amountIncrement, minBase)
priceIncrement = self.safe_string(market, 'tickSize')
minCost = self.safe_string(market, 'quoteMinSize')
return {
'id': id,
'symbol': base + '/' + quote,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'active': not locked,
'contract': False,
'linear': None,
'inverse': None,
'taker': self.safe_number(market, 'takerFee'),
'maker': self.safe_number(market, 'makerFee'),
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': self.safe_number(market, 'quantityIncrement'),
'price': self.safe_number(market, 'tickSize'),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': self.parse_number(minAmount),
'max': None,
},
'price': {
'min': self.parse_number(priceIncrement),
'max': None,
},
'cost': {
'min': minCost,
'max': None,
},
},
'info': market,
}
def parse_currency(self, currency):
#
# {
# "symbol": "BTC",
# "name": "Bitcoin",
# "address": "0x8370fbc6ddec1e18b4e41e72ed943e238458487c",
# "icon": "data:image/svg+xml;base64,PHN2ZyB3aWR...mc+Cg==",
# "background": "transparent",
# "fiatSymbol": "BTC",
# "decimals": 8,
# "tradeDecimals": 20,
# "displayDecimals": 4,
# "crypto": True,
# "depositEnabled": True,
# "withdrawalEnabled": True,
# "transferEnabled": True,
# "buyEnabled": False,
# "purchaseEnabled": False,
# "redeemEnabled": False,
# "active": True,
# "withdrawalFee": "50000000000000000",
# "purchaseCommissions": []
# }
#
# https://github.com/ccxt/ccxt/issues/6878
#
# {
# "symbol":"XRP",
# "name":"Ripple",
# "address":"0x0dc8882914f3ddeebf4cec6dc20edb99df3def6c",
# "decimals":6,
# "tradeDecimals":16,
# "depositEnabled":true,
# "withdrawalEnabled":true,
# "transferEnabled":true,
# "active":true
# }
#
id = self.safe_string(currency, 'symbol')
code = self.safe_currency_code(id)
name = self.safe_string(currency, 'name')
depositEnabled = self.safe_value(currency, 'depositEnabled')
withdrawEnabled = self.safe_value(currency, 'withdrawalEnabled')
isActive = self.safe_value(currency, 'active')
active = depositEnabled and withdrawEnabled and isActive
# fee = self.safe_number(currency, 'withdrawalFee')
feeString = self.safe_string(currency, 'withdrawalFee')
tradeDecimals = self.safe_integer(currency, 'tradeDecimals')
fee = None
if (feeString is not None) and (tradeDecimals is not None):
feeStringLen = len(feeString)
dotIndex = feeStringLen - tradeDecimals
if dotIndex > 0:
whole = feeString[0:dotIndex]
fraction = feeString[-dotIndex:]
fee = self.parse_number(whole + '.' + fraction)
else:
fraction = '.'
for i in range(0, -dotIndex):
fraction += '0'
fee = self.parse_number(fraction + feeString)
return {
'id': code,
'code': code,
'info': currency,
'type': None,
'name': name,
'active': active,
'deposit': depositEnabled,
'withdraw': withdrawEnabled,
'fee': fee,
'precision': self.parse_number(self.parse_precision(self.safe_string(currency, 'decimals'))),
'limits': {
'withdraw': {'min': fee, 'max': None},
'amount': {'min': None, 'max': None},
},
}
def parse_ticker(self, ticker, market=None):
#
# {
# "ask": 0.017,
# "bid": 0.016,
# "high": 0.019,
# "last": 0.017,
# "low": 0.015,
# "market": "TIME/ETH",
# "open": 0.016,
# "period": "H1",
# "timestamp": "2018-12-14T20:50:36.134Z",
# "volume": 4.57,
# "volumeQuote": 0.07312
# }
#
marketId = self.safe_string(ticker, 'market')
symbol = self.safe_symbol(marketId, market, '/')
timestamp = self.parse8601(self.safe_string(ticker, 'timestamp'))
last = self.safe_string(ticker, 'last')
open = self.safe_string(ticker, 'open')
return self.safe_ticker({
'symbol': symbol,
'info': ticker,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_string(ticker, 'high'),
'low': self.safe_string(ticker, 'low'),
'bid': self.safe_string(ticker, 'bid'),
'bidVolume': None,
'ask': self.safe_string(ticker, 'ask'),
'askVolume': None,
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_string(ticker, 'volume'),
'quoteVolume': self.safe_string(ticker, 'volumeQuote'),
}, market)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "id":1,
# "timestamp":"2019-06-25T17:01:50.309",
# "direction":"BUY",
# "price":"0.027",
# "quantity":"0.001"
# }
#
# fetchMyTrades, fetchOrder(private)
#
# {
# "id": "7613414",
# "makerOrderId": "0x8420af060722f560098f786a2894d4358079b6ea5d14b395969ed77bc87a623a",
# "takerOrderId": "0x1235ef158a361815b54c9988b6241c85aedcbc1fe81caf8df8587d5ab0373d1a",
# "symbol": "LTCUSDT",
# "side": "BUY",
# "quantity": "0.2",
# "fee": "0.22685",
# "feeToken": "USDT",
# "price": "226.85",
# "makerOrTaker": "TAKER",
# "timestamp": "2021-04-09T15:39:45.608"
# }
#
marketId = self.safe_string(trade, 'symbol')
symbol = self.safe_symbol(marketId, market)
timestamp = self.parse8601(self.safe_string(trade, 'timestamp'))
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string(trade, 'quantity')
price = self.parse_number(priceString)
amount = self.parse_number(amountString)
cost = self.parse_number(Precise.string_mul(priceString, amountString))
id = self.safe_string(trade, 'id')
side = self.safe_string_lower_2(trade, 'direction', 'side')
takerOrMaker = self.safe_string_lower(trade, 'makerOrTaker')
orderId = None
if takerOrMaker is not None:
orderId = self.safe_string(trade, takerOrMaker + 'OrderId')
fee = None
feeCost = self.safe_number(trade, 'fee')
feeCurrency = self.safe_currency_code(self.safe_string(trade, 'feeToken'))
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
return {
'info': trade,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': None,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'takerOrMaker': takerOrMaker,
'fee': fee,
}
def parse_ohlcv(self, ohlcv, market=None):
#
# {
# "timestamp":"2019-12-04T23:00:00",
# "open":"0.02024009",
# "high":"0.02024009",
# "low":"0.02024009",
# "close":"0.02024009",
# "volume":"0.00008096036",
# "volumeQuote":"0.004",
# }
#
return [
self.parse8601(self.safe_string(ohlcv, 'timestamp')),
self.safe_number(ohlcv, 'open'),
self.safe_number(ohlcv, 'high'),
self.safe_number(ohlcv, 'low'),
self.safe_number(ohlcv, 'close'),
self.safe_number(ohlcv, 'volume'),
]
def parse_order(self, order, market=None):
#
# fetchOrder, createOrder, cancelOrder, cancelOrders, fetchOpenOrders, fetchClosedOrders
#
# {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# "trades": [], # injected from the outside
# }
#
id = self.safe_string(order, 'id')
type = self.safe_string_lower(order, 'type')
side = self.safe_string_lower(order, 'side')
marketId = self.safe_string(order, 'symbol')
symbol = self.safe_symbol(marketId, market)
timestamp = self.parse8601(self.safe_string(order, 'createdAt'))
price = self.safe_string(order, 'price')
amount = self.safe_string(order, 'quantity')
filled = self.safe_string(order, 'filledQuantity')
canceledQuantity = self.omit_zero(self.safe_string(order, 'cancelledQuantity'))
status = None
if Precise.string_equals(filled, amount):
status = 'closed'
elif canceledQuantity is not None:
status = 'canceled'
else:
status = 'open'
rawTrades = self.safe_value(order, 'trades', [])
clientOrderId = self.safe_string(order, 'clientOrderId')
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': None,
'amount': amount,
'cost': None,
'average': None,
'filled': filled,
'remaining': None,
'status': status,
'fee': None,
'trades': rawTrades,
}, market)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + api + '/' + path
if params:
url += '?' + self.urlencode_with_array_repeat(params)
if api != 'public':
self.check_required_credentials()
auth = self.string_to_base64(self.apiKey + ':' + self.secret)
secret = 'Basic ' + self.decode(auth)
headers = {'authorization': secret}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, statusCode, statusText, url, method, responseHeaders, responseBody, response, requestHeaders, requestBody):
if response is None:
return
if statusCode >= 400:
#
# {"error":{"timestamp":"05.12.2019T05:25:43.584+0000","status":"BAD_REQUEST","message":"Insufficient ETH balance. Required: 1, actual: 0.","code":4001}}
# {"error":{"timestamp":"05.12.2019T04:03:25.419+0000","status":"FORBIDDEN","message":"Access denied","code":4300}}
#
feedback = self.id + ' ' + responseBody
error = self.safe_value(response, 'error')
if error is None:
error = response
code = self.safe_string_2(error, 'code', 'status')
message = self.safe_string_2(error, 'message', 'debugMessage')
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], code, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
raise ExchangeError(feedback)
| 43.904586 | 2,235 | 0.497953 |
rt.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NotSupported
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class timex(Exchange):
def describe(self):
return self.deep_extend(super(timex, self).describe(), {
'id': 'timex',
'name': 'TimeX',
'countries': ['AU'],
'version': 'v1',
'rateLimit': 1500,
'has': {
'CORS': None,
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'addMargin': False,
'cancelOrder': True,
'cancelOrders': True,
'createOrder': True,
'createReduceOnlyOrder': False,
'createStopLimitOrder': False,
'createStopMarketOrder': False,
'createStopOrder': False,
'editOrder': True,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchBorrowRatesPerSymbol': False,
'fetchClosedOrders': True,
'fetchCurrencies': True,
'fetchFundingHistory': False,
'fetchFundingRate': False,
'fetchFundingRateHistory': False,
'fetchFundingRates': False,
'fetchIndexOHLCV': False,
'fetchLeverage': False,
'fetchLeverageTiers': False,
'fetchMarkets': True,
'fetchMarkOHLCV': False,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenInterestHistory': False,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchPosition': False,
'fetchPositions': False,
'fetchPositionsRisk': False,
'fetchPremiumIndexOHLCV': False,
'fetchTicker': True,
'fetchTickers': True,
'fetchTrades': True,
'fetchTradingFee': True,
'reduceMargin': False,
'setLeverage': False,
'setMarginMode': False,
'setPositionMode': False,
},
'timeframes': {
'1m': 'I1',
'5m': 'I5',
'15m': 'I15',
'30m': 'I30',
'1h': 'H1',
'2h': 'H2',
'4h': 'H4',
'6h': 'H6',
'12h': 'H12',
'1d': 'D1',
'1w': 'W1',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/70423869-6839ab00-1a7f-11ea-8f94-13ae72c31115.jpg',
'api': 'https://plasma-relay-backend.timex.io',
'www': 'https://timex.io',
'doc': 'https://docs.timex.io',
'referral': 'https://timex.io/?refcode=1x27vNkTbP1uwkCck',
},
'api': {
'custody': {
'get': [
'credentials',
'credentials/h/{hash}',
'credentials/k/{key}',
'credentials/me/address',
'deposit-addresses',
'deposit-addresses/h/{hash}',
],
},
'history': {
'get': [
'orders',
'orders/details',
'orders/export/csv',
'trades',
'trades/export/csv',
],
},
'currencies': {
'get': [
'a/{address}',
'i/{id}',
's/{symbol}',
],
'post': [
'perform',
'prepare',
'remove/perform',
's/{symbol}/remove/prepare',
's/{symbol}/update/perform',
's/{symbol}/update/prepare',
],
},
'markets': {
'get': [
'i/{id}',
's/{symbol}',
],
'post': [
'perform',
'prepare',
'remove/perform',
's/{symbol}/remove/prepare',
's/{symbol}/update/perform',
's/{symbol}/update/prepare',
],
},
'public': {
'get': [
'candles',
'currencies',
'markets',
'orderbook',
'orderbook/raw',
'orderbook/v2',
'tickers',
'trades',
],
},
'statistics': {
'get': [
'address',
],
},
'trading': {
'get': [
'balances',
'fees',
'orders',
],
'post': [
'orders',
'orders/json',
],
'put': [
'orders',
'orders/json',
],
'delete': [
'orders',
'orders/json',
],
},
'tradingview': {
'get': [
'config',
'history',
'symbol_info',
'time',
],
},
},
'precisionMode': TICK_SIZE,
'exceptions': {
'exact': {
'0': ExchangeError,
'1': NotSupported,
'4000': BadRequest,
'4001': BadRequest,
'4002': InsufficientFunds,
'4003': AuthenticationError,
'4004': AuthenticationError,
'4005': BadRequest,
'4006': BadRequest,
'4007': BadRequest,
'4300': PermissionDenied,
'4100': AuthenticationError,
'4400': OrderNotFound,
'5001': InvalidOrder,
'5002': ExchangeError,
'400': BadRequest,
'401': AuthenticationError,
'403': PermissionDenied,
'404': OrderNotFound,
'429': RateLimitExceeded,
'500': ExchangeError,
'503': ExchangeNotAvailable,
},
'broad': {
'Insufficient': InsufficientFunds,
},
},
'options': {
'expireIn': 31536000,
'fetchTickers': {
'period': '1d',
},
'fetchTrades': {
'sort': 'timestamp,asc',
},
'fetchMyTrades': {
'sort': 'timestamp,asc',
},
'fetchOpenOrders': {
'sort': 'createdAt,asc',
},
'fetchClosedOrders': {
'sort': 'createdAt,asc',
},
'defaultSort': 'timestamp,asc',
'defaultSortOrders': 'createdAt,asc',
},
})
async def fetch_markets(self, params={}):
response = await self.publicGetMarkets(params)
result = []
for i in range(0, len(response)):
result.append(self.parse_market(response[i]))
return result
async def fetch_currencies(self, params={}):
response = await self.publicGetCurrencies(params)
result = []
for i in range(0, len(response)):
currency = response[i]
result.append(self.parse_currency(currency))
return self.index_by(result, 'code')
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
period = self.safe_string(self.options['fetchTickers'], 'period', '1d')
request = {
'period': self.timeframes[period],
}
response = await self.publicGetTickers(self.extend(request, params))
return self.parse_tickers(response, symbols)
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
period = self.safe_string(self.options['fetchTickers'], 'period', '1d')
request = {
'market': market['id'],
'period': self.timeframes[period],
}
response = await self.publicGetTickers(self.extend(request, params))
ticker = self.safe_value(response, 0)
return self.parse_ticker(ticker, market)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
}
if limit is not None:
request['limit'] = limit
response = await self.publicGetOrderbookV2(self.extend(request, params))
timestamp = self.parse8601(self.safe_string(response, 'timestamp'))
return self.parse_order_book(response, symbol, timestamp, 'bid', 'ask', 'price', 'baseTokenAmount')
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
options = self.safe_value(self.options, 'fetchTrades', {})
defaultSort = self.safe_value(options, 'sort', 'timestamp,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
'market': market['id'],
ot None:
request['from'] = self.iso8601(since)
if limit is not None:
request['size'] = limit
response = await self.publicGetTrades(self.extend(request, query))
return self.parse_trades(response, market, since, limit)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
'period': self.timeframes[timeframe],
}
duration = self.parse_timeframe(timeframe)
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['till'] = self.iso8601(self.sum(since, self.sum(limit, 1) * duration * 1000))
elif limit is not None:
now = self.milliseconds()
request['till'] = self.iso8601(now)
request['from'] = self.iso8601(now - limit * duration * 1000 - 1)
else:
request['till'] = self.iso8601(self.milliseconds())
response = await self.publicGetCandles(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_balance(self, response):
result = {
'info': response,
'timestamp': None,
'datetime': None,
}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['total'] = self.safe_string(balance, 'totalBalance')
account['used'] = self.safe_string(balance, 'lockedBalance')
result[code] = account
return self.safe_balance(result)
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.tradingGetBalances(params)
return self.parse_balance(response)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
uppercaseSide = side.upper()
uppercaseType = type.upper()
postOnly = self.safe_value(params, 'postOnly', False)
if postOnly:
uppercaseType = 'POST_ONLY'
params = self.omit(params, ['postOnly'])
request = {
'symbol': market['id'],
'quantity': self.amount_to_precision(symbol, amount),
'side': uppercaseSide,
'orderTypes': uppercaseType,
query = params
if (uppercaseType == 'LIMIT') or (uppercaseType == 'POST_ONLY'):
request['price'] = self.price_to_precision(symbol, price)
defaultExpireIn = self.safe_integer(self.options, 'expireIn')
expireTime = self.safe_value(params, 'expireTime')
expireIn = self.safe_value(params, 'expireIn', defaultExpireIn)
if expireTime is not None:
request['expireTime'] = expireTime
elif expireIn is not None:
request['expireIn'] = expireIn
else:
raise InvalidOrder(self.id + ' createOrder() method requires a expireTime or expireIn param for a ' + type + ' order, you can also set the expireIn exchange-wide option')
query = self.omit(params, ['expireTime', 'expireIn'])
else:
request['price'] = 0
response = await self.tradingPostOrders(self.extend(request, query))
orders = self.safe_value(response, 'orders', [])
order = self.safe_value(orders, 0, {})
return self.parse_order(order, market)
async def edit_order(self, id, symbol, type, side, amount=None, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'id': id,
}
if amount is not None:
request['quantity'] = self.amount_to_precision(symbol, amount)
if price is not None:
request['price'] = self.price_to_precision(symbol, price)
response = await self.tradingPutOrders(self.extend(request, params))
if 'unchangedOrders' in response:
orderIds = self.safe_value(response, 'unchangedOrders', [])
orderId = self.safe_string(orderIds, 0)
return {
'id': orderId,
'info': response,
}
orders = self.safe_value(response, 'changedOrders', [])
firstOrder = self.safe_value(orders, 0, {})
order = self.safe_value(firstOrder, 'newOrder', {})
return self.parse_order(order, market)
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
return await self.cancel_orders([id], symbol, params)
async def cancel_orders(self, ids, symbol=None, params={}):
await self.load_markets()
request = {
'id': ids,
}
response = await self.tradingDeleteOrders(self.extend(request, params))
return response
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'orderHash': id,
}
response = await self.historyGetOrdersDetails(request)
order = self.safe_value(response, 'order', {})
trades = self.safe_value(response, 'trades', [])
return self.parse_order(self.extend(order, {'trades': trades}))
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
options = self.safe_value(self.options, 'fetchOpenOrders', {})
defaultSort = self.safe_value(options, 'sort', 'createdAt,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if limit is not None:
request['size'] = limit
response = await self.tradingGetOrders(self.extend(request, query))
orders = self.safe_value(response, 'orders', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
options = self.safe_value(self.options, 'fetchClosedOrders', {})
defaultSort = self.safe_value(options, 'sort', 'createdAt,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['size'] = limit
response = await self.historyGetOrders(self.extend(request, query))
orders = self.safe_value(response, 'orders', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
options = self.safe_value(self.options, 'fetchMyTrades', {})
defaultSort = self.safe_value(options, 'sort', 'timestamp,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['size'] = limit
response = await self.historyGetTrades(self.extend(request, query))
trades = self.safe_value(response, 'trades', [])
return self.parse_trades(trades, market, since, limit)
def parse_trading_fee(self, fee, market=None):
marketId = self.safe_string(fee, 'market')
rate = self.safe_number(fee, 'fee')
return {
'info': fee,
'symbol': self.safe_symbol(marketId, market),
'maker': rate,
'taker': rate,
}
async def fetch_trading_fee(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'markets': market['id'],
}
response = await self.tradingGetFees(self.extend(request, params))
result = self.safe_value(response, 0, {})
return self.parse_trading_fee(result, market)
def parse_market(self, market):
locked = self.safe_value(market, 'locked')
id = self.safe_string(market, 'symbol')
baseId = self.safe_string(market, 'baseCurrency')
quoteId = self.safe_string(market, 'quoteCurrency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
amountIncrement = self.safe_string(market, 'quantityIncrement')
minBase = self.safe_string(market, 'baseMinSize')
minAmount = Precise.string_max(amountIncrement, minBase)
priceIncrement = self.safe_string(market, 'tickSize')
minCost = self.safe_string(market, 'quoteMinSize')
return {
'id': id,
'symbol': base + '/' + quote,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'active': not locked,
'contract': False,
'linear': None,
'inverse': None,
'taker': self.safe_number(market, 'takerFee'),
'maker': self.safe_number(market, 'makerFee'),
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': self.safe_number(market, 'quantityIncrement'),
'price': self.safe_number(market, 'tickSize'),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': self.parse_number(minAmount),
'max': None,
},
'price': {
'min': self.parse_number(priceIncrement),
'max': None,
},
'cost': {
'min': minCost,
'max': None,
},
},
'info': market,
}
def parse_currency(self, currency):
id = self.safe_string(currency, 'symbol')
code = self.safe_currency_code(id)
name = self.safe_string(currency, 'name')
depositEnabled = self.safe_value(currency, 'depositEnabled')
withdrawEnabled = self.safe_value(currency, 'withdrawalEnabled')
isActive = self.safe_value(currency, 'active')
active = depositEnabled and withdrawEnabled and isActive
feeString = self.safe_string(currency, 'withdrawalFee')
tradeDecimals = self.safe_integer(currency, 'tradeDecimals')
fee = None
if (feeString is not None) and (tradeDecimals is not None):
feeStringLen = len(feeString)
dotIndex = feeStringLen - tradeDecimals
if dotIndex > 0:
whole = feeString[0:dotIndex]
fraction = feeString[-dotIndex:]
fee = self.parse_number(whole + '.' + fraction)
else:
fraction = '.'
for i in range(0, -dotIndex):
fraction += '0'
fee = self.parse_number(fraction + feeString)
return {
'id': code,
'code': code,
'info': currency,
'type': None,
'name': name,
'active': active,
'deposit': depositEnabled,
'withdraw': withdrawEnabled,
'fee': fee,
'precision': self.parse_number(self.parse_precision(self.safe_string(currency, 'decimals'))),
'limits': {
'withdraw': {'min': fee, 'max': None},
'amount': {'min': None, 'max': None},
},
}
def parse_ticker(self, ticker, market=None):
marketId = self.safe_string(ticker, 'market')
symbol = self.safe_symbol(marketId, market, '/')
timestamp = self.parse8601(self.safe_string(ticker, 'timestamp'))
last = self.safe_string(ticker, 'last')
open = self.safe_string(ticker, 'open')
return self.safe_ticker({
'symbol': symbol,
'info': ticker,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_string(ticker, 'high'),
'low': self.safe_string(ticker, 'low'),
'bid': self.safe_string(ticker, 'bid'),
'bidVolume': None,
'ask': self.safe_string(ticker, 'ask'),
'askVolume': None,
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_string(ticker, 'volume'),
'quoteVolume': self.safe_string(ticker, 'volumeQuote'),
}, market)
def parse_trade(self, trade, market=None):
marketId = self.safe_string(trade, 'symbol')
symbol = self.safe_symbol(marketId, market)
timestamp = self.parse8601(self.safe_string(trade, 'timestamp'))
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string(trade, 'quantity')
price = self.parse_number(priceString)
amount = self.parse_number(amountString)
cost = self.parse_number(Precise.string_mul(priceString, amountString))
id = self.safe_string(trade, 'id')
side = self.safe_string_lower_2(trade, 'direction', 'side')
takerOrMaker = self.safe_string_lower(trade, 'makerOrTaker')
orderId = None
if takerOrMaker is not None:
orderId = self.safe_string(trade, takerOrMaker + 'OrderId')
fee = None
feeCost = self.safe_number(trade, 'fee')
feeCurrency = self.safe_currency_code(self.safe_string(trade, 'feeToken'))
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
return {
'info': trade,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': None,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'takerOrMaker': takerOrMaker,
'fee': fee,
}
def parse_ohlcv(self, ohlcv, market=None):
return [
self.parse8601(self.safe_string(ohlcv, 'timestamp')),
self.safe_number(ohlcv, 'open'),
self.safe_number(ohlcv, 'high'),
self.safe_number(ohlcv, 'low'),
self.safe_number(ohlcv, 'close'),
self.safe_number(ohlcv, 'volume'),
]
def parse_order(self, order, market=None):
id = self.safe_string(order, 'id')
type = self.safe_string_lower(order, 'type')
side = self.safe_string_lower(order, 'side')
marketId = self.safe_string(order, 'symbol')
symbol = self.safe_symbol(marketId, market)
timestamp = self.parse8601(self.safe_string(order, 'createdAt'))
price = self.safe_string(order, 'price')
amount = self.safe_string(order, 'quantity')
filled = self.safe_string(order, 'filledQuantity')
canceledQuantity = self.omit_zero(self.safe_string(order, 'cancelledQuantity'))
status = None
if Precise.string_equals(filled, amount):
status = 'closed'
elif canceledQuantity is not None:
status = 'canceled'
else:
status = 'open'
rawTrades = self.safe_value(order, 'trades', [])
clientOrderId = self.safe_string(order, 'clientOrderId')
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': None,
'amount': amount,
'cost': None,
'average': None,
'filled': filled,
'remaining': None,
'status': status,
'fee': None,
'trades': rawTrades,
}, market)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + api + '/' + path
if params:
url += '?' + self.urlencode_with_array_repeat(params)
if api != 'public':
self.check_required_credentials()
auth = self.string_to_base64(self.apiKey + ':' + self.secret)
secret = 'Basic ' + self.decode(auth)
headers = {'authorization': secret}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, statusCode, statusText, url, method, responseHeaders, responseBody, response, requestHeaders, requestBody):
if response is None:
return
if statusCode >= 400:
feedback = self.id + ' ' + responseBody
error = self.safe_value(response, 'error')
if error is None:
error = response
code = self.safe_string_2(error, 'code', 'status')
message = self.safe_string_2(error, 'message', 'debugMessage')
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], code, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
raise ExchangeError(feedback)
| true | true |
f71c2d2de4124b6e764541c80fea3098c5dcf2c2 | 231,565 | py | Python | zerver/tests/test_messages.py | myii/zulip | 915d8013271f1823954dd8d4441842842857ab9f | [
"Apache-2.0"
] | null | null | null | zerver/tests/test_messages.py | myii/zulip | 915d8013271f1823954dd8d4441842842857ab9f | [
"Apache-2.0"
] | null | null | null | zerver/tests/test_messages.py | myii/zulip | 915d8013271f1823954dd8d4441842842857ab9f | [
"Apache-2.0"
] | null | null | null | from django.db import IntegrityError
from django.db.models import Q
from django.conf import settings
from django.http import HttpResponse
from django.test import TestCase, override_settings
from django.utils.timezone import now as timezone_now
from zerver.lib import bugdown
from zerver.decorator import JsonableError
from zerver.lib.test_runner import slow
from zerver.lib.addressee import Addressee
from zerver.lib.actions import (
check_message,
check_send_stream_message,
create_mirror_user_if_needed,
do_add_alert_words,
do_change_is_admin,
do_change_stream_invite_only,
do_change_stream_post_policy,
do_claim_attachments,
do_create_user,
do_deactivate_user,
do_send_messages,
do_update_message,
do_set_realm_property,
extract_private_recipients,
extract_stream_indicator,
gather_subscriptions_helper,
get_active_presence_idle_user_ids,
get_client,
get_last_message_id,
get_topic_messages,
get_user_info_for_message_updates,
internal_prep_private_message,
internal_prep_stream_message_by_name,
internal_send_huddle_message,
internal_send_private_message,
internal_send_stream_message,
internal_send_stream_message_by_name,
send_rate_limited_pm_notification_to_bot_owner,
)
from zerver.lib.cache import (
cache_delete,
get_stream_cache_key,
to_dict_cache_key_id,
)
from zerver.lib.create_user import (
create_user_profile,
)
from zerver.lib.message import (
MessageDict,
bulk_access_messages,
get_first_visible_message_id,
get_raw_unread_data,
get_recent_private_conversations,
maybe_update_first_visible_message_id,
messages_for_ids,
render_markdown,
sew_messages_and_reactions,
update_first_visible_message_id,
)
from zerver.lib.test_helpers import (
get_subscription,
get_user_messages,
make_client,
message_stream_count,
most_recent_message,
most_recent_usermessage,
queries_captured,
reset_emails_in_zulip_realm,
)
from zerver.lib.test_classes import (
ZulipTestCase,
)
from zerver.lib.topic import (
LEGACY_PREV_TOPIC,
DB_TOPIC_NAME,
TOPIC_LINKS,
TOPIC_NAME,
)
from zerver.lib.types import DisplayRecipientT, UserDisplayRecipient
from zerver.lib.soft_deactivation import (
add_missing_messages,
do_soft_activate_users,
do_soft_deactivate_users,
reactivate_user_if_soft_deactivated,
)
from zerver.models import (
MAX_MESSAGE_LENGTH, MAX_TOPIC_NAME_LENGTH,
Message, Realm, Recipient, Stream, UserMessage, UserProfile, Attachment,
RealmAuditLog, RealmDomain, get_realm, UserPresence, Subscription,
get_stream, get_system_bot, get_user, Reaction,
flush_per_request_caches, ScheduledMessage, get_huddle_recipient,
bulk_get_huddle_user_ids, get_huddle_user_ids,
get_display_recipient, RealmFilter
)
from zerver.lib.timestamp import convert_to_UTC, datetime_to_timestamp
from zerver.lib.timezone import get_timezone
from zerver.lib.upload import create_attachment
from zerver.lib.url_encoding import near_message_url
from zerver.views.messages import create_mirrored_message_users, InvalidMirrorInput
from analytics.lib.counts import COUNT_STATS
from analytics.models import RealmCount
import datetime
import mock
from operator import itemgetter
import time
import ujson
from typing import Any, Dict, List, Set, Union, Tuple
from collections import namedtuple
class MiscMessageTest(ZulipTestCase):
def test_get_last_message_id(self) -> None:
self.assertEqual(
get_last_message_id(),
Message.objects.latest('id').id
)
Message.objects.all().delete()
self.assertEqual(get_last_message_id(), -1)
class TopicHistoryTest(ZulipTestCase):
def test_topics_history_zephyr_mirror(self) -> None:
user_profile = self.mit_user('sipbtest')
stream_name = 'new_stream'
# Send a message to this new stream from another user
self.subscribe(self.mit_user("starnine"), stream_name)
stream = get_stream(stream_name, user_profile.realm)
self.send_stream_message(self.mit_user("starnine"), stream_name,
topic_name="secret topic")
# Now subscribe this MIT user to the new stream and verify
# that the new topic is not accessible
self.login_user(user_profile)
self.subscribe(user_profile, stream_name)
endpoint = '/json/users/me/%d/topics' % (stream.id,)
result = self.client_get(endpoint, dict(), subdomain="zephyr")
self.assert_json_success(result)
history = result.json()['topics']
self.assertEqual(history, [])
def test_topics_history(self) -> None:
# verified: int(UserMessage.flags.read) == 1
user_profile = self.example_user('iago')
self.login_user(user_profile)
stream_name = 'Verona'
stream = get_stream(stream_name, user_profile.realm)
recipient = stream.recipient
def create_test_message(topic: str) -> int:
# TODO: Clean this up to send messages the normal way.
hamlet = self.example_user('hamlet')
message = Message(
sender=hamlet,
recipient=recipient,
content='whatever',
date_sent=timezone_now(),
sending_client=get_client('whatever'),
)
message.set_topic_name(topic)
message.save()
UserMessage.objects.create(
user_profile=user_profile,
message=message,
flags=0,
)
return message.id
# our most recent topics are topic0, topic1, topic2
# Create old messages with strange spellings.
create_test_message('topic2')
create_test_message('toPIc1')
create_test_message('toPIc0')
create_test_message('topic2')
create_test_message('topic2')
create_test_message('Topic2')
# Create new messages
topic2_msg_id = create_test_message('topic2')
create_test_message('topic1')
create_test_message('topic1')
topic1_msg_id = create_test_message('topic1')
topic0_msg_id = create_test_message('topic0')
endpoint = '/json/users/me/%d/topics' % (stream.id,)
result = self.client_get(endpoint, dict())
self.assert_json_success(result)
history = result.json()['topics']
# We only look at the most recent three topics, because
# the prior fixture data may be unreliable.
history = history[:3]
self.assertEqual([topic['name'] for topic in history], [
'topic0',
'topic1',
'topic2',
])
self.assertEqual([topic['max_id'] for topic in history], [
topic0_msg_id,
topic1_msg_id,
topic2_msg_id,
])
# Now try as cordelia, who we imagine as a totally new user in
# that she doesn't have UserMessage rows. We should see the
# same results for a public stream.
self.login('cordelia')
result = self.client_get(endpoint, dict())
self.assert_json_success(result)
history = result.json()['topics']
# We only look at the most recent three topics, because
# the prior fixture data may be unreliable.
history = history[:3]
self.assertEqual([topic['name'] for topic in history], [
'topic0',
'topic1',
'topic2',
])
self.assertIn('topic0', [topic['name'] for topic in history])
self.assertEqual([topic['max_id'] for topic in history], [
topic0_msg_id,
topic1_msg_id,
topic2_msg_id,
])
# Now make stream private, but subscribe cordelia
do_change_stream_invite_only(stream, True)
self.subscribe(self.example_user("cordelia"), stream.name)
result = self.client_get(endpoint, dict())
self.assert_json_success(result)
history = result.json()['topics']
history = history[:3]
# Cordelia doesn't have these recent history items when we
# wasn't subscribed in her results.
self.assertNotIn('topic0', [topic['name'] for topic in history])
self.assertNotIn('topic1', [topic['name'] for topic in history])
self.assertNotIn('topic2', [topic['name'] for topic in history])
def test_bad_stream_id(self) -> None:
self.login('iago')
# non-sensible stream id
endpoint = '/json/users/me/9999999999/topics'
result = self.client_get(endpoint, dict())
self.assert_json_error(result, 'Invalid stream id')
# out of realm
bad_stream = self.make_stream(
'mit_stream',
realm=get_realm('zephyr')
)
endpoint = '/json/users/me/%s/topics' % (bad_stream.id,)
result = self.client_get(endpoint, dict())
self.assert_json_error(result, 'Invalid stream id')
# private stream to which I am not subscribed
private_stream = self.make_stream(
'private_stream',
invite_only=True
)
endpoint = '/json/users/me/%s/topics' % (private_stream.id,)
result = self.client_get(endpoint, dict())
self.assert_json_error(result, 'Invalid stream id')
class TopicDeleteTest(ZulipTestCase):
def test_topic_delete(self) -> None:
initial_last_msg_id = self.get_last_message().id
stream_name = 'new_stream'
topic_name = 'new topic 2'
# NON-ADMIN USER
user_profile = self.example_user('hamlet')
self.subscribe(user_profile, stream_name)
# Send message
stream = get_stream(stream_name, user_profile.realm)
self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
last_msg_id = self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
# Deleting the topic
self.login_user(user_profile)
endpoint = '/json/streams/' + str(stream.id) + '/delete_topic'
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_error(result, "Must be an organization administrator")
self.assertEqual(self.get_last_message().id, last_msg_id)
# Make stream private with limited history
do_change_stream_invite_only(stream, invite_only=True,
history_public_to_subscribers=False)
# ADMIN USER subscribed now
user_profile = self.example_user('iago')
self.subscribe(user_profile, stream_name)
self.login_user(user_profile)
new_last_msg_id = self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
# Now admin deletes all messages in topic -- which should only
# delete new_last_msg_id, i.e. the one sent since they joined.
self.assertEqual(self.get_last_message().id, new_last_msg_id)
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, last_msg_id)
# Try to delete all messages in the topic again. There are no messages accessible
# to the administrator, so this should do nothing.
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, last_msg_id)
# Make the stream's history public to subscribers
do_change_stream_invite_only(stream, invite_only=True,
history_public_to_subscribers=True)
# Delete the topic should now remove all messages
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, initial_last_msg_id)
# Delete again, to test the edge case of deleting an empty topic.
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, initial_last_msg_id)
class TestCrossRealmPMs(ZulipTestCase):
def make_realm(self, domain: str) -> Realm:
realm = Realm.objects.create(string_id=domain, invite_required=False)
RealmDomain.objects.create(realm=realm, domain=domain)
return realm
def create_user(self, email: str) -> UserProfile:
subdomain = email.split("@")[1]
self.register(email, 'test', subdomain=subdomain)
return get_user(email, get_realm(subdomain))
@slow("Sends a large number of messages")
@override_settings(CROSS_REALM_BOT_EMAILS=['notification-bot@zulip.com',
'welcome-bot@zulip.com',
'support@3.example.com'])
def test_realm_scenarios(self) -> None:
self.make_realm('1.example.com')
r2 = self.make_realm('2.example.com')
self.make_realm('3.example.com')
def assert_message_received(to_user: UserProfile, from_user: UserProfile) -> None:
messages = get_user_messages(to_user)
self.assertEqual(messages[-1].sender.id, from_user.id)
def assert_invalid_user() -> Any:
return self.assertRaisesRegex(
JsonableError,
'Invalid user ID ')
user1_email = 'user1@1.example.com'
user1a_email = 'user1a@1.example.com'
user2_email = 'user2@2.example.com'
user3_email = 'user3@3.example.com'
notification_bot_email = 'notification-bot@zulip.com'
support_email = 'support@3.example.com' # note: not zulip.com
user1 = self.create_user(user1_email)
user1a = self.create_user(user1a_email)
user2 = self.create_user(user2_email)
user3 = self.create_user(user3_email)
notification_bot = get_system_bot(notification_bot_email)
with self.settings(CROSS_REALM_BOT_EMAILS=['notification-bot@zulip.com', 'welcome-bot@zulip.com']):
# HACK: We should probably be creating this "bot" user another
# way, but since you can't register a user with a
# cross-realm email, we need to hide this for now.
support_bot = self.create_user(support_email)
# Users can PM themselves
self.send_personal_message(user1, user1)
assert_message_received(user1, user1)
# Users on the same realm can PM each other
self.send_personal_message(user1, user1a)
assert_message_received(user1a, user1)
# Cross-realm bots in the zulip.com realm can PM any realm
# (They need lower level APIs to do this.)
internal_send_private_message(
realm=r2,
sender=get_system_bot(notification_bot_email),
recipient_user=get_user(user2_email, r2),
content='bla',
)
assert_message_received(user2, notification_bot)
# All users can PM cross-realm bots in the zulip.com realm
self.send_personal_message(user1, notification_bot)
assert_message_received(notification_bot, user1)
# Users can PM cross-realm bots on non-zulip realms.
# (The support bot represents some theoretical bot that we may
# create in the future that does not have zulip.com as its realm.)
self.send_personal_message(user1, support_bot)
assert_message_received(support_bot, user1)
# Allow sending PMs to two different cross-realm bots simultaneously.
# (We don't particularly need this feature, but since users can
# already individually send PMs to cross-realm bots, we shouldn't
# prevent them from sending multiple bots at once. We may revisit
# this if it's a nuisance for huddles.)
self.send_huddle_message(user1, [notification_bot, support_bot])
assert_message_received(notification_bot, user1)
assert_message_received(support_bot, user1)
# Prevent old loophole where I could send PMs to other users as long
# as I copied a cross-realm bot from the same realm.
with assert_invalid_user():
self.send_huddle_message(user1, [user3, support_bot])
# Users on three different realms can't PM each other,
# even if one of the users is a cross-realm bot.
with assert_invalid_user():
self.send_huddle_message(user1, [user2, notification_bot])
with assert_invalid_user():
self.send_huddle_message(notification_bot, [user1, user2])
# Users on the different realms cannot PM each other
with assert_invalid_user():
self.send_personal_message(user1, user2)
# Users on non-zulip realms can't PM "ordinary" Zulip users
with assert_invalid_user():
self.send_personal_message(user1, self.example_user('hamlet'))
# Users on three different realms cannot PM each other
with assert_invalid_user():
self.send_huddle_message(user1, [user2, user3])
class TestAddressee(ZulipTestCase):
def test_addressee_for_user_ids(self) -> None:
realm = get_realm('zulip')
user_ids = [self.example_user('cordelia').id,
self.example_user('hamlet').id,
self.example_user('othello').id]
result = Addressee.for_user_ids(user_ids=user_ids, realm=realm)
user_profiles = result.user_profiles()
result_user_ids = [user_profiles[0].id, user_profiles[1].id,
user_profiles[2].id]
self.assertEqual(set(result_user_ids), set(user_ids))
def test_addressee_for_user_ids_nonexistent_id(self) -> None:
def assert_invalid_user_id() -> Any:
return self.assertRaisesRegex(
JsonableError,
'Invalid user ID ')
with assert_invalid_user_id():
Addressee.for_user_ids(user_ids=[779], realm=get_realm('zulip'))
def test_addressee_legacy_build_for_user_ids(self) -> None:
realm = get_realm('zulip')
self.login('hamlet')
user_ids = [self.example_user('cordelia').id,
self.example_user('othello').id]
result = Addressee.legacy_build(
sender=self.example_user('hamlet'), message_type_name='private',
message_to=user_ids, topic_name='random_topic',
realm=realm
)
user_profiles = result.user_profiles()
result_user_ids = [user_profiles[0].id, user_profiles[1].id]
self.assertEqual(set(result_user_ids), set(user_ids))
def test_addressee_legacy_build_for_stream_id(self) -> None:
realm = get_realm('zulip')
self.login('iago')
sender = self.example_user('iago')
self.subscribe(sender, "Denmark")
stream = get_stream('Denmark', realm)
result = Addressee.legacy_build(
sender=sender, message_type_name='stream',
message_to=[stream.id], topic_name='random_topic',
realm=realm
)
stream_id = result.stream_id()
self.assertEqual(stream.id, stream_id)
class InternalPrepTest(ZulipTestCase):
def test_returns_for_internal_sends(self) -> None:
# For our internal_send_* functions we return
# if the prep stages fail. This is mostly defensive
# code, since we are generally creating the messages
# ourselves, but we want to make sure that the functions
# won't actually explode if we give them bad content.
bad_content = ''
realm = get_realm('zulip')
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
othello = self.example_user('othello')
stream = get_stream('Verona', realm)
with mock.patch('logging.exception') as m:
internal_send_private_message(
realm=realm,
sender=cordelia,
recipient_user=hamlet,
content=bad_content,
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
with mock.patch('logging.exception') as m:
internal_send_huddle_message(
realm=realm,
sender=cordelia,
emails=[hamlet.email, othello.email],
content=bad_content,
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
with mock.patch('logging.exception') as m:
internal_send_stream_message(
realm=realm,
sender=cordelia,
topic='whatever',
content=bad_content,
stream=stream
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
with mock.patch('logging.exception') as m:
internal_send_stream_message_by_name(
realm=realm,
sender=cordelia,
stream_name=stream.name,
topic='whatever',
content=bad_content
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
def test_error_handling(self) -> None:
realm = get_realm('zulip')
sender = self.example_user('cordelia')
recipient_user = self.example_user('hamlet')
content = 'x' * 15000
result = internal_prep_private_message(
realm=realm,
sender=sender,
recipient_user=recipient_user,
content=content)
message = result['message']
self.assertIn('message was too long', message.content)
with self.assertRaises(RuntimeError):
internal_prep_private_message(
realm=None, # should cause error
sender=sender,
recipient_user=recipient_user,
content=content)
# Simulate sending a message to somebody not in the
# realm of the sender.
recipient_user = self.mit_user('starnine')
with mock.patch('logging.exception') as logging_mock:
result = internal_prep_private_message(
realm=realm,
sender=sender,
recipient_user=recipient_user,
content=content)
arg = logging_mock.call_args_list[0][0][0]
prefix = "Error queueing internal message by cordelia@zulip.com: You can't send private messages outside of your organization."
self.assertTrue(arg.startswith(prefix))
def test_ensure_stream_gets_called(self) -> None:
realm = get_realm('zulip')
sender = self.example_user('cordelia')
stream_name = 'test_stream'
topic = 'whatever'
content = 'hello'
internal_prep_stream_message_by_name(
realm=realm,
sender=sender,
stream_name=stream_name,
topic=topic,
content=content)
# This would throw an error if the stream
# wasn't automatically created.
Stream.objects.get(name=stream_name, realm_id=realm.id)
class ExtractTest(TestCase):
def test_extract_stream_indicator(self) -> None:
self.assertEqual(
extract_stream_indicator('development'),
"development",
)
self.assertEqual(
extract_stream_indicator('commas,are,fine'),
"commas,are,fine",
)
self.assertEqual(
extract_stream_indicator('"Who hasn\'t done this?"'),
"Who hasn't done this?",
)
self.assertEqual(
extract_stream_indicator("999"),
999,
)
# For legacy reasons it's plausible that users will
# put a single stream into an array and then encode it
# as JSON. We can probably eliminate this support
# by mid 2020 at the latest.
self.assertEqual(
extract_stream_indicator('["social"]'),
'social',
)
self.assertEqual(
extract_stream_indicator("[123]"),
123,
)
with self.assertRaisesRegex(JsonableError, 'Invalid data type for stream'):
extract_stream_indicator('{}')
with self.assertRaisesRegex(JsonableError, 'Invalid data type for stream'):
extract_stream_indicator('[{}]')
with self.assertRaisesRegex(JsonableError, 'Expected exactly one stream'):
extract_stream_indicator('[1,2,"general"]')
def test_extract_private_recipients_emails(self) -> None:
# JSON list w/dups, empties, and trailing whitespace
s = ujson.dumps([' alice@zulip.com ', ' bob@zulip.com ', ' ', 'bob@zulip.com'])
# sorted() gets confused by extract_private_recipients' return type
# For testing, ignorance here is better than manual casting
result = sorted(extract_private_recipients(s))
self.assertEqual(result, ['alice@zulip.com', 'bob@zulip.com'])
# simple string with one name
s = 'alice@zulip.com '
self.assertEqual(extract_private_recipients(s), ['alice@zulip.com'])
# JSON-encoded string
s = '"alice@zulip.com"'
self.assertEqual(extract_private_recipients(s), ['alice@zulip.com'])
# bare comma-delimited string
s = 'bob@zulip.com, alice@zulip.com'
result = sorted(extract_private_recipients(s))
self.assertEqual(result, ['alice@zulip.com', 'bob@zulip.com'])
# JSON-encoded, comma-delimited string
s = '"bob@zulip.com,alice@zulip.com"'
result = sorted(extract_private_recipients(s))
self.assertEqual(result, ['alice@zulip.com', 'bob@zulip.com'])
# Invalid data
s = ujson.dumps(dict(color='red'))
with self.assertRaisesRegex(JsonableError, 'Invalid data type for recipients'):
extract_private_recipients(s)
s = ujson.dumps([{}])
with self.assertRaisesRegex(JsonableError, 'Invalid data type for recipients'):
extract_private_recipients(s)
# Empty list
self.assertEqual(extract_private_recipients('[]'), [])
# Heterogeneous lists are not supported
mixed = ujson.dumps(['eeshan@example.com', 3, 4])
with self.assertRaisesRegex(JsonableError, 'Recipient lists may contain emails or user IDs, but not both.'):
extract_private_recipients(mixed)
def test_extract_recipient_ids(self) -> None:
# JSON list w/dups
s = ujson.dumps([3, 3, 12])
result = sorted(extract_private_recipients(s))
self.assertEqual(result, [3, 12])
# Invalid data
ids = ujson.dumps(dict(recipient=12))
with self.assertRaisesRegex(JsonableError, 'Invalid data type for recipients'):
extract_private_recipients(ids)
# Heterogeneous lists are not supported
mixed = ujson.dumps([3, 4, 'eeshan@example.com'])
with self.assertRaisesRegex(JsonableError, 'Recipient lists may contain emails or user IDs, but not both.'):
extract_private_recipients(mixed)
class PersonalMessagesTest(ZulipTestCase):
def test_near_pm_message_url(self) -> None:
realm = get_realm('zulip')
message = dict(
type='personal',
id=555,
display_recipient=[
dict(id=77),
dict(id=80),
],
)
url = near_message_url(
realm=realm,
message=message,
)
self.assertEqual(url, 'http://zulip.testserver/#narrow/pm-with/77,80-pm/near/555')
def test_is_private_flag_not_leaked(self) -> None:
"""
Make sure `is_private` flag is not leaked to the API.
"""
self.login('hamlet')
self.send_personal_message(self.example_user("hamlet"),
self.example_user("cordelia"),
"test")
for msg in self.get_messages():
self.assertNotIn('is_private', msg['flags'])
def test_auto_subbed_to_personals(self) -> None:
"""
Newly created users are auto-subbed to the ability to receive
personals.
"""
test_email = self.nonreg_email('test')
self.register(test_email, "test")
user_profile = self.nonreg_user('test')
old_messages_count = message_stream_count(user_profile)
self.send_personal_message(user_profile, user_profile)
new_messages_count = message_stream_count(user_profile)
self.assertEqual(new_messages_count, old_messages_count + 1)
recipient = Recipient.objects.get(type_id=user_profile.id,
type=Recipient.PERSONAL)
message = most_recent_message(user_profile)
self.assertEqual(message.recipient, recipient)
with mock.patch('zerver.models.get_display_recipient', return_value='recip'):
self.assertEqual(
str(message),
'<Message: recip / / '
'<UserProfile: {} {}>>'.format(user_profile.email, user_profile.realm))
user_message = most_recent_usermessage(user_profile)
self.assertEqual(
str(user_message),
'<UserMessage: recip / {} ([])>'.format(user_profile.email)
)
@slow("checks several profiles")
def test_personal_to_self(self) -> None:
"""
If you send a personal to yourself, only you see it.
"""
old_user_profiles = list(UserProfile.objects.all())
test_email = self.nonreg_email('test1')
self.register(test_email, "test1")
old_messages = []
for user_profile in old_user_profiles:
old_messages.append(message_stream_count(user_profile))
user_profile = self.nonreg_user('test1')
self.send_personal_message(user_profile, user_profile)
new_messages = []
for user_profile in old_user_profiles:
new_messages.append(message_stream_count(user_profile))
self.assertEqual(old_messages, new_messages)
user_profile = self.nonreg_user('test1')
recipient = Recipient.objects.get(type_id=user_profile.id, type=Recipient.PERSONAL)
self.assertEqual(most_recent_message(user_profile).recipient, recipient)
def assert_personal(self, sender: UserProfile, receiver: UserProfile, content: str="testcontent") -> None:
"""
Send a private message from `sender_email` to `receiver_email` and check
that only those two parties actually received the message.
"""
sender_messages = message_stream_count(sender)
receiver_messages = message_stream_count(receiver)
other_user_profiles = UserProfile.objects.filter(~Q(id=sender.id) &
~Q(id=receiver.id))
old_other_messages = []
for user_profile in other_user_profiles:
old_other_messages.append(message_stream_count(user_profile))
self.send_personal_message(sender, receiver, content)
# Users outside the conversation don't get the message.
new_other_messages = []
for user_profile in other_user_profiles:
new_other_messages.append(message_stream_count(user_profile))
self.assertEqual(old_other_messages, new_other_messages)
# The personal message is in the streams of both the sender and receiver.
self.assertEqual(message_stream_count(sender),
sender_messages + 1)
self.assertEqual(message_stream_count(receiver),
receiver_messages + 1)
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
self.assertEqual(most_recent_message(sender).recipient, recipient)
self.assertEqual(most_recent_message(receiver).recipient, recipient)
def test_personal(self) -> None:
"""
If you send a personal, only you and the recipient see it.
"""
self.login('hamlet')
self.assert_personal(
sender=self.example_user("hamlet"),
receiver=self.example_user("othello")
)
def test_private_message_policy(self) -> None:
"""
Tests that PRIVATE_MESSAGE_POLICY_DISABLED works correctly.
"""
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
do_set_realm_property(user_profile.realm, "private_message_policy",
Realm.PRIVATE_MESSAGE_POLICY_DISABLED)
with self.assertRaises(JsonableError):
self.send_personal_message(user_profile, self.example_user("cordelia"))
bot_profile = self.create_test_bot("testbot", user_profile)
self.send_personal_message(user_profile, get_system_bot(settings.NOTIFICATION_BOT))
self.send_personal_message(user_profile, bot_profile)
self.send_personal_message(bot_profile, user_profile)
def test_non_ascii_personal(self) -> None:
"""
Sending a PM containing non-ASCII characters succeeds.
"""
self.login('hamlet')
self.assert_personal(
sender=self.example_user("hamlet"),
receiver=self.example_user("othello"),
content="hümbüǵ"
)
class StreamMessagesTest(ZulipTestCase):
def assert_stream_message(self, stream_name: str, topic_name: str="test topic",
content: str="test content") -> None:
"""
Check that messages sent to a stream reach all subscribers to that stream.
"""
realm = get_realm('zulip')
subscribers = self.users_subscribed_to_stream(stream_name, realm)
# Outgoing webhook bots don't store UserMessage rows; they will be processed later.
subscribers = [subscriber for subscriber in subscribers
if subscriber.bot_type != UserProfile.OUTGOING_WEBHOOK_BOT]
old_subscriber_messages = []
for subscriber in subscribers:
old_subscriber_messages.append(message_stream_count(subscriber))
non_subscribers = [user_profile for user_profile in UserProfile.objects.all()
if user_profile not in subscribers]
old_non_subscriber_messages = []
for non_subscriber in non_subscribers:
old_non_subscriber_messages.append(message_stream_count(non_subscriber))
non_bot_subscribers = [user_profile for user_profile in subscribers
if not user_profile.is_bot]
a_subscriber = non_bot_subscribers[0]
self.login_user(a_subscriber)
self.send_stream_message(a_subscriber, stream_name,
content=content, topic_name=topic_name)
# Did all of the subscribers get the message?
new_subscriber_messages = []
for subscriber in subscribers:
new_subscriber_messages.append(message_stream_count(subscriber))
# Did non-subscribers not get the message?
new_non_subscriber_messages = []
for non_subscriber in non_subscribers:
new_non_subscriber_messages.append(message_stream_count(non_subscriber))
self.assertEqual(old_non_subscriber_messages, new_non_subscriber_messages)
self.assertEqual(new_subscriber_messages, [elt + 1 for elt in old_subscriber_messages])
def test_performance(self) -> None:
'''
This test is part of the automated test suite, but
it is more intended as an aid to measuring the
performance of do_send_messages() with consistent
data setup across different commits. You can modify
the values below and run just this test, and then
comment out the print statement toward the bottom.
'''
num_messages = 2
num_extra_users = 10
sender = self.example_user('cordelia')
realm = sender.realm
message_content = 'whatever'
stream = get_stream('Denmark', realm)
topic_name = 'lunch'
recipient = stream.recipient
sending_client = make_client(name="test suite")
for i in range(num_extra_users):
# Make every other user be idle.
long_term_idle = i % 2 > 0
email = 'foo%d@example.com' % (i,)
user = UserProfile.objects.create(
realm=realm,
email=email,
pointer=0,
long_term_idle=long_term_idle,
)
Subscription.objects.create(
user_profile=user,
recipient=recipient
)
def send_test_message() -> None:
message = Message(
sender=sender,
recipient=recipient,
content=message_content,
date_sent=timezone_now(),
sending_client=sending_client,
)
message.set_topic_name(topic_name)
do_send_messages([dict(message=message)])
before_um_count = UserMessage.objects.count()
t = time.time()
for i in range(num_messages):
send_test_message()
delay = time.time() - t
assert(delay) # quiet down lint
# print(delay)
after_um_count = UserMessage.objects.count()
ums_created = after_um_count - before_um_count
num_active_users = num_extra_users / 2
self.assertTrue(ums_created > (num_active_users * num_messages))
def test_not_too_many_queries(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago"),
self.example_user("cordelia"), self.example_user("othello")]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
sender = self.example_user('hamlet')
sending_client = make_client(name="test suite")
stream_name = 'Denmark'
topic_name = 'foo'
content = 'whatever'
realm = sender.realm
# To get accurate count of the queries, we should make sure that
# caches don't come into play. If we count queries while caches are
# filled, we will get a lower count. Caches are not supposed to be
# persistent, so our test can also fail if cache is invalidated
# during the course of the unit test.
flush_per_request_caches()
cache_delete(get_stream_cache_key(stream_name, realm.id))
with queries_captured() as queries:
check_send_stream_message(
sender=sender,
client=sending_client,
stream_name=stream_name,
topic=topic_name,
body=content,
)
self.assert_length(queries, 15)
def test_stream_message_dict(self) -> None:
user_profile = self.example_user('iago')
self.subscribe(user_profile, "Denmark")
self.send_stream_message(self.example_user("hamlet"), "Denmark",
content="whatever", topic_name="my topic")
message = most_recent_message(user_profile)
row = MessageDict.get_raw_db_rows([message.id])[0]
dct = MessageDict.build_dict_from_raw_db_row(row)
MessageDict.post_process_dicts([dct], apply_markdown=True, client_gravatar=False)
self.assertEqual(dct['display_recipient'], 'Denmark')
stream = get_stream('Denmark', user_profile.realm)
self.assertEqual(dct['stream_id'], stream.id)
def test_stream_message_unicode(self) -> None:
receiving_user_profile = self.example_user('iago')
sender = self.example_user('hamlet')
self.subscribe(receiving_user_profile, "Denmark")
self.send_stream_message(sender, "Denmark",
content="whatever", topic_name="my topic")
message = most_recent_message(receiving_user_profile)
self.assertEqual(str(message),
'<Message: Denmark / my topic / '
'<UserProfile: {} {}>>'.format(sender.email, sender.realm))
def test_message_mentions(self) -> None:
user_profile = self.example_user('iago')
self.subscribe(user_profile, "Denmark")
self.send_stream_message(self.example_user("hamlet"), "Denmark",
content="test @**Iago** rules")
message = most_recent_message(user_profile)
assert(UserMessage.objects.get(user_profile=user_profile, message=message).flags.mentioned.is_set)
def test_is_private_flag(self) -> None:
user_profile = self.example_user('iago')
self.subscribe(user_profile, "Denmark")
self.send_stream_message(self.example_user("hamlet"), "Denmark",
content="test")
message = most_recent_message(user_profile)
self.assertFalse(UserMessage.objects.get(user_profile=user_profile, message=message).flags.is_private.is_set)
self.send_personal_message(self.example_user("hamlet"), user_profile,
content="test")
message = most_recent_message(user_profile)
self.assertTrue(UserMessage.objects.get(user_profile=user_profile, message=message).flags.is_private.is_set)
def _send_stream_message(self, user: UserProfile, stream_name: str, content: str) -> Set[int]:
with mock.patch('zerver.lib.actions.send_event') as m:
self.send_stream_message(
user,
stream_name,
content=content
)
self.assertEqual(m.call_count, 1)
users = m.call_args[0][2]
user_ids = {u['id'] for u in users}
return user_ids
def test_unsub_mention(self) -> None:
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
stream_name = 'Test Stream'
self.subscribe(hamlet, stream_name)
UserMessage.objects.filter(
user_profile=cordelia
).delete()
def mention_cordelia() -> Set[int]:
content = 'test @**Cordelia Lear** rules'
user_ids = self._send_stream_message(
user=hamlet,
stream_name=stream_name,
content=content
)
return user_ids
def num_cordelia_messages() -> int:
return UserMessage.objects.filter(
user_profile=cordelia
).count()
user_ids = mention_cordelia()
self.assertEqual(0, num_cordelia_messages())
self.assertNotIn(cordelia.id, user_ids)
# Make sure test isn't too brittle-subscribing
# Cordelia and mentioning her should give her a
# message.
self.subscribe(cordelia, stream_name)
user_ids = mention_cordelia()
self.assertIn(cordelia.id, user_ids)
self.assertEqual(1, num_cordelia_messages())
def test_message_bot_mentions(self) -> None:
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
realm = hamlet.realm
stream_name = 'Test Stream'
self.subscribe(hamlet, stream_name)
normal_bot = do_create_user(
email='normal-bot@zulip.com',
password='',
realm=realm,
full_name='Normal Bot',
short_name='',
bot_type=UserProfile.DEFAULT_BOT,
bot_owner=cordelia,
)
content = 'test @**Normal Bot** rules'
user_ids = self._send_stream_message(
user=hamlet,
stream_name=stream_name,
content=content
)
self.assertIn(normal_bot.id, user_ids)
user_message = most_recent_usermessage(normal_bot)
self.assertEqual(user_message.message.content, content)
self.assertTrue(user_message.flags.mentioned)
def test_stream_message_mirroring(self) -> None:
user = self.mit_user('starnine')
self.subscribe(user, 'Verona')
do_change_is_admin(user, True, 'api_super_user')
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"to": "Verona",
"sender": self.mit_email("sipbtest"),
"client": "zephyr_mirror",
"topic": "announcement",
"content": "Everyone knows Iago rules",
"forged": "true"},
subdomain="zephyr")
self.assert_json_success(result)
do_change_is_admin(user, False, 'api_super_user')
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"to": "Verona",
"sender": self.mit_email("sipbtest"),
"client": "zephyr_mirror",
"topic": "announcement",
"content": "Everyone knows Iago rules",
"forged": "true"},
subdomain="zephyr")
self.assert_json_error(result, "User not authorized for this query")
def test_message_to_stream(self) -> None:
"""
If you send a message to a stream, everyone subscribed to the stream
receives the messages.
"""
self.assert_stream_message("Scotland")
def test_non_ascii_stream_message(self) -> None:
"""
Sending a stream message containing non-ASCII characters in the stream
name, topic, or message body succeeds.
"""
self.login('hamlet')
# Subscribe everyone to a stream with non-ASCII characters.
non_ascii_stream_name = "hümbüǵ"
realm = get_realm("zulip")
stream = self.make_stream(non_ascii_stream_name)
for user_profile in UserProfile.objects.filter(is_active=True, is_bot=False,
realm=realm)[0:3]:
self.subscribe(user_profile, stream.name)
self.assert_stream_message(non_ascii_stream_name, topic_name="hümbüǵ",
content="hümbüǵ")
def test_get_raw_unread_data_for_huddle_messages(self) -> None:
users = [
self.example_user('hamlet'),
self.example_user('cordelia'),
self.example_user('iago'),
self.example_user('prospero'),
self.example_user('othello'),
]
message1_id = self.send_huddle_message(users[0], users, "test content 1")
message2_id = self.send_huddle_message(users[0], users, "test content 2")
msg_data = get_raw_unread_data(users[1])
# both the messages are present in msg_data
self.assertIn(message1_id, msg_data["huddle_dict"].keys())
self.assertIn(message2_id, msg_data["huddle_dict"].keys())
# only these two messages are present in msg_data
self.assertEqual(len(msg_data["huddle_dict"].keys()), 2)
recent_conversations = get_recent_private_conversations(users[1])
self.assertEqual(len(recent_conversations), 1)
recent_conversation = list(recent_conversations.values())[0]
self.assertEqual(set(recent_conversation['user_ids']), {user.id for user in users if
user != users[1]})
self.assertEqual(recent_conversation['max_message_id'], message2_id)
class MessageDictTest(ZulipTestCase):
def test_both_codepaths(self) -> None:
'''
We have two different codepaths that
extract a particular shape of dictionary
for messages to send to clients:
events:
These are the events we send to MANY
clients when a message is originally
sent.
fetch:
These are the messages we send to ONE
client when they fetch messages via
some narrow/search in the UI.
Different clients have different needs
when it comes to things like generating avatar
hashes or including both rendered and unrendered
markdown, so that explains the different shapes.
And then the two codepaths have different
performance needs. In the events codepath, we
have the Django view generate a single "wide"
dictionary that gets put on the event queue,
and then we send events to multiple clients,
finalizing the payload for each of them depending
on the "shape" they want. (We also avoid
doing extra work for any two clients who want
the same shape dictionary, but that's out of the
scope of this particular test).
In the fetch scenario, the single client only needs
a dictionary of one shape, but we need to re-hydrate
the sender information, since the sender details
may have changed since the message was originally
sent.
This test simply verifies that the two codepaths
ultimately provide the same result.
'''
def reload_message(msg_id: int) -> Message:
# Get a clean copy of the message, and
# clear the cache.
cache_delete(to_dict_cache_key_id(msg_id))
msg = Message.objects.get(id=msg_id)
return msg
def get_send_message_payload(
msg_id: int,
apply_markdown: bool,
client_gravatar: bool) -> Dict[str, Any]:
msg = reload_message(msg_id)
wide_dict = MessageDict.wide_dict(msg)
narrow_dict = MessageDict.finalize_payload(
wide_dict,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
return narrow_dict
def get_fetch_payload(
msg_id: int,
apply_markdown: bool,
client_gravatar: bool) -> Dict[str, Any]:
msg = reload_message(msg_id)
unhydrated_dict = MessageDict.to_dict_uncached_helper([msg])[0]
# The next step mutates the dict in place
# for performance reasons.
MessageDict.post_process_dicts(
[unhydrated_dict],
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
final_dict = unhydrated_dict
return final_dict
def test_message_id() -> int:
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
msg_id = self.send_stream_message(
hamlet,
"Scotland",
topic_name="editing",
content="before edit"
)
return msg_id
flag_setups = [
[False, False],
[False, True],
[True, False],
[True, True],
]
msg_id = test_message_id()
for (apply_markdown, client_gravatar) in flag_setups:
send_message_payload = get_send_message_payload(
msg_id,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
fetch_payload = get_fetch_payload(
msg_id,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
self.assertEqual(send_message_payload, fetch_payload)
@slow('builds lots of messages')
def test_bulk_message_fetching(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
pm_recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
stream_name = 'Çiğdem'
stream = self.make_stream(stream_name)
stream_recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
sending_client = make_client(name="test suite")
ids = []
for i in range(300):
for recipient in [pm_recipient, stream_recipient]:
message = Message(
sender=sender,
recipient=recipient,
content='whatever %d' % (i,),
rendered_content='DOES NOT MATTER',
rendered_content_version=bugdown.version,
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
ids.append(message.id)
Reaction.objects.create(user_profile=sender, message=message,
emoji_name='simple_smile')
num_ids = len(ids)
self.assertTrue(num_ids >= 600)
flush_per_request_caches()
t = time.time()
with queries_captured() as queries:
rows = list(MessageDict.get_raw_db_rows(ids))
objs = [
MessageDict.build_dict_from_raw_db_row(row)
for row in rows
]
MessageDict.post_process_dicts(objs, apply_markdown=False, client_gravatar=False)
delay = time.time() - t
# Make sure we don't take longer than 1.5ms per message to
# extract messages. Note that we increased this from 1ms to
# 1.5ms to handle tests running in parallel being a bit
# slower.
error_msg = "Number of ids: {}. Time delay: {}".format(num_ids, delay)
self.assertTrue(delay < 0.0015 * num_ids, error_msg)
self.assert_length(queries, 7)
self.assertEqual(len(rows), num_ids)
def test_applying_markdown(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
sending_client = make_client(name="test suite")
message = Message(
sender=sender,
recipient=recipient,
content='hello **world**',
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
# An important part of this test is to get the message through this exact code path,
# because there is an ugly hack we need to cover. So don't just say "row = message".
row = MessageDict.get_raw_db_rows([message.id])[0]
dct = MessageDict.build_dict_from_raw_db_row(row)
expected_content = '<p>hello <strong>world</strong></p>'
self.assertEqual(dct['rendered_content'], expected_content)
message = Message.objects.get(id=message.id)
self.assertEqual(message.rendered_content, expected_content)
self.assertEqual(message.rendered_content_version, bugdown.version)
@mock.patch("zerver.lib.message.bugdown.convert")
def test_applying_markdown_invalid_format(self, convert_mock: Any) -> None:
# pretend the converter returned an invalid message without raising an exception
convert_mock.return_value = None
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
sending_client = make_client(name="test suite")
message = Message(
sender=sender,
recipient=recipient,
content='hello **world**',
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
# An important part of this test is to get the message through this exact code path,
# because there is an ugly hack we need to cover. So don't just say "row = message".
row = MessageDict.get_raw_db_rows([message.id])[0]
dct = MessageDict.build_dict_from_raw_db_row(row)
error_content = '<p>[Zulip note: Sorry, we could not understand the formatting of your message]</p>'
self.assertEqual(dct['rendered_content'], error_content)
def test_topic_links_use_stream_realm(self) -> None:
# Set up a realm filter on 'zulip' and assert that messages
# sent to a stream on 'zulip' have the topic linkified from
# senders in both the 'zulip' and 'lear' realms as well as
# the notification bot.
zulip_realm = get_realm('zulip')
url_format_string = r"https://trac.zulip.net/ticket/%(id)s"
url = 'https://trac.zulip.net/ticket/123'
topic_name = 'test #123'
realm_filter = RealmFilter(realm=zulip_realm,
pattern=r"#(?P<id>[0-9]{2,8})",
url_format_string=url_format_string)
self.assertEqual(
realm_filter.__str__(),
'<RealmFilter(zulip): #(?P<id>[0-9]{2,8})'
' https://trac.zulip.net/ticket/%(id)s>')
def get_message(sender: UserProfile) -> Message:
msg_id = self.send_stream_message(sender, 'Denmark', 'hello world', topic_name,
zulip_realm)
return Message.objects.get(id=msg_id)
def assert_topic_links(links: List[str], msg: Message) -> None:
dct = MessageDict.to_dict_uncached_helper([msg])[0]
self.assertEqual(dct[TOPIC_LINKS], links)
# Send messages before and after saving the realm filter from each user.
assert_topic_links([], get_message(self.example_user('othello')))
assert_topic_links([], get_message(self.lear_user('cordelia')))
assert_topic_links([], get_message(self.notification_bot()))
realm_filter.save()
assert_topic_links([url], get_message(self.example_user('othello')))
assert_topic_links([url], get_message(self.lear_user('cordelia')))
assert_topic_links([url], get_message(self.notification_bot()))
def test_reaction(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
sending_client = make_client(name="test suite")
message = Message(
sender=sender,
recipient=recipient,
content='hello **world**',
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
reaction = Reaction.objects.create(
message=message, user_profile=sender,
emoji_name='simple_smile')
row = MessageDict.get_raw_db_rows([message.id])[0]
msg_dict = MessageDict.build_dict_from_raw_db_row(row)
self.assertEqual(msg_dict['reactions'][0]['emoji_name'],
reaction.emoji_name)
self.assertEqual(msg_dict['reactions'][0]['user_id'], sender.id)
self.assertEqual(msg_dict['reactions'][0]['user']['id'],
sender.id)
self.assertEqual(msg_dict['reactions'][0]['user']['email'],
sender.email)
self.assertEqual(msg_dict['reactions'][0]['user']['full_name'],
sender.full_name)
def test_missing_anchor(self) -> None:
self.login('hamlet')
result = self.client_get(
'/json/messages?use_first_unread_anchor=false&num_before=1&num_after=1')
self.assert_json_error(
result, "Missing 'anchor' argument.")
def test_invalid_anchor(self) -> None:
self.login('hamlet')
result = self.client_get(
'/json/messages?use_first_unread_anchor=false&num_before=1&num_after=1&anchor=chocolate')
self.assert_json_error(
result, "Invalid anchor")
class SewMessageAndReactionTest(ZulipTestCase):
def test_sew_messages_and_reaction(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
pm_recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
stream_name = 'Çiğdem'
stream = self.make_stream(stream_name)
stream_recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
sending_client = make_client(name="test suite")
needed_ids = []
for i in range(5):
for recipient in [pm_recipient, stream_recipient]:
message = Message(
sender=sender,
recipient=recipient,
content='whatever %d' % (i,),
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
needed_ids.append(message.id)
reaction = Reaction(user_profile=sender, message=message,
emoji_name='simple_smile')
reaction.save()
messages = Message.objects.filter(id__in=needed_ids).values(
*['id', 'content'])
reactions = Reaction.get_raw_db_rows(needed_ids)
tied_data = sew_messages_and_reactions(messages, reactions)
for data in tied_data:
self.assertEqual(len(data['reactions']), 1)
self.assertEqual(data['reactions'][0]['emoji_name'],
'simple_smile')
self.assertTrue(data['id'])
self.assertTrue(data['content'])
class MessagePOSTTest(ZulipTestCase):
def _send_and_verify_message(self, user: UserProfile, stream_name: str, error_msg: str=None) -> None:
if error_msg is None:
msg_id = self.send_stream_message(user, stream_name)
result = self.api_get(user, '/json/messages/' + str(msg_id))
self.assert_json_success(result)
else:
with self.assertRaisesRegex(JsonableError, error_msg):
self.send_stream_message(user, stream_name)
def test_message_to_self(self) -> None:
"""
Sending a message to a stream to which you are subscribed is
successful.
"""
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_success(result)
def test_api_message_to_self(self) -> None:
"""
Same as above, but for the API view
"""
user = self.example_user('hamlet')
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_success(result)
def test_message_to_stream_with_nonexistent_id(self) -> None:
cordelia = self.example_user('cordelia')
bot = self.create_test_bot(
short_name='whatever',
user_profile=cordelia,
)
result = self.api_post(
bot, "/api/v1/messages",
{
"type": "stream",
"to": ujson.dumps([99999]),
"client": "test suite",
"content": "Stream message by ID.",
"topic": "Test topic for stream ID message"
}
)
self.assert_json_error(result, "Stream with ID '99999' does not exist")
msg = self.get_last_message()
expected = ("Your bot `whatever-bot@zulip.testserver` tried to send a message to "
"stream ID 99999, but there is no stream with that ID.")
self.assertEqual(msg.content, expected)
def test_message_to_stream_by_id(self) -> None:
"""
Sending a message to a stream (by stream ID) to which you are
subscribed is successful.
"""
self.login('hamlet')
realm = get_realm('zulip')
stream = get_stream('Verona', realm)
result = self.client_post("/json/messages", {"type": "stream",
"to": ujson.dumps([stream.id]),
"client": "test suite",
"content": "Stream message by ID.",
"topic": "Test topic for stream ID message"})
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content, "Stream message by ID.")
def test_sending_message_as_stream_post_policy_admins(self) -> None:
"""
Sending messages to streams which only the admins can create and post to.
"""
admin_profile = self.example_user("iago")
self.login_user(admin_profile)
stream_name = "Verona"
stream = get_stream(stream_name, admin_profile.realm)
do_change_stream_post_policy(stream, Stream.STREAM_POST_POLICY_ADMINS)
# Admins and their owned bots can send to STREAM_POST_POLICY_ADMINS streams
self._send_and_verify_message(admin_profile, stream_name)
admin_owned_bot = self.create_test_bot(
short_name='whatever1',
full_name='whatever1',
user_profile=admin_profile,
)
self._send_and_verify_message(admin_owned_bot, stream_name)
non_admin_profile = self.example_user("hamlet")
self.login_user(non_admin_profile)
# Non admins and their owned bots cannot send to STREAM_POST_POLICY_ADMINS streams
self._send_and_verify_message(non_admin_profile, stream_name,
"Only organization administrators can send to this stream.")
non_admin_owned_bot = self.create_test_bot(
short_name='whatever2',
full_name='whatever2',
user_profile=non_admin_profile,
)
self._send_and_verify_message(non_admin_owned_bot, stream_name,
"Only organization administrators can send to this stream.")
# Bots without owner (except cross realm bot) cannot send to announcement only streams
bot_without_owner = do_create_user(
email='free-bot@zulip.testserver',
password='',
realm=non_admin_profile.realm,
full_name='freebot',
short_name='freebot',
bot_type=UserProfile.DEFAULT_BOT,
)
self._send_and_verify_message(bot_without_owner, stream_name,
"Only organization administrators can send to this stream.")
# Cross realm bots should be allowed
notification_bot = get_system_bot("notification-bot@zulip.com")
internal_send_stream_message(stream.realm, notification_bot, stream,
'Test topic', 'Test message by notification bot')
self.assertEqual(self.get_last_message().content, 'Test message by notification bot')
def test_sending_message_as_stream_post_policy_restrict_new_members(self) -> None:
"""
Sending messages to streams which new members cannot create and post to.
"""
admin_profile = self.example_user("iago")
self.login_user(admin_profile)
do_set_realm_property(admin_profile.realm, 'waiting_period_threshold', 10)
admin_profile.date_joined = timezone_now() - datetime.timedelta(days=9)
admin_profile.save()
self.assertTrue(admin_profile.is_new_member)
self.assertTrue(admin_profile.is_realm_admin)
stream_name = "Verona"
stream = get_stream(stream_name, admin_profile.realm)
do_change_stream_post_policy(stream, Stream.STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS)
# Admins and their owned bots can send to STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS streams,
# even if the admin is a new user
self._send_and_verify_message(admin_profile, stream_name)
admin_owned_bot = self.create_test_bot(
short_name='whatever1',
full_name='whatever1',
user_profile=admin_profile,
)
self._send_and_verify_message(admin_owned_bot, stream_name)
non_admin_profile = self.example_user("hamlet")
self.login_user(non_admin_profile)
non_admin_profile.date_joined = timezone_now() - datetime.timedelta(days=9)
non_admin_profile.save()
self.assertTrue(non_admin_profile.is_new_member)
self.assertFalse(non_admin_profile.is_realm_admin)
# Non admins and their owned bots can send to STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS streams,
# if the user is not a new member
self._send_and_verify_message(non_admin_profile, stream_name,
"New members cannot send to this stream.")
non_admin_owned_bot = self.create_test_bot(
short_name='whatever2',
full_name='whatever2',
user_profile=non_admin_profile,
)
self._send_and_verify_message(non_admin_owned_bot, stream_name,
"New members cannot send to this stream.")
# Bots without owner (except cross realm bot) cannot send to announcement only stream
bot_without_owner = do_create_user(
email='free-bot@zulip.testserver',
password='',
realm=non_admin_profile.realm,
full_name='freebot',
short_name='freebot',
bot_type=UserProfile.DEFAULT_BOT,
)
self._send_and_verify_message(bot_without_owner, stream_name,
"New members cannot send to this stream.")
# Cross realm bots should be allowed
notification_bot = get_system_bot("notification-bot@zulip.com")
internal_send_stream_message(stream.realm, notification_bot, stream,
'Test topic', 'Test message by notification bot')
self.assertEqual(self.get_last_message().content, 'Test message by notification bot')
def test_api_message_with_default_to(self) -> None:
"""
Sending messages without a to field should be sent to the default
stream for the user_profile.
"""
user = self.example_user('hamlet')
user.default_sending_stream_id = get_stream('Verona', user.realm).id
user.save()
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"client": "test suite",
"content": "Test message no to",
"topic": "Test topic"})
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content, "Test message no to")
def test_message_to_nonexistent_stream(self) -> None:
"""
Sending a message to a nonexistent stream fails.
"""
self.login('hamlet')
self.assertFalse(Stream.objects.filter(name="nonexistent_stream"))
result = self.client_post("/json/messages", {"type": "stream",
"to": "nonexistent_stream",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_error(result, "Stream 'nonexistent_stream' does not exist")
def test_message_to_nonexistent_stream_with_bad_characters(self) -> None:
"""
Nonexistent stream name with bad characters should be escaped properly.
"""
self.login('hamlet')
self.assertFalse(Stream.objects.filter(name="""&<"'><non-existent>"""))
result = self.client_post("/json/messages", {"type": "stream",
"to": """&<"'><non-existent>""",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_error(result, "Stream '&<"'><non-existent>' does not exist")
def test_personal_message(self) -> None:
"""
Sending a personal message to a valid username is successful.
"""
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
othello = self.example_user('othello')
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": othello.email})
self.assert_json_success(result)
message_id = ujson.loads(result.content.decode())['id']
recent_conversations = get_recent_private_conversations(user_profile)
self.assertEqual(len(recent_conversations), 1)
recent_conversation = list(recent_conversations.values())[0]
recipient_id = list(recent_conversations.keys())[0]
self.assertEqual(set(recent_conversation['user_ids']), {othello.id})
self.assertEqual(recent_conversation['max_message_id'], message_id)
# Now send a message to yourself and see how that interacts with the data structure
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": user_profile.email})
self.assert_json_success(result)
self_message_id = ujson.loads(result.content.decode())['id']
recent_conversations = get_recent_private_conversations(user_profile)
self.assertEqual(len(recent_conversations), 2)
recent_conversation = recent_conversations[recipient_id]
self.assertEqual(set(recent_conversation['user_ids']), {othello.id})
self.assertEqual(recent_conversation['max_message_id'], message_id)
# Now verify we have the appropriate self-pm data structure
del recent_conversations[recipient_id]
recent_conversation = list(recent_conversations.values())[0]
recipient_id = list(recent_conversations.keys())[0]
self.assertEqual(set(recent_conversation['user_ids']), set())
self.assertEqual(recent_conversation['max_message_id'], self_message_id)
def test_personal_message_by_id(self) -> None:
"""
Sending a personal message to a valid user ID is successful.
"""
self.login('hamlet')
result = self.client_post(
"/json/messages",
{
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([self.example_user("othello").id])
}
)
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual("Test message", msg.content)
self.assertEqual(msg.recipient_id, self.example_user("othello").id)
def test_group_personal_message_by_id(self) -> None:
"""
Sending a personal message to a valid user ID is successful.
"""
self.login('hamlet')
result = self.client_post(
"/json/messages",
{
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([self.example_user("othello").id,
self.example_user("cordelia").id])
}
)
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual("Test message", msg.content)
self.assertEqual(msg.recipient_id, get_huddle_recipient(
{self.example_user("hamlet").id,
self.example_user("othello").id,
self.example_user("cordelia").id}).id
)
def test_personal_message_copying_self(self) -> None:
"""
Sending a personal message to yourself plus another user is successful,
and counts as a message just to that user.
"""
hamlet = self.example_user('hamlet')
othello = self.example_user('othello')
self.login_user(hamlet)
result = self.client_post("/json/messages", {
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([hamlet.id, othello.id])})
self.assert_json_success(result)
msg = self.get_last_message()
# Verify that we're not actually on the "recipient list"
self.assertNotIn("Hamlet", str(msg.recipient))
def test_personal_message_to_nonexistent_user(self) -> None:
"""
Sending a personal message to an invalid email returns error JSON.
"""
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": "nonexistent"})
self.assert_json_error(result, "Invalid email 'nonexistent'")
def test_personal_message_to_deactivated_user(self) -> None:
"""
Sending a personal message to a deactivated user returns error JSON.
"""
othello = self.example_user('othello')
cordelia = self.example_user('cordelia')
do_deactivate_user(othello)
self.login('hamlet')
result = self.client_post("/json/messages", {
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([othello.id])})
self.assert_json_error(result, "'{}' is no longer using Zulip.".format(othello.email))
result = self.client_post("/json/messages", {
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([othello.id, cordelia.id])})
self.assert_json_error(result, "'{}' is no longer using Zulip.".format(othello.email))
def test_invalid_type(self) -> None:
"""
Sending a message of unknown type returns error JSON.
"""
self.login('hamlet')
othello = self.example_user('othello')
result = self.client_post("/json/messages", {"type": "invalid type",
"content": "Test message",
"client": "test suite",
"to": othello.email})
self.assert_json_error(result, "Invalid message type")
def test_empty_message(self) -> None:
"""
Sending a message that is empty or only whitespace should fail
"""
self.login('hamlet')
othello = self.example_user('othello')
result = self.client_post("/json/messages", {"type": "private",
"content": " ",
"client": "test suite",
"to": othello.email})
self.assert_json_error(result, "Message must not be empty")
def test_empty_string_topic(self) -> None:
"""
Sending a message that has empty string topic should fail
"""
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": ""})
self.assert_json_error(result, "Topic can't be empty")
def test_missing_topic(self) -> None:
"""
Sending a message without topic should fail
"""
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message"})
self.assert_json_error(result, "Missing topic")
def test_invalid_message_type(self) -> None:
"""
Messages other than the type of "private" or "stream" are considered as invalid
"""
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "invalid",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_error(result, "Invalid message type")
def test_private_message_without_recipients(self) -> None:
"""
Sending private message without recipients should fail
"""
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "private",
"content": "Test content",
"client": "test suite",
"to": ""})
self.assert_json_error(result, "Message must have recipients")
def test_mirrored_huddle(self) -> None:
"""
Sending a mirrored huddle message works
"""
result = self.api_post(self.mit_user("starnine"),
"/json/messages", {"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": ujson.dumps([self.mit_email("starnine"),
self.mit_email("espuser")])},
subdomain="zephyr")
self.assert_json_success(result)
def test_mirrored_personal(self) -> None:
"""
Sending a mirrored personal message works
"""
result = self.api_post(self.mit_user("starnine"),
"/json/messages", {"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_success(result)
def test_mirrored_personal_browser(self) -> None:
"""
Sending a mirrored personal message via the browser should not work.
"""
user = self.mit_user('starnine')
self.login_user(user)
result = self.client_post("/json/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "Invalid mirrored message")
def test_mirrored_personal_to_someone_else(self) -> None:
"""
Sending a mirrored personal message to someone else is not allowed.
"""
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("espuser")},
subdomain="zephyr")
self.assert_json_error(result, "User not authorized for this query")
def test_duplicated_mirrored_huddle(self) -> None:
"""
Sending two mirrored huddles in the row return the same ID
"""
msg = {"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": ujson.dumps([self.mit_email("espuser"),
self.mit_email("starnine")])}
with mock.patch('DNS.dnslookup', return_value=[['starnine:*:84233:101:Athena Consulting Exchange User,,,:/mit/starnine:/bin/bash']]):
result1 = self.api_post(self.mit_user("starnine"), "/api/v1/messages", msg,
subdomain="zephyr")
self.assert_json_success(result1)
with mock.patch('DNS.dnslookup', return_value=[['espuser:*:95494:101:Esp Classroom,,,:/mit/espuser:/bin/athena/bash']]):
result2 = self.api_post(self.mit_user("espuser"), "/api/v1/messages", msg,
subdomain="zephyr")
self.assert_json_success(result2)
self.assertEqual(ujson.loads(result1.content)['id'],
ujson.loads(result2.content)['id'])
def test_message_with_null_bytes(self) -> None:
"""
A message with null bytes in it is handled.
"""
self.login('hamlet')
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": " I like null bytes \x00 in my content", "topic": "Test topic"}
result = self.client_post("/json/messages", post_data)
self.assert_json_error(result, "Message must not contain null bytes")
def test_strip_message(self) -> None:
"""
A message with mixed whitespace at the end is cleaned up.
"""
self.login('hamlet')
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": " I like whitespace at the end! \n\n \n", "topic": "Test topic"}
result = self.client_post("/json/messages", post_data)
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content, " I like whitespace at the end!")
def test_long_message(self) -> None:
"""
Sending a message longer than the maximum message length succeeds but is
truncated.
"""
self.login('hamlet')
long_message = "A" * (MAX_MESSAGE_LENGTH + 1)
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": long_message, "topic": "Test topic"}
result = self.client_post("/json/messages", post_data)
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content,
"A" * (MAX_MESSAGE_LENGTH - 20) + "\n[message truncated]")
def test_long_topic(self) -> None:
"""
Sending a message with a topic longer than the maximum topic length
succeeds, but the topic is truncated.
"""
self.login('hamlet')
long_topic = "A" * (MAX_TOPIC_NAME_LENGTH + 1)
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": "test content", "topic": long_topic}
result = self.client_post("/json/messages", post_data)
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.topic_name(),
"A" * (MAX_TOPIC_NAME_LENGTH - 3) + "...")
def test_send_forged_message_as_not_superuser(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic",
"forged": "true"})
self.assert_json_error(result, "User not authorized for this query")
def test_send_message_as_not_superuser_to_different_domain(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic",
"realm_str": "mit"})
self.assert_json_error(result, "User not authorized for this query")
def test_send_message_as_superuser_to_domain_that_dont_exist(self) -> None:
user = self.example_user("default_bot")
password = "test_password"
user.set_password(password)
user.is_api_super_user = True
user.save()
result = self.api_post(user,
"/api/v1/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic",
"realm_str": "non-existing"})
user.is_api_super_user = False
user.save()
self.assert_json_error(result, "Unknown organization 'non-existing'")
def test_send_message_when_sender_is_not_set(self) -> None:
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "private",
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "Missing sender")
def test_send_message_as_not_superuser_when_type_is_not_private(self) -> None:
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "not-private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "User not authorized for this query")
@mock.patch("zerver.views.messages.create_mirrored_message_users")
def test_send_message_create_mirrored_message_user_returns_invalid_input(
self, create_mirrored_message_users_mock: Any) -> None:
create_mirrored_message_users_mock.side_effect = InvalidMirrorInput()
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "Invalid mirrored message")
@mock.patch("zerver.views.messages.create_mirrored_message_users")
def test_send_message_when_client_is_zephyr_mirror_but_string_id_is_not_zephyr(
self, create_mirrored_message_users_mock: Any) -> None:
create_mirrored_message_users_mock.return_value = mock.Mock()
user = self.mit_user("starnine")
user.realm.string_id = 'notzephyr'
user.realm.save()
result = self.api_post(user, "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": user.email},
subdomain="notzephyr")
self.assert_json_error(result, "Zephyr mirroring is not allowed in this organization")
@mock.patch("zerver.views.messages.create_mirrored_message_users")
def test_send_message_when_client_is_zephyr_mirror_but_recipient_is_user_id(
self, create_mirrored_message_users_mock: Any) -> None:
create_mirrored_message_users_mock.return_value = mock.Mock()
user = self.mit_user("starnine")
self.login_user(user)
result = self.api_post(user, "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": ujson.dumps([user.id])},
subdomain="zephyr")
self.assert_json_error(result, "Mirroring not allowed with recipient user IDs")
def test_send_message_irc_mirror(self) -> None:
reset_emails_in_zulip_realm()
self.login('hamlet')
bot_info = {
'full_name': 'IRC bot',
'short_name': 'irc',
}
result = self.client_post("/json/bots", bot_info)
self.assert_json_success(result)
email = "irc-bot@zulip.testserver"
user = get_user(email, get_realm('zulip'))
user.is_api_super_user = True
user.save()
user = get_user(email, get_realm('zulip'))
self.subscribe(user, "IRCland")
# Simulate a mirrored message with a slightly old timestamp.
fake_date_sent = timezone_now() - datetime.timedelta(minutes=37)
fake_timestamp = datetime_to_timestamp(fake_date_sent)
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"forged": "true",
"time": fake_timestamp,
"sender": "irc-user@irc.zulip.com",
"content": "Test message",
"client": "irc_mirror",
"topic": "from irc",
"to": "IRCLand"})
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual(int(datetime_to_timestamp(msg.date_sent)), int(fake_timestamp))
# Now test again using forged=yes
fake_date_sent = timezone_now() - datetime.timedelta(minutes=22)
fake_timestamp = datetime_to_timestamp(fake_date_sent)
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"forged": "yes",
"time": fake_timestamp,
"sender": "irc-user@irc.zulip.com",
"content": "Test message",
"client": "irc_mirror",
"topic": "from irc",
"to": "IRCLand"})
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual(int(datetime_to_timestamp(msg.date_sent)), int(fake_timestamp))
def test_unsubscribed_api_super_user(self) -> None:
reset_emails_in_zulip_realm()
cordelia = self.example_user('cordelia')
stream_name = 'private_stream'
self.make_stream(stream_name, invite_only=True)
self.unsubscribe(cordelia, stream_name)
# As long as Cordelia is a super_user, she can send messages
# to ANY stream, even one she is not unsubscribed to, and
# she can do it for herself or on behalf of a mirrored user.
def test_with(sender_email: str, client: str, forged: bool) -> None:
payload = dict(
type="stream",
to=stream_name,
client=client,
topic='whatever',
content='whatever',
forged=ujson.dumps(forged),
)
# Only pass the 'sender' property when doing mirroring behavior.
if forged:
payload['sender'] = sender_email
cordelia.is_api_super_user = False
cordelia.save()
result = self.api_post(cordelia, "/api/v1/messages", payload)
self.assert_json_error_contains(result, 'authorized')
cordelia.is_api_super_user = True
cordelia.save()
result = self.api_post(cordelia, "/api/v1/messages", payload)
self.assert_json_success(result)
test_with(
sender_email=cordelia.email,
client='test suite',
forged=False,
)
test_with(
sender_email='irc_person@zulip.com',
client='irc_mirror',
forged=True,
)
def test_bot_can_send_to_owner_stream(self) -> None:
cordelia = self.example_user('cordelia')
bot = self.create_test_bot(
short_name='whatever',
user_profile=cordelia,
)
stream_name = 'private_stream'
self.make_stream(stream_name, invite_only=True)
payload = dict(
type="stream",
to=stream_name,
client='test suite',
topic='whatever',
content='whatever',
)
result = self.api_post(bot, "/api/v1/messages", payload)
self.assert_json_error_contains(result, 'Not authorized to send')
# We subscribe the bot owner! (aka cordelia)
self.subscribe(bot.bot_owner, stream_name)
result = self.api_post(bot, "/api/v1/messages", payload)
self.assert_json_success(result)
def test_cross_realm_bots_can_use_api_on_own_subdomain(self) -> None:
# Cross realm bots should use internal_send_*_message, not the API:
notification_bot = self.notification_bot()
stream = self.make_stream("notify_channel", get_realm("zulipinternal"))
result = self.api_post(notification_bot,
"/api/v1/messages",
{"type": "stream",
"to": "notify_channel",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"},
subdomain='zulipinternal')
self.assert_json_success(result)
message = self.get_last_message()
self.assertEqual(message.content, "Test message")
self.assertEqual(message.sender, notification_bot)
self.assertEqual(message.recipient.type_id, stream.id)
def test_create_mirror_user_despite_race(self) -> None:
realm = get_realm('zulip')
email = 'fred@example.com'
email_to_full_name = lambda email: 'fred'
def create_user(**kwargs: Any) -> UserProfile:
self.assertEqual(kwargs['full_name'], 'fred')
self.assertEqual(kwargs['email'], email)
self.assertEqual(kwargs['active'], False)
self.assertEqual(kwargs['is_mirror_dummy'], True)
# We create an actual user here to simulate a race.
# We use the minimal, un-mocked function.
kwargs['bot_type'] = None
kwargs['bot_owner'] = None
kwargs['tos_version'] = None
kwargs['timezone'] = timezone_now()
create_user_profile(**kwargs).save()
raise IntegrityError()
with mock.patch('zerver.lib.actions.create_user',
side_effect=create_user) as m:
mirror_fred_user = create_mirror_user_if_needed(
realm,
email,
email_to_full_name,
)
self.assertEqual(mirror_fred_user.delivery_email, email)
m.assert_called()
def test_guest_user(self) -> None:
sender = self.example_user('polonius')
stream_name = 'public stream'
self.make_stream(stream_name, invite_only=False)
payload = dict(
type="stream",
to=stream_name,
client='test suite',
topic='whatever',
content='whatever',
)
# Guest user can't send message to unsubscribed public streams
result = self.api_post(sender, "/api/v1/messages", payload)
self.assert_json_error(result, "Not authorized to send to stream 'public stream'")
self.subscribe(sender, stream_name)
# Guest user can send message to subscribed public streams
result = self.api_post(sender, "/api/v1/messages", payload)
self.assert_json_success(result)
class ScheduledMessageTest(ZulipTestCase):
def last_scheduled_message(self) -> ScheduledMessage:
return ScheduledMessage.objects.all().order_by('-id')[0]
def do_schedule_message(self, msg_type: str, to: str, msg: str,
defer_until: str='', tz_guess: str='',
delivery_type: str='send_later',
realm_str: str='zulip') -> HttpResponse:
self.login('hamlet')
topic_name = ''
if msg_type == 'stream':
topic_name = 'Test topic'
payload = {"type": msg_type,
"to": to,
"client": "test suite",
"content": msg,
"topic": topic_name,
"realm_str": realm_str,
"delivery_type": delivery_type,
"tz_guess": tz_guess}
if defer_until:
payload["deliver_at"] = defer_until
result = self.client_post("/json/messages", payload)
return result
def test_schedule_message(self) -> None:
content = "Test message"
defer_until = timezone_now().replace(tzinfo=None) + datetime.timedelta(days=1)
defer_until_str = str(defer_until)
# Scheduling a message to a stream you are subscribed is successful.
result = self.do_schedule_message('stream', 'Verona',
content + ' 1', defer_until_str)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 1')
self.assertEqual(message.topic_name(), 'Test topic')
self.assertEqual(message.scheduled_timestamp, convert_to_UTC(defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
# Scheduling a message for reminders.
result = self.do_schedule_message('stream', 'Verona',
content + ' 2', defer_until_str,
delivery_type='remind')
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.delivery_type, ScheduledMessage.REMIND)
# Scheduling a private message is successful.
othello = self.example_user('othello')
hamlet = self.example_user('hamlet')
result = self.do_schedule_message('private', othello.email,
content + ' 3', defer_until_str)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 3')
self.assertEqual(message.scheduled_timestamp, convert_to_UTC(defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
# Setting a reminder in PM's to other users causes a error.
result = self.do_schedule_message('private', othello.email,
content + ' 4', defer_until_str,
delivery_type='remind')
self.assert_json_error(result, 'Reminders can only be set for streams.')
# Setting a reminder in PM's to ourself is successful.
# Required by reminders from message actions popover caret feature.
result = self.do_schedule_message('private', hamlet.email,
content + ' 5', defer_until_str,
delivery_type='remind')
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 5')
self.assertEqual(message.delivery_type, ScheduledMessage.REMIND)
# Scheduling a message while guessing timezone.
tz_guess = 'Asia/Kolkata'
result = self.do_schedule_message('stream', 'Verona', content + ' 6',
defer_until_str, tz_guess=tz_guess)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 6')
local_tz = get_timezone(tz_guess)
# Since mypy is not able to recognize localize and normalize as attributes of tzinfo we use ignore.
utz_defer_until = local_tz.normalize(local_tz.localize(defer_until)) # type: ignore[attr-defined] # Reason in comment on previous line.
self.assertEqual(message.scheduled_timestamp,
convert_to_UTC(utz_defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
# Test with users timezone setting as set to some timezone rather than
# empty. This will help interpret timestamp in users local timezone.
user = self.example_user("hamlet")
user.timezone = 'US/Pacific'
user.save(update_fields=['timezone'])
result = self.do_schedule_message('stream', 'Verona',
content + ' 7', defer_until_str)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 7')
local_tz = get_timezone(user.timezone)
# Since mypy is not able to recognize localize and normalize as attributes of tzinfo we use ignore.
utz_defer_until = local_tz.normalize(local_tz.localize(defer_until)) # type: ignore[attr-defined] # Reason in comment on previous line.
self.assertEqual(message.scheduled_timestamp,
convert_to_UTC(utz_defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
def test_scheduling_in_past(self) -> None:
# Scheduling a message in past should fail.
content = "Test message"
defer_until = timezone_now()
defer_until_str = str(defer_until)
result = self.do_schedule_message('stream', 'Verona',
content + ' 1', defer_until_str)
self.assert_json_error(result, 'Time must be in the future.')
def test_invalid_timestamp(self) -> None:
# Scheduling a message from which timestamp couldn't be parsed
# successfully should fail.
content = "Test message"
defer_until = 'Missed the timestamp'
result = self.do_schedule_message('stream', 'Verona',
content + ' 1', defer_until)
self.assert_json_error(result, 'Invalid time format')
def test_missing_deliver_at(self) -> None:
content = "Test message"
result = self.do_schedule_message('stream', 'Verona',
content + ' 1')
self.assert_json_error(result, 'Missing deliver_at in a request for delayed message delivery')
class EditMessageTest(ZulipTestCase):
def check_topic(self,
msg_id: int,
topic_name: str) -> None:
msg = Message.objects.get(id=msg_id)
self.assertEqual(msg.topic_name(), topic_name)
def check_message(self,
msg_id: int,
topic_name: str,
content: str) -> None:
# Make sure we saved the message correctly to the DB.
msg = Message.objects.get(id=msg_id)
self.assertEqual(msg.topic_name(), topic_name)
self.assertEqual(msg.content, content)
'''
Next, we will make sure we properly cached the
messages. We still have to do 2 queries to
hydrate sender/recipient info, but we won't need
to hit the zerver_message table.
'''
with queries_captured() as queries:
(fetch_message_dict,) = messages_for_ids(
message_ids = [msg.id],
user_message_flags={msg_id: []},
search_fields=dict(),
apply_markdown=False,
client_gravatar=False,
allow_edit_history=True,
)
self.assertEqual(len(queries), 2)
for query in queries:
self.assertNotIn('message', query['sql'])
self.assertEqual(
fetch_message_dict[TOPIC_NAME],
msg.topic_name()
)
self.assertEqual(
fetch_message_dict['content'],
msg.content
)
self.assertEqual(
fetch_message_dict['sender_id'],
msg.sender_id
)
if msg.edit_history:
self.assertEqual(
fetch_message_dict['edit_history'],
ujson.loads(msg.edit_history)
)
def test_query_count_on_to_dict_uncached(self) -> None:
# `to_dict_uncached` method is used by the mechanisms
# tested in this class. Hence, its performance is tested here.
# Generate 2 messages
user = self.example_user("hamlet")
self.login_user(user)
stream_name = "public_stream"
self.subscribe(user, stream_name)
message_one_id = self.send_stream_message(user,
stream_name, "Message one")
later_subscribed_user = self.example_user("cordelia")
self.subscribe(later_subscribed_user, stream_name)
message_two_id = self.send_stream_message(user,
stream_name, "Message two")
message_ids = [message_one_id, message_two_id]
messages = [Message.objects.select_related().get(id=message_id)
for message_id in message_ids]
# Check number of queries performed
with queries_captured() as queries:
MessageDict.to_dict_uncached(messages)
# 1 query for realm_id per message = 2
# 1 query each for reactions & submessage for all messages = 2
self.assertEqual(len(queries), 4)
def test_save_message(self) -> None:
"""This is also tested by a client test, but here we can verify
the cache against the database"""
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'after edit'
})
self.assert_json_success(result)
self.check_message(msg_id, topic_name="editing", content="after edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': 'edited'
})
self.assert_json_success(result)
self.check_topic(msg_id, topic_name="edited")
def test_fetch_raw_message(self) -> None:
self.login('hamlet')
msg_id = self.send_personal_message(
from_user=self.example_user("hamlet"),
to_user=self.example_user("cordelia"),
content="**before** edit",
)
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
self.assertEqual(result.json()['raw_content'], '**before** edit')
# Test error cases
result = self.client_get('/json/messages/999999')
self.assert_json_error(result, 'Invalid message(s)')
self.login('cordelia')
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
self.login('othello')
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_error(result, 'Invalid message(s)')
def test_fetch_raw_message_stream_wrong_realm(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.make_stream('public_stream')
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name="test", content="test")
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
mit_user = self.mit_user('sipbtest')
self.login_user(mit_user)
result = self.client_get('/json/messages/' + str(msg_id), subdomain="zephyr")
self.assert_json_error(result, 'Invalid message(s)')
def test_fetch_raw_message_private_stream(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.make_stream('private_stream', invite_only=True)
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name="test", content="test")
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
self.login('othello')
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_error(result, 'Invalid message(s)')
def test_edit_message_no_permission(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content after edit',
})
self.assert_json_error(result, "You don't have permission to edit this message")
def test_edit_message_no_changes(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
})
self.assert_json_error(result, "Nothing to change")
def test_edit_message_no_topic(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': ' '
})
self.assert_json_error(result, "Topic can't be empty")
def test_edit_message_no_content(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': ' '
})
self.assert_json_success(result)
content = Message.objects.filter(id=msg_id).values_list('content', flat = True)[0]
self.assertEqual(content, "(deleted)")
def test_edit_message_history_disabled(self) -> None:
user_profile = self.example_user("hamlet")
do_set_realm_property(user_profile.realm, "allow_edit_history", False)
self.login('hamlet')
# Single-line edit
msg_id_1 = self.send_stream_message(self.example_user("hamlet"),
"Denmark",
topic_name="editing",
content="content before edit")
new_content_1 = 'content after edit'
result_1 = self.client_patch("/json/messages/" + str(msg_id_1), {
'message_id': msg_id_1, 'content': new_content_1
})
self.assert_json_success(result_1)
result = self.client_get(
"/json/messages/" + str(msg_id_1) + "/history")
self.assert_json_error(result, "Message edit history is disabled in this organization")
# Now verify that if we fetch the message directly, there's no
# edit history data attached.
messages_result = self.client_get("/json/messages",
{"anchor": msg_id_1, "num_before": 0, "num_after": 10})
self.assert_json_success(messages_result)
json_messages = ujson.loads(
messages_result.content.decode('utf-8'))
for msg in json_messages['messages']:
self.assertNotIn("edit_history", msg)
def test_edit_message_history(self) -> None:
self.login('hamlet')
# Single-line edit
msg_id_1 = self.send_stream_message(
self.example_user("hamlet"),
"Scotland",
topic_name="editing",
content="content before edit")
new_content_1 = 'content after edit'
result_1 = self.client_patch("/json/messages/" + str(msg_id_1), {
'message_id': msg_id_1, 'content': new_content_1
})
self.assert_json_success(result_1)
message_edit_history_1 = self.client_get(
"/json/messages/" + str(msg_id_1) + "/history")
json_response_1 = ujson.loads(
message_edit_history_1.content.decode('utf-8'))
message_history_1 = json_response_1['message_history']
# Check content of message after edit.
self.assertEqual(message_history_1[0]['rendered_content'],
'<p>content before edit</p>')
self.assertEqual(message_history_1[1]['rendered_content'],
'<p>content after edit</p>')
self.assertEqual(message_history_1[1]['content_html_diff'],
('<p>content '
'<span class="highlight_text_inserted">after</span> '
'<span class="highlight_text_deleted">before</span>'
' edit</p>'))
# Check content of message before edit.
self.assertEqual(message_history_1[1]['prev_rendered_content'],
'<p>content before edit</p>')
# Edits on new lines
msg_id_2 = self.send_stream_message(
self.example_user("hamlet"),
"Scotland",
topic_name="editing",
content=('content before edit, line 1\n'
'\n'
'content before edit, line 3'))
new_content_2 = ('content before edit, line 1\n'
'content after edit, line 2\n'
'content before edit, line 3')
result_2 = self.client_patch("/json/messages/" + str(msg_id_2), {
'message_id': msg_id_2, 'content': new_content_2
})
self.assert_json_success(result_2)
message_edit_history_2 = self.client_get(
"/json/messages/" + str(msg_id_2) + "/history")
json_response_2 = ujson.loads(
message_edit_history_2.content.decode('utf-8'))
message_history_2 = json_response_2['message_history']
self.assertEqual(message_history_2[0]['rendered_content'],
('<p>content before edit, line 1</p>\n'
'<p>content before edit, line 3</p>'))
self.assertEqual(message_history_2[1]['rendered_content'],
('<p>content before edit, line 1<br>\n'
'content after edit, line 2<br>\n'
'content before edit, line 3</p>'))
self.assertEqual(message_history_2[1]['content_html_diff'],
('<p>content before edit, line 1<br> '
'content <span class="highlight_text_inserted">after edit, line 2<br> '
'content</span> before edit, line 3</p>'))
self.assertEqual(message_history_2[1]['prev_rendered_content'],
('<p>content before edit, line 1</p>\n'
'<p>content before edit, line 3</p>'))
def test_edit_link(self) -> None:
# Link editing
self.login('hamlet')
msg_id_1 = self.send_stream_message(
self.example_user("hamlet"),
"Scotland",
topic_name="editing",
content="Here is a link to [zulip](www.zulip.org).")
new_content_1 = 'Here is a link to [zulip](www.zulipchat.com).'
result_1 = self.client_patch("/json/messages/" + str(msg_id_1), {
'message_id': msg_id_1, 'content': new_content_1
})
self.assert_json_success(result_1)
message_edit_history_1 = self.client_get(
"/json/messages/" + str(msg_id_1) + "/history")
json_response_1 = ujson.loads(
message_edit_history_1.content.decode('utf-8'))
message_history_1 = json_response_1['message_history']
# Check content of message after edit.
self.assertEqual(message_history_1[0]['rendered_content'],
'<p>Here is a link to '
'<a href="http://www.zulip.org">zulip</a>.</p>')
self.assertEqual(message_history_1[1]['rendered_content'],
'<p>Here is a link to '
'<a href="http://www.zulipchat.com">zulip</a>.</p>')
self.assertEqual(message_history_1[1]['content_html_diff'],
('<p>Here is a link to <a href="http://www.zulipchat.com"'
'>zulip '
'<span class="highlight_text_inserted"> Link: http://www.zulipchat.com .'
'</span> <span class="highlight_text_deleted"> Link: http://www.zulip.org .'
'</span> </a></p>'))
def test_edit_history_unedited(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(
self.example_user('hamlet'),
'Scotland',
topic_name='editing',
content='This message has not been edited.')
result = self.client_get('/json/messages/{}/history'.format(msg_id))
self.assert_json_success(result)
message_history = result.json()['message_history']
self.assert_length(message_history, 1)
def test_user_info_for_updates(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
self.login_user(hamlet)
self.subscribe(hamlet, 'Scotland')
self.subscribe(cordelia, 'Scotland')
msg_id = self.send_stream_message(hamlet, 'Scotland',
content='@**Cordelia Lear**')
user_info = get_user_info_for_message_updates(msg_id)
message_user_ids = user_info['message_user_ids']
self.assertIn(hamlet.id, message_user_ids)
self.assertIn(cordelia.id, message_user_ids)
mention_user_ids = user_info['mention_user_ids']
self.assertEqual(mention_user_ids, {cordelia.id})
def test_edit_cases(self) -> None:
"""This test verifies the accuracy of construction of Zulip's edit
history data structures."""
self.login('hamlet')
hamlet = self.example_user('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic 1", content="content 1")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content 2',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0]['prev_content'], 'content 1')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.assertEqual(set(history[0].keys()),
{'timestamp', 'prev_content', 'user_id',
'prev_rendered_content', 'prev_rendered_content_version'})
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': 'topic 2',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 1')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.assertEqual(set(history[0].keys()), {'timestamp', LEGACY_PREV_TOPIC, 'user_id'})
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content 3',
'topic': 'topic 3',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0]['prev_content'], 'content 2')
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 2')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.assertEqual(set(history[0].keys()),
{'timestamp', LEGACY_PREV_TOPIC, 'prev_content', 'user_id',
'prev_rendered_content', 'prev_rendered_content_version'})
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content 4',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0]['prev_content'], 'content 3')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.login('iago')
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': 'topic 4',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 3')
self.assertEqual(history[0]['user_id'], self.example_user('iago').id)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 3')
self.assertEqual(history[2][LEGACY_PREV_TOPIC], 'topic 2')
self.assertEqual(history[3][LEGACY_PREV_TOPIC], 'topic 1')
self.assertEqual(history[1]['prev_content'], 'content 3')
self.assertEqual(history[2]['prev_content'], 'content 2')
self.assertEqual(history[4]['prev_content'], 'content 1')
# Now, we verify that the edit history data sent back has the
# correct filled-out fields
message_edit_history = self.client_get("/json/messages/" + str(msg_id) + "/history")
json_response = ujson.loads(message_edit_history.content.decode('utf-8'))
# We reverse the message history view output so that the IDs line up with the above.
message_history = list(reversed(json_response['message_history']))
i = 0
for entry in message_history:
expected_entries = {'content', 'rendered_content', 'topic', 'timestamp', 'user_id'}
if i in {0, 2, 3}:
expected_entries.add('prev_topic')
if i in {1, 2, 4}:
expected_entries.add('prev_content')
expected_entries.add('prev_rendered_content')
expected_entries.add('content_html_diff')
i += 1
self.assertEqual(expected_entries, set(entry.keys()))
self.assertEqual(len(message_history), 6)
self.assertEqual(message_history[0]['prev_topic'], 'topic 3')
self.assertEqual(message_history[0]['topic'], 'topic 4')
self.assertEqual(message_history[1]['topic'], 'topic 3')
self.assertEqual(message_history[2]['topic'], 'topic 3')
self.assertEqual(message_history[2]['prev_topic'], 'topic 2')
self.assertEqual(message_history[3]['topic'], 'topic 2')
self.assertEqual(message_history[3]['prev_topic'], 'topic 1')
self.assertEqual(message_history[4]['topic'], 'topic 1')
self.assertEqual(message_history[0]['content'], 'content 4')
self.assertEqual(message_history[1]['content'], 'content 4')
self.assertEqual(message_history[1]['prev_content'], 'content 3')
self.assertEqual(message_history[2]['content'], 'content 3')
self.assertEqual(message_history[2]['prev_content'], 'content 2')
self.assertEqual(message_history[3]['content'], 'content 2')
self.assertEqual(message_history[4]['content'], 'content 2')
self.assertEqual(message_history[4]['prev_content'], 'content 1')
self.assertEqual(message_history[5]['content'], 'content 1')
self.assertEqual(message_history[5]['topic'], 'topic 1')
def test_edit_message_content_limit(self) -> None:
def set_message_editing_params(allow_message_editing: bool,
message_content_edit_limit_seconds: int,
allow_community_topic_editing: bool) -> None:
result = self.client_patch("/json/realm", {
'allow_message_editing': ujson.dumps(allow_message_editing),
'message_content_edit_limit_seconds': message_content_edit_limit_seconds,
'allow_community_topic_editing': ujson.dumps(allow_community_topic_editing),
})
self.assert_json_success(result)
def do_edit_message_assert_success(id_: int, unique_str: str, topic_only: bool=False) -> None:
new_topic = 'topic' + unique_str
new_content = 'content' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
if not topic_only:
params_dict['content'] = new_content
result = self.client_patch("/json/messages/" + str(id_), params_dict)
self.assert_json_success(result)
if topic_only:
self.check_topic(id_, topic_name=new_topic)
else:
self.check_message(id_, topic_name=new_topic, content=new_content)
def do_edit_message_assert_error(id_: int, unique_str: str, error: str,
topic_only: bool=False) -> None:
message = Message.objects.get(id=id_)
old_topic = message.topic_name()
old_content = message.content
new_topic = 'topic' + unique_str
new_content = 'content' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
if not topic_only:
params_dict['content'] = new_content
result = self.client_patch("/json/messages/" + str(id_), params_dict)
message = Message.objects.get(id=id_)
self.assert_json_error(result, error)
msg = Message.objects.get(id=id_)
self.assertEqual(msg.topic_name(), old_topic)
self.assertEqual(msg.content, old_content)
self.login('iago')
# send a message in the past
id_ = self.send_stream_message(self.example_user("iago"), "Scotland",
content="content", topic_name="topic")
message = Message.objects.get(id=id_)
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
message.save()
# test the various possible message editing settings
# high enough time limit, all edits allowed
set_message_editing_params(True, 240, False)
do_edit_message_assert_success(id_, 'A')
# out of time, only topic editing allowed
set_message_editing_params(True, 120, False)
do_edit_message_assert_success(id_, 'B', True)
do_edit_message_assert_error(id_, 'C', "The time limit for editing this message has passed")
# infinite time, all edits allowed
set_message_editing_params(True, 0, False)
do_edit_message_assert_success(id_, 'D')
# without allow_message_editing, nothing is allowed
set_message_editing_params(False, 240, False)
do_edit_message_assert_error(id_, 'E', "Your organization has turned off message editing", True)
set_message_editing_params(False, 120, False)
do_edit_message_assert_error(id_, 'F', "Your organization has turned off message editing", True)
set_message_editing_params(False, 0, False)
do_edit_message_assert_error(id_, 'G', "Your organization has turned off message editing", True)
def test_allow_community_topic_editing(self) -> None:
def set_message_editing_params(allow_message_editing: bool,
message_content_edit_limit_seconds: int,
allow_community_topic_editing: bool) -> None:
result = self.client_patch("/json/realm", {
'allow_message_editing': ujson.dumps(allow_message_editing),
'message_content_edit_limit_seconds': message_content_edit_limit_seconds,
'allow_community_topic_editing': ujson.dumps(allow_community_topic_editing),
})
self.assert_json_success(result)
def do_edit_message_assert_success(id_: int, unique_str: str) -> None:
new_topic = 'topic' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
result = self.client_patch("/json/messages/" + str(id_), params_dict)
self.assert_json_success(result)
self.check_topic(id_, topic_name=new_topic)
def do_edit_message_assert_error(id_: int, unique_str: str, error: str) -> None:
message = Message.objects.get(id=id_)
old_topic = message.topic_name()
old_content = message.content
new_topic = 'topic' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
result = self.client_patch("/json/messages/" + str(id_), params_dict)
message = Message.objects.get(id=id_)
self.assert_json_error(result, error)
msg = Message.objects.get(id=id_)
self.assertEqual(msg.topic_name(), old_topic)
self.assertEqual(msg.content, old_content)
self.login('iago')
# send a message in the past
id_ = self.send_stream_message(self.example_user("hamlet"), "Scotland",
content="content", topic_name="topic")
message = Message.objects.get(id=id_)
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
message.save()
# any user can edit the topic of a message
set_message_editing_params(True, 0, True)
# log in as a new user
self.login('cordelia')
do_edit_message_assert_success(id_, 'A')
# only admins can edit the topics of messages
self.login('iago')
set_message_editing_params(True, 0, False)
do_edit_message_assert_success(id_, 'B')
self.login('cordelia')
do_edit_message_assert_error(id_, 'C', "You don't have permission to edit this message")
# users cannot edit topics if allow_message_editing is False
self.login('iago')
set_message_editing_params(False, 0, True)
self.login('cordelia')
do_edit_message_assert_error(id_, 'D', "Your organization has turned off message editing")
# non-admin users cannot edit topics sent > 24 hrs ago
message.date_sent = message.date_sent - datetime.timedelta(seconds=90000)
message.save()
self.login('iago')
set_message_editing_params(True, 0, True)
do_edit_message_assert_success(id_, 'E')
self.login('cordelia')
do_edit_message_assert_error(id_, 'F', "The time limit for editing this message has passed")
# anyone should be able to edit "no topic" indefinitely
message.set_topic_name("(no topic)")
message.save()
self.login('cordelia')
do_edit_message_assert_success(id_, 'D')
@mock.patch("zerver.lib.actions.send_event")
def test_edit_topic_public_history_stream(self, mock_send_event: mock.MagicMock) -> None:
stream_name = "Macbeth"
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
self.make_stream(stream_name, history_public_to_subscribers=True)
self.subscribe(hamlet, stream_name)
self.login_user(hamlet)
message_id = self.send_stream_message(hamlet, stream_name, "Where am I?")
self.login_user(cordelia)
self.subscribe(cordelia, stream_name)
message = Message.objects.get(id=message_id)
def do_update_message_topic_success(user_profile: UserProfile, message: Message,
topic_name: str, users_to_be_notified: List[Dict[str, Any]]) -> None:
do_update_message(
user_profile=user_profile,
message=message,
new_stream=None,
topic_name=topic_name,
propagate_mode="change_later",
content=None,
rendered_content=None,
prior_mention_user_ids=set(),
mention_user_ids=set(),
mention_data=None,
)
mock_send_event.assert_called_with(mock.ANY, mock.ANY, users_to_be_notified)
# Returns the users that need to be notified when a message topic is changed
def notify(user_id: int) -> Dict[str, Any]:
um = UserMessage.objects.get(message=message_id)
if um.user_profile_id == user_id:
return {
"id": user_id,
"flags": um.flags_list()
}
else:
return {
"id": user_id,
"flags": ["read"]
}
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
# Edit topic of a message sent before Cordelia subscribed the stream
do_update_message_topic_success(cordelia, message, "Othello eats apple", users_to_be_notified)
# If Cordelia is long-term idle, she doesn't get a notification.
cordelia.long_term_idle = True
cordelia.save()
users_to_be_notified = list(map(notify, [hamlet.id]))
do_update_message_topic_success(cordelia, message, "Another topic idle", users_to_be_notified)
cordelia.long_term_idle = False
cordelia.save()
# Even if Hamlet unsubscribes the stream, he should be notified when the topic is changed
# because he has a UserMessage row.
self.unsubscribe(hamlet, stream_name)
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
do_update_message_topic_success(cordelia, message, "Another topic", users_to_be_notified)
# Hamlet subscribes to the stream again and Cordelia unsubscribes, then Hamlet changes
# the message topic. Cordelia won't receive any updates when a message on that stream is
# changed because she is not a subscriber and doesn't have a UserMessage row.
self.subscribe(hamlet, stream_name)
self.unsubscribe(cordelia, stream_name)
self.login_user(hamlet)
users_to_be_notified = list(map(notify, [hamlet.id]))
do_update_message_topic_success(hamlet, message, "Change again", users_to_be_notified)
@mock.patch("zerver.lib.actions.send_event")
def test_wildcard_mention(self, mock_send_event: mock.MagicMock) -> None:
stream_name = "Macbeth"
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
self.make_stream(stream_name, history_public_to_subscribers=True)
self.subscribe(hamlet, stream_name)
self.subscribe(cordelia, stream_name)
self.login_user(hamlet)
message_id = self.send_stream_message(hamlet, stream_name, "Hello everyone")
def notify(user_id: int) -> Dict[str, Any]:
return {
"id": user_id,
"flags": ["wildcard_mentioned"]
}
users_to_be_notified = sorted(map(notify, [cordelia.id, hamlet.id]), key=itemgetter("id"))
result = self.client_patch("/json/messages/" + str(message_id), {
'message_id': message_id,
'content': 'Hello @**everyone**',
})
self.assert_json_success(result)
# Extract the send_event call where event type is 'update_message'.
# Here we assert wildcard_mention_user_ids has been set properly.
called = False
for call_args in mock_send_event.call_args_list:
(arg_realm, arg_event, arg_notified_users) = call_args[0]
if arg_event['type'] == 'update_message':
self.assertEqual(arg_event['type'], 'update_message')
self.assertEqual(arg_event['wildcard_mention_user_ids'], [cordelia.id, hamlet.id])
self.assertEqual(sorted(arg_notified_users, key=itemgetter("id")), users_to_be_notified)
called = True
self.assertTrue(called)
def test_propagate_topic_forward(self) -> None:
self.login('hamlet')
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
id2 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic1")
id3 = self.send_stream_message(self.example_user("iago"), "Rome",
topic_name="topic1")
id4 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic2")
id5 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic1")
result = self.client_patch("/json/messages/" + str(id1), {
'message_id': id1,
'topic': 'edited',
'propagate_mode': 'change_later'
})
self.assert_json_success(result)
self.check_topic(id1, topic_name="edited")
self.check_topic(id2, topic_name="edited")
self.check_topic(id3, topic_name="topic1")
self.check_topic(id4, topic_name="topic2")
self.check_topic(id5, topic_name="edited")
def test_propagate_all_topics(self) -> None:
self.login('hamlet')
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
id2 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
id3 = self.send_stream_message(self.example_user("iago"), "Rome",
topic_name="topic1")
id4 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic2")
id5 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic1")
id6 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic3")
result = self.client_patch("/json/messages/" + str(id2), {
'message_id': id2,
'topic': 'edited',
'propagate_mode': 'change_all'
})
self.assert_json_success(result)
self.check_topic(id1, topic_name="edited")
self.check_topic(id2, topic_name="edited")
self.check_topic(id3, topic_name="topic1")
self.check_topic(id4, topic_name="topic2")
self.check_topic(id5, topic_name="edited")
self.check_topic(id6, topic_name="topic3")
def test_propagate_invalid(self) -> None:
self.login('hamlet')
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
result = self.client_patch("/json/messages/" + str(id1), {
'topic': 'edited',
'propagate_mode': 'invalid',
})
self.assert_json_error(result, 'Invalid propagate_mode')
self.check_topic(id1, topic_name="topic1")
result = self.client_patch("/json/messages/" + str(id1), {
'content': 'edited',
'propagate_mode': 'change_all',
})
self.assert_json_error(result, 'Invalid propagate_mode without topic edit')
self.check_topic(id1, topic_name="topic1")
def prepare_move_topics(self, user_email: str, old_stream: str, new_stream: str, topic: str) -> Tuple[UserProfile, Stream, Stream, int, int]:
user_profile = self.example_user(user_email)
self.login(user_email)
stream = self.make_stream(old_stream)
new_stream = self.make_stream(new_stream)
self.subscribe(user_profile, stream.name)
self.subscribe(user_profile, new_stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name=topic, content="First")
msg_id_lt = self.send_stream_message(user_profile, stream.name,
topic_name=topic, content="Second")
self.send_stream_message(user_profile, stream.name,
topic_name=topic, content="third")
return (user_profile, stream, new_stream, msg_id, msg_id_lt)
def test_move_message_to_stream(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all'
})
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0].content, "This topic was moved by @_**Iago|%s** to #**new stream>test**" % (user_profile.id,))
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 4)
self.assertEqual(messages[3].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%s**" % (user_profile.id,))
def test_move_message_to_stream_change_later(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id_later), {
'message_id': msg_id_later,
'stream_id': new_stream.id,
'propagate_mode': 'change_later'
})
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 2)
self.assertEqual(messages[0].id, msg_id)
self.assertEqual(messages[1].content, "This topic was moved by @_**Iago|%s** to #**new stream>test**" % (user_profile.id,))
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 3)
self.assertEqual(messages[0].id, msg_id_later)
self.assertEqual(messages[2].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%d**" % (user_profile.id,))
def test_move_message_to_stream_no_allowed(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"aaron", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all'
})
self.assert_json_error(result, "You don't have permission to move this message")
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 3)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 0)
def test_move_message_to_stream_with_content(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all',
'content': 'Not allowed'
})
self.assert_json_error(result, "Cannot change message content while changing stream")
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 3)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 0)
def test_move_message_to_stream_and_topic(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all',
'topic': 'new topic'
})
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0].content, "This topic was moved by @_**Iago|%s** to #**new stream>new topic**" % (user_profile.id,))
messages = get_topic_messages(user_profile, new_stream, "new topic")
self.assertEqual(len(messages), 4)
self.assertEqual(messages[3].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%s**" % (user_profile.id,))
self.assert_json_success(result)
def test_move_message_to_stream_to_private_stream(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
stream = self.make_stream("test move stream")
new_stream = self.make_stream("new stream", None, True)
self.subscribe(user_profile, stream.name)
self.subscribe(user_profile, new_stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name="test", content="First")
self.send_stream_message(user_profile, stream.name,
topic_name="test", content="Second")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all',
})
self.assert_json_error(result, "Streams must be public")
# We expect the messages to remain in the original stream/topic
messages = get_topic_messages(user_profile, stream, "test")
self.assertEqual(len(messages), 2)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 0)
class MirroredMessageUsersTest(ZulipTestCase):
def test_invalid_sender(self) -> None:
user = self.example_user('hamlet')
recipients: List[str] = []
Request = namedtuple('Request', ['POST'])
request = Request(POST=dict()) # no sender
with self.assertRaises(InvalidMirrorInput):
create_mirrored_message_users(request, user, recipients)
def test_invalid_client(self) -> None:
client = get_client(name='banned_mirror') # Invalid!!!
user = self.example_user('hamlet')
sender = user
recipients: List[str] = []
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
with self.assertRaises(InvalidMirrorInput):
create_mirrored_message_users(request, user, recipients)
def test_invalid_email(self) -> None:
invalid_email = 'alice AT example.com'
recipients = [invalid_email]
# We use an MIT user here to maximize code coverage
user = self.mit_user('starnine')
sender = user
Request = namedtuple('Request', ['POST', 'client'])
for client_name in ['zephyr_mirror', 'irc_mirror', 'jabber_mirror']:
client = get_client(name=client_name)
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
with self.assertRaises(InvalidMirrorInput):
create_mirrored_message_users(request, user, recipients)
@mock.patch('DNS.dnslookup', return_value=[['sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh']])
def test_zephyr_mirror_new_recipient(self, ignored: object) -> None:
"""Test mirror dummy user creation for PM recipients"""
client = get_client(name='zephyr_mirror')
user = self.mit_user('starnine')
sender = self.mit_user('sipbtest')
new_user_email = 'bob_the_new_user@mit.edu'
new_user_realm = get_realm("zephyr")
recipients = [user.email, new_user_email]
# Now make the request.
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
mirror_sender = create_mirrored_message_users(request, user, recipients)
self.assertEqual(mirror_sender, sender)
realm_users = UserProfile.objects.filter(realm=sender.realm)
realm_emails = {user.email for user in realm_users}
self.assertIn(user.email, realm_emails)
self.assertIn(new_user_email, realm_emails)
bob = get_user(new_user_email, new_user_realm)
self.assertTrue(bob.is_mirror_dummy)
@mock.patch('DNS.dnslookup', return_value=[['sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh']])
def test_zephyr_mirror_new_sender(self, ignored: object) -> None:
"""Test mirror dummy user creation for sender when sending to stream"""
client = get_client(name='zephyr_mirror')
user = self.mit_user('starnine')
sender_email = 'new_sender@mit.edu'
recipients = ['stream_name']
# Now make the request.
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender_email, type='stream'),
client = client)
mirror_sender = create_mirrored_message_users(request, user, recipients)
assert(mirror_sender is not None)
self.assertEqual(mirror_sender.email, sender_email)
self.assertTrue(mirror_sender.is_mirror_dummy)
def test_irc_mirror(self) -> None:
reset_emails_in_zulip_realm()
client = get_client(name='irc_mirror')
sender = self.example_user('hamlet')
recipients = [self.nonreg_email('alice'), 'bob@irc.zulip.com', self.nonreg_email('cordelia')]
# Now make the request.
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
mirror_sender = create_mirrored_message_users(request, sender, recipients)
self.assertEqual(mirror_sender, sender)
realm_users = UserProfile.objects.filter(realm=sender.realm)
realm_emails = {user.email for user in realm_users}
self.assertIn(self.nonreg_email('alice'), realm_emails)
self.assertIn('bob@irc.zulip.com', realm_emails)
bob = get_user('bob@irc.zulip.com', sender.realm)
self.assertTrue(bob.is_mirror_dummy)
def test_jabber_mirror(self) -> None:
reset_emails_in_zulip_realm()
client = get_client(name='jabber_mirror')
sender = self.example_user('hamlet')
user = sender
recipients = [self.nonreg_email('alice'), self.nonreg_email('bob'), self.nonreg_email('cordelia')]
# Now make the request.
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
mirror_sender = create_mirrored_message_users(request, user, recipients)
self.assertEqual(mirror_sender, sender)
realm_users = UserProfile.objects.filter(realm=sender.realm)
realm_emails = {user.email for user in realm_users}
self.assertIn(self.nonreg_email('alice'), realm_emails)
self.assertIn(self.nonreg_email('bob'), realm_emails)
bob = get_user(self.nonreg_email('bob'), sender.realm)
self.assertTrue(bob.is_mirror_dummy)
class MessageAccessTests(ZulipTestCase):
def test_update_invalid_flags(self) -> None:
message = self.send_personal_message(
self.example_user("cordelia"),
self.example_user("hamlet"),
"hello",
)
self.login('hamlet')
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "invalid"})
self.assert_json_error(result, "Invalid flag: 'invalid'")
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "is_private"})
self.assert_json_error(result, "Invalid flag: 'is_private'")
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "active_mobile_push_notification"})
self.assert_json_error(result, "Invalid flag: 'active_mobile_push_notification'")
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "mentioned"})
self.assert_json_error(result, "Flag not editable: 'mentioned'")
def change_star(self, messages: List[int], add: bool=True, **kwargs: Any) -> HttpResponse:
return self.client_post("/json/messages/flags",
{"messages": ujson.dumps(messages),
"op": "add" if add else "remove",
"flag": "starred"},
**kwargs)
def test_change_star(self) -> None:
"""
You can set a message as starred/un-starred through
POST /json/messages/flags.
"""
self.login('hamlet')
message_ids = [self.send_personal_message(self.example_user("hamlet"),
self.example_user("hamlet"),
"test")]
# Star a message.
result = self.change_star(message_ids)
self.assert_json_success(result)
for msg in self.get_messages():
if msg['id'] in message_ids:
self.assertEqual(msg['flags'], ['starred'])
else:
self.assertEqual(msg['flags'], ['read'])
result = self.change_star(message_ids, False)
self.assert_json_success(result)
# Remove the stars.
for msg in self.get_messages():
if msg['id'] in message_ids:
self.assertEqual(msg['flags'], [])
def test_change_star_public_stream_historical(self) -> None:
"""
You can set a message as starred/un-starred through
POST /json/messages/flags.
"""
stream_name = "new_stream"
self.subscribe(self.example_user("hamlet"), stream_name)
self.login('hamlet')
message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
]
# Send a second message so we can verify it isn't modified
other_message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test_unused"),
]
received_message_ids = [
self.send_personal_message(
self.example_user("hamlet"),
self.example_user("cordelia"),
"test_received"
),
]
# Now login as another user who wasn't on that stream
self.login('cordelia')
# Send a message to yourself to make sure we have at least one with the read flag
sent_message_ids = [
self.send_personal_message(
self.example_user("cordelia"),
self.example_user("cordelia"),
"test_read_message",
),
]
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps(sent_message_ids),
"op": "add",
"flag": "read"})
# We can't change flags other than "starred" on historical messages:
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps(message_ids),
"op": "add",
"flag": "read"})
self.assert_json_error(result, 'Invalid message(s)')
# Trying to change a list of more than one historical message fails
result = self.change_star(message_ids * 2)
self.assert_json_error(result, 'Invalid message(s)')
# Confirm that one can change the historical flag now
result = self.change_star(message_ids)
self.assert_json_success(result)
for msg in self.get_messages():
if msg['id'] in message_ids:
self.assertEqual(set(msg['flags']), {'starred', 'historical', 'read'})
elif msg['id'] in received_message_ids:
self.assertEqual(msg['flags'], [])
else:
self.assertEqual(msg['flags'], ['read'])
self.assertNotIn(msg['id'], other_message_ids)
result = self.change_star(message_ids, False)
self.assert_json_success(result)
# But it still doesn't work if you're in another realm
user = self.mit_user('sipbtest')
self.login_user(user)
result = self.change_star(message_ids, subdomain="zephyr")
self.assert_json_error(result, 'Invalid message(s)')
def test_change_star_private_message_security(self) -> None:
"""
You can set a message as starred/un-starred through
POST /json/messages/flags.
"""
self.login('hamlet')
message_ids = [
self.send_personal_message(
self.example_user("hamlet"),
self.example_user("hamlet"),
"test",
),
]
# Starring private messages you didn't receive fails.
self.login('cordelia')
result = self.change_star(message_ids)
self.assert_json_error(result, 'Invalid message(s)')
def test_change_star_private_stream_security(self) -> None:
stream_name = "private_stream"
self.make_stream(stream_name, invite_only=True)
self.subscribe(self.example_user("hamlet"), stream_name)
self.login('hamlet')
message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
]
# Starring private stream messages you received works
result = self.change_star(message_ids)
self.assert_json_success(result)
# Starring private stream messages you didn't receive fails.
self.login('cordelia')
result = self.change_star(message_ids)
self.assert_json_error(result, 'Invalid message(s)')
stream_name = "private_stream_2"
self.make_stream(stream_name, invite_only=True,
history_public_to_subscribers=True)
self.subscribe(self.example_user("hamlet"), stream_name)
self.login('hamlet')
message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
]
# With stream.history_public_to_subscribers = True, you still
# can't see it if you didn't receive the message and are
# not subscribed.
self.login('cordelia')
result = self.change_star(message_ids)
self.assert_json_error(result, 'Invalid message(s)')
# But if you subscribe, then you can star the message
self.subscribe(self.example_user("cordelia"), stream_name)
result = self.change_star(message_ids)
self.assert_json_success(result)
def test_new_message(self) -> None:
"""
New messages aren't starred.
"""
sender = self.example_user('hamlet')
self.login_user(sender)
content = "Test message for star"
self.send_stream_message(sender, "Verona",
content=content)
sent_message = UserMessage.objects.filter(
user_profile=self.example_user('hamlet')
).order_by("id").reverse()[0]
self.assertEqual(sent_message.message.content, content)
self.assertFalse(sent_message.flags.starred)
def test_change_star_public_stream_security_for_guest_user(self) -> None:
# Guest user can't access(star) unsubscribed public stream messages
normal_user = self.example_user("hamlet")
stream_name = "public_stream"
self.make_stream(stream_name)
self.subscribe(normal_user, stream_name)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 1")
]
guest_user = self.example_user('polonius')
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_error(result, 'Invalid message(s)')
# Subscribed guest users can access public stream messages sent before they join
self.subscribe(guest_user, stream_name)
result = self.change_star(message_id)
self.assert_json_success(result)
# And messages sent after they join
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 2")
]
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_success(result)
def test_change_star_private_stream_security_for_guest_user(self) -> None:
# Guest users can't access(star) unsubscribed private stream messages
normal_user = self.example_user("hamlet")
stream_name = "private_stream"
stream = self.make_stream(stream_name, invite_only=True)
self.subscribe(normal_user, stream_name)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 1")
]
guest_user = self.example_user('polonius')
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_error(result, 'Invalid message(s)')
# Guest user can't access messages of subscribed private streams if
# history is not public to subscribers
self.subscribe(guest_user, stream_name)
result = self.change_star(message_id)
self.assert_json_error(result, 'Invalid message(s)')
# Guest user can access messages of subscribed private streams if
# history is public to subscribers
do_change_stream_invite_only(stream, True, history_public_to_subscribers=True)
result = self.change_star(message_id)
self.assert_json_success(result)
# With history not public to subscribers, they can still see new messages
do_change_stream_invite_only(stream, True, history_public_to_subscribers=False)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 2")
]
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_success(result)
def test_bulk_access_messages_private_stream(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
stream_name = "private_stream"
stream = self.make_stream(stream_name, invite_only=True,
history_public_to_subscribers=False)
self.subscribe(user, stream_name)
# Send a message before subscribing a new user to stream
message_one_id = self.send_stream_message(user,
stream_name, "Message one")
later_subscribed_user = self.example_user("cordelia")
# Subscribe a user to private-protected history stream
self.subscribe(later_subscribed_user, stream_name)
# Send a message after subscribing a new user to stream
message_two_id = self.send_stream_message(user,
stream_name, "Message two")
message_ids = [message_one_id, message_two_id]
messages = [Message.objects.select_related().get(id=message_id)
for message_id in message_ids]
filtered_messages = bulk_access_messages(later_subscribed_user, messages)
# Message sent before subscribing wouldn't be accessible by later
# subscribed user as stream has protected history
self.assertEqual(len(filtered_messages), 1)
self.assertEqual(filtered_messages[0].id, message_two_id)
do_change_stream_invite_only(stream, True, history_public_to_subscribers=True)
filtered_messages = bulk_access_messages(later_subscribed_user, messages)
# Message sent before subscribing are accessible by 8user as stream
# don't have protected history
self.assertEqual(len(filtered_messages), 2)
# Testing messages accessiblity for an unsubscribed user
unsubscribed_user = self.example_user("ZOE")
filtered_messages = bulk_access_messages(unsubscribed_user, messages)
self.assertEqual(len(filtered_messages), 0)
def test_bulk_access_messages_public_stream(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
# Testing messages accessiblity including a public stream message
stream_name = "public_stream"
self.subscribe(user, stream_name)
message_one_id = self.send_stream_message(user,
stream_name, "Message one")
later_subscribed_user = self.example_user("cordelia")
self.subscribe(later_subscribed_user, stream_name)
# Send a message after subscribing a new user to stream
message_two_id = self.send_stream_message(user,
stream_name, "Message two")
message_ids = [message_one_id, message_two_id]
messages = [Message.objects.select_related().get(id=message_id)
for message_id in message_ids]
# All public stream messages are always accessible
filtered_messages = bulk_access_messages(later_subscribed_user, messages)
self.assertEqual(len(filtered_messages), 2)
unsubscribed_user = self.example_user("ZOE")
filtered_messages = bulk_access_messages(unsubscribed_user, messages)
self.assertEqual(len(filtered_messages), 2)
class MessageHasKeywordsTest(ZulipTestCase):
'''Test for keywords like has_link, has_image, has_attachment.'''
def setup_dummy_attachments(self, user_profile: UserProfile) -> List[str]:
sample_size = 10
realm_id = user_profile.realm_id
dummy_files = [
('zulip.txt', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt' % (realm_id,), sample_size),
('temp_file.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py' % (realm_id,), sample_size),
('abc.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py' % (realm_id,), sample_size)
]
for file_name, path_id, size in dummy_files:
create_attachment(file_name, path_id, user_profile, size)
# return path ids
return [x[1] for x in dummy_files]
def test_claim_attachment(self) -> None:
user_profile = self.example_user('hamlet')
dummy_path_ids = self.setup_dummy_attachments(user_profile)
dummy_urls = ["http://zulip.testserver/user_uploads/{}".format(x) for x in dummy_path_ids]
# Send message referring the attachment
self.subscribe(user_profile, "Denmark")
def assert_attachment_claimed(path_id: str, claimed: bool) -> None:
attachment = Attachment.objects.get(path_id=path_id)
self.assertEqual(attachment.is_claimed(), claimed)
# This message should claim attachments 1 only because attachment 2
# is not being parsed as a link by Bugdown.
body = ("Some files here ...[zulip.txt]({})" +
"{}.... Some more...." +
"{}").format(dummy_urls[0], dummy_urls[1], dummy_urls[1])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[0], True)
assert_attachment_claimed(dummy_path_ids[1], False)
# This message tries to claim the third attachment but fails because
# Bugdown would not set has_attachments = True here.
body = "Link in code: `{}`".format(dummy_urls[2])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[2], False)
# Another scenario where we wouldn't parse the link.
body = "Link to not parse: .{}.`".format(dummy_urls[2])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[2], False)
# Finally, claim attachment 3.
body = "Link: {}".format(dummy_urls[2])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[2], True)
assert_attachment_claimed(dummy_path_ids[1], False)
def test_finds_all_links(self) -> None:
msg_ids = []
msg_contents = ["foo.org", "[bar](baz.gov)", "http://quux.ca"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertTrue(all([msg.has_link for msg in msgs]))
def test_finds_only_links(self) -> None:
msg_ids = []
msg_contents = ["`example.org`", '``example.org```', '$$https://example.org$$', "foo"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertFalse(all([msg.has_link for msg in msgs]))
def update_message(self, msg: Message, content: str) -> None:
hamlet = self.example_user('hamlet')
realm_id = hamlet.realm.id
rendered_content = render_markdown(msg, content)
mention_data = bugdown.MentionData(realm_id, content)
do_update_message(hamlet, msg, None, None, "change_one", content,
rendered_content, set(), set(), mention_data=mention_data)
def test_finds_link_after_edit(self) -> None:
hamlet = self.example_user('hamlet')
msg_id = self.send_stream_message(hamlet, 'Denmark', content='a')
msg = Message.objects.get(id=msg_id)
self.assertFalse(msg.has_link)
self.update_message(msg, 'a http://foo.com')
self.assertTrue(msg.has_link)
self.update_message(msg, 'a')
self.assertFalse(msg.has_link)
# Check in blockquotes work
self.update_message(msg, '> http://bar.com')
self.assertTrue(msg.has_link)
self.update_message(msg, 'a `http://foo.com`')
self.assertFalse(msg.has_link)
def test_has_image(self) -> None:
msg_ids = []
msg_contents = ["Link: foo.org",
"Image: https://www.google.com/images/srpr/logo4w.png",
"Image: https://www.google.com/images/srpr/logo4w.pdf",
"[Google Link](https://www.google.com/images/srpr/logo4w.png)"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertEqual([False, True, False, True], [msg.has_image for msg in msgs])
self.update_message(msgs[0], 'https://www.google.com/images/srpr/logo4w.png')
self.assertTrue(msgs[0].has_image)
self.update_message(msgs[0], 'No Image Again')
self.assertFalse(msgs[0].has_image)
def test_has_attachment(self) -> None:
hamlet = self.example_user('hamlet')
dummy_path_ids = self.setup_dummy_attachments(hamlet)
dummy_urls = ["http://zulip.testserver/user_uploads/{}".format(x) for x in dummy_path_ids]
self.subscribe(hamlet, "Denmark")
body = ("Files ...[zulip.txt]({}) {} {}").format(dummy_urls[0], dummy_urls[1], dummy_urls[2])
msg_id = self.send_stream_message(hamlet, "Denmark", body, "test")
msg = Message.objects.get(id=msg_id)
self.assertTrue(msg.has_attachment)
self.update_message(msg, 'No Attachments')
self.assertFalse(msg.has_attachment)
self.update_message(msg, body)
self.assertTrue(msg.has_attachment)
self.update_message(msg, 'Link in code: `{}`'.format(dummy_urls[1]))
self.assertFalse(msg.has_attachment)
# Test blockquotes
self.update_message(msg, '> {}'.format(dummy_urls[1]))
self.assertTrue(msg.has_attachment)
# Additional test to check has_attachment is being set is due to the correct attachment.
self.update_message(msg, 'Outside: {}. In code: `{}`.'.format(dummy_urls[0], dummy_urls[1]))
self.assertTrue(msg.has_attachment)
self.assertTrue(msg.attachment_set.filter(path_id=dummy_path_ids[0]))
self.assertEqual(msg.attachment_set.count(), 1)
self.update_message(msg, 'Outside: {}. In code: `{}`.'.format(dummy_urls[1], dummy_urls[0]))
self.assertTrue(msg.has_attachment)
self.assertTrue(msg.attachment_set.filter(path_id=dummy_path_ids[1]))
self.assertEqual(msg.attachment_set.count(), 1)
self.update_message(msg, 'Both in code: `{} {}`.'.format(dummy_urls[1], dummy_urls[0]))
self.assertFalse(msg.has_attachment)
self.assertEqual(msg.attachment_set.count(), 0)
def test_potential_attachment_path_ids(self) -> None:
hamlet = self.example_user('hamlet')
self.subscribe(hamlet, "Denmark")
dummy_path_ids = self.setup_dummy_attachments(hamlet)
body = "Hello"
msg_id = self.send_stream_message(hamlet, "Denmark", body, "test")
msg = Message.objects.get(id=msg_id)
with mock.patch("zerver.lib.actions.do_claim_attachments",
wraps=do_claim_attachments) as m:
self.update_message(msg, '[link](http://{}/user_uploads/{})'.format(
hamlet.realm.host, dummy_path_ids[0]))
self.assertTrue(m.called)
m.reset_mock()
self.update_message(msg, '[link](/user_uploads/{})'.format(dummy_path_ids[1]))
self.assertTrue(m.called)
m.reset_mock()
self.update_message(msg, '[new text link](/user_uploads/{})'.format(dummy_path_ids[1]))
self.assertFalse(m.called)
m.reset_mock()
# It's not clear this is correct behavior
self.update_message(msg, '[link](user_uploads/{})'.format(dummy_path_ids[2]))
self.assertFalse(m.called)
m.reset_mock()
self.update_message(msg, '[link](https://github.com/user_uploads/{})'.format(
dummy_path_ids[0]))
self.assertFalse(m.called)
m.reset_mock()
class MissedMessageTest(ZulipTestCase):
def test_presence_idle_user_ids(self) -> None:
UserPresence.objects.all().delete()
sender = self.example_user('cordelia')
realm = sender.realm
hamlet = self.example_user('hamlet')
othello = self.example_user('othello')
recipient_ids = {hamlet.id, othello.id}
message_type = 'stream'
user_flags: Dict[int, List[str]] = {}
def assert_missing(user_ids: List[int]) -> None:
presence_idle_user_ids = get_active_presence_idle_user_ids(
realm=realm,
sender_id=sender.id,
message_type=message_type,
active_user_ids=recipient_ids,
user_flags=user_flags,
)
self.assertEqual(sorted(user_ids), sorted(presence_idle_user_ids))
def set_presence(user: UserProfile, client_name: str, ago: int) -> None:
when = timezone_now() - datetime.timedelta(seconds=ago)
UserPresence.objects.create(
user_profile_id=user.id,
realm_id=user.realm_id,
client=get_client(client_name),
timestamp=when,
)
message_type = 'private'
assert_missing([hamlet.id, othello.id])
message_type = 'stream'
user_flags[hamlet.id] = ['mentioned']
assert_missing([hamlet.id])
set_presence(hamlet, 'iPhone', ago=5000)
assert_missing([hamlet.id])
set_presence(hamlet, 'webapp', ago=15)
assert_missing([])
message_type = 'private'
assert_missing([othello.id])
class LogDictTest(ZulipTestCase):
def test_to_log_dict(self) -> None:
user = self.example_user('hamlet')
stream_name = 'Denmark'
topic_name = 'Copenhagen'
content = 'find me some good coffee shops'
message_id = self.send_stream_message(user, stream_name,
topic_name=topic_name,
content=content)
message = Message.objects.get(id=message_id)
dct = message.to_log_dict()
self.assertTrue('timestamp' in dct)
self.assertEqual(dct['content'], 'find me some good coffee shops')
self.assertEqual(dct['id'], message.id)
self.assertEqual(dct['recipient'], 'Denmark')
self.assertEqual(dct['sender_realm_str'], 'zulip')
self.assertEqual(dct['sender_email'], user.email)
self.assertEqual(dct['sender_full_name'], 'King Hamlet')
self.assertEqual(dct['sender_id'], user.id)
self.assertEqual(dct['sender_short_name'], 'hamlet')
self.assertEqual(dct['sending_client'], 'test suite')
self.assertEqual(dct[DB_TOPIC_NAME], 'Copenhagen')
self.assertEqual(dct['type'], 'stream')
class CheckMessageTest(ZulipTestCase):
def test_basic_check_message_call(self) -> None:
sender = self.example_user('othello')
client = make_client(name="test suite")
stream_name = 'España y Francia'
self.make_stream(stream_name)
topic_name = 'issue'
message_content = 'whatever'
addressee = Addressee.for_stream_name(stream_name, topic_name)
ret = check_message(sender, client, addressee, message_content)
self.assertEqual(ret['message'].sender.id, sender.id)
def test_bot_pm_feature(self) -> None:
"""We send a PM to a bot's owner if their bot sends a message to
an unsubscribed stream"""
parent = self.example_user('othello')
bot = do_create_user(
email='othello-bot@zulip.com',
password='',
realm=parent.realm,
full_name='',
short_name='',
bot_type=UserProfile.DEFAULT_BOT,
bot_owner=parent
)
bot.last_reminder = None
sender = bot
client = make_client(name="test suite")
stream_name = 'Россия'
topic_name = 'issue'
addressee = Addressee.for_stream_name(stream_name, topic_name)
message_content = 'whatever'
old_count = message_stream_count(parent)
# Try sending to stream that doesn't exist sends a reminder to
# the sender
with self.assertRaises(JsonableError):
check_message(sender, client, addressee, message_content)
new_count = message_stream_count(parent)
self.assertEqual(new_count, old_count + 1)
self.assertIn("that stream does not exist.", most_recent_message(parent).content)
# Try sending to stream that exists with no subscribers soon
# after; due to rate-limiting, this should send nothing.
self.make_stream(stream_name)
ret = check_message(sender, client, addressee, message_content)
new_count = message_stream_count(parent)
self.assertEqual(new_count, old_count + 1)
# Try sending to stream that exists with no subscribers longer
# after; this should send an error to the bot owner that the
# stream doesn't exist
assert(sender.last_reminder is not None)
sender.last_reminder = sender.last_reminder - datetime.timedelta(hours=1)
sender.save(update_fields=["last_reminder"])
ret = check_message(sender, client, addressee, message_content)
new_count = message_stream_count(parent)
self.assertEqual(new_count, old_count + 2)
self.assertEqual(ret['message'].sender.email, 'othello-bot@zulip.com')
self.assertIn("does not have any subscribers", most_recent_message(parent).content)
def test_bot_pm_error_handling(self) -> None:
# This just test some defensive code.
cordelia = self.example_user('cordelia')
test_bot = self.create_test_bot(
short_name='test',
user_profile=cordelia,
)
content = 'whatever'
good_realm = test_bot.realm
wrong_realm = get_realm("zephyr")
wrong_sender = cordelia
send_rate_limited_pm_notification_to_bot_owner(test_bot, wrong_realm, content)
self.assertEqual(test_bot.last_reminder, None)
send_rate_limited_pm_notification_to_bot_owner(wrong_sender, good_realm, content)
self.assertEqual(test_bot.last_reminder, None)
test_bot.realm.deactivated = True
send_rate_limited_pm_notification_to_bot_owner(test_bot, good_realm, content)
self.assertEqual(test_bot.last_reminder, None)
class DeleteMessageTest(ZulipTestCase):
def test_delete_message_invalid_request_format(self) -> None:
self.login('iago')
hamlet = self.example_user('hamlet')
msg_id = self.send_stream_message(hamlet, "Scotland")
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id + 1),
{'message_id': msg_id})
self.assert_json_error(result, "Invalid message(s)")
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
self.assert_json_success(result)
def test_delete_message_by_user(self) -> None:
def set_message_deleting_params(allow_message_deleting: bool,
message_content_delete_limit_seconds: int) -> None:
self.login('iago')
result = self.client_patch("/json/realm", {
'allow_message_deleting': ujson.dumps(allow_message_deleting),
'message_content_delete_limit_seconds': message_content_delete_limit_seconds
})
self.assert_json_success(result)
def test_delete_message_by_admin(msg_id: int) -> HttpResponse:
self.login('iago')
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
return result
def test_delete_message_by_owner(msg_id: int) -> HttpResponse:
self.login('hamlet')
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
return result
def test_delete_message_by_other_user(msg_id: int) -> HttpResponse:
self.login('cordelia')
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
return result
# Test if message deleting is not allowed(default).
set_message_deleting_params(False, 0)
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
msg_id = self.send_stream_message(hamlet, "Scotland")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_other_user(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_admin(msg_id=msg_id)
self.assert_json_success(result)
# Test if message deleting is allowed.
# Test if time limit is zero(no limit).
set_message_deleting_params(True, 0)
msg_id = self.send_stream_message(hamlet, "Scotland")
message = Message.objects.get(id=msg_id)
message.date_sent = message.date_sent - datetime.timedelta(seconds=600)
message.save()
result = test_delete_message_by_other_user(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_success(result)
# Test if time limit is non-zero.
set_message_deleting_params(True, 240)
msg_id_1 = self.send_stream_message(hamlet, "Scotland")
message = Message.objects.get(id=msg_id_1)
message.date_sent = message.date_sent - datetime.timedelta(seconds=120)
message.save()
msg_id_2 = self.send_stream_message(hamlet, "Scotland")
message = Message.objects.get(id=msg_id_2)
message.date_sent = message.date_sent - datetime.timedelta(seconds=360)
message.save()
result = test_delete_message_by_other_user(msg_id=msg_id_1)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_owner(msg_id=msg_id_1)
self.assert_json_success(result)
result = test_delete_message_by_owner(msg_id=msg_id_2)
self.assert_json_error(result, "The time limit for deleting this message has passed")
# No limit for admin.
result = test_delete_message_by_admin(msg_id=msg_id_2)
self.assert_json_success(result)
# Test multiple delete requests with no latency issues
msg_id = self.send_stream_message(hamlet, "Scotland")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_success(result)
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Invalid message(s)")
# Test handling of 500 error caused by multiple delete requests due to latency.
# see issue #11219.
with mock.patch("zerver.views.messages.do_delete_messages") as m, \
mock.patch("zerver.views.messages.validate_can_delete_message", return_value=None), \
mock.patch("zerver.views.messages.access_message", return_value=(None, None)):
m.side_effect = IntegrityError()
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Message already deleted")
m.side_effect = Message.DoesNotExist()
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Message already deleted")
class SoftDeactivationMessageTest(ZulipTestCase):
def test_reactivate_user_if_soft_deactivated(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago")]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
sender = self.example_user('iago')
stream_name = 'Denmark'
topic_name = 'foo'
def last_realm_audit_log_entry(event_type: int) -> RealmAuditLog:
return RealmAuditLog.objects.filter(
event_type=event_type
).order_by('-event_time')[0]
long_term_idle_user = self.example_user('hamlet')
# We are sending this message to ensure that long_term_idle_user has
# at least one UserMessage row.
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
message = 'Test Message 1'
message_id = self.send_stream_message(sender, stream_name,
message, topic_name)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertNotEqual(idle_user_msg_list[-1].content, message)
with queries_captured() as queries:
reactivate_user_if_soft_deactivated(long_term_idle_user)
self.assert_length(queries, 8)
self.assertFalse(long_term_idle_user.long_term_idle)
self.assertEqual(last_realm_audit_log_entry(
RealmAuditLog.USER_SOFT_ACTIVATED).modified_user, long_term_idle_user)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 1)
self.assertEqual(idle_user_msg_list[-1].content, message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, message_id)
def test_add_missing_messages(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago")]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
sender = self.example_user('iago')
realm = sender.realm
sending_client = make_client(name="test suite")
stream_name = 'Denmark'
stream = get_stream(stream_name, realm)
topic_name = 'foo'
def send_fake_message(message_content: str, stream: Stream) -> Message:
recipient = stream.recipient
message = Message(sender = sender,
recipient = recipient,
content = message_content,
date_sent = timezone_now(),
sending_client = sending_client)
message.set_topic_name(topic_name)
message.save()
return message
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
# Test that add_missing_messages() in simplest case of adding a
# message for which UserMessage row doesn't exist for this user.
sent_message = send_fake_message('Test Message 1', stream)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertNotEqual(idle_user_msg_list[-1], sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 1)
self.assertEqual(idle_user_msg_list[-1], sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message.id)
# Test that add_missing_messages() only adds messages that aren't
# already present in the UserMessage table. This test works on the
# fact that previous test just above this added a message but didn't
# updated the last_active_message_id field for the user.
sent_message = send_fake_message('Test Message 2', stream)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertNotEqual(idle_user_msg_list[-1], sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 7)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 1)
self.assertEqual(idle_user_msg_list[-1], sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message.id)
# Test UserMessage rows are created correctly in case of stream
# Subscription was altered by admin while user was away.
# Test for a public stream.
sent_message_list = []
sent_message_list.append(send_fake_message('Test Message 3', stream))
# Alter subscription to stream.
self.unsubscribe(long_term_idle_user, stream_name)
send_fake_message('Test Message 4', stream)
self.subscribe(long_term_idle_user, stream_name)
sent_message_list.append(send_fake_message('Test Message 5', stream))
sent_message_list.reverse()
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
for sent_message in sent_message_list:
self.assertNotEqual(idle_user_msg_list.pop(), sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 2)
for sent_message in sent_message_list:
self.assertEqual(idle_user_msg_list.pop(), sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message_list[0].id)
# Test consecutive subscribe/unsubscribe in a public stream
sent_message_list = []
sent_message_list.append(send_fake_message('Test Message 6', stream))
# Unsubscribe from stream and then immediately subscribe back again.
self.unsubscribe(long_term_idle_user, stream_name)
self.subscribe(long_term_idle_user, stream_name)
sent_message_list.append(send_fake_message('Test Message 7', stream))
# Again unsubscribe from stream and send a message.
# This will make sure that if initially in a unsubscribed state
# a consecutive subscribe/unsubscribe doesn't misbehave.
self.unsubscribe(long_term_idle_user, stream_name)
send_fake_message('Test Message 8', stream)
# Do a subscribe and unsubscribe immediately.
self.subscribe(long_term_idle_user, stream_name)
self.unsubscribe(long_term_idle_user, stream_name)
sent_message_list.reverse()
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
for sent_message in sent_message_list:
self.assertNotEqual(idle_user_msg_list.pop(), sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 2)
for sent_message in sent_message_list:
self.assertEqual(idle_user_msg_list.pop(), sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message_list[0].id)
# Test for when user unsubscribes before soft deactivation
# (must reactivate them in order to do this).
do_soft_activate_users([long_term_idle_user])
self.subscribe(long_term_idle_user, stream_name)
# Send a real message to update last_active_message_id
sent_message_id = self.send_stream_message(
sender, stream_name, 'Test Message 9')
self.unsubscribe(long_term_idle_user, stream_name)
# Soft deactivate and send another message to the unsubscribed stream.
do_soft_deactivate_users([long_term_idle_user])
send_fake_message('Test Message 10', stream)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertEqual(idle_user_msg_list[-1].id, sent_message_id)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
# There are no streams to fetch missing messages from, so
# the Message.objects query will be avoided.
self.assert_length(queries, 4)
idle_user_msg_list = get_user_messages(long_term_idle_user)
# No new UserMessage rows should have been created.
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count)
# Note: At this point in this test we have long_term_idle_user
# unsubscribed from the 'Denmark' stream.
# Test for a Private Stream.
stream_name = "Core"
private_stream = self.make_stream('Core', invite_only=True)
self.subscribe(self.example_user("iago"), stream_name)
sent_message_list = []
send_fake_message('Test Message 11', private_stream)
self.subscribe(self.example_user("hamlet"), stream_name)
sent_message_list.append(send_fake_message('Test Message 12', private_stream))
self.unsubscribe(long_term_idle_user, stream_name)
send_fake_message('Test Message 13', private_stream)
self.subscribe(long_term_idle_user, stream_name)
sent_message_list.append(send_fake_message('Test Message 14', private_stream))
sent_message_list.reverse()
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
for sent_message in sent_message_list:
self.assertNotEqual(idle_user_msg_list.pop(), sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 2)
for sent_message in sent_message_list:
self.assertEqual(idle_user_msg_list.pop(), sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message_list[0].id)
@mock.patch('zerver.lib.soft_deactivation.BULK_CREATE_BATCH_SIZE', 2)
def test_add_missing_messages_pagination(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago")]
stream_name = 'Denmark'
for user_profile in recipient_list:
self.subscribe(user_profile, stream_name)
sender = self.example_user('iago')
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
num_new_messages = 5
message_ids = []
for _ in range(num_new_messages):
message_id = self.send_stream_message(sender, stream_name)
message_ids.append(message_id)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 10)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + num_new_messages)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, message_ids[-1])
def test_user_message_filter(self) -> None:
# In this test we are basically testing out the logic used out in
# do_send_messages() in action.py for filtering the messages for which
# UserMessage rows should be created for a soft-deactivated user.
recipient_list = [
self.example_user("hamlet"),
self.example_user("iago"),
self.example_user('cordelia')
]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
cordelia = self.example_user('cordelia')
sender = self.example_user('iago')
stream_name = 'Denmark'
topic_name = 'foo'
def send_stream_message(content: str) -> None:
self.send_stream_message(sender, stream_name,
content, topic_name)
def send_personal_message(content: str) -> None:
self.send_personal_message(sender, self.example_user("hamlet"), content)
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
def assert_um_count(user: UserProfile, count: int) -> None:
user_messages = get_user_messages(user)
self.assertEqual(len(user_messages), count)
def assert_last_um_content(user: UserProfile, content: str, negate: bool=False) -> None:
user_messages = get_user_messages(user)
if negate:
self.assertNotEqual(user_messages[-1].content, content)
else:
self.assertEqual(user_messages[-1].content, content)
# Test that sending a message to a stream with soft deactivated user
# doesn't end up creating UserMessage row for deactivated user.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test Message 1'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message, negate=True)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test that sending a message to a stream with soft deactivated user
# and push/email notifications on creates a UserMessage row for the
# deactivated user.
sub = get_subscription(stream_name, long_term_idle_user)
sub.push_notifications = True
sub.save()
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test private stream message'
send_stream_message(message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_last_um_content(long_term_idle_user, message)
sub.push_notifications = False
sub.save()
# Test sending a private message to soft deactivated user creates
# UserMessage row.
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test PM'
send_personal_message(message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_last_um_content(long_term_idle_user, message)
# Test UserMessage row is created while user is deactivated if
# user itself is mentioned.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**King Hamlet** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is not created while user is deactivated if
# anyone is mentioned but the user.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**Cordelia Lear** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message, negate=True)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is created while user is deactivated if
# there is a wildcard mention such as @all or @everyone
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**all** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**everyone** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**stream** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is not created while user is deactivated if there
# is a alert word in message.
do_add_alert_words(long_term_idle_user, ['test_alert_word'])
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Testing test_alert_word'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is created while user is deactivated if
# message is a me message.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = '/me says test'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message, negate=True)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
class MessageHydrationTest(ZulipTestCase):
def test_hydrate_stream_recipient_info(self) -> None:
realm = get_realm('zulip')
cordelia = self.example_user('cordelia')
stream_id = get_stream('Verona', realm).id
obj = dict(
recipient_type=Recipient.STREAM,
recipient_type_id=stream_id,
sender_is_mirror_dummy=False,
sender_email=cordelia.email,
sender_full_name=cordelia.full_name,
sender_short_name=cordelia.short_name,
sender_id=cordelia.id,
)
MessageDict.hydrate_recipient_info(obj, 'Verona')
self.assertEqual(obj['display_recipient'], 'Verona')
self.assertEqual(obj['type'], 'stream')
def test_hydrate_pm_recipient_info(self) -> None:
cordelia = self.example_user('cordelia')
display_recipient: List[UserDisplayRecipient] = [
dict(
email='aaron@example.com',
full_name='Aaron Smith',
short_name='Aaron',
id=999,
is_mirror_dummy=False
),
]
obj = dict(
recipient_type=Recipient.PERSONAL,
recipient_type_id=None,
sender_is_mirror_dummy=False,
sender_email=cordelia.email,
sender_full_name=cordelia.full_name,
sender_short_name=cordelia.short_name,
sender_id=cordelia.id,
)
MessageDict.hydrate_recipient_info(obj, display_recipient)
self.assertEqual(
obj['display_recipient'],
[
dict(
email='aaron@example.com',
full_name='Aaron Smith',
short_name='Aaron',
id=999,
is_mirror_dummy=False
),
dict(
email=cordelia.email,
full_name=cordelia.full_name,
id=cordelia.id,
short_name=cordelia.short_name,
is_mirror_dummy=False,
),
],
)
self.assertEqual(obj['type'], 'private')
def test_messages_for_ids(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
stream_name = 'test stream'
self.subscribe(cordelia, stream_name)
old_message_id = self.send_stream_message(cordelia, stream_name, content='foo')
self.subscribe(hamlet, stream_name)
content = 'hello @**King Hamlet**'
new_message_id = self.send_stream_message(cordelia, stream_name, content=content)
user_message_flags = {
old_message_id: ['read', 'historical'],
new_message_id: ['mentioned'],
}
messages = messages_for_ids(
message_ids=[old_message_id, new_message_id],
user_message_flags=user_message_flags,
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self.assertEqual(len(messages), 2)
for message in messages:
if message['id'] == old_message_id:
old_message = message
elif message['id'] == new_message_id:
new_message = message
self.assertEqual(old_message['content'], '<p>foo</p>')
self.assertEqual(old_message['flags'], ['read', 'historical'])
self.assertIn('class="user-mention"', new_message['content'])
self.assertEqual(new_message['flags'], ['mentioned'])
def test_display_recipient_up_to_date(self) -> None:
"""
This is a test for a bug where due to caching of message_dicts,
after updating a user's information, fetching those cached messages
via messages_for_ids would return message_dicts with display_recipient
still having the old information. The returned message_dicts should have
up-to-date display_recipients and we check for that here.
"""
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
message_id = self.send_personal_message(hamlet, cordelia, 'test')
cordelia_recipient = cordelia.recipient
# Cause the display_recipient to get cached:
get_display_recipient(cordelia_recipient)
# Change cordelia's email:
cordelia_new_email = 'new-cordelia@zulip.com'
cordelia.email = cordelia_new_email
cordelia.save()
# Local display_recipient cache needs to be flushed.
# flush_per_request_caches() is called after every request,
# so it makes sense to run it here.
flush_per_request_caches()
messages = messages_for_ids(
message_ids=[message_id],
user_message_flags={message_id: ['read']},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
message = messages[0]
# Find which display_recipient in the list is cordelia:
for display_recipient in message['display_recipient']:
if display_recipient['short_name'] == 'cordelia':
cordelia_display_recipient = display_recipient
# Make sure the email is up-to-date.
self.assertEqual(cordelia_display_recipient['email'], cordelia_new_email)
class TestMessageForIdsDisplayRecipientFetching(ZulipTestCase):
def _verify_display_recipient(self, display_recipient: DisplayRecipientT,
expected_recipient_objects: Union[Stream, List[UserProfile]]) -> None:
if isinstance(expected_recipient_objects, Stream):
self.assertEqual(display_recipient, expected_recipient_objects.name)
else:
for user_profile in expected_recipient_objects:
recipient_dict: UserDisplayRecipient = {
'email': user_profile.email,
'full_name': user_profile.full_name,
'short_name': user_profile.short_name,
'id': user_profile.id,
'is_mirror_dummy': user_profile.is_mirror_dummy,
}
self.assertTrue(recipient_dict in display_recipient)
def test_display_recipient_personal(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
message_ids = [
self.send_personal_message(hamlet, cordelia, 'test'),
self.send_personal_message(cordelia, othello, 'test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], [hamlet, cordelia])
self._verify_display_recipient(messages[1]['display_recipient'], [cordelia, othello])
def test_display_recipient_stream(self) -> None:
cordelia = self.example_user('cordelia')
message_ids = [
self.send_stream_message(cordelia, "Verona", content='test'),
self.send_stream_message(cordelia, "Denmark", content='test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], get_stream("Verona", cordelia.realm))
self._verify_display_recipient(messages[1]['display_recipient'], get_stream("Denmark", cordelia.realm))
def test_display_recipient_huddle(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
iago = self.example_user('iago')
message_ids = [
self.send_huddle_message(hamlet, [cordelia, othello], 'test'),
self.send_huddle_message(cordelia, [hamlet, othello, iago], 'test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], [hamlet, cordelia, othello])
self._verify_display_recipient(messages[1]['display_recipient'], [hamlet, cordelia, othello, iago])
def test_display_recipient_various_types(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
iago = self.example_user('iago')
message_ids = [
self.send_huddle_message(hamlet, [cordelia, othello], 'test'),
self.send_stream_message(cordelia, "Verona", content='test'),
self.send_personal_message(hamlet, cordelia, 'test'),
self.send_stream_message(cordelia, "Denmark", content='test'),
self.send_huddle_message(cordelia, [hamlet, othello, iago], 'test'),
self.send_personal_message(cordelia, othello, 'test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], [hamlet, cordelia, othello])
self._verify_display_recipient(messages[1]['display_recipient'], get_stream("Verona", hamlet.realm))
self._verify_display_recipient(messages[2]['display_recipient'], [hamlet, cordelia])
self._verify_display_recipient(messages[3]['display_recipient'], get_stream("Denmark", hamlet.realm))
self._verify_display_recipient(messages[4]['display_recipient'], [hamlet, cordelia, othello, iago])
self._verify_display_recipient(messages[5]['display_recipient'], [cordelia, othello])
class MessageVisibilityTest(ZulipTestCase):
def test_update_first_visible_message_id(self) -> None:
Message.objects.all().delete()
message_ids = [self.send_stream_message(self.example_user("othello"), "Scotland") for i in range(15)]
# If message_visibility_limit is None update_first_visible_message_id
# should set first_visible_message_id to 0
realm = get_realm("zulip")
realm.message_visibility_limit = None
# Setting to a random value other than 0 as the default value of
# first_visible_message_id is 0
realm.first_visible_message_id = 5
realm.save()
update_first_visible_message_id(realm)
self.assertEqual(get_first_visible_message_id(realm), 0)
realm.message_visibility_limit = 10
realm.save()
expected_message_id = message_ids[5]
update_first_visible_message_id(realm)
self.assertEqual(get_first_visible_message_id(realm), expected_message_id)
# If the message_visibility_limit is greater than number of messages
# get_first_visible_message_id should return 0
realm.message_visibility_limit = 50
realm.save()
update_first_visible_message_id(realm)
self.assertEqual(get_first_visible_message_id(realm), 0)
def test_maybe_update_first_visible_message_id(self) -> None:
realm = get_realm("zulip")
lookback_hours = 30
realm.message_visibility_limit = None
realm.save()
end_time = timezone_now() - datetime.timedelta(hours=lookback_hours - 5)
stat = COUNT_STATS['messages_sent:is_bot:hour']
RealmCount.objects.create(realm=realm, property=stat.property,
end_time=end_time, value=5)
with mock.patch("zerver.lib.message.update_first_visible_message_id") as m:
maybe_update_first_visible_message_id(realm, lookback_hours)
m.assert_not_called()
realm.message_visibility_limit = 10
realm.save()
RealmCount.objects.all().delete()
with mock.patch("zerver.lib.message.update_first_visible_message_id") as m:
maybe_update_first_visible_message_id(realm, lookback_hours)
m.assert_not_called()
RealmCount.objects.create(realm=realm, property=stat.property,
end_time=end_time, value=5)
with mock.patch("zerver.lib.message.update_first_visible_message_id") as m:
maybe_update_first_visible_message_id(realm, lookback_hours)
m.assert_called_once_with(realm)
class TestBulkGetHuddleUserIds(ZulipTestCase):
def test_bulk_get_huddle_user_ids(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
iago = self.example_user('iago')
message_ids = [
self.send_huddle_message(hamlet, [cordelia, othello], 'test'),
self.send_huddle_message(cordelia, [hamlet, othello, iago], 'test')
]
messages = Message.objects.filter(id__in=message_ids).order_by("id")
first_huddle_recipient = messages[0].recipient
first_huddle_user_ids = list(get_huddle_user_ids(first_huddle_recipient))
second_huddle_recipient = messages[1].recipient
second_huddle_user_ids = list(get_huddle_user_ids(second_huddle_recipient))
huddle_user_ids = bulk_get_huddle_user_ids([first_huddle_recipient, second_huddle_recipient])
self.assertEqual(huddle_user_ids[first_huddle_recipient.id], first_huddle_user_ids)
self.assertEqual(huddle_user_ids[second_huddle_recipient.id], second_huddle_user_ids)
def test_bulk_get_huddle_user_ids_empty_list(self) -> None:
self.assertEqual(bulk_get_huddle_user_ids([]), {})
class NoRecipientIDsTest(ZulipTestCase):
def test_no_recipient_ids(self) -> None:
user_profile = self.example_user('cordelia')
Subscription.objects.filter(user_profile=user_profile, recipient__type=Recipient.STREAM).delete()
subs = gather_subscriptions_helper(user_profile)
# Checks that gather_subscriptions_helper will not return anything
# since there will not be any recipients, without crashing.
#
# This covers a rare corner case.
self.assertEqual(len(subs[0]), 0)
| 44.023764 | 145 | 0.613132 | from django.db import IntegrityError
from django.db.models import Q
from django.conf import settings
from django.http import HttpResponse
from django.test import TestCase, override_settings
from django.utils.timezone import now as timezone_now
from zerver.lib import bugdown
from zerver.decorator import JsonableError
from zerver.lib.test_runner import slow
from zerver.lib.addressee import Addressee
from zerver.lib.actions import (
check_message,
check_send_stream_message,
create_mirror_user_if_needed,
do_add_alert_words,
do_change_is_admin,
do_change_stream_invite_only,
do_change_stream_post_policy,
do_claim_attachments,
do_create_user,
do_deactivate_user,
do_send_messages,
do_update_message,
do_set_realm_property,
extract_private_recipients,
extract_stream_indicator,
gather_subscriptions_helper,
get_active_presence_idle_user_ids,
get_client,
get_last_message_id,
get_topic_messages,
get_user_info_for_message_updates,
internal_prep_private_message,
internal_prep_stream_message_by_name,
internal_send_huddle_message,
internal_send_private_message,
internal_send_stream_message,
internal_send_stream_message_by_name,
send_rate_limited_pm_notification_to_bot_owner,
)
from zerver.lib.cache import (
cache_delete,
get_stream_cache_key,
to_dict_cache_key_id,
)
from zerver.lib.create_user import (
create_user_profile,
)
from zerver.lib.message import (
MessageDict,
bulk_access_messages,
get_first_visible_message_id,
get_raw_unread_data,
get_recent_private_conversations,
maybe_update_first_visible_message_id,
messages_for_ids,
render_markdown,
sew_messages_and_reactions,
update_first_visible_message_id,
)
from zerver.lib.test_helpers import (
get_subscription,
get_user_messages,
make_client,
message_stream_count,
most_recent_message,
most_recent_usermessage,
queries_captured,
reset_emails_in_zulip_realm,
)
from zerver.lib.test_classes import (
ZulipTestCase,
)
from zerver.lib.topic import (
LEGACY_PREV_TOPIC,
DB_TOPIC_NAME,
TOPIC_LINKS,
TOPIC_NAME,
)
from zerver.lib.types import DisplayRecipientT, UserDisplayRecipient
from zerver.lib.soft_deactivation import (
add_missing_messages,
do_soft_activate_users,
do_soft_deactivate_users,
reactivate_user_if_soft_deactivated,
)
from zerver.models import (
MAX_MESSAGE_LENGTH, MAX_TOPIC_NAME_LENGTH,
Message, Realm, Recipient, Stream, UserMessage, UserProfile, Attachment,
RealmAuditLog, RealmDomain, get_realm, UserPresence, Subscription,
get_stream, get_system_bot, get_user, Reaction,
flush_per_request_caches, ScheduledMessage, get_huddle_recipient,
bulk_get_huddle_user_ids, get_huddle_user_ids,
get_display_recipient, RealmFilter
)
from zerver.lib.timestamp import convert_to_UTC, datetime_to_timestamp
from zerver.lib.timezone import get_timezone
from zerver.lib.upload import create_attachment
from zerver.lib.url_encoding import near_message_url
from zerver.views.messages import create_mirrored_message_users, InvalidMirrorInput
from analytics.lib.counts import COUNT_STATS
from analytics.models import RealmCount
import datetime
import mock
from operator import itemgetter
import time
import ujson
from typing import Any, Dict, List, Set, Union, Tuple
from collections import namedtuple
class MiscMessageTest(ZulipTestCase):
def test_get_last_message_id(self) -> None:
self.assertEqual(
get_last_message_id(),
Message.objects.latest('id').id
)
Message.objects.all().delete()
self.assertEqual(get_last_message_id(), -1)
class TopicHistoryTest(ZulipTestCase):
def test_topics_history_zephyr_mirror(self) -> None:
user_profile = self.mit_user('sipbtest')
stream_name = 'new_stream'
self.subscribe(self.mit_user("starnine"), stream_name)
stream = get_stream(stream_name, user_profile.realm)
self.send_stream_message(self.mit_user("starnine"), stream_name,
topic_name="secret topic")
self.login_user(user_profile)
self.subscribe(user_profile, stream_name)
endpoint = '/json/users/me/%d/topics' % (stream.id,)
result = self.client_get(endpoint, dict(), subdomain="zephyr")
self.assert_json_success(result)
history = result.json()['topics']
self.assertEqual(history, [])
def test_topics_history(self) -> None:
user_profile = self.example_user('iago')
self.login_user(user_profile)
stream_name = 'Verona'
stream = get_stream(stream_name, user_profile.realm)
recipient = stream.recipient
def create_test_message(topic: str) -> int:
hamlet = self.example_user('hamlet')
message = Message(
sender=hamlet,
recipient=recipient,
content='whatever',
date_sent=timezone_now(),
sending_client=get_client('whatever'),
)
message.set_topic_name(topic)
message.save()
UserMessage.objects.create(
user_profile=user_profile,
message=message,
flags=0,
)
return message.id
create_test_message('topic2')
create_test_message('toPIc1')
create_test_message('toPIc0')
create_test_message('topic2')
create_test_message('topic2')
create_test_message('Topic2')
topic2_msg_id = create_test_message('topic2')
create_test_message('topic1')
create_test_message('topic1')
topic1_msg_id = create_test_message('topic1')
topic0_msg_id = create_test_message('topic0')
endpoint = '/json/users/me/%d/topics' % (stream.id,)
result = self.client_get(endpoint, dict())
self.assert_json_success(result)
history = result.json()['topics']
history = history[:3]
self.assertEqual([topic['name'] for topic in history], [
'topic0',
'topic1',
'topic2',
])
self.assertEqual([topic['max_id'] for topic in history], [
topic0_msg_id,
topic1_msg_id,
topic2_msg_id,
])
# same results for a public stream.
self.login('cordelia')
result = self.client_get(endpoint, dict())
self.assert_json_success(result)
history = result.json()['topics']
# We only look at the most recent three topics, because
# the prior fixture data may be unreliable.
history = history[:3]
self.assertEqual([topic['name'] for topic in history], [
'topic0',
'topic1',
'topic2',
])
self.assertIn('topic0', [topic['name'] for topic in history])
self.assertEqual([topic['max_id'] for topic in history], [
topic0_msg_id,
topic1_msg_id,
topic2_msg_id,
])
# Now make stream private, but subscribe cordelia
do_change_stream_invite_only(stream, True)
self.subscribe(self.example_user("cordelia"), stream.name)
result = self.client_get(endpoint, dict())
self.assert_json_success(result)
history = result.json()['topics']
history = history[:3]
# Cordelia doesn't have these recent history items when we
self.assertNotIn('topic0', [topic['name'] for topic in history])
self.assertNotIn('topic1', [topic['name'] for topic in history])
self.assertNotIn('topic2', [topic['name'] for topic in history])
def test_bad_stream_id(self) -> None:
self.login('iago')
# non-sensible stream id
endpoint = '/json/users/me/9999999999/topics'
result = self.client_get(endpoint, dict())
self.assert_json_error(result, 'Invalid stream id')
# out of realm
bad_stream = self.make_stream(
'mit_stream',
realm=get_realm('zephyr')
)
endpoint = '/json/users/me/%s/topics' % (bad_stream.id,)
result = self.client_get(endpoint, dict())
self.assert_json_error(result, 'Invalid stream id')
# private stream to which I am not subscribed
private_stream = self.make_stream(
'private_stream',
invite_only=True
)
endpoint = '/json/users/me/%s/topics' % (private_stream.id,)
result = self.client_get(endpoint, dict())
self.assert_json_error(result, 'Invalid stream id')
class TopicDeleteTest(ZulipTestCase):
def test_topic_delete(self) -> None:
initial_last_msg_id = self.get_last_message().id
stream_name = 'new_stream'
topic_name = 'new topic 2'
# NON-ADMIN USER
user_profile = self.example_user('hamlet')
self.subscribe(user_profile, stream_name)
# Send message
stream = get_stream(stream_name, user_profile.realm)
self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
last_msg_id = self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
# Deleting the topic
self.login_user(user_profile)
endpoint = '/json/streams/' + str(stream.id) + '/delete_topic'
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_error(result, "Must be an organization administrator")
self.assertEqual(self.get_last_message().id, last_msg_id)
# Make stream private with limited history
do_change_stream_invite_only(stream, invite_only=True,
history_public_to_subscribers=False)
# ADMIN USER subscribed now
user_profile = self.example_user('iago')
self.subscribe(user_profile, stream_name)
self.login_user(user_profile)
new_last_msg_id = self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
# Now admin deletes all messages in topic -- which should only
# delete new_last_msg_id, i.e. the one sent since they joined.
self.assertEqual(self.get_last_message().id, new_last_msg_id)
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, last_msg_id)
# Try to delete all messages in the topic again. There are no messages accessible
# to the administrator, so this should do nothing.
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, last_msg_id)
# Make the stream's history public to subscribers
do_change_stream_invite_only(stream, invite_only=True,
history_public_to_subscribers=True)
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, initial_last_msg_id)
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, initial_last_msg_id)
class TestCrossRealmPMs(ZulipTestCase):
def make_realm(self, domain: str) -> Realm:
realm = Realm.objects.create(string_id=domain, invite_required=False)
RealmDomain.objects.create(realm=realm, domain=domain)
return realm
def create_user(self, email: str) -> UserProfile:
subdomain = email.split("@")[1]
self.register(email, 'test', subdomain=subdomain)
return get_user(email, get_realm(subdomain))
@slow("Sends a large number of messages")
@override_settings(CROSS_REALM_BOT_EMAILS=['notification-bot@zulip.com',
'welcome-bot@zulip.com',
'support@3.example.com'])
def test_realm_scenarios(self) -> None:
self.make_realm('1.example.com')
r2 = self.make_realm('2.example.com')
self.make_realm('3.example.com')
def assert_message_received(to_user: UserProfile, from_user: UserProfile) -> None:
messages = get_user_messages(to_user)
self.assertEqual(messages[-1].sender.id, from_user.id)
def assert_invalid_user() -> Any:
return self.assertRaisesRegex(
JsonableError,
'Invalid user ID ')
user1_email = 'user1@1.example.com'
user1a_email = 'user1a@1.example.com'
user2_email = 'user2@2.example.com'
user3_email = 'user3@3.example.com'
notification_bot_email = 'notification-bot@zulip.com'
support_email = 'support@3.example.com'
user1 = self.create_user(user1_email)
user1a = self.create_user(user1a_email)
user2 = self.create_user(user2_email)
user3 = self.create_user(user3_email)
notification_bot = get_system_bot(notification_bot_email)
with self.settings(CROSS_REALM_BOT_EMAILS=['notification-bot@zulip.com', 'welcome-bot@zulip.com']):
# cross-realm email, we need to hide this for now.
support_bot = self.create_user(support_email)
# Users can PM themselves
self.send_personal_message(user1, user1)
assert_message_received(user1, user1)
# Users on the same realm can PM each other
self.send_personal_message(user1, user1a)
assert_message_received(user1a, user1)
# Cross-realm bots in the zulip.com realm can PM any realm
# (They need lower level APIs to do this.)
internal_send_private_message(
realm=r2,
sender=get_system_bot(notification_bot_email),
recipient_user=get_user(user2_email, r2),
content='bla',
)
assert_message_received(user2, notification_bot)
# All users can PM cross-realm bots in the zulip.com realm
self.send_personal_message(user1, notification_bot)
assert_message_received(notification_bot, user1)
# Users can PM cross-realm bots on non-zulip realms.
# (The support bot represents some theoretical bot that we may
# create in the future that does not have zulip.com as its realm.)
self.send_personal_message(user1, support_bot)
assert_message_received(support_bot, user1)
# Allow sending PMs to two different cross-realm bots simultaneously.
# (We don't particularly need this feature, but since users can
# prevent them from sending multiple bots at once. We may revisit
# this if it's a nuisance for huddles.)
self.send_huddle_message(user1, [notification_bot, support_bot])
assert_message_received(notification_bot, user1)
assert_message_received(support_bot, user1)
with assert_invalid_user():
self.send_huddle_message(user1, [user3, support_bot])
# even if one of the users is a cross-realm bot.
with assert_invalid_user():
self.send_huddle_message(user1, [user2, notification_bot])
with assert_invalid_user():
self.send_huddle_message(notification_bot, [user1, user2])
# Users on the different realms cannot PM each other
with assert_invalid_user():
self.send_personal_message(user1, user2)
# Users on non-zulip realms can't PM "ordinary" Zulip users
with assert_invalid_user():
self.send_personal_message(user1, self.example_user('hamlet'))
with assert_invalid_user():
self.send_huddle_message(user1, [user2, user3])
class TestAddressee(ZulipTestCase):
def test_addressee_for_user_ids(self) -> None:
realm = get_realm('zulip')
user_ids = [self.example_user('cordelia').id,
self.example_user('hamlet').id,
self.example_user('othello').id]
result = Addressee.for_user_ids(user_ids=user_ids, realm=realm)
user_profiles = result.user_profiles()
result_user_ids = [user_profiles[0].id, user_profiles[1].id,
user_profiles[2].id]
self.assertEqual(set(result_user_ids), set(user_ids))
def test_addressee_for_user_ids_nonexistent_id(self) -> None:
def assert_invalid_user_id() -> Any:
return self.assertRaisesRegex(
JsonableError,
'Invalid user ID ')
with assert_invalid_user_id():
Addressee.for_user_ids(user_ids=[779], realm=get_realm('zulip'))
def test_addressee_legacy_build_for_user_ids(self) -> None:
realm = get_realm('zulip')
self.login('hamlet')
user_ids = [self.example_user('cordelia').id,
self.example_user('othello').id]
result = Addressee.legacy_build(
sender=self.example_user('hamlet'), message_type_name='private',
message_to=user_ids, topic_name='random_topic',
realm=realm
)
user_profiles = result.user_profiles()
result_user_ids = [user_profiles[0].id, user_profiles[1].id]
self.assertEqual(set(result_user_ids), set(user_ids))
def test_addressee_legacy_build_for_stream_id(self) -> None:
realm = get_realm('zulip')
self.login('iago')
sender = self.example_user('iago')
self.subscribe(sender, "Denmark")
stream = get_stream('Denmark', realm)
result = Addressee.legacy_build(
sender=sender, message_type_name='stream',
message_to=[stream.id], topic_name='random_topic',
realm=realm
)
stream_id = result.stream_id()
self.assertEqual(stream.id, stream_id)
class InternalPrepTest(ZulipTestCase):
def test_returns_for_internal_sends(self) -> None:
bad_content = ''
realm = get_realm('zulip')
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
othello = self.example_user('othello')
stream = get_stream('Verona', realm)
with mock.patch('logging.exception') as m:
internal_send_private_message(
realm=realm,
sender=cordelia,
recipient_user=hamlet,
content=bad_content,
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
with mock.patch('logging.exception') as m:
internal_send_huddle_message(
realm=realm,
sender=cordelia,
emails=[hamlet.email, othello.email],
content=bad_content,
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
with mock.patch('logging.exception') as m:
internal_send_stream_message(
realm=realm,
sender=cordelia,
topic='whatever',
content=bad_content,
stream=stream
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
with mock.patch('logging.exception') as m:
internal_send_stream_message_by_name(
realm=realm,
sender=cordelia,
stream_name=stream.name,
topic='whatever',
content=bad_content
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
def test_error_handling(self) -> None:
realm = get_realm('zulip')
sender = self.example_user('cordelia')
recipient_user = self.example_user('hamlet')
content = 'x' * 15000
result = internal_prep_private_message(
realm=realm,
sender=sender,
recipient_user=recipient_user,
content=content)
message = result['message']
self.assertIn('message was too long', message.content)
with self.assertRaises(RuntimeError):
internal_prep_private_message(
realm=None, # should cause error
sender=sender,
recipient_user=recipient_user,
content=content)
# Simulate sending a message to somebody not in the
# realm of the sender.
recipient_user = self.mit_user('starnine')
with mock.patch('logging.exception') as logging_mock:
result = internal_prep_private_message(
realm=realm,
sender=sender,
recipient_user=recipient_user,
content=content)
arg = logging_mock.call_args_list[0][0][0]
prefix = "Error queueing internal message by cordelia@zulip.com: You can't send private messages outside of your organization."
self.assertTrue(arg.startswith(prefix))
def test_ensure_stream_gets_called(self) -> None:
realm = get_realm('zulip')
sender = self.example_user('cordelia')
stream_name = 'test_stream'
topic = 'whatever'
content = 'hello'
internal_prep_stream_message_by_name(
realm=realm,
sender=sender,
stream_name=stream_name,
topic=topic,
content=content)
Stream.objects.get(name=stream_name, realm_id=realm.id)
class ExtractTest(TestCase):
def test_extract_stream_indicator(self) -> None:
self.assertEqual(
extract_stream_indicator('development'),
"development",
)
self.assertEqual(
extract_stream_indicator('commas,are,fine'),
"commas,are,fine",
)
self.assertEqual(
extract_stream_indicator('"Who hasn\'t done this?"'),
"Who hasn't done this?",
)
self.assertEqual(
extract_stream_indicator("999"),
999,
)
# For legacy reasons it's plausible that users will
self.assertEqual(
extract_stream_indicator('["social"]'),
'social',
)
self.assertEqual(
extract_stream_indicator("[123]"),
123,
)
with self.assertRaisesRegex(JsonableError, 'Invalid data type for stream'):
extract_stream_indicator('{}')
with self.assertRaisesRegex(JsonableError, 'Invalid data type for stream'):
extract_stream_indicator('[{}]')
with self.assertRaisesRegex(JsonableError, 'Expected exactly one stream'):
extract_stream_indicator('[1,2,"general"]')
def test_extract_private_recipients_emails(self) -> None:
s = ujson.dumps([' alice@zulip.com ', ' bob@zulip.com ', ' ', 'bob@zulip.com'])
# For testing, ignorance here is better than manual casting
result = sorted(extract_private_recipients(s))
self.assertEqual(result, ['alice@zulip.com', 'bob@zulip.com'])
# simple string with one name
s = 'alice@zulip.com '
self.assertEqual(extract_private_recipients(s), ['alice@zulip.com'])
# JSON-encoded string
s = '"alice@zulip.com"'
self.assertEqual(extract_private_recipients(s), ['alice@zulip.com'])
# bare comma-delimited string
s = 'bob@zulip.com, alice@zulip.com'
result = sorted(extract_private_recipients(s))
self.assertEqual(result, ['alice@zulip.com', 'bob@zulip.com'])
# JSON-encoded, comma-delimited string
s = '"bob@zulip.com,alice@zulip.com"'
result = sorted(extract_private_recipients(s))
self.assertEqual(result, ['alice@zulip.com', 'bob@zulip.com'])
# Invalid data
s = ujson.dumps(dict(color='red'))
with self.assertRaisesRegex(JsonableError, 'Invalid data type for recipients'):
extract_private_recipients(s)
s = ujson.dumps([{}])
with self.assertRaisesRegex(JsonableError, 'Invalid data type for recipients'):
extract_private_recipients(s)
# Empty list
self.assertEqual(extract_private_recipients('[]'), [])
# Heterogeneous lists are not supported
mixed = ujson.dumps(['eeshan@example.com', 3, 4])
with self.assertRaisesRegex(JsonableError, 'Recipient lists may contain emails or user IDs, but not both.'):
extract_private_recipients(mixed)
def test_extract_recipient_ids(self) -> None:
# JSON list w/dups
s = ujson.dumps([3, 3, 12])
result = sorted(extract_private_recipients(s))
self.assertEqual(result, [3, 12])
# Invalid data
ids = ujson.dumps(dict(recipient=12))
with self.assertRaisesRegex(JsonableError, 'Invalid data type for recipients'):
extract_private_recipients(ids)
# Heterogeneous lists are not supported
mixed = ujson.dumps([3, 4, 'eeshan@example.com'])
with self.assertRaisesRegex(JsonableError, 'Recipient lists may contain emails or user IDs, but not both.'):
extract_private_recipients(mixed)
class PersonalMessagesTest(ZulipTestCase):
def test_near_pm_message_url(self) -> None:
realm = get_realm('zulip')
message = dict(
type='personal',
id=555,
display_recipient=[
dict(id=77),
dict(id=80),
],
)
url = near_message_url(
realm=realm,
message=message,
)
self.assertEqual(url, 'http://zulip.testserver/
def test_is_private_flag_not_leaked(self) -> None:
self.login('hamlet')
self.send_personal_message(self.example_user("hamlet"),
self.example_user("cordelia"),
"test")
for msg in self.get_messages():
self.assertNotIn('is_private', msg['flags'])
def test_auto_subbed_to_personals(self) -> None:
test_email = self.nonreg_email('test')
self.register(test_email, "test")
user_profile = self.nonreg_user('test')
old_messages_count = message_stream_count(user_profile)
self.send_personal_message(user_profile, user_profile)
new_messages_count = message_stream_count(user_profile)
self.assertEqual(new_messages_count, old_messages_count + 1)
recipient = Recipient.objects.get(type_id=user_profile.id,
type=Recipient.PERSONAL)
message = most_recent_message(user_profile)
self.assertEqual(message.recipient, recipient)
with mock.patch('zerver.models.get_display_recipient', return_value='recip'):
self.assertEqual(
str(message),
'<Message: recip / / '
'<UserProfile: {} {}>>'.format(user_profile.email, user_profile.realm))
user_message = most_recent_usermessage(user_profile)
self.assertEqual(
str(user_message),
'<UserMessage: recip / {} ([])>'.format(user_profile.email)
)
@slow("checks several profiles")
def test_personal_to_self(self) -> None:
old_user_profiles = list(UserProfile.objects.all())
test_email = self.nonreg_email('test1')
self.register(test_email, "test1")
old_messages = []
for user_profile in old_user_profiles:
old_messages.append(message_stream_count(user_profile))
user_profile = self.nonreg_user('test1')
self.send_personal_message(user_profile, user_profile)
new_messages = []
for user_profile in old_user_profiles:
new_messages.append(message_stream_count(user_profile))
self.assertEqual(old_messages, new_messages)
user_profile = self.nonreg_user('test1')
recipient = Recipient.objects.get(type_id=user_profile.id, type=Recipient.PERSONAL)
self.assertEqual(most_recent_message(user_profile).recipient, recipient)
def assert_personal(self, sender: UserProfile, receiver: UserProfile, content: str="testcontent") -> None:
sender_messages = message_stream_count(sender)
receiver_messages = message_stream_count(receiver)
other_user_profiles = UserProfile.objects.filter(~Q(id=sender.id) &
~Q(id=receiver.id))
old_other_messages = []
for user_profile in other_user_profiles:
old_other_messages.append(message_stream_count(user_profile))
self.send_personal_message(sender, receiver, content)
# Users outside the conversation don't get the message.
new_other_messages = []
for user_profile in other_user_profiles:
new_other_messages.append(message_stream_count(user_profile))
self.assertEqual(old_other_messages, new_other_messages)
self.assertEqual(message_stream_count(sender),
sender_messages + 1)
self.assertEqual(message_stream_count(receiver),
receiver_messages + 1)
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
self.assertEqual(most_recent_message(sender).recipient, recipient)
self.assertEqual(most_recent_message(receiver).recipient, recipient)
def test_personal(self) -> None:
self.login('hamlet')
self.assert_personal(
sender=self.example_user("hamlet"),
receiver=self.example_user("othello")
)
def test_private_message_policy(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
do_set_realm_property(user_profile.realm, "private_message_policy",
Realm.PRIVATE_MESSAGE_POLICY_DISABLED)
with self.assertRaises(JsonableError):
self.send_personal_message(user_profile, self.example_user("cordelia"))
bot_profile = self.create_test_bot("testbot", user_profile)
self.send_personal_message(user_profile, get_system_bot(settings.NOTIFICATION_BOT))
self.send_personal_message(user_profile, bot_profile)
self.send_personal_message(bot_profile, user_profile)
def test_non_ascii_personal(self) -> None:
self.login('hamlet')
self.assert_personal(
sender=self.example_user("hamlet"),
receiver=self.example_user("othello"),
content="hümbüǵ"
)
class StreamMessagesTest(ZulipTestCase):
def assert_stream_message(self, stream_name: str, topic_name: str="test topic",
content: str="test content") -> None:
realm = get_realm('zulip')
subscribers = self.users_subscribed_to_stream(stream_name, realm)
subscribers = [subscriber for subscriber in subscribers
if subscriber.bot_type != UserProfile.OUTGOING_WEBHOOK_BOT]
old_subscriber_messages = []
for subscriber in subscribers:
old_subscriber_messages.append(message_stream_count(subscriber))
non_subscribers = [user_profile for user_profile in UserProfile.objects.all()
if user_profile not in subscribers]
old_non_subscriber_messages = []
for non_subscriber in non_subscribers:
old_non_subscriber_messages.append(message_stream_count(non_subscriber))
non_bot_subscribers = [user_profile for user_profile in subscribers
if not user_profile.is_bot]
a_subscriber = non_bot_subscribers[0]
self.login_user(a_subscriber)
self.send_stream_message(a_subscriber, stream_name,
content=content, topic_name=topic_name)
# Did all of the subscribers get the message?
new_subscriber_messages = []
for subscriber in subscribers:
new_subscriber_messages.append(message_stream_count(subscriber))
# Did non-subscribers not get the message?
new_non_subscriber_messages = []
for non_subscriber in non_subscribers:
new_non_subscriber_messages.append(message_stream_count(non_subscriber))
self.assertEqual(old_non_subscriber_messages, new_non_subscriber_messages)
self.assertEqual(new_subscriber_messages, [elt + 1 for elt in old_subscriber_messages])
def test_performance(self) -> None:
num_messages = 2
num_extra_users = 10
sender = self.example_user('cordelia')
realm = sender.realm
message_content = 'whatever'
stream = get_stream('Denmark', realm)
topic_name = 'lunch'
recipient = stream.recipient
sending_client = make_client(name="test suite")
for i in range(num_extra_users):
# Make every other user be idle.
long_term_idle = i % 2 > 0
email = 'foo%d@example.com' % (i,)
user = UserProfile.objects.create(
realm=realm,
email=email,
pointer=0,
long_term_idle=long_term_idle,
)
Subscription.objects.create(
user_profile=user,
recipient=recipient
)
def send_test_message() -> None:
message = Message(
sender=sender,
recipient=recipient,
content=message_content,
date_sent=timezone_now(),
sending_client=sending_client,
)
message.set_topic_name(topic_name)
do_send_messages([dict(message=message)])
before_um_count = UserMessage.objects.count()
t = time.time()
for i in range(num_messages):
send_test_message()
delay = time.time() - t
assert(delay) # quiet down lint
# print(delay)
after_um_count = UserMessage.objects.count()
ums_created = after_um_count - before_um_count
num_active_users = num_extra_users / 2
self.assertTrue(ums_created > (num_active_users * num_messages))
def test_not_too_many_queries(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago"),
self.example_user("cordelia"), self.example_user("othello")]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
sender = self.example_user('hamlet')
sending_client = make_client(name="test suite")
stream_name = 'Denmark'
topic_name = 'foo'
content = 'whatever'
realm = sender.realm
# To get accurate count of the queries, we should make sure that
# caches don't come into play. If we count queries while caches are
flush_per_request_caches()
cache_delete(get_stream_cache_key(stream_name, realm.id))
with queries_captured() as queries:
check_send_stream_message(
sender=sender,
client=sending_client,
stream_name=stream_name,
topic=topic_name,
body=content,
)
self.assert_length(queries, 15)
def test_stream_message_dict(self) -> None:
user_profile = self.example_user('iago')
self.subscribe(user_profile, "Denmark")
self.send_stream_message(self.example_user("hamlet"), "Denmark",
content="whatever", topic_name="my topic")
message = most_recent_message(user_profile)
row = MessageDict.get_raw_db_rows([message.id])[0]
dct = MessageDict.build_dict_from_raw_db_row(row)
MessageDict.post_process_dicts([dct], apply_markdown=True, client_gravatar=False)
self.assertEqual(dct['display_recipient'], 'Denmark')
stream = get_stream('Denmark', user_profile.realm)
self.assertEqual(dct['stream_id'], stream.id)
def test_stream_message_unicode(self) -> None:
receiving_user_profile = self.example_user('iago')
sender = self.example_user('hamlet')
self.subscribe(receiving_user_profile, "Denmark")
self.send_stream_message(sender, "Denmark",
content="whatever", topic_name="my topic")
message = most_recent_message(receiving_user_profile)
self.assertEqual(str(message),
'<Message: Denmark / my topic / '
'<UserProfile: {} {}>>'.format(sender.email, sender.realm))
def test_message_mentions(self) -> None:
user_profile = self.example_user('iago')
self.subscribe(user_profile, "Denmark")
self.send_stream_message(self.example_user("hamlet"), "Denmark",
content="test @**Iago** rules")
message = most_recent_message(user_profile)
assert(UserMessage.objects.get(user_profile=user_profile, message=message).flags.mentioned.is_set)
def test_is_private_flag(self) -> None:
user_profile = self.example_user('iago')
self.subscribe(user_profile, "Denmark")
self.send_stream_message(self.example_user("hamlet"), "Denmark",
content="test")
message = most_recent_message(user_profile)
self.assertFalse(UserMessage.objects.get(user_profile=user_profile, message=message).flags.is_private.is_set)
self.send_personal_message(self.example_user("hamlet"), user_profile,
content="test")
message = most_recent_message(user_profile)
self.assertTrue(UserMessage.objects.get(user_profile=user_profile, message=message).flags.is_private.is_set)
def _send_stream_message(self, user: UserProfile, stream_name: str, content: str) -> Set[int]:
with mock.patch('zerver.lib.actions.send_event') as m:
self.send_stream_message(
user,
stream_name,
content=content
)
self.assertEqual(m.call_count, 1)
users = m.call_args[0][2]
user_ids = {u['id'] for u in users}
return user_ids
def test_unsub_mention(self) -> None:
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
stream_name = 'Test Stream'
self.subscribe(hamlet, stream_name)
UserMessage.objects.filter(
user_profile=cordelia
).delete()
def mention_cordelia() -> Set[int]:
content = 'test @**Cordelia Lear** rules'
user_ids = self._send_stream_message(
user=hamlet,
stream_name=stream_name,
content=content
)
return user_ids
def num_cordelia_messages() -> int:
return UserMessage.objects.filter(
user_profile=cordelia
).count()
user_ids = mention_cordelia()
self.assertEqual(0, num_cordelia_messages())
self.assertNotIn(cordelia.id, user_ids)
# Cordelia and mentioning her should give her a
# message.
self.subscribe(cordelia, stream_name)
user_ids = mention_cordelia()
self.assertIn(cordelia.id, user_ids)
self.assertEqual(1, num_cordelia_messages())
def test_message_bot_mentions(self) -> None:
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
realm = hamlet.realm
stream_name = 'Test Stream'
self.subscribe(hamlet, stream_name)
normal_bot = do_create_user(
email='normal-bot@zulip.com',
password='',
realm=realm,
full_name='Normal Bot',
short_name='',
bot_type=UserProfile.DEFAULT_BOT,
bot_owner=cordelia,
)
content = 'test @**Normal Bot** rules'
user_ids = self._send_stream_message(
user=hamlet,
stream_name=stream_name,
content=content
)
self.assertIn(normal_bot.id, user_ids)
user_message = most_recent_usermessage(normal_bot)
self.assertEqual(user_message.message.content, content)
self.assertTrue(user_message.flags.mentioned)
def test_stream_message_mirroring(self) -> None:
user = self.mit_user('starnine')
self.subscribe(user, 'Verona')
do_change_is_admin(user, True, 'api_super_user')
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"to": "Verona",
"sender": self.mit_email("sipbtest"),
"client": "zephyr_mirror",
"topic": "announcement",
"content": "Everyone knows Iago rules",
"forged": "true"},
subdomain="zephyr")
self.assert_json_success(result)
do_change_is_admin(user, False, 'api_super_user')
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"to": "Verona",
"sender": self.mit_email("sipbtest"),
"client": "zephyr_mirror",
"topic": "announcement",
"content": "Everyone knows Iago rules",
"forged": "true"},
subdomain="zephyr")
self.assert_json_error(result, "User not authorized for this query")
def test_message_to_stream(self) -> None:
self.assert_stream_message("Scotland")
def test_non_ascii_stream_message(self) -> None:
self.login('hamlet')
# Subscribe everyone to a stream with non-ASCII characters.
non_ascii_stream_name = "hümbüǵ"
realm = get_realm("zulip")
stream = self.make_stream(non_ascii_stream_name)
for user_profile in UserProfile.objects.filter(is_active=True, is_bot=False,
realm=realm)[0:3]:
self.subscribe(user_profile, stream.name)
self.assert_stream_message(non_ascii_stream_name, topic_name="hümbüǵ",
content="hümbüǵ")
def test_get_raw_unread_data_for_huddle_messages(self) -> None:
users = [
self.example_user('hamlet'),
self.example_user('cordelia'),
self.example_user('iago'),
self.example_user('prospero'),
self.example_user('othello'),
]
message1_id = self.send_huddle_message(users[0], users, "test content 1")
message2_id = self.send_huddle_message(users[0], users, "test content 2")
msg_data = get_raw_unread_data(users[1])
# both the messages are present in msg_data
self.assertIn(message1_id, msg_data["huddle_dict"].keys())
self.assertIn(message2_id, msg_data["huddle_dict"].keys())
# only these two messages are present in msg_data
self.assertEqual(len(msg_data["huddle_dict"].keys()), 2)
recent_conversations = get_recent_private_conversations(users[1])
self.assertEqual(len(recent_conversations), 1)
recent_conversation = list(recent_conversations.values())[0]
self.assertEqual(set(recent_conversation['user_ids']), {user.id for user in users if
user != users[1]})
self.assertEqual(recent_conversation['max_message_id'], message2_id)
class MessageDictTest(ZulipTestCase):
def test_both_codepaths(self) -> None:
def reload_message(msg_id: int) -> Message:
# Get a clean copy of the message, and
# clear the cache.
cache_delete(to_dict_cache_key_id(msg_id))
msg = Message.objects.get(id=msg_id)
return msg
def get_send_message_payload(
msg_id: int,
apply_markdown: bool,
client_gravatar: bool) -> Dict[str, Any]:
msg = reload_message(msg_id)
wide_dict = MessageDict.wide_dict(msg)
narrow_dict = MessageDict.finalize_payload(
wide_dict,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
return narrow_dict
def get_fetch_payload(
msg_id: int,
apply_markdown: bool,
client_gravatar: bool) -> Dict[str, Any]:
msg = reload_message(msg_id)
unhydrated_dict = MessageDict.to_dict_uncached_helper([msg])[0]
# The next step mutates the dict in place
# for performance reasons.
MessageDict.post_process_dicts(
[unhydrated_dict],
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
final_dict = unhydrated_dict
return final_dict
def test_message_id() -> int:
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
msg_id = self.send_stream_message(
hamlet,
"Scotland",
topic_name="editing",
content="before edit"
)
return msg_id
flag_setups = [
[False, False],
[False, True],
[True, False],
[True, True],
]
msg_id = test_message_id()
for (apply_markdown, client_gravatar) in flag_setups:
send_message_payload = get_send_message_payload(
msg_id,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
fetch_payload = get_fetch_payload(
msg_id,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
self.assertEqual(send_message_payload, fetch_payload)
@slow('builds lots of messages')
def test_bulk_message_fetching(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
pm_recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
stream_name = 'Çiğdem'
stream = self.make_stream(stream_name)
stream_recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
sending_client = make_client(name="test suite")
ids = []
for i in range(300):
for recipient in [pm_recipient, stream_recipient]:
message = Message(
sender=sender,
recipient=recipient,
content='whatever %d' % (i,),
rendered_content='DOES NOT MATTER',
rendered_content_version=bugdown.version,
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
ids.append(message.id)
Reaction.objects.create(user_profile=sender, message=message,
emoji_name='simple_smile')
num_ids = len(ids)
self.assertTrue(num_ids >= 600)
flush_per_request_caches()
t = time.time()
with queries_captured() as queries:
rows = list(MessageDict.get_raw_db_rows(ids))
objs = [
MessageDict.build_dict_from_raw_db_row(row)
for row in rows
]
MessageDict.post_process_dicts(objs, apply_markdown=False, client_gravatar=False)
delay = time.time() - t
# Make sure we don't take longer than 1.5ms per message to
error_msg = "Number of ids: {}. Time delay: {}".format(num_ids, delay)
self.assertTrue(delay < 0.0015 * num_ids, error_msg)
self.assert_length(queries, 7)
self.assertEqual(len(rows), num_ids)
def test_applying_markdown(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
sending_client = make_client(name="test suite")
message = Message(
sender=sender,
recipient=recipient,
content='hello **world**',
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
row = MessageDict.get_raw_db_rows([message.id])[0]
dct = MessageDict.build_dict_from_raw_db_row(row)
expected_content = '<p>hello <strong>world</strong></p>'
self.assertEqual(dct['rendered_content'], expected_content)
message = Message.objects.get(id=message.id)
self.assertEqual(message.rendered_content, expected_content)
self.assertEqual(message.rendered_content_version, bugdown.version)
@mock.patch("zerver.lib.message.bugdown.convert")
def test_applying_markdown_invalid_format(self, convert_mock: Any) -> None:
# pretend the converter returned an invalid message without raising an exception
convert_mock.return_value = None
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
sending_client = make_client(name="test suite")
message = Message(
sender=sender,
recipient=recipient,
content='hello **world**',
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
# An important part of this test is to get the message through this exact code path,
# because there is an ugly hack we need to cover. So don't just say "row = message".
row = MessageDict.get_raw_db_rows([message.id])[0]
dct = MessageDict.build_dict_from_raw_db_row(row)
error_content = '<p>[Zulip note: Sorry, we could not understand the formatting of your message]</p>'
self.assertEqual(dct['rendered_content'], error_content)
def test_topic_links_use_stream_realm(self) -> None:
zulip_realm = get_realm('zulip')
url_format_string = r"https://trac.zulip.net/ticket/%(id)s"
url = 'https://trac.zulip.net/ticket/123'
topic_name = 'test #123'
realm_filter = RealmFilter(realm=zulip_realm,
pattern=r"#(?P<id>[0-9]{2,8})",
url_format_string=url_format_string)
self.assertEqual(
realm_filter.__str__(),
'<RealmFilter(zulip): #(?P<id>[0-9]{2,8})'
' https://trac.zulip.net/ticket/%(id)s>')
def get_message(sender: UserProfile) -> Message:
msg_id = self.send_stream_message(sender, 'Denmark', 'hello world', topic_name,
zulip_realm)
return Message.objects.get(id=msg_id)
def assert_topic_links(links: List[str], msg: Message) -> None:
dct = MessageDict.to_dict_uncached_helper([msg])[0]
self.assertEqual(dct[TOPIC_LINKS], links)
assert_topic_links([], get_message(self.example_user('othello')))
assert_topic_links([], get_message(self.lear_user('cordelia')))
assert_topic_links([], get_message(self.notification_bot()))
realm_filter.save()
assert_topic_links([url], get_message(self.example_user('othello')))
assert_topic_links([url], get_message(self.lear_user('cordelia')))
assert_topic_links([url], get_message(self.notification_bot()))
def test_reaction(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
sending_client = make_client(name="test suite")
message = Message(
sender=sender,
recipient=recipient,
content='hello **world**',
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
reaction = Reaction.objects.create(
message=message, user_profile=sender,
emoji_name='simple_smile')
row = MessageDict.get_raw_db_rows([message.id])[0]
msg_dict = MessageDict.build_dict_from_raw_db_row(row)
self.assertEqual(msg_dict['reactions'][0]['emoji_name'],
reaction.emoji_name)
self.assertEqual(msg_dict['reactions'][0]['user_id'], sender.id)
self.assertEqual(msg_dict['reactions'][0]['user']['id'],
sender.id)
self.assertEqual(msg_dict['reactions'][0]['user']['email'],
sender.email)
self.assertEqual(msg_dict['reactions'][0]['user']['full_name'],
sender.full_name)
def test_missing_anchor(self) -> None:
self.login('hamlet')
result = self.client_get(
'/json/messages?use_first_unread_anchor=false&num_before=1&num_after=1')
self.assert_json_error(
result, "Missing 'anchor' argument.")
def test_invalid_anchor(self) -> None:
self.login('hamlet')
result = self.client_get(
'/json/messages?use_first_unread_anchor=false&num_before=1&num_after=1&anchor=chocolate')
self.assert_json_error(
result, "Invalid anchor")
class SewMessageAndReactionTest(ZulipTestCase):
def test_sew_messages_and_reaction(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
pm_recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
stream_name = 'Çiğdem'
stream = self.make_stream(stream_name)
stream_recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
sending_client = make_client(name="test suite")
needed_ids = []
for i in range(5):
for recipient in [pm_recipient, stream_recipient]:
message = Message(
sender=sender,
recipient=recipient,
content='whatever %d' % (i,),
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
needed_ids.append(message.id)
reaction = Reaction(user_profile=sender, message=message,
emoji_name='simple_smile')
reaction.save()
messages = Message.objects.filter(id__in=needed_ids).values(
*['id', 'content'])
reactions = Reaction.get_raw_db_rows(needed_ids)
tied_data = sew_messages_and_reactions(messages, reactions)
for data in tied_data:
self.assertEqual(len(data['reactions']), 1)
self.assertEqual(data['reactions'][0]['emoji_name'],
'simple_smile')
self.assertTrue(data['id'])
self.assertTrue(data['content'])
class MessagePOSTTest(ZulipTestCase):
def _send_and_verify_message(self, user: UserProfile, stream_name: str, error_msg: str=None) -> None:
if error_msg is None:
msg_id = self.send_stream_message(user, stream_name)
result = self.api_get(user, '/json/messages/' + str(msg_id))
self.assert_json_success(result)
else:
with self.assertRaisesRegex(JsonableError, error_msg):
self.send_stream_message(user, stream_name)
def test_message_to_self(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_success(result)
def test_api_message_to_self(self) -> None:
user = self.example_user('hamlet')
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_success(result)
def test_message_to_stream_with_nonexistent_id(self) -> None:
cordelia = self.example_user('cordelia')
bot = self.create_test_bot(
short_name='whatever',
user_profile=cordelia,
)
result = self.api_post(
bot, "/api/v1/messages",
{
"type": "stream",
"to": ujson.dumps([99999]),
"client": "test suite",
"content": "Stream message by ID.",
"topic": "Test topic for stream ID message"
}
)
self.assert_json_error(result, "Stream with ID '99999' does not exist")
msg = self.get_last_message()
expected = ("Your bot `whatever-bot@zulip.testserver` tried to send a message to "
"stream ID 99999, but there is no stream with that ID.")
self.assertEqual(msg.content, expected)
def test_message_to_stream_by_id(self) -> None:
self.login('hamlet')
realm = get_realm('zulip')
stream = get_stream('Verona', realm)
result = self.client_post("/json/messages", {"type": "stream",
"to": ujson.dumps([stream.id]),
"client": "test suite",
"content": "Stream message by ID.",
"topic": "Test topic for stream ID message"})
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content, "Stream message by ID.")
def test_sending_message_as_stream_post_policy_admins(self) -> None:
admin_profile = self.example_user("iago")
self.login_user(admin_profile)
stream_name = "Verona"
stream = get_stream(stream_name, admin_profile.realm)
do_change_stream_post_policy(stream, Stream.STREAM_POST_POLICY_ADMINS)
self._send_and_verify_message(admin_profile, stream_name)
admin_owned_bot = self.create_test_bot(
short_name='whatever1',
full_name='whatever1',
user_profile=admin_profile,
)
self._send_and_verify_message(admin_owned_bot, stream_name)
non_admin_profile = self.example_user("hamlet")
self.login_user(non_admin_profile)
self._send_and_verify_message(non_admin_profile, stream_name,
"Only organization administrators can send to this stream.")
non_admin_owned_bot = self.create_test_bot(
short_name='whatever2',
full_name='whatever2',
user_profile=non_admin_profile,
)
self._send_and_verify_message(non_admin_owned_bot, stream_name,
"Only organization administrators can send to this stream.")
bot_without_owner = do_create_user(
email='free-bot@zulip.testserver',
password='',
realm=non_admin_profile.realm,
full_name='freebot',
short_name='freebot',
bot_type=UserProfile.DEFAULT_BOT,
)
self._send_and_verify_message(bot_without_owner, stream_name,
"Only organization administrators can send to this stream.")
notification_bot = get_system_bot("notification-bot@zulip.com")
internal_send_stream_message(stream.realm, notification_bot, stream,
'Test topic', 'Test message by notification bot')
self.assertEqual(self.get_last_message().content, 'Test message by notification bot')
def test_sending_message_as_stream_post_policy_restrict_new_members(self) -> None:
admin_profile = self.example_user("iago")
self.login_user(admin_profile)
do_set_realm_property(admin_profile.realm, 'waiting_period_threshold', 10)
admin_profile.date_joined = timezone_now() - datetime.timedelta(days=9)
admin_profile.save()
self.assertTrue(admin_profile.is_new_member)
self.assertTrue(admin_profile.is_realm_admin)
stream_name = "Verona"
stream = get_stream(stream_name, admin_profile.realm)
do_change_stream_post_policy(stream, Stream.STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS)
self._send_and_verify_message(admin_profile, stream_name)
admin_owned_bot = self.create_test_bot(
short_name='whatever1',
full_name='whatever1',
user_profile=admin_profile,
)
self._send_and_verify_message(admin_owned_bot, stream_name)
non_admin_profile = self.example_user("hamlet")
self.login_user(non_admin_profile)
non_admin_profile.date_joined = timezone_now() - datetime.timedelta(days=9)
non_admin_profile.save()
self.assertTrue(non_admin_profile.is_new_member)
self.assertFalse(non_admin_profile.is_realm_admin)
self._send_and_verify_message(non_admin_profile, stream_name,
"New members cannot send to this stream.")
non_admin_owned_bot = self.create_test_bot(
short_name='whatever2',
full_name='whatever2',
user_profile=non_admin_profile,
)
self._send_and_verify_message(non_admin_owned_bot, stream_name,
"New members cannot send to this stream.")
bot_without_owner = do_create_user(
email='free-bot@zulip.testserver',
password='',
realm=non_admin_profile.realm,
full_name='freebot',
short_name='freebot',
bot_type=UserProfile.DEFAULT_BOT,
)
self._send_and_verify_message(bot_without_owner, stream_name,
"New members cannot send to this stream.")
notification_bot = get_system_bot("notification-bot@zulip.com")
internal_send_stream_message(stream.realm, notification_bot, stream,
'Test topic', 'Test message by notification bot')
self.assertEqual(self.get_last_message().content, 'Test message by notification bot')
def test_api_message_with_default_to(self) -> None:
user = self.example_user('hamlet')
user.default_sending_stream_id = get_stream('Verona', user.realm).id
user.save()
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"client": "test suite",
"content": "Test message no to",
"topic": "Test topic"})
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content, "Test message no to")
def test_message_to_nonexistent_stream(self) -> None:
self.login('hamlet')
self.assertFalse(Stream.objects.filter(name="nonexistent_stream"))
result = self.client_post("/json/messages", {"type": "stream",
"to": "nonexistent_stream",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_error(result, "Stream 'nonexistent_stream' does not exist")
def test_message_to_nonexistent_stream_with_bad_characters(self) -> None:
self.login('hamlet')
self.assertFalse(Stream.objects.filter(name="""&<"'><non-existent>"""))
result = self.client_post("/json/messages", {"type": "stream",
"to": """&<"'><non-existent>""",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_error(result, "Stream '&<"'><non-existent>' does not exist")
def test_personal_message(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
othello = self.example_user('othello')
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": othello.email})
self.assert_json_success(result)
message_id = ujson.loads(result.content.decode())['id']
recent_conversations = get_recent_private_conversations(user_profile)
self.assertEqual(len(recent_conversations), 1)
recent_conversation = list(recent_conversations.values())[0]
recipient_id = list(recent_conversations.keys())[0]
self.assertEqual(set(recent_conversation['user_ids']), {othello.id})
self.assertEqual(recent_conversation['max_message_id'], message_id)
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": user_profile.email})
self.assert_json_success(result)
self_message_id = ujson.loads(result.content.decode())['id']
recent_conversations = get_recent_private_conversations(user_profile)
self.assertEqual(len(recent_conversations), 2)
recent_conversation = recent_conversations[recipient_id]
self.assertEqual(set(recent_conversation['user_ids']), {othello.id})
self.assertEqual(recent_conversation['max_message_id'], message_id)
del recent_conversations[recipient_id]
recent_conversation = list(recent_conversations.values())[0]
recipient_id = list(recent_conversations.keys())[0]
self.assertEqual(set(recent_conversation['user_ids']), set())
self.assertEqual(recent_conversation['max_message_id'], self_message_id)
def test_personal_message_by_id(self) -> None:
self.login('hamlet')
result = self.client_post(
"/json/messages",
{
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([self.example_user("othello").id])
}
)
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual("Test message", msg.content)
self.assertEqual(msg.recipient_id, self.example_user("othello").id)
def test_group_personal_message_by_id(self) -> None:
self.login('hamlet')
result = self.client_post(
"/json/messages",
{
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([self.example_user("othello").id,
self.example_user("cordelia").id])
}
)
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual("Test message", msg.content)
self.assertEqual(msg.recipient_id, get_huddle_recipient(
{self.example_user("hamlet").id,
self.example_user("othello").id,
self.example_user("cordelia").id}).id
)
def test_personal_message_copying_self(self) -> None:
hamlet = self.example_user('hamlet')
othello = self.example_user('othello')
self.login_user(hamlet)
result = self.client_post("/json/messages", {
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([hamlet.id, othello.id])})
self.assert_json_success(result)
msg = self.get_last_message()
self.assertNotIn("Hamlet", str(msg.recipient))
def test_personal_message_to_nonexistent_user(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": "nonexistent"})
self.assert_json_error(result, "Invalid email 'nonexistent'")
def test_personal_message_to_deactivated_user(self) -> None:
othello = self.example_user('othello')
cordelia = self.example_user('cordelia')
do_deactivate_user(othello)
self.login('hamlet')
result = self.client_post("/json/messages", {
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([othello.id])})
self.assert_json_error(result, "'{}' is no longer using Zulip.".format(othello.email))
result = self.client_post("/json/messages", {
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([othello.id, cordelia.id])})
self.assert_json_error(result, "'{}' is no longer using Zulip.".format(othello.email))
def test_invalid_type(self) -> None:
self.login('hamlet')
othello = self.example_user('othello')
result = self.client_post("/json/messages", {"type": "invalid type",
"content": "Test message",
"client": "test suite",
"to": othello.email})
self.assert_json_error(result, "Invalid message type")
def test_empty_message(self) -> None:
self.login('hamlet')
othello = self.example_user('othello')
result = self.client_post("/json/messages", {"type": "private",
"content": " ",
"client": "test suite",
"to": othello.email})
self.assert_json_error(result, "Message must not be empty")
def test_empty_string_topic(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": ""})
self.assert_json_error(result, "Topic can't be empty")
def test_missing_topic(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message"})
self.assert_json_error(result, "Missing topic")
def test_invalid_message_type(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "invalid",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_error(result, "Invalid message type")
def test_private_message_without_recipients(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "private",
"content": "Test content",
"client": "test suite",
"to": ""})
self.assert_json_error(result, "Message must have recipients")
def test_mirrored_huddle(self) -> None:
result = self.api_post(self.mit_user("starnine"),
"/json/messages", {"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": ujson.dumps([self.mit_email("starnine"),
self.mit_email("espuser")])},
subdomain="zephyr")
self.assert_json_success(result)
def test_mirrored_personal(self) -> None:
result = self.api_post(self.mit_user("starnine"),
"/json/messages", {"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_success(result)
def test_mirrored_personal_browser(self) -> None:
user = self.mit_user('starnine')
self.login_user(user)
result = self.client_post("/json/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "Invalid mirrored message")
def test_mirrored_personal_to_someone_else(self) -> None:
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("espuser")},
subdomain="zephyr")
self.assert_json_error(result, "User not authorized for this query")
def test_duplicated_mirrored_huddle(self) -> None:
msg = {"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": ujson.dumps([self.mit_email("espuser"),
self.mit_email("starnine")])}
with mock.patch('DNS.dnslookup', return_value=[['starnine:*:84233:101:Athena Consulting Exchange User,,,:/mit/starnine:/bin/bash']]):
result1 = self.api_post(self.mit_user("starnine"), "/api/v1/messages", msg,
subdomain="zephyr")
self.assert_json_success(result1)
with mock.patch('DNS.dnslookup', return_value=[['espuser:*:95494:101:Esp Classroom,,,:/mit/espuser:/bin/athena/bash']]):
result2 = self.api_post(self.mit_user("espuser"), "/api/v1/messages", msg,
subdomain="zephyr")
self.assert_json_success(result2)
self.assertEqual(ujson.loads(result1.content)['id'],
ujson.loads(result2.content)['id'])
def test_message_with_null_bytes(self) -> None:
self.login('hamlet')
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": " I like null bytes \x00 in my content", "topic": "Test topic"}
result = self.client_post("/json/messages", post_data)
self.assert_json_error(result, "Message must not contain null bytes")
def test_strip_message(self) -> None:
self.login('hamlet')
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": " I like whitespace at the end! \n\n \n", "topic": "Test topic"}
result = self.client_post("/json/messages", post_data)
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content, " I like whitespace at the end!")
def test_long_message(self) -> None:
self.login('hamlet')
long_message = "A" * (MAX_MESSAGE_LENGTH + 1)
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": long_message, "topic": "Test topic"}
result = self.client_post("/json/messages", post_data)
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content,
"A" * (MAX_MESSAGE_LENGTH - 20) + "\n[message truncated]")
def test_long_topic(self) -> None:
self.login('hamlet')
long_topic = "A" * (MAX_TOPIC_NAME_LENGTH + 1)
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": "test content", "topic": long_topic}
result = self.client_post("/json/messages", post_data)
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.topic_name(),
"A" * (MAX_TOPIC_NAME_LENGTH - 3) + "...")
def test_send_forged_message_as_not_superuser(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic",
"forged": "true"})
self.assert_json_error(result, "User not authorized for this query")
def test_send_message_as_not_superuser_to_different_domain(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic",
"realm_str": "mit"})
self.assert_json_error(result, "User not authorized for this query")
def test_send_message_as_superuser_to_domain_that_dont_exist(self) -> None:
user = self.example_user("default_bot")
password = "test_password"
user.set_password(password)
user.is_api_super_user = True
user.save()
result = self.api_post(user,
"/api/v1/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic",
"realm_str": "non-existing"})
user.is_api_super_user = False
user.save()
self.assert_json_error(result, "Unknown organization 'non-existing'")
def test_send_message_when_sender_is_not_set(self) -> None:
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "private",
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "Missing sender")
def test_send_message_as_not_superuser_when_type_is_not_private(self) -> None:
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "not-private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "User not authorized for this query")
@mock.patch("zerver.views.messages.create_mirrored_message_users")
def test_send_message_create_mirrored_message_user_returns_invalid_input(
self, create_mirrored_message_users_mock: Any) -> None:
create_mirrored_message_users_mock.side_effect = InvalidMirrorInput()
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "Invalid mirrored message")
@mock.patch("zerver.views.messages.create_mirrored_message_users")
def test_send_message_when_client_is_zephyr_mirror_but_string_id_is_not_zephyr(
self, create_mirrored_message_users_mock: Any) -> None:
create_mirrored_message_users_mock.return_value = mock.Mock()
user = self.mit_user("starnine")
user.realm.string_id = 'notzephyr'
user.realm.save()
result = self.api_post(user, "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": user.email},
subdomain="notzephyr")
self.assert_json_error(result, "Zephyr mirroring is not allowed in this organization")
@mock.patch("zerver.views.messages.create_mirrored_message_users")
def test_send_message_when_client_is_zephyr_mirror_but_recipient_is_user_id(
self, create_mirrored_message_users_mock: Any) -> None:
create_mirrored_message_users_mock.return_value = mock.Mock()
user = self.mit_user("starnine")
self.login_user(user)
result = self.api_post(user, "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": ujson.dumps([user.id])},
subdomain="zephyr")
self.assert_json_error(result, "Mirroring not allowed with recipient user IDs")
def test_send_message_irc_mirror(self) -> None:
reset_emails_in_zulip_realm()
self.login('hamlet')
bot_info = {
'full_name': 'IRC bot',
'short_name': 'irc',
}
result = self.client_post("/json/bots", bot_info)
self.assert_json_success(result)
email = "irc-bot@zulip.testserver"
user = get_user(email, get_realm('zulip'))
user.is_api_super_user = True
user.save()
user = get_user(email, get_realm('zulip'))
self.subscribe(user, "IRCland")
fake_date_sent = timezone_now() - datetime.timedelta(minutes=37)
fake_timestamp = datetime_to_timestamp(fake_date_sent)
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"forged": "true",
"time": fake_timestamp,
"sender": "irc-user@irc.zulip.com",
"content": "Test message",
"client": "irc_mirror",
"topic": "from irc",
"to": "IRCLand"})
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual(int(datetime_to_timestamp(msg.date_sent)), int(fake_timestamp))
fake_date_sent = timezone_now() - datetime.timedelta(minutes=22)
fake_timestamp = datetime_to_timestamp(fake_date_sent)
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"forged": "yes",
"time": fake_timestamp,
"sender": "irc-user@irc.zulip.com",
"content": "Test message",
"client": "irc_mirror",
"topic": "from irc",
"to": "IRCLand"})
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual(int(datetime_to_timestamp(msg.date_sent)), int(fake_timestamp))
def test_unsubscribed_api_super_user(self) -> None:
reset_emails_in_zulip_realm()
cordelia = self.example_user('cordelia')
stream_name = 'private_stream'
self.make_stream(stream_name, invite_only=True)
self.unsubscribe(cordelia, stream_name)
def test_with(sender_email: str, client: str, forged: bool) -> None:
payload = dict(
type="stream",
to=stream_name,
client=client,
topic='whatever',
content='whatever',
forged=ujson.dumps(forged),
)
if forged:
payload['sender'] = sender_email
cordelia.is_api_super_user = False
cordelia.save()
result = self.api_post(cordelia, "/api/v1/messages", payload)
self.assert_json_error_contains(result, 'authorized')
cordelia.is_api_super_user = True
cordelia.save()
result = self.api_post(cordelia, "/api/v1/messages", payload)
self.assert_json_success(result)
test_with(
sender_email=cordelia.email,
client='test suite',
forged=False,
)
test_with(
sender_email='irc_person@zulip.com',
client='irc_mirror',
forged=True,
)
def test_bot_can_send_to_owner_stream(self) -> None:
cordelia = self.example_user('cordelia')
bot = self.create_test_bot(
short_name='whatever',
user_profile=cordelia,
)
stream_name = 'private_stream'
self.make_stream(stream_name, invite_only=True)
payload = dict(
type="stream",
to=stream_name,
client='test suite',
topic='whatever',
content='whatever',
)
result = self.api_post(bot, "/api/v1/messages", payload)
self.assert_json_error_contains(result, 'Not authorized to send')
self.subscribe(bot.bot_owner, stream_name)
result = self.api_post(bot, "/api/v1/messages", payload)
self.assert_json_success(result)
def test_cross_realm_bots_can_use_api_on_own_subdomain(self) -> None:
notification_bot = self.notification_bot()
stream = self.make_stream("notify_channel", get_realm("zulipinternal"))
result = self.api_post(notification_bot,
"/api/v1/messages",
{"type": "stream",
"to": "notify_channel",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"},
subdomain='zulipinternal')
self.assert_json_success(result)
message = self.get_last_message()
self.assertEqual(message.content, "Test message")
self.assertEqual(message.sender, notification_bot)
self.assertEqual(message.recipient.type_id, stream.id)
def test_create_mirror_user_despite_race(self) -> None:
realm = get_realm('zulip')
email = 'fred@example.com'
email_to_full_name = lambda email: 'fred'
def create_user(**kwargs: Any) -> UserProfile:
self.assertEqual(kwargs['full_name'], 'fred')
self.assertEqual(kwargs['email'], email)
self.assertEqual(kwargs['active'], False)
self.assertEqual(kwargs['is_mirror_dummy'], True)
kwargs['bot_type'] = None
kwargs['bot_owner'] = None
kwargs['tos_version'] = None
kwargs['timezone'] = timezone_now()
create_user_profile(**kwargs).save()
raise IntegrityError()
with mock.patch('zerver.lib.actions.create_user',
side_effect=create_user) as m:
mirror_fred_user = create_mirror_user_if_needed(
realm,
email,
email_to_full_name,
)
self.assertEqual(mirror_fred_user.delivery_email, email)
m.assert_called()
def test_guest_user(self) -> None:
sender = self.example_user('polonius')
stream_name = 'public stream'
self.make_stream(stream_name, invite_only=False)
payload = dict(
type="stream",
to=stream_name,
client='test suite',
topic='whatever',
content='whatever',
)
result = self.api_post(sender, "/api/v1/messages", payload)
self.assert_json_error(result, "Not authorized to send to stream 'public stream'")
self.subscribe(sender, stream_name)
# Guest user can send message to subscribed public streams
result = self.api_post(sender, "/api/v1/messages", payload)
self.assert_json_success(result)
class ScheduledMessageTest(ZulipTestCase):
def last_scheduled_message(self) -> ScheduledMessage:
return ScheduledMessage.objects.all().order_by('-id')[0]
def do_schedule_message(self, msg_type: str, to: str, msg: str,
defer_until: str='', tz_guess: str='',
delivery_type: str='send_later',
realm_str: str='zulip') -> HttpResponse:
self.login('hamlet')
topic_name = ''
if msg_type == 'stream':
topic_name = 'Test topic'
payload = {"type": msg_type,
"to": to,
"client": "test suite",
"content": msg,
"topic": topic_name,
"realm_str": realm_str,
"delivery_type": delivery_type,
"tz_guess": tz_guess}
if defer_until:
payload["deliver_at"] = defer_until
result = self.client_post("/json/messages", payload)
return result
def test_schedule_message(self) -> None:
content = "Test message"
defer_until = timezone_now().replace(tzinfo=None) + datetime.timedelta(days=1)
defer_until_str = str(defer_until)
# Scheduling a message to a stream you are subscribed is successful.
result = self.do_schedule_message('stream', 'Verona',
content + ' 1', defer_until_str)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 1')
self.assertEqual(message.topic_name(), 'Test topic')
self.assertEqual(message.scheduled_timestamp, convert_to_UTC(defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
# Scheduling a message for reminders.
result = self.do_schedule_message('stream', 'Verona',
content + ' 2', defer_until_str,
delivery_type='remind')
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.delivery_type, ScheduledMessage.REMIND)
# Scheduling a private message is successful.
othello = self.example_user('othello')
hamlet = self.example_user('hamlet')
result = self.do_schedule_message('private', othello.email,
content + ' 3', defer_until_str)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 3')
self.assertEqual(message.scheduled_timestamp, convert_to_UTC(defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
# Setting a reminder in PM's to other users causes a error.
result = self.do_schedule_message('private', othello.email,
content + ' 4', defer_until_str,
delivery_type='remind')
self.assert_json_error(result, 'Reminders can only be set for streams.')
# Required by reminders from message actions popover caret feature.
result = self.do_schedule_message('private', hamlet.email,
content + ' 5', defer_until_str,
delivery_type='remind')
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 5')
self.assertEqual(message.delivery_type, ScheduledMessage.REMIND)
# Scheduling a message while guessing timezone.
tz_guess = 'Asia/Kolkata'
result = self.do_schedule_message('stream', 'Verona', content + ' 6',
defer_until_str, tz_guess=tz_guess)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 6')
local_tz = get_timezone(tz_guess)
# Since mypy is not able to recognize localize and normalize as attributes of tzinfo we use ignore.
utz_defer_until = local_tz.normalize(local_tz.localize(defer_until)) # type: ignore[attr-defined] # Reason in comment on previous line.
self.assertEqual(message.scheduled_timestamp,
convert_to_UTC(utz_defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
# Test with users timezone setting as set to some timezone rather than
# empty. This will help interpret timestamp in users local timezone.
user = self.example_user("hamlet")
user.timezone = 'US/Pacific'
user.save(update_fields=['timezone'])
result = self.do_schedule_message('stream', 'Verona',
content + ' 7', defer_until_str)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 7')
local_tz = get_timezone(user.timezone)
# Since mypy is not able to recognize localize and normalize as attributes of tzinfo we use ignore.
utz_defer_until = local_tz.normalize(local_tz.localize(defer_until)) # type: ignore[attr-defined] # Reason in comment on previous line.
self.assertEqual(message.scheduled_timestamp,
convert_to_UTC(utz_defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
def test_scheduling_in_past(self) -> None:
# Scheduling a message in past should fail.
content = "Test message"
defer_until = timezone_now()
defer_until_str = str(defer_until)
result = self.do_schedule_message('stream', 'Verona',
content + ' 1', defer_until_str)
self.assert_json_error(result, 'Time must be in the future.')
def test_invalid_timestamp(self) -> None:
# Scheduling a message from which timestamp couldn't be parsed
content = "Test message"
defer_until = 'Missed the timestamp'
result = self.do_schedule_message('stream', 'Verona',
content + ' 1', defer_until)
self.assert_json_error(result, 'Invalid time format')
def test_missing_deliver_at(self) -> None:
content = "Test message"
result = self.do_schedule_message('stream', 'Verona',
content + ' 1')
self.assert_json_error(result, 'Missing deliver_at in a request for delayed message delivery')
class EditMessageTest(ZulipTestCase):
def check_topic(self,
msg_id: int,
topic_name: str) -> None:
msg = Message.objects.get(id=msg_id)
self.assertEqual(msg.topic_name(), topic_name)
def check_message(self,
msg_id: int,
topic_name: str,
content: str) -> None:
msg = Message.objects.get(id=msg_id)
self.assertEqual(msg.topic_name(), topic_name)
self.assertEqual(msg.content, content)
with queries_captured() as queries:
(fetch_message_dict,) = messages_for_ids(
message_ids = [msg.id],
user_message_flags={msg_id: []},
search_fields=dict(),
apply_markdown=False,
client_gravatar=False,
allow_edit_history=True,
)
self.assertEqual(len(queries), 2)
for query in queries:
self.assertNotIn('message', query['sql'])
self.assertEqual(
fetch_message_dict[TOPIC_NAME],
msg.topic_name()
)
self.assertEqual(
fetch_message_dict['content'],
msg.content
)
self.assertEqual(
fetch_message_dict['sender_id'],
msg.sender_id
)
if msg.edit_history:
self.assertEqual(
fetch_message_dict['edit_history'],
ujson.loads(msg.edit_history)
)
def test_query_count_on_to_dict_uncached(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
stream_name = "public_stream"
self.subscribe(user, stream_name)
message_one_id = self.send_stream_message(user,
stream_name, "Message one")
later_subscribed_user = self.example_user("cordelia")
self.subscribe(later_subscribed_user, stream_name)
message_two_id = self.send_stream_message(user,
stream_name, "Message two")
message_ids = [message_one_id, message_two_id]
messages = [Message.objects.select_related().get(id=message_id)
for message_id in message_ids]
with queries_captured() as queries:
MessageDict.to_dict_uncached(messages)
self.assertEqual(len(queries), 4)
def test_save_message(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'after edit'
})
self.assert_json_success(result)
self.check_message(msg_id, topic_name="editing", content="after edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': 'edited'
})
self.assert_json_success(result)
self.check_topic(msg_id, topic_name="edited")
def test_fetch_raw_message(self) -> None:
self.login('hamlet')
msg_id = self.send_personal_message(
from_user=self.example_user("hamlet"),
to_user=self.example_user("cordelia"),
content="**before** edit",
)
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
self.assertEqual(result.json()['raw_content'], '**before** edit')
result = self.client_get('/json/messages/999999')
self.assert_json_error(result, 'Invalid message(s)')
self.login('cordelia')
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
self.login('othello')
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_error(result, 'Invalid message(s)')
def test_fetch_raw_message_stream_wrong_realm(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.make_stream('public_stream')
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name="test", content="test")
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
mit_user = self.mit_user('sipbtest')
self.login_user(mit_user)
result = self.client_get('/json/messages/' + str(msg_id), subdomain="zephyr")
self.assert_json_error(result, 'Invalid message(s)')
def test_fetch_raw_message_private_stream(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.make_stream('private_stream', invite_only=True)
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name="test", content="test")
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
self.login('othello')
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_error(result, 'Invalid message(s)')
def test_edit_message_no_permission(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content after edit',
})
self.assert_json_error(result, "You don't have permission to edit this message")
def test_edit_message_no_changes(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
})
self.assert_json_error(result, "Nothing to change")
def test_edit_message_no_topic(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': ' '
})
self.assert_json_error(result, "Topic can't be empty")
def test_edit_message_no_content(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': ' '
})
self.assert_json_success(result)
content = Message.objects.filter(id=msg_id).values_list('content', flat = True)[0]
self.assertEqual(content, "(deleted)")
def test_edit_message_history_disabled(self) -> None:
user_profile = self.example_user("hamlet")
do_set_realm_property(user_profile.realm, "allow_edit_history", False)
self.login('hamlet')
msg_id_1 = self.send_stream_message(self.example_user("hamlet"),
"Denmark",
topic_name="editing",
content="content before edit")
new_content_1 = 'content after edit'
result_1 = self.client_patch("/json/messages/" + str(msg_id_1), {
'message_id': msg_id_1, 'content': new_content_1
})
self.assert_json_success(result_1)
result = self.client_get(
"/json/messages/" + str(msg_id_1) + "/history")
self.assert_json_error(result, "Message edit history is disabled in this organization")
# edit history data attached.
messages_result = self.client_get("/json/messages",
{"anchor": msg_id_1, "num_before": 0, "num_after": 10})
self.assert_json_success(messages_result)
json_messages = ujson.loads(
messages_result.content.decode('utf-8'))
for msg in json_messages['messages']:
self.assertNotIn("edit_history", msg)
def test_edit_message_history(self) -> None:
self.login('hamlet')
# Single-line edit
msg_id_1 = self.send_stream_message(
self.example_user("hamlet"),
"Scotland",
topic_name="editing",
content="content before edit")
new_content_1 = 'content after edit'
result_1 = self.client_patch("/json/messages/" + str(msg_id_1), {
'message_id': msg_id_1, 'content': new_content_1
})
self.assert_json_success(result_1)
message_edit_history_1 = self.client_get(
"/json/messages/" + str(msg_id_1) + "/history")
json_response_1 = ujson.loads(
message_edit_history_1.content.decode('utf-8'))
message_history_1 = json_response_1['message_history']
# Check content of message after edit.
self.assertEqual(message_history_1[0]['rendered_content'],
'<p>content before edit</p>')
self.assertEqual(message_history_1[1]['rendered_content'],
'<p>content after edit</p>')
self.assertEqual(message_history_1[1]['content_html_diff'],
('<p>content '
'<span class="highlight_text_inserted">after</span> '
'<span class="highlight_text_deleted">before</span>'
' edit</p>'))
# Check content of message before edit.
self.assertEqual(message_history_1[1]['prev_rendered_content'],
'<p>content before edit</p>')
# Edits on new lines
msg_id_2 = self.send_stream_message(
self.example_user("hamlet"),
"Scotland",
topic_name="editing",
content=('content before edit, line 1\n'
'\n'
'content before edit, line 3'))
new_content_2 = ('content before edit, line 1\n'
'content after edit, line 2\n'
'content before edit, line 3')
result_2 = self.client_patch("/json/messages/" + str(msg_id_2), {
'message_id': msg_id_2, 'content': new_content_2
})
self.assert_json_success(result_2)
message_edit_history_2 = self.client_get(
"/json/messages/" + str(msg_id_2) + "/history")
json_response_2 = ujson.loads(
message_edit_history_2.content.decode('utf-8'))
message_history_2 = json_response_2['message_history']
self.assertEqual(message_history_2[0]['rendered_content'],
('<p>content before edit, line 1</p>\n'
'<p>content before edit, line 3</p>'))
self.assertEqual(message_history_2[1]['rendered_content'],
('<p>content before edit, line 1<br>\n'
'content after edit, line 2<br>\n'
'content before edit, line 3</p>'))
self.assertEqual(message_history_2[1]['content_html_diff'],
('<p>content before edit, line 1<br> '
'content <span class="highlight_text_inserted">after edit, line 2<br> '
'content</span> before edit, line 3</p>'))
self.assertEqual(message_history_2[1]['prev_rendered_content'],
('<p>content before edit, line 1</p>\n'
'<p>content before edit, line 3</p>'))
def test_edit_link(self) -> None:
# Link editing
self.login('hamlet')
msg_id_1 = self.send_stream_message(
self.example_user("hamlet"),
"Scotland",
topic_name="editing",
content="Here is a link to [zulip](www.zulip.org).")
new_content_1 = 'Here is a link to [zulip](www.zulipchat.com).'
result_1 = self.client_patch("/json/messages/" + str(msg_id_1), {
'message_id': msg_id_1, 'content': new_content_1
})
self.assert_json_success(result_1)
message_edit_history_1 = self.client_get(
"/json/messages/" + str(msg_id_1) + "/history")
json_response_1 = ujson.loads(
message_edit_history_1.content.decode('utf-8'))
message_history_1 = json_response_1['message_history']
# Check content of message after edit.
self.assertEqual(message_history_1[0]['rendered_content'],
'<p>Here is a link to '
'<a href="http://www.zulip.org">zulip</a>.</p>')
self.assertEqual(message_history_1[1]['rendered_content'],
'<p>Here is a link to '
'<a href="http://www.zulipchat.com">zulip</a>.</p>')
self.assertEqual(message_history_1[1]['content_html_diff'],
('<p>Here is a link to <a href="http://www.zulipchat.com"'
'>zulip '
'<span class="highlight_text_inserted"> Link: http://www.zulipchat.com .'
'</span> <span class="highlight_text_deleted"> Link: http://www.zulip.org .'
'</span> </a></p>'))
def test_edit_history_unedited(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(
self.example_user('hamlet'),
'Scotland',
topic_name='editing',
content='This message has not been edited.')
result = self.client_get('/json/messages/{}/history'.format(msg_id))
self.assert_json_success(result)
message_history = result.json()['message_history']
self.assert_length(message_history, 1)
def test_user_info_for_updates(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
self.login_user(hamlet)
self.subscribe(hamlet, 'Scotland')
self.subscribe(cordelia, 'Scotland')
msg_id = self.send_stream_message(hamlet, 'Scotland',
content='@**Cordelia Lear**')
user_info = get_user_info_for_message_updates(msg_id)
message_user_ids = user_info['message_user_ids']
self.assertIn(hamlet.id, message_user_ids)
self.assertIn(cordelia.id, message_user_ids)
mention_user_ids = user_info['mention_user_ids']
self.assertEqual(mention_user_ids, {cordelia.id})
def test_edit_cases(self) -> None:
self.login('hamlet')
hamlet = self.example_user('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic 1", content="content 1")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content 2',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0]['prev_content'], 'content 1')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.assertEqual(set(history[0].keys()),
{'timestamp', 'prev_content', 'user_id',
'prev_rendered_content', 'prev_rendered_content_version'})
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': 'topic 2',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 1')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.assertEqual(set(history[0].keys()), {'timestamp', LEGACY_PREV_TOPIC, 'user_id'})
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content 3',
'topic': 'topic 3',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0]['prev_content'], 'content 2')
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 2')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.assertEqual(set(history[0].keys()),
{'timestamp', LEGACY_PREV_TOPIC, 'prev_content', 'user_id',
'prev_rendered_content', 'prev_rendered_content_version'})
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content 4',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0]['prev_content'], 'content 3')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.login('iago')
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': 'topic 4',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 3')
self.assertEqual(history[0]['user_id'], self.example_user('iago').id)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 3')
self.assertEqual(history[2][LEGACY_PREV_TOPIC], 'topic 2')
self.assertEqual(history[3][LEGACY_PREV_TOPIC], 'topic 1')
self.assertEqual(history[1]['prev_content'], 'content 3')
self.assertEqual(history[2]['prev_content'], 'content 2')
self.assertEqual(history[4]['prev_content'], 'content 1')
# Now, we verify that the edit history data sent back has the
# correct filled-out fields
message_edit_history = self.client_get("/json/messages/" + str(msg_id) + "/history")
json_response = ujson.loads(message_edit_history.content.decode('utf-8'))
# We reverse the message history view output so that the IDs line up with the above.
message_history = list(reversed(json_response['message_history']))
i = 0
for entry in message_history:
expected_entries = {'content', 'rendered_content', 'topic', 'timestamp', 'user_id'}
if i in {0, 2, 3}:
expected_entries.add('prev_topic')
if i in {1, 2, 4}:
expected_entries.add('prev_content')
expected_entries.add('prev_rendered_content')
expected_entries.add('content_html_diff')
i += 1
self.assertEqual(expected_entries, set(entry.keys()))
self.assertEqual(len(message_history), 6)
self.assertEqual(message_history[0]['prev_topic'], 'topic 3')
self.assertEqual(message_history[0]['topic'], 'topic 4')
self.assertEqual(message_history[1]['topic'], 'topic 3')
self.assertEqual(message_history[2]['topic'], 'topic 3')
self.assertEqual(message_history[2]['prev_topic'], 'topic 2')
self.assertEqual(message_history[3]['topic'], 'topic 2')
self.assertEqual(message_history[3]['prev_topic'], 'topic 1')
self.assertEqual(message_history[4]['topic'], 'topic 1')
self.assertEqual(message_history[0]['content'], 'content 4')
self.assertEqual(message_history[1]['content'], 'content 4')
self.assertEqual(message_history[1]['prev_content'], 'content 3')
self.assertEqual(message_history[2]['content'], 'content 3')
self.assertEqual(message_history[2]['prev_content'], 'content 2')
self.assertEqual(message_history[3]['content'], 'content 2')
self.assertEqual(message_history[4]['content'], 'content 2')
self.assertEqual(message_history[4]['prev_content'], 'content 1')
self.assertEqual(message_history[5]['content'], 'content 1')
self.assertEqual(message_history[5]['topic'], 'topic 1')
def test_edit_message_content_limit(self) -> None:
def set_message_editing_params(allow_message_editing: bool,
message_content_edit_limit_seconds: int,
allow_community_topic_editing: bool) -> None:
result = self.client_patch("/json/realm", {
'allow_message_editing': ujson.dumps(allow_message_editing),
'message_content_edit_limit_seconds': message_content_edit_limit_seconds,
'allow_community_topic_editing': ujson.dumps(allow_community_topic_editing),
})
self.assert_json_success(result)
def do_edit_message_assert_success(id_: int, unique_str: str, topic_only: bool=False) -> None:
new_topic = 'topic' + unique_str
new_content = 'content' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
if not topic_only:
params_dict['content'] = new_content
result = self.client_patch("/json/messages/" + str(id_), params_dict)
self.assert_json_success(result)
if topic_only:
self.check_topic(id_, topic_name=new_topic)
else:
self.check_message(id_, topic_name=new_topic, content=new_content)
def do_edit_message_assert_error(id_: int, unique_str: str, error: str,
topic_only: bool=False) -> None:
message = Message.objects.get(id=id_)
old_topic = message.topic_name()
old_content = message.content
new_topic = 'topic' + unique_str
new_content = 'content' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
if not topic_only:
params_dict['content'] = new_content
result = self.client_patch("/json/messages/" + str(id_), params_dict)
message = Message.objects.get(id=id_)
self.assert_json_error(result, error)
msg = Message.objects.get(id=id_)
self.assertEqual(msg.topic_name(), old_topic)
self.assertEqual(msg.content, old_content)
self.login('iago')
# send a message in the past
id_ = self.send_stream_message(self.example_user("iago"), "Scotland",
content="content", topic_name="topic")
message = Message.objects.get(id=id_)
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
message.save()
# test the various possible message editing settings
# high enough time limit, all edits allowed
set_message_editing_params(True, 240, False)
do_edit_message_assert_success(id_, 'A')
# out of time, only topic editing allowed
set_message_editing_params(True, 120, False)
do_edit_message_assert_success(id_, 'B', True)
do_edit_message_assert_error(id_, 'C', "The time limit for editing this message has passed")
# infinite time, all edits allowed
set_message_editing_params(True, 0, False)
do_edit_message_assert_success(id_, 'D')
# without allow_message_editing, nothing is allowed
set_message_editing_params(False, 240, False)
do_edit_message_assert_error(id_, 'E', "Your organization has turned off message editing", True)
set_message_editing_params(False, 120, False)
do_edit_message_assert_error(id_, 'F', "Your organization has turned off message editing", True)
set_message_editing_params(False, 0, False)
do_edit_message_assert_error(id_, 'G', "Your organization has turned off message editing", True)
def test_allow_community_topic_editing(self) -> None:
def set_message_editing_params(allow_message_editing: bool,
message_content_edit_limit_seconds: int,
allow_community_topic_editing: bool) -> None:
result = self.client_patch("/json/realm", {
'allow_message_editing': ujson.dumps(allow_message_editing),
'message_content_edit_limit_seconds': message_content_edit_limit_seconds,
'allow_community_topic_editing': ujson.dumps(allow_community_topic_editing),
})
self.assert_json_success(result)
def do_edit_message_assert_success(id_: int, unique_str: str) -> None:
new_topic = 'topic' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
result = self.client_patch("/json/messages/" + str(id_), params_dict)
self.assert_json_success(result)
self.check_topic(id_, topic_name=new_topic)
def do_edit_message_assert_error(id_: int, unique_str: str, error: str) -> None:
message = Message.objects.get(id=id_)
old_topic = message.topic_name()
old_content = message.content
new_topic = 'topic' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
result = self.client_patch("/json/messages/" + str(id_), params_dict)
message = Message.objects.get(id=id_)
self.assert_json_error(result, error)
msg = Message.objects.get(id=id_)
self.assertEqual(msg.topic_name(), old_topic)
self.assertEqual(msg.content, old_content)
self.login('iago')
# send a message in the past
id_ = self.send_stream_message(self.example_user("hamlet"), "Scotland",
content="content", topic_name="topic")
message = Message.objects.get(id=id_)
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
message.save()
# any user can edit the topic of a message
set_message_editing_params(True, 0, True)
# log in as a new user
self.login('cordelia')
do_edit_message_assert_success(id_, 'A')
# only admins can edit the topics of messages
self.login('iago')
set_message_editing_params(True, 0, False)
do_edit_message_assert_success(id_, 'B')
self.login('cordelia')
do_edit_message_assert_error(id_, 'C', "You don't have permission to edit this message")
self.login('iago')
set_message_editing_params(False, 0, True)
self.login('cordelia')
do_edit_message_assert_error(id_, 'D', "Your organization has turned off message editing")
message.date_sent = message.date_sent - datetime.timedelta(seconds=90000)
message.save()
self.login('iago')
set_message_editing_params(True, 0, True)
do_edit_message_assert_success(id_, 'E')
self.login('cordelia')
do_edit_message_assert_error(id_, 'F', "The time limit for editing this message has passed")
message.set_topic_name("(no topic)")
message.save()
self.login('cordelia')
do_edit_message_assert_success(id_, 'D')
@mock.patch("zerver.lib.actions.send_event")
def test_edit_topic_public_history_stream(self, mock_send_event: mock.MagicMock) -> None:
stream_name = "Macbeth"
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
self.make_stream(stream_name, history_public_to_subscribers=True)
self.subscribe(hamlet, stream_name)
self.login_user(hamlet)
message_id = self.send_stream_message(hamlet, stream_name, "Where am I?")
self.login_user(cordelia)
self.subscribe(cordelia, stream_name)
message = Message.objects.get(id=message_id)
def do_update_message_topic_success(user_profile: UserProfile, message: Message,
topic_name: str, users_to_be_notified: List[Dict[str, Any]]) -> None:
do_update_message(
user_profile=user_profile,
message=message,
new_stream=None,
topic_name=topic_name,
propagate_mode="change_later",
content=None,
rendered_content=None,
prior_mention_user_ids=set(),
mention_user_ids=set(),
mention_data=None,
)
mock_send_event.assert_called_with(mock.ANY, mock.ANY, users_to_be_notified)
def notify(user_id: int) -> Dict[str, Any]:
um = UserMessage.objects.get(message=message_id)
if um.user_profile_id == user_id:
return {
"id": user_id,
"flags": um.flags_list()
}
else:
return {
"id": user_id,
"flags": ["read"]
}
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
do_update_message_topic_success(cordelia, message, "Othello eats apple", users_to_be_notified)
cordelia.long_term_idle = True
cordelia.save()
users_to_be_notified = list(map(notify, [hamlet.id]))
do_update_message_topic_success(cordelia, message, "Another topic idle", users_to_be_notified)
cordelia.long_term_idle = False
cordelia.save()
# Even if Hamlet unsubscribes the stream, he should be notified when the topic is changed
# because he has a UserMessage row.
self.unsubscribe(hamlet, stream_name)
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
do_update_message_topic_success(cordelia, message, "Another topic", users_to_be_notified)
# Hamlet subscribes to the stream again and Cordelia unsubscribes, then Hamlet changes
# the message topic. Cordelia won't receive any updates when a message on that stream is
self.subscribe(hamlet, stream_name)
self.unsubscribe(cordelia, stream_name)
self.login_user(hamlet)
users_to_be_notified = list(map(notify, [hamlet.id]))
do_update_message_topic_success(hamlet, message, "Change again", users_to_be_notified)
@mock.patch("zerver.lib.actions.send_event")
def test_wildcard_mention(self, mock_send_event: mock.MagicMock) -> None:
stream_name = "Macbeth"
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
self.make_stream(stream_name, history_public_to_subscribers=True)
self.subscribe(hamlet, stream_name)
self.subscribe(cordelia, stream_name)
self.login_user(hamlet)
message_id = self.send_stream_message(hamlet, stream_name, "Hello everyone")
def notify(user_id: int) -> Dict[str, Any]:
return {
"id": user_id,
"flags": ["wildcard_mentioned"]
}
users_to_be_notified = sorted(map(notify, [cordelia.id, hamlet.id]), key=itemgetter("id"))
result = self.client_patch("/json/messages/" + str(message_id), {
'message_id': message_id,
'content': 'Hello @**everyone**',
})
self.assert_json_success(result)
# Extract the send_event call where event type is 'update_message'.
# Here we assert wildcard_mention_user_ids has been set properly.
called = False
for call_args in mock_send_event.call_args_list:
(arg_realm, arg_event, arg_notified_users) = call_args[0]
if arg_event['type'] == 'update_message':
self.assertEqual(arg_event['type'], 'update_message')
self.assertEqual(arg_event['wildcard_mention_user_ids'], [cordelia.id, hamlet.id])
self.assertEqual(sorted(arg_notified_users, key=itemgetter("id")), users_to_be_notified)
called = True
self.assertTrue(called)
def test_propagate_topic_forward(self) -> None:
self.login('hamlet')
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
id2 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic1")
id3 = self.send_stream_message(self.example_user("iago"), "Rome",
topic_name="topic1")
id4 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic2")
id5 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic1")
result = self.client_patch("/json/messages/" + str(id1), {
'message_id': id1,
'topic': 'edited',
'propagate_mode': 'change_later'
})
self.assert_json_success(result)
self.check_topic(id1, topic_name="edited")
self.check_topic(id2, topic_name="edited")
self.check_topic(id3, topic_name="topic1")
self.check_topic(id4, topic_name="topic2")
self.check_topic(id5, topic_name="edited")
def test_propagate_all_topics(self) -> None:
self.login('hamlet')
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
id2 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
id3 = self.send_stream_message(self.example_user("iago"), "Rome",
topic_name="topic1")
id4 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic2")
id5 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic1")
id6 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic3")
result = self.client_patch("/json/messages/" + str(id2), {
'message_id': id2,
'topic': 'edited',
'propagate_mode': 'change_all'
})
self.assert_json_success(result)
self.check_topic(id1, topic_name="edited")
self.check_topic(id2, topic_name="edited")
self.check_topic(id3, topic_name="topic1")
self.check_topic(id4, topic_name="topic2")
self.check_topic(id5, topic_name="edited")
self.check_topic(id6, topic_name="topic3")
def test_propagate_invalid(self) -> None:
self.login('hamlet')
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
result = self.client_patch("/json/messages/" + str(id1), {
'topic': 'edited',
'propagate_mode': 'invalid',
})
self.assert_json_error(result, 'Invalid propagate_mode')
self.check_topic(id1, topic_name="topic1")
result = self.client_patch("/json/messages/" + str(id1), {
'content': 'edited',
'propagate_mode': 'change_all',
})
self.assert_json_error(result, 'Invalid propagate_mode without topic edit')
self.check_topic(id1, topic_name="topic1")
def prepare_move_topics(self, user_email: str, old_stream: str, new_stream: str, topic: str) -> Tuple[UserProfile, Stream, Stream, int, int]:
user_profile = self.example_user(user_email)
self.login(user_email)
stream = self.make_stream(old_stream)
new_stream = self.make_stream(new_stream)
self.subscribe(user_profile, stream.name)
self.subscribe(user_profile, new_stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name=topic, content="First")
msg_id_lt = self.send_stream_message(user_profile, stream.name,
topic_name=topic, content="Second")
self.send_stream_message(user_profile, stream.name,
topic_name=topic, content="third")
return (user_profile, stream, new_stream, msg_id, msg_id_lt)
def test_move_message_to_stream(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all'
})
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0].content, "This topic was moved by @_**Iago|%s** to #**new stream>test**" % (user_profile.id,))
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 4)
self.assertEqual(messages[3].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%s**" % (user_profile.id,))
def test_move_message_to_stream_change_later(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id_later), {
'message_id': msg_id_later,
'stream_id': new_stream.id,
'propagate_mode': 'change_later'
})
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 2)
self.assertEqual(messages[0].id, msg_id)
self.assertEqual(messages[1].content, "This topic was moved by @_**Iago|%s** to #**new stream>test**" % (user_profile.id,))
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 3)
self.assertEqual(messages[0].id, msg_id_later)
self.assertEqual(messages[2].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%d**" % (user_profile.id,))
def test_move_message_to_stream_no_allowed(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"aaron", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all'
})
self.assert_json_error(result, "You don't have permission to move this message")
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 3)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 0)
def test_move_message_to_stream_with_content(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all',
'content': 'Not allowed'
})
self.assert_json_error(result, "Cannot change message content while changing stream")
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 3)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 0)
def test_move_message_to_stream_and_topic(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all',
'topic': 'new topic'
})
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0].content, "This topic was moved by @_**Iago|%s** to #**new stream>new topic**" % (user_profile.id,))
messages = get_topic_messages(user_profile, new_stream, "new topic")
self.assertEqual(len(messages), 4)
self.assertEqual(messages[3].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%s**" % (user_profile.id,))
self.assert_json_success(result)
def test_move_message_to_stream_to_private_stream(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
stream = self.make_stream("test move stream")
new_stream = self.make_stream("new stream", None, True)
self.subscribe(user_profile, stream.name)
self.subscribe(user_profile, new_stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name="test", content="First")
self.send_stream_message(user_profile, stream.name,
topic_name="test", content="Second")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all',
})
self.assert_json_error(result, "Streams must be public")
messages = get_topic_messages(user_profile, stream, "test")
self.assertEqual(len(messages), 2)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 0)
class MirroredMessageUsersTest(ZulipTestCase):
def test_invalid_sender(self) -> None:
user = self.example_user('hamlet')
recipients: List[str] = []
Request = namedtuple('Request', ['POST'])
request = Request(POST=dict())
with self.assertRaises(InvalidMirrorInput):
create_mirrored_message_users(request, user, recipients)
def test_invalid_client(self) -> None:
client = get_client(name='banned_mirror')
user = self.example_user('hamlet')
sender = user
recipients: List[str] = []
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
with self.assertRaises(InvalidMirrorInput):
create_mirrored_message_users(request, user, recipients)
def test_invalid_email(self) -> None:
invalid_email = 'alice AT example.com'
recipients = [invalid_email]
user = self.mit_user('starnine')
sender = user
Request = namedtuple('Request', ['POST', 'client'])
for client_name in ['zephyr_mirror', 'irc_mirror', 'jabber_mirror']:
client = get_client(name=client_name)
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
with self.assertRaises(InvalidMirrorInput):
create_mirrored_message_users(request, user, recipients)
@mock.patch('DNS.dnslookup', return_value=[['sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh']])
def test_zephyr_mirror_new_recipient(self, ignored: object) -> None:
client = get_client(name='zephyr_mirror')
user = self.mit_user('starnine')
sender = self.mit_user('sipbtest')
new_user_email = 'bob_the_new_user@mit.edu'
new_user_realm = get_realm("zephyr")
recipients = [user.email, new_user_email]
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
mirror_sender = create_mirrored_message_users(request, user, recipients)
self.assertEqual(mirror_sender, sender)
realm_users = UserProfile.objects.filter(realm=sender.realm)
realm_emails = {user.email for user in realm_users}
self.assertIn(user.email, realm_emails)
self.assertIn(new_user_email, realm_emails)
bob = get_user(new_user_email, new_user_realm)
self.assertTrue(bob.is_mirror_dummy)
@mock.patch('DNS.dnslookup', return_value=[['sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh']])
def test_zephyr_mirror_new_sender(self, ignored: object) -> None:
client = get_client(name='zephyr_mirror')
user = self.mit_user('starnine')
sender_email = 'new_sender@mit.edu'
recipients = ['stream_name']
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender_email, type='stream'),
client = client)
mirror_sender = create_mirrored_message_users(request, user, recipients)
assert(mirror_sender is not None)
self.assertEqual(mirror_sender.email, sender_email)
self.assertTrue(mirror_sender.is_mirror_dummy)
def test_irc_mirror(self) -> None:
reset_emails_in_zulip_realm()
client = get_client(name='irc_mirror')
sender = self.example_user('hamlet')
recipients = [self.nonreg_email('alice'), 'bob@irc.zulip.com', self.nonreg_email('cordelia')]
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
mirror_sender = create_mirrored_message_users(request, sender, recipients)
self.assertEqual(mirror_sender, sender)
realm_users = UserProfile.objects.filter(realm=sender.realm)
realm_emails = {user.email for user in realm_users}
self.assertIn(self.nonreg_email('alice'), realm_emails)
self.assertIn('bob@irc.zulip.com', realm_emails)
bob = get_user('bob@irc.zulip.com', sender.realm)
self.assertTrue(bob.is_mirror_dummy)
def test_jabber_mirror(self) -> None:
reset_emails_in_zulip_realm()
client = get_client(name='jabber_mirror')
sender = self.example_user('hamlet')
user = sender
recipients = [self.nonreg_email('alice'), self.nonreg_email('bob'), self.nonreg_email('cordelia')]
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
mirror_sender = create_mirrored_message_users(request, user, recipients)
self.assertEqual(mirror_sender, sender)
realm_users = UserProfile.objects.filter(realm=sender.realm)
realm_emails = {user.email for user in realm_users}
self.assertIn(self.nonreg_email('alice'), realm_emails)
self.assertIn(self.nonreg_email('bob'), realm_emails)
bob = get_user(self.nonreg_email('bob'), sender.realm)
self.assertTrue(bob.is_mirror_dummy)
class MessageAccessTests(ZulipTestCase):
def test_update_invalid_flags(self) -> None:
message = self.send_personal_message(
self.example_user("cordelia"),
self.example_user("hamlet"),
"hello",
)
self.login('hamlet')
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "invalid"})
self.assert_json_error(result, "Invalid flag: 'invalid'")
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "is_private"})
self.assert_json_error(result, "Invalid flag: 'is_private'")
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "active_mobile_push_notification"})
self.assert_json_error(result, "Invalid flag: 'active_mobile_push_notification'")
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "mentioned"})
self.assert_json_error(result, "Flag not editable: 'mentioned'")
def change_star(self, messages: List[int], add: bool=True, **kwargs: Any) -> HttpResponse:
return self.client_post("/json/messages/flags",
{"messages": ujson.dumps(messages),
"op": "add" if add else "remove",
"flag": "starred"},
**kwargs)
def test_change_star(self) -> None:
self.login('hamlet')
message_ids = [self.send_personal_message(self.example_user("hamlet"),
self.example_user("hamlet"),
"test")]
result = self.change_star(message_ids)
self.assert_json_success(result)
for msg in self.get_messages():
if msg['id'] in message_ids:
self.assertEqual(msg['flags'], ['starred'])
else:
self.assertEqual(msg['flags'], ['read'])
result = self.change_star(message_ids, False)
self.assert_json_success(result)
for msg in self.get_messages():
if msg['id'] in message_ids:
self.assertEqual(msg['flags'], [])
def test_change_star_public_stream_historical(self) -> None:
stream_name = "new_stream"
self.subscribe(self.example_user("hamlet"), stream_name)
self.login('hamlet')
message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
]
other_message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test_unused"),
]
received_message_ids = [
self.send_personal_message(
self.example_user("hamlet"),
self.example_user("cordelia"),
"test_received"
),
]
# Now login as another user who wasn't on that stream
self.login('cordelia')
sent_message_ids = [
self.send_personal_message(
self.example_user("cordelia"),
self.example_user("cordelia"),
"test_read_message",
),
]
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps(sent_message_ids),
"op": "add",
"flag": "read"})
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps(message_ids),
"op": "add",
"flag": "read"})
self.assert_json_error(result, 'Invalid message(s)')
# Trying to change a list of more than one historical message fails
result = self.change_star(message_ids * 2)
self.assert_json_error(result, 'Invalid message(s)')
# Confirm that one can change the historical flag now
result = self.change_star(message_ids)
self.assert_json_success(result)
for msg in self.get_messages():
if msg['id'] in message_ids:
self.assertEqual(set(msg['flags']), {'starred', 'historical', 'read'})
elif msg['id'] in received_message_ids:
self.assertEqual(msg['flags'], [])
else:
self.assertEqual(msg['flags'], ['read'])
self.assertNotIn(msg['id'], other_message_ids)
result = self.change_star(message_ids, False)
self.assert_json_success(result)
# But it still doesn't work if you're in another realm
user = self.mit_user('sipbtest')
self.login_user(user)
result = self.change_star(message_ids, subdomain="zephyr")
self.assert_json_error(result, 'Invalid message(s)')
def test_change_star_private_message_security(self) -> None:
self.login('hamlet')
message_ids = [
self.send_personal_message(
self.example_user("hamlet"),
self.example_user("hamlet"),
"test",
),
]
# Starring private messages you didn't receive fails.
self.login('cordelia')
result = self.change_star(message_ids)
self.assert_json_error(result, 'Invalid message(s)')
def test_change_star_private_stream_security(self) -> None:
stream_name = "private_stream"
self.make_stream(stream_name, invite_only=True)
self.subscribe(self.example_user("hamlet"), stream_name)
self.login('hamlet')
message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
]
result = self.change_star(message_ids)
self.assert_json_success(result)
self.login('cordelia')
result = self.change_star(message_ids)
self.assert_json_error(result, 'Invalid message(s)')
stream_name = "private_stream_2"
self.make_stream(stream_name, invite_only=True,
history_public_to_subscribers=True)
self.subscribe(self.example_user("hamlet"), stream_name)
self.login('hamlet')
message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
]
# With stream.history_public_to_subscribers = True, you still
# can't see it if you didn't receive the message and are
# not subscribed.
self.login('cordelia')
result = self.change_star(message_ids)
self.assert_json_error(result, 'Invalid message(s)')
# But if you subscribe, then you can star the message
self.subscribe(self.example_user("cordelia"), stream_name)
result = self.change_star(message_ids)
self.assert_json_success(result)
def test_new_message(self) -> None:
sender = self.example_user('hamlet')
self.login_user(sender)
content = "Test message for star"
self.send_stream_message(sender, "Verona",
content=content)
sent_message = UserMessage.objects.filter(
user_profile=self.example_user('hamlet')
).order_by("id").reverse()[0]
self.assertEqual(sent_message.message.content, content)
self.assertFalse(sent_message.flags.starred)
def test_change_star_public_stream_security_for_guest_user(self) -> None:
# Guest user can't access(star) unsubscribed public stream messages
normal_user = self.example_user("hamlet")
stream_name = "public_stream"
self.make_stream(stream_name)
self.subscribe(normal_user, stream_name)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 1")
]
guest_user = self.example_user('polonius')
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_error(result, 'Invalid message(s)')
self.subscribe(guest_user, stream_name)
result = self.change_star(message_id)
self.assert_json_success(result)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 2")
]
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_success(result)
def test_change_star_private_stream_security_for_guest_user(self) -> None:
normal_user = self.example_user("hamlet")
stream_name = "private_stream"
stream = self.make_stream(stream_name, invite_only=True)
self.subscribe(normal_user, stream_name)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 1")
]
guest_user = self.example_user('polonius')
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_error(result, 'Invalid message(s)')
# Guest user can't access messages of subscribed private streams if
self.subscribe(guest_user, stream_name)
result = self.change_star(message_id)
self.assert_json_error(result, 'Invalid message(s)')
do_change_stream_invite_only(stream, True, history_public_to_subscribers=True)
result = self.change_star(message_id)
self.assert_json_success(result)
do_change_stream_invite_only(stream, True, history_public_to_subscribers=False)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 2")
]
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_success(result)
def test_bulk_access_messages_private_stream(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
stream_name = "private_stream"
stream = self.make_stream(stream_name, invite_only=True,
history_public_to_subscribers=False)
self.subscribe(user, stream_name)
message_one_id = self.send_stream_message(user,
stream_name, "Message one")
later_subscribed_user = self.example_user("cordelia")
self.subscribe(later_subscribed_user, stream_name)
message_two_id = self.send_stream_message(user,
stream_name, "Message two")
message_ids = [message_one_id, message_two_id]
messages = [Message.objects.select_related().get(id=message_id)
for message_id in message_ids]
filtered_messages = bulk_access_messages(later_subscribed_user, messages)
# subscribed user as stream has protected history
self.assertEqual(len(filtered_messages), 1)
self.assertEqual(filtered_messages[0].id, message_two_id)
do_change_stream_invite_only(stream, True, history_public_to_subscribers=True)
filtered_messages = bulk_access_messages(later_subscribed_user, messages)
# Message sent before subscribing are accessible by 8user as stream
# don't have protected history
self.assertEqual(len(filtered_messages), 2)
unsubscribed_user = self.example_user("ZOE")
filtered_messages = bulk_access_messages(unsubscribed_user, messages)
self.assertEqual(len(filtered_messages), 0)
def test_bulk_access_messages_public_stream(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
stream_name = "public_stream"
self.subscribe(user, stream_name)
message_one_id = self.send_stream_message(user,
stream_name, "Message one")
later_subscribed_user = self.example_user("cordelia")
self.subscribe(later_subscribed_user, stream_name)
message_two_id = self.send_stream_message(user,
stream_name, "Message two")
message_ids = [message_one_id, message_two_id]
messages = [Message.objects.select_related().get(id=message_id)
for message_id in message_ids]
filtered_messages = bulk_access_messages(later_subscribed_user, messages)
self.assertEqual(len(filtered_messages), 2)
unsubscribed_user = self.example_user("ZOE")
filtered_messages = bulk_access_messages(unsubscribed_user, messages)
self.assertEqual(len(filtered_messages), 2)
class MessageHasKeywordsTest(ZulipTestCase):
def setup_dummy_attachments(self, user_profile: UserProfile) -> List[str]:
sample_size = 10
realm_id = user_profile.realm_id
dummy_files = [
('zulip.txt', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt' % (realm_id,), sample_size),
('temp_file.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py' % (realm_id,), sample_size),
('abc.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py' % (realm_id,), sample_size)
]
for file_name, path_id, size in dummy_files:
create_attachment(file_name, path_id, user_profile, size)
return [x[1] for x in dummy_files]
def test_claim_attachment(self) -> None:
user_profile = self.example_user('hamlet')
dummy_path_ids = self.setup_dummy_attachments(user_profile)
dummy_urls = ["http://zulip.testserver/user_uploads/{}".format(x) for x in dummy_path_ids]
self.subscribe(user_profile, "Denmark")
def assert_attachment_claimed(path_id: str, claimed: bool) -> None:
attachment = Attachment.objects.get(path_id=path_id)
self.assertEqual(attachment.is_claimed(), claimed)
body = ("Some files here ...[zulip.txt]({})" +
"{}.... Some more...." +
"{}").format(dummy_urls[0], dummy_urls[1], dummy_urls[1])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[0], True)
assert_attachment_claimed(dummy_path_ids[1], False)
body = "Link in code: `{}`".format(dummy_urls[2])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[2], False)
body = "Link to not parse: .{}.`".format(dummy_urls[2])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[2], False)
# Finally, claim attachment 3.
body = "Link: {}".format(dummy_urls[2])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[2], True)
assert_attachment_claimed(dummy_path_ids[1], False)
def test_finds_all_links(self) -> None:
msg_ids = []
msg_contents = ["foo.org", "[bar](baz.gov)", "http://quux.ca"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertTrue(all([msg.has_link for msg in msgs]))
def test_finds_only_links(self) -> None:
msg_ids = []
msg_contents = ["`example.org`", '``example.org```', '$$https://example.org$$', "foo"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertFalse(all([msg.has_link for msg in msgs]))
def update_message(self, msg: Message, content: str) -> None:
hamlet = self.example_user('hamlet')
realm_id = hamlet.realm.id
rendered_content = render_markdown(msg, content)
mention_data = bugdown.MentionData(realm_id, content)
do_update_message(hamlet, msg, None, None, "change_one", content,
rendered_content, set(), set(), mention_data=mention_data)
def test_finds_link_after_edit(self) -> None:
hamlet = self.example_user('hamlet')
msg_id = self.send_stream_message(hamlet, 'Denmark', content='a')
msg = Message.objects.get(id=msg_id)
self.assertFalse(msg.has_link)
self.update_message(msg, 'a http://foo.com')
self.assertTrue(msg.has_link)
self.update_message(msg, 'a')
self.assertFalse(msg.has_link)
# Check in blockquotes work
self.update_message(msg, '> http://bar.com')
self.assertTrue(msg.has_link)
self.update_message(msg, 'a `http://foo.com`')
self.assertFalse(msg.has_link)
def test_has_image(self) -> None:
msg_ids = []
msg_contents = ["Link: foo.org",
"Image: https://www.google.com/images/srpr/logo4w.png",
"Image: https://www.google.com/images/srpr/logo4w.pdf",
"[Google Link](https://www.google.com/images/srpr/logo4w.png)"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertEqual([False, True, False, True], [msg.has_image for msg in msgs])
self.update_message(msgs[0], 'https://www.google.com/images/srpr/logo4w.png')
self.assertTrue(msgs[0].has_image)
self.update_message(msgs[0], 'No Image Again')
self.assertFalse(msgs[0].has_image)
def test_has_attachment(self) -> None:
hamlet = self.example_user('hamlet')
dummy_path_ids = self.setup_dummy_attachments(hamlet)
dummy_urls = ["http://zulip.testserver/user_uploads/{}".format(x) for x in dummy_path_ids]
self.subscribe(hamlet, "Denmark")
body = ("Files ...[zulip.txt]({}) {} {}").format(dummy_urls[0], dummy_urls[1], dummy_urls[2])
msg_id = self.send_stream_message(hamlet, "Denmark", body, "test")
msg = Message.objects.get(id=msg_id)
self.assertTrue(msg.has_attachment)
self.update_message(msg, 'No Attachments')
self.assertFalse(msg.has_attachment)
self.update_message(msg, body)
self.assertTrue(msg.has_attachment)
self.update_message(msg, 'Link in code: `{}`'.format(dummy_urls[1]))
self.assertFalse(msg.has_attachment)
# Test blockquotes
self.update_message(msg, '> {}'.format(dummy_urls[1]))
self.assertTrue(msg.has_attachment)
# Additional test to check has_attachment is being set is due to the correct attachment.
self.update_message(msg, 'Outside: {}. In code: `{}`.'.format(dummy_urls[0], dummy_urls[1]))
self.assertTrue(msg.has_attachment)
self.assertTrue(msg.attachment_set.filter(path_id=dummy_path_ids[0]))
self.assertEqual(msg.attachment_set.count(), 1)
self.update_message(msg, 'Outside: {}. In code: `{}`.'.format(dummy_urls[1], dummy_urls[0]))
self.assertTrue(msg.has_attachment)
self.assertTrue(msg.attachment_set.filter(path_id=dummy_path_ids[1]))
self.assertEqual(msg.attachment_set.count(), 1)
self.update_message(msg, 'Both in code: `{} {}`.'.format(dummy_urls[1], dummy_urls[0]))
self.assertFalse(msg.has_attachment)
self.assertEqual(msg.attachment_set.count(), 0)
def test_potential_attachment_path_ids(self) -> None:
hamlet = self.example_user('hamlet')
self.subscribe(hamlet, "Denmark")
dummy_path_ids = self.setup_dummy_attachments(hamlet)
body = "Hello"
msg_id = self.send_stream_message(hamlet, "Denmark", body, "test")
msg = Message.objects.get(id=msg_id)
with mock.patch("zerver.lib.actions.do_claim_attachments",
wraps=do_claim_attachments) as m:
self.update_message(msg, '[link](http://{}/user_uploads/{})'.format(
hamlet.realm.host, dummy_path_ids[0]))
self.assertTrue(m.called)
m.reset_mock()
self.update_message(msg, '[link](/user_uploads/{})'.format(dummy_path_ids[1]))
self.assertTrue(m.called)
m.reset_mock()
self.update_message(msg, '[new text link](/user_uploads/{})'.format(dummy_path_ids[1]))
self.assertFalse(m.called)
m.reset_mock()
# It's not clear this is correct behavior
self.update_message(msg, '[link](user_uploads/{})'.format(dummy_path_ids[2]))
self.assertFalse(m.called)
m.reset_mock()
self.update_message(msg, '[link](https://github.com/user_uploads/{})'.format(
dummy_path_ids[0]))
self.assertFalse(m.called)
m.reset_mock()
class MissedMessageTest(ZulipTestCase):
def test_presence_idle_user_ids(self) -> None:
UserPresence.objects.all().delete()
sender = self.example_user('cordelia')
realm = sender.realm
hamlet = self.example_user('hamlet')
othello = self.example_user('othello')
recipient_ids = {hamlet.id, othello.id}
message_type = 'stream'
user_flags: Dict[int, List[str]] = {}
def assert_missing(user_ids: List[int]) -> None:
presence_idle_user_ids = get_active_presence_idle_user_ids(
realm=realm,
sender_id=sender.id,
message_type=message_type,
active_user_ids=recipient_ids,
user_flags=user_flags,
)
self.assertEqual(sorted(user_ids), sorted(presence_idle_user_ids))
def set_presence(user: UserProfile, client_name: str, ago: int) -> None:
when = timezone_now() - datetime.timedelta(seconds=ago)
UserPresence.objects.create(
user_profile_id=user.id,
realm_id=user.realm_id,
client=get_client(client_name),
timestamp=when,
)
message_type = 'private'
assert_missing([hamlet.id, othello.id])
message_type = 'stream'
user_flags[hamlet.id] = ['mentioned']
assert_missing([hamlet.id])
set_presence(hamlet, 'iPhone', ago=5000)
assert_missing([hamlet.id])
set_presence(hamlet, 'webapp', ago=15)
assert_missing([])
message_type = 'private'
assert_missing([othello.id])
class LogDictTest(ZulipTestCase):
def test_to_log_dict(self) -> None:
user = self.example_user('hamlet')
stream_name = 'Denmark'
topic_name = 'Copenhagen'
content = 'find me some good coffee shops'
message_id = self.send_stream_message(user, stream_name,
topic_name=topic_name,
content=content)
message = Message.objects.get(id=message_id)
dct = message.to_log_dict()
self.assertTrue('timestamp' in dct)
self.assertEqual(dct['content'], 'find me some good coffee shops')
self.assertEqual(dct['id'], message.id)
self.assertEqual(dct['recipient'], 'Denmark')
self.assertEqual(dct['sender_realm_str'], 'zulip')
self.assertEqual(dct['sender_email'], user.email)
self.assertEqual(dct['sender_full_name'], 'King Hamlet')
self.assertEqual(dct['sender_id'], user.id)
self.assertEqual(dct['sender_short_name'], 'hamlet')
self.assertEqual(dct['sending_client'], 'test suite')
self.assertEqual(dct[DB_TOPIC_NAME], 'Copenhagen')
self.assertEqual(dct['type'], 'stream')
class CheckMessageTest(ZulipTestCase):
def test_basic_check_message_call(self) -> None:
sender = self.example_user('othello')
client = make_client(name="test suite")
stream_name = 'España y Francia'
self.make_stream(stream_name)
topic_name = 'issue'
message_content = 'whatever'
addressee = Addressee.for_stream_name(stream_name, topic_name)
ret = check_message(sender, client, addressee, message_content)
self.assertEqual(ret['message'].sender.id, sender.id)
def test_bot_pm_feature(self) -> None:
parent = self.example_user('othello')
bot = do_create_user(
email='othello-bot@zulip.com',
password='',
realm=parent.realm,
full_name='',
short_name='',
bot_type=UserProfile.DEFAULT_BOT,
bot_owner=parent
)
bot.last_reminder = None
sender = bot
client = make_client(name="test suite")
stream_name = 'Россия'
topic_name = 'issue'
addressee = Addressee.for_stream_name(stream_name, topic_name)
message_content = 'whatever'
old_count = message_stream_count(parent)
# the sender
with self.assertRaises(JsonableError):
check_message(sender, client, addressee, message_content)
new_count = message_stream_count(parent)
self.assertEqual(new_count, old_count + 1)
self.assertIn("that stream does not exist.", most_recent_message(parent).content)
# Try sending to stream that exists with no subscribers soon
# after; due to rate-limiting, this should send nothing.
self.make_stream(stream_name)
ret = check_message(sender, client, addressee, message_content)
new_count = message_stream_count(parent)
self.assertEqual(new_count, old_count + 1)
# Try sending to stream that exists with no subscribers longer
# after; this should send an error to the bot owner that the
# stream doesn't exist
assert(sender.last_reminder is not None)
sender.last_reminder = sender.last_reminder - datetime.timedelta(hours=1)
sender.save(update_fields=["last_reminder"])
ret = check_message(sender, client, addressee, message_content)
new_count = message_stream_count(parent)
self.assertEqual(new_count, old_count + 2)
self.assertEqual(ret['message'].sender.email, 'othello-bot@zulip.com')
self.assertIn("does not have any subscribers", most_recent_message(parent).content)
def test_bot_pm_error_handling(self) -> None:
cordelia = self.example_user('cordelia')
test_bot = self.create_test_bot(
short_name='test',
user_profile=cordelia,
)
content = 'whatever'
good_realm = test_bot.realm
wrong_realm = get_realm("zephyr")
wrong_sender = cordelia
send_rate_limited_pm_notification_to_bot_owner(test_bot, wrong_realm, content)
self.assertEqual(test_bot.last_reminder, None)
send_rate_limited_pm_notification_to_bot_owner(wrong_sender, good_realm, content)
self.assertEqual(test_bot.last_reminder, None)
test_bot.realm.deactivated = True
send_rate_limited_pm_notification_to_bot_owner(test_bot, good_realm, content)
self.assertEqual(test_bot.last_reminder, None)
class DeleteMessageTest(ZulipTestCase):
def test_delete_message_invalid_request_format(self) -> None:
self.login('iago')
hamlet = self.example_user('hamlet')
msg_id = self.send_stream_message(hamlet, "Scotland")
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id + 1),
{'message_id': msg_id})
self.assert_json_error(result, "Invalid message(s)")
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
self.assert_json_success(result)
def test_delete_message_by_user(self) -> None:
def set_message_deleting_params(allow_message_deleting: bool,
message_content_delete_limit_seconds: int) -> None:
self.login('iago')
result = self.client_patch("/json/realm", {
'allow_message_deleting': ujson.dumps(allow_message_deleting),
'message_content_delete_limit_seconds': message_content_delete_limit_seconds
})
self.assert_json_success(result)
def test_delete_message_by_admin(msg_id: int) -> HttpResponse:
self.login('iago')
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
return result
def test_delete_message_by_owner(msg_id: int) -> HttpResponse:
self.login('hamlet')
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
return result
def test_delete_message_by_other_user(msg_id: int) -> HttpResponse:
self.login('cordelia')
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
return result
set_message_deleting_params(False, 0)
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
msg_id = self.send_stream_message(hamlet, "Scotland")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_other_user(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_admin(msg_id=msg_id)
self.assert_json_success(result)
set_message_deleting_params(True, 0)
msg_id = self.send_stream_message(hamlet, "Scotland")
message = Message.objects.get(id=msg_id)
message.date_sent = message.date_sent - datetime.timedelta(seconds=600)
message.save()
result = test_delete_message_by_other_user(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_success(result)
# Test if time limit is non-zero.
set_message_deleting_params(True, 240)
msg_id_1 = self.send_stream_message(hamlet, "Scotland")
message = Message.objects.get(id=msg_id_1)
message.date_sent = message.date_sent - datetime.timedelta(seconds=120)
message.save()
msg_id_2 = self.send_stream_message(hamlet, "Scotland")
message = Message.objects.get(id=msg_id_2)
message.date_sent = message.date_sent - datetime.timedelta(seconds=360)
message.save()
result = test_delete_message_by_other_user(msg_id=msg_id_1)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_owner(msg_id=msg_id_1)
self.assert_json_success(result)
result = test_delete_message_by_owner(msg_id=msg_id_2)
self.assert_json_error(result, "The time limit for deleting this message has passed")
result = test_delete_message_by_admin(msg_id=msg_id_2)
self.assert_json_success(result)
msg_id = self.send_stream_message(hamlet, "Scotland")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_success(result)
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Invalid message(s)")
with mock.patch("zerver.views.messages.do_delete_messages") as m, \
mock.patch("zerver.views.messages.validate_can_delete_message", return_value=None), \
mock.patch("zerver.views.messages.access_message", return_value=(None, None)):
m.side_effect = IntegrityError()
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Message already deleted")
m.side_effect = Message.DoesNotExist()
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Message already deleted")
class SoftDeactivationMessageTest(ZulipTestCase):
def test_reactivate_user_if_soft_deactivated(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago")]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
sender = self.example_user('iago')
stream_name = 'Denmark'
topic_name = 'foo'
def last_realm_audit_log_entry(event_type: int) -> RealmAuditLog:
return RealmAuditLog.objects.filter(
event_type=event_type
).order_by('-event_time')[0]
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
message = 'Test Message 1'
message_id = self.send_stream_message(sender, stream_name,
message, topic_name)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertNotEqual(idle_user_msg_list[-1].content, message)
with queries_captured() as queries:
reactivate_user_if_soft_deactivated(long_term_idle_user)
self.assert_length(queries, 8)
self.assertFalse(long_term_idle_user.long_term_idle)
self.assertEqual(last_realm_audit_log_entry(
RealmAuditLog.USER_SOFT_ACTIVATED).modified_user, long_term_idle_user)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 1)
self.assertEqual(idle_user_msg_list[-1].content, message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, message_id)
def test_add_missing_messages(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago")]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
sender = self.example_user('iago')
realm = sender.realm
sending_client = make_client(name="test suite")
stream_name = 'Denmark'
stream = get_stream(stream_name, realm)
topic_name = 'foo'
def send_fake_message(message_content: str, stream: Stream) -> Message:
recipient = stream.recipient
message = Message(sender = sender,
recipient = recipient,
content = message_content,
date_sent = timezone_now(),
sending_client = sending_client)
message.set_topic_name(topic_name)
message.save()
return message
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
sent_message = send_fake_message('Test Message 1', stream)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertNotEqual(idle_user_msg_list[-1], sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 1)
self.assertEqual(idle_user_msg_list[-1], sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message.id)
# Test that add_missing_messages() only adds messages that aren't
# updated the last_active_message_id field for the user.
sent_message = send_fake_message('Test Message 2', stream)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertNotEqual(idle_user_msg_list[-1], sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 7)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 1)
self.assertEqual(idle_user_msg_list[-1], sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message.id)
# Test UserMessage rows are created correctly in case of stream
# Subscription was altered by admin while user was away.
# Test for a public stream.
sent_message_list = []
sent_message_list.append(send_fake_message('Test Message 3', stream))
# Alter subscription to stream.
self.unsubscribe(long_term_idle_user, stream_name)
send_fake_message('Test Message 4', stream)
self.subscribe(long_term_idle_user, stream_name)
sent_message_list.append(send_fake_message('Test Message 5', stream))
sent_message_list.reverse()
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
for sent_message in sent_message_list:
self.assertNotEqual(idle_user_msg_list.pop(), sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 2)
for sent_message in sent_message_list:
self.assertEqual(idle_user_msg_list.pop(), sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message_list[0].id)
# Test consecutive subscribe/unsubscribe in a public stream
sent_message_list = []
sent_message_list.append(send_fake_message('Test Message 6', stream))
# Unsubscribe from stream and then immediately subscribe back again.
self.unsubscribe(long_term_idle_user, stream_name)
self.subscribe(long_term_idle_user, stream_name)
sent_message_list.append(send_fake_message('Test Message 7', stream))
# Again unsubscribe from stream and send a message.
# This will make sure that if initially in a unsubscribed state
# a consecutive subscribe/unsubscribe doesn't misbehave.
self.unsubscribe(long_term_idle_user, stream_name)
send_fake_message('Test Message 8', stream)
self.subscribe(long_term_idle_user, stream_name)
self.unsubscribe(long_term_idle_user, stream_name)
sent_message_list.reverse()
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
for sent_message in sent_message_list:
self.assertNotEqual(idle_user_msg_list.pop(), sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 2)
for sent_message in sent_message_list:
self.assertEqual(idle_user_msg_list.pop(), sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message_list[0].id)
do_soft_activate_users([long_term_idle_user])
self.subscribe(long_term_idle_user, stream_name)
sent_message_id = self.send_stream_message(
sender, stream_name, 'Test Message 9')
self.unsubscribe(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
send_fake_message('Test Message 10', stream)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertEqual(idle_user_msg_list[-1].id, sent_message_id)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 4)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count)
stream_name = "Core"
private_stream = self.make_stream('Core', invite_only=True)
self.subscribe(self.example_user("iago"), stream_name)
sent_message_list = []
send_fake_message('Test Message 11', private_stream)
self.subscribe(self.example_user("hamlet"), stream_name)
sent_message_list.append(send_fake_message('Test Message 12', private_stream))
self.unsubscribe(long_term_idle_user, stream_name)
send_fake_message('Test Message 13', private_stream)
self.subscribe(long_term_idle_user, stream_name)
sent_message_list.append(send_fake_message('Test Message 14', private_stream))
sent_message_list.reverse()
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
for sent_message in sent_message_list:
self.assertNotEqual(idle_user_msg_list.pop(), sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 2)
for sent_message in sent_message_list:
self.assertEqual(idle_user_msg_list.pop(), sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message_list[0].id)
@mock.patch('zerver.lib.soft_deactivation.BULK_CREATE_BATCH_SIZE', 2)
def test_add_missing_messages_pagination(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago")]
stream_name = 'Denmark'
for user_profile in recipient_list:
self.subscribe(user_profile, stream_name)
sender = self.example_user('iago')
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
num_new_messages = 5
message_ids = []
for _ in range(num_new_messages):
message_id = self.send_stream_message(sender, stream_name)
message_ids.append(message_id)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 10)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + num_new_messages)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, message_ids[-1])
def test_user_message_filter(self) -> None:
recipient_list = [
self.example_user("hamlet"),
self.example_user("iago"),
self.example_user('cordelia')
]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
cordelia = self.example_user('cordelia')
sender = self.example_user('iago')
stream_name = 'Denmark'
topic_name = 'foo'
def send_stream_message(content: str) -> None:
self.send_stream_message(sender, stream_name,
content, topic_name)
def send_personal_message(content: str) -> None:
self.send_personal_message(sender, self.example_user("hamlet"), content)
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
def assert_um_count(user: UserProfile, count: int) -> None:
user_messages = get_user_messages(user)
self.assertEqual(len(user_messages), count)
def assert_last_um_content(user: UserProfile, content: str, negate: bool=False) -> None:
user_messages = get_user_messages(user)
if negate:
self.assertNotEqual(user_messages[-1].content, content)
else:
self.assertEqual(user_messages[-1].content, content)
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test Message 1'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message, negate=True)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test that sending a message to a stream with soft deactivated user
# and push/email notifications on creates a UserMessage row for the
# deactivated user.
sub = get_subscription(stream_name, long_term_idle_user)
sub.push_notifications = True
sub.save()
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test private stream message'
send_stream_message(message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_last_um_content(long_term_idle_user, message)
sub.push_notifications = False
sub.save()
# Test sending a private message to soft deactivated user creates
# UserMessage row.
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test PM'
send_personal_message(message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_last_um_content(long_term_idle_user, message)
# Test UserMessage row is created while user is deactivated if
# user itself is mentioned.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**King Hamlet** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is not created while user is deactivated if
# anyone is mentioned but the user.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**Cordelia Lear** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message, negate=True)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is created while user is deactivated if
# there is a wildcard mention such as @all or @everyone
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**all** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**everyone** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**stream** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is not created while user is deactivated if there
# is a alert word in message.
do_add_alert_words(long_term_idle_user, ['test_alert_word'])
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Testing test_alert_word'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is created while user is deactivated if
# message is a me message.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = '/me says test'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message, negate=True)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
class MessageHydrationTest(ZulipTestCase):
def test_hydrate_stream_recipient_info(self) -> None:
realm = get_realm('zulip')
cordelia = self.example_user('cordelia')
stream_id = get_stream('Verona', realm).id
obj = dict(
recipient_type=Recipient.STREAM,
recipient_type_id=stream_id,
sender_is_mirror_dummy=False,
sender_email=cordelia.email,
sender_full_name=cordelia.full_name,
sender_short_name=cordelia.short_name,
sender_id=cordelia.id,
)
MessageDict.hydrate_recipient_info(obj, 'Verona')
self.assertEqual(obj['display_recipient'], 'Verona')
self.assertEqual(obj['type'], 'stream')
def test_hydrate_pm_recipient_info(self) -> None:
cordelia = self.example_user('cordelia')
display_recipient: List[UserDisplayRecipient] = [
dict(
email='aaron@example.com',
full_name='Aaron Smith',
short_name='Aaron',
id=999,
is_mirror_dummy=False
),
]
obj = dict(
recipient_type=Recipient.PERSONAL,
recipient_type_id=None,
sender_is_mirror_dummy=False,
sender_email=cordelia.email,
sender_full_name=cordelia.full_name,
sender_short_name=cordelia.short_name,
sender_id=cordelia.id,
)
MessageDict.hydrate_recipient_info(obj, display_recipient)
self.assertEqual(
obj['display_recipient'],
[
dict(
email='aaron@example.com',
full_name='Aaron Smith',
short_name='Aaron',
id=999,
is_mirror_dummy=False
),
dict(
email=cordelia.email,
full_name=cordelia.full_name,
id=cordelia.id,
short_name=cordelia.short_name,
is_mirror_dummy=False,
),
],
)
self.assertEqual(obj['type'], 'private')
def test_messages_for_ids(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
stream_name = 'test stream'
self.subscribe(cordelia, stream_name)
old_message_id = self.send_stream_message(cordelia, stream_name, content='foo')
self.subscribe(hamlet, stream_name)
content = 'hello @**King Hamlet**'
new_message_id = self.send_stream_message(cordelia, stream_name, content=content)
user_message_flags = {
old_message_id: ['read', 'historical'],
new_message_id: ['mentioned'],
}
messages = messages_for_ids(
message_ids=[old_message_id, new_message_id],
user_message_flags=user_message_flags,
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self.assertEqual(len(messages), 2)
for message in messages:
if message['id'] == old_message_id:
old_message = message
elif message['id'] == new_message_id:
new_message = message
self.assertEqual(old_message['content'], '<p>foo</p>')
self.assertEqual(old_message['flags'], ['read', 'historical'])
self.assertIn('class="user-mention"', new_message['content'])
self.assertEqual(new_message['flags'], ['mentioned'])
def test_display_recipient_up_to_date(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
message_id = self.send_personal_message(hamlet, cordelia, 'test')
cordelia_recipient = cordelia.recipient
# Cause the display_recipient to get cached:
get_display_recipient(cordelia_recipient)
# Change cordelia's email:
cordelia_new_email = 'new-cordelia@zulip.com'
cordelia.email = cordelia_new_email
cordelia.save()
flush_per_request_caches()
messages = messages_for_ids(
message_ids=[message_id],
user_message_flags={message_id: ['read']},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
message = messages[0]
for display_recipient in message['display_recipient']:
if display_recipient['short_name'] == 'cordelia':
cordelia_display_recipient = display_recipient
self.assertEqual(cordelia_display_recipient['email'], cordelia_new_email)
class TestMessageForIdsDisplayRecipientFetching(ZulipTestCase):
def _verify_display_recipient(self, display_recipient: DisplayRecipientT,
expected_recipient_objects: Union[Stream, List[UserProfile]]) -> None:
if isinstance(expected_recipient_objects, Stream):
self.assertEqual(display_recipient, expected_recipient_objects.name)
else:
for user_profile in expected_recipient_objects:
recipient_dict: UserDisplayRecipient = {
'email': user_profile.email,
'full_name': user_profile.full_name,
'short_name': user_profile.short_name,
'id': user_profile.id,
'is_mirror_dummy': user_profile.is_mirror_dummy,
}
self.assertTrue(recipient_dict in display_recipient)
def test_display_recipient_personal(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
message_ids = [
self.send_personal_message(hamlet, cordelia, 'test'),
self.send_personal_message(cordelia, othello, 'test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], [hamlet, cordelia])
self._verify_display_recipient(messages[1]['display_recipient'], [cordelia, othello])
def test_display_recipient_stream(self) -> None:
cordelia = self.example_user('cordelia')
message_ids = [
self.send_stream_message(cordelia, "Verona", content='test'),
self.send_stream_message(cordelia, "Denmark", content='test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], get_stream("Verona", cordelia.realm))
self._verify_display_recipient(messages[1]['display_recipient'], get_stream("Denmark", cordelia.realm))
def test_display_recipient_huddle(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
iago = self.example_user('iago')
message_ids = [
self.send_huddle_message(hamlet, [cordelia, othello], 'test'),
self.send_huddle_message(cordelia, [hamlet, othello, iago], 'test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], [hamlet, cordelia, othello])
self._verify_display_recipient(messages[1]['display_recipient'], [hamlet, cordelia, othello, iago])
def test_display_recipient_various_types(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
iago = self.example_user('iago')
message_ids = [
self.send_huddle_message(hamlet, [cordelia, othello], 'test'),
self.send_stream_message(cordelia, "Verona", content='test'),
self.send_personal_message(hamlet, cordelia, 'test'),
self.send_stream_message(cordelia, "Denmark", content='test'),
self.send_huddle_message(cordelia, [hamlet, othello, iago], 'test'),
self.send_personal_message(cordelia, othello, 'test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], [hamlet, cordelia, othello])
self._verify_display_recipient(messages[1]['display_recipient'], get_stream("Verona", hamlet.realm))
self._verify_display_recipient(messages[2]['display_recipient'], [hamlet, cordelia])
self._verify_display_recipient(messages[3]['display_recipient'], get_stream("Denmark", hamlet.realm))
self._verify_display_recipient(messages[4]['display_recipient'], [hamlet, cordelia, othello, iago])
self._verify_display_recipient(messages[5]['display_recipient'], [cordelia, othello])
class MessageVisibilityTest(ZulipTestCase):
def test_update_first_visible_message_id(self) -> None:
Message.objects.all().delete()
message_ids = [self.send_stream_message(self.example_user("othello"), "Scotland") for i in range(15)]
realm = get_realm("zulip")
realm.message_visibility_limit = None
realm.first_visible_message_id = 5
realm.save()
update_first_visible_message_id(realm)
self.assertEqual(get_first_visible_message_id(realm), 0)
realm.message_visibility_limit = 10
realm.save()
expected_message_id = message_ids[5]
update_first_visible_message_id(realm)
self.assertEqual(get_first_visible_message_id(realm), expected_message_id)
realm.message_visibility_limit = 50
realm.save()
update_first_visible_message_id(realm)
self.assertEqual(get_first_visible_message_id(realm), 0)
def test_maybe_update_first_visible_message_id(self) -> None:
realm = get_realm("zulip")
lookback_hours = 30
realm.message_visibility_limit = None
realm.save()
end_time = timezone_now() - datetime.timedelta(hours=lookback_hours - 5)
stat = COUNT_STATS['messages_sent:is_bot:hour']
RealmCount.objects.create(realm=realm, property=stat.property,
end_time=end_time, value=5)
with mock.patch("zerver.lib.message.update_first_visible_message_id") as m:
maybe_update_first_visible_message_id(realm, lookback_hours)
m.assert_not_called()
realm.message_visibility_limit = 10
realm.save()
RealmCount.objects.all().delete()
with mock.patch("zerver.lib.message.update_first_visible_message_id") as m:
maybe_update_first_visible_message_id(realm, lookback_hours)
m.assert_not_called()
RealmCount.objects.create(realm=realm, property=stat.property,
end_time=end_time, value=5)
with mock.patch("zerver.lib.message.update_first_visible_message_id") as m:
maybe_update_first_visible_message_id(realm, lookback_hours)
m.assert_called_once_with(realm)
class TestBulkGetHuddleUserIds(ZulipTestCase):
def test_bulk_get_huddle_user_ids(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
iago = self.example_user('iago')
message_ids = [
self.send_huddle_message(hamlet, [cordelia, othello], 'test'),
self.send_huddle_message(cordelia, [hamlet, othello, iago], 'test')
]
messages = Message.objects.filter(id__in=message_ids).order_by("id")
first_huddle_recipient = messages[0].recipient
first_huddle_user_ids = list(get_huddle_user_ids(first_huddle_recipient))
second_huddle_recipient = messages[1].recipient
second_huddle_user_ids = list(get_huddle_user_ids(second_huddle_recipient))
huddle_user_ids = bulk_get_huddle_user_ids([first_huddle_recipient, second_huddle_recipient])
self.assertEqual(huddle_user_ids[first_huddle_recipient.id], first_huddle_user_ids)
self.assertEqual(huddle_user_ids[second_huddle_recipient.id], second_huddle_user_ids)
def test_bulk_get_huddle_user_ids_empty_list(self) -> None:
self.assertEqual(bulk_get_huddle_user_ids([]), {})
class NoRecipientIDsTest(ZulipTestCase):
def test_no_recipient_ids(self) -> None:
user_profile = self.example_user('cordelia')
Subscription.objects.filter(user_profile=user_profile, recipient__type=Recipient.STREAM).delete()
subs = gather_subscriptions_helper(user_profile)
self.assertEqual(len(subs[0]), 0)
| true | true |
f71c2d89d64d953f36a10a9d27d38da71f45ea05 | 436 | py | Python | Python/python-practice/chapter7-while/sandwich_orders.py | jiaoqiyuan/Tests | a3595b0e4b430d910f90e428d6b6b4465f67a059 | [
"Apache-2.0"
] | null | null | null | Python/python-practice/chapter7-while/sandwich_orders.py | jiaoqiyuan/Tests | a3595b0e4b430d910f90e428d6b6b4465f67a059 | [
"Apache-2.0"
] | null | null | null | Python/python-practice/chapter7-while/sandwich_orders.py | jiaoqiyuan/Tests | a3595b0e4b430d910f90e428d6b6b4465f67a059 | [
"Apache-2.0"
] | null | null | null | sandwich_orders = ['aaa', 'pastrami', 'bbb', 'pastrami', 'ccc','pastrami']
finished_sandwiches = []
print("\nAll pastrami had been sold!")
while 'pastrami' in sandwich_orders:
sandwich_orders.remove('pastrami')
while sandwich_orders:
sandwich_order = sandwich_orders.pop()
print("\nI made your tuna sanwichi " + sandwich_order)
finished_sandwiches.append(sandwich_order)
print("\nI have finished all sandwiches!")
| 33.538462 | 74 | 0.729358 | sandwich_orders = ['aaa', 'pastrami', 'bbb', 'pastrami', 'ccc','pastrami']
finished_sandwiches = []
print("\nAll pastrami had been sold!")
while 'pastrami' in sandwich_orders:
sandwich_orders.remove('pastrami')
while sandwich_orders:
sandwich_order = sandwich_orders.pop()
print("\nI made your tuna sanwichi " + sandwich_order)
finished_sandwiches.append(sandwich_order)
print("\nI have finished all sandwiches!")
| true | true |
f71c2e710a36a4f82f3feb6e43398072d821ab5d | 219 | py | Python | olc_webportalv2/cowbat/admin.py | OLC-Bioinformatics/olc_genomics_portal | d70ec669a3a49106f8290fff5dee089726259a23 | [
"MIT"
] | 3 | 2019-01-03T21:22:21.000Z | 2019-04-23T15:47:29.000Z | olc_webportalv2/cowbat/admin.py | lowandrew/olc_webportalv2 | e75ba1b7af85bb25b59138d31e268ecde6616208 | [
"MIT"
] | 49 | 2019-01-03T18:15:12.000Z | 2022-03-11T23:37:20.000Z | olc_webportalv2/cowbat/admin.py | OLC-Bioinformatics/olc_webportalv2 | d70ec669a3a49106f8290fff5dee089726259a23 | [
"MIT"
] | 58 | 2019-01-03T21:21:59.000Z | 2021-11-02T18:00:20.000Z | from django.contrib import admin
from .models import DataFile, SequencingRun, InterOpFile
# Register your models here.
admin.site.register(DataFile)
admin.site.register(SequencingRun)
admin.site.register(InterOpFile)
| 24.333333 | 56 | 0.826484 | from django.contrib import admin
from .models import DataFile, SequencingRun, InterOpFile
admin.site.register(DataFile)
admin.site.register(SequencingRun)
admin.site.register(InterOpFile)
| true | true |
f71c3158cd7fa547b538702286ea7c2954416084 | 2,113 | py | Python | nipype/interfaces/afni/tests/test_auto_ROIStats.py | PAmcconnell/nipype | 39fbd5411a844ce7c023964d3295eb7643b95af5 | [
"Apache-2.0"
] | null | null | null | nipype/interfaces/afni/tests/test_auto_ROIStats.py | PAmcconnell/nipype | 39fbd5411a844ce7c023964d3295eb7643b95af5 | [
"Apache-2.0"
] | 2 | 2018-04-26T12:09:32.000Z | 2018-04-27T06:36:49.000Z | nipype/interfaces/afni/tests/test_auto_ROIStats.py | PAmcconnell/nipype | 39fbd5411a844ce7c023964d3295eb7643b95af5 | [
"Apache-2.0"
] | 1 | 2019-11-14T14:16:57.000Z | 2019-11-14T14:16:57.000Z | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ..preprocess import ROIStats
def test_ROIStats_inputs():
input_map = dict(
args=dict(argstr='%s', ),
debug=dict(argstr='-debug', ),
environ=dict(
nohash=True,
usedefault=True,
),
format1D=dict(
argstr='-1Dformat',
xor=['format1DR'],
),
format1DR=dict(
argstr='-1DRformat',
xor=['format1D'],
),
in_file=dict(
argstr='%s',
extensions=None,
mandatory=True,
position=-2,
),
mask=dict(
argstr='-mask %s',
deprecated='1.1.4',
extensions=None,
new_name='mask_file',
position=3,
),
mask_f2short=dict(argstr='-mask_f2short', ),
mask_file=dict(
argstr='-mask %s',
extensions=None,
),
nobriklab=dict(argstr='-nobriklab', ),
nomeanout=dict(argstr='-nomeanout', ),
num_roi=dict(argstr='-numroi %s', ),
out_file=dict(
argstr='> %s',
extensions=None,
keep_extension=False,
name_source='in_file',
name_template='%s_roistat.1D',
position=-1,
),
quiet=dict(argstr='-quiet', ),
roisel=dict(
argstr='-roisel %s',
extensions=None,
),
stat=dict(argstr='%s...', ),
zerofill=dict(
argstr='-zerofill %s',
requires=['num_roi'],
),
)
inputs = ROIStats.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_ROIStats_outputs():
output_map = dict(out_file=dict(extensions=None, ), )
outputs = ROIStats.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| 28.945205 | 67 | 0.510648 |
from ..preprocess import ROIStats
def test_ROIStats_inputs():
input_map = dict(
args=dict(argstr='%s', ),
debug=dict(argstr='-debug', ),
environ=dict(
nohash=True,
usedefault=True,
),
format1D=dict(
argstr='-1Dformat',
xor=['format1DR'],
),
format1DR=dict(
argstr='-1DRformat',
xor=['format1D'],
),
in_file=dict(
argstr='%s',
extensions=None,
mandatory=True,
position=-2,
),
mask=dict(
argstr='-mask %s',
deprecated='1.1.4',
extensions=None,
new_name='mask_file',
position=3,
),
mask_f2short=dict(argstr='-mask_f2short', ),
mask_file=dict(
argstr='-mask %s',
extensions=None,
),
nobriklab=dict(argstr='-nobriklab', ),
nomeanout=dict(argstr='-nomeanout', ),
num_roi=dict(argstr='-numroi %s', ),
out_file=dict(
argstr='> %s',
extensions=None,
keep_extension=False,
name_source='in_file',
name_template='%s_roistat.1D',
position=-1,
),
quiet=dict(argstr='-quiet', ),
roisel=dict(
argstr='-roisel %s',
extensions=None,
),
stat=dict(argstr='%s...', ),
zerofill=dict(
argstr='-zerofill %s',
requires=['num_roi'],
),
)
inputs = ROIStats.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_ROIStats_outputs():
output_map = dict(out_file=dict(extensions=None, ), )
outputs = ROIStats.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| true | true |
f71c315a2c062c492837c96d407768a4e6981339 | 2,392 | py | Python | libs/cherrypy/tutorial/tut06_default_method.py | scambra/HTPC-Manager | 1a1440db84ae1b6e7a2610c7f3bd5b6adf0aab1d | [
"MIT"
] | 674 | 2015-11-06T04:22:47.000Z | 2022-02-26T17:31:43.000Z | libs/cherrypy/tutorial/tut06_default_method.py | scambra/HTPC-Manager | 1a1440db84ae1b6e7a2610c7f3bd5b6adf0aab1d | [
"MIT"
] | 713 | 2015-11-06T10:48:58.000Z | 2018-11-27T16:32:18.000Z | libs/cherrypy/tutorial/tut06_default_method.py | scambra/HTPC-Manager | 1a1440db84ae1b6e7a2610c7f3bd5b6adf0aab1d | [
"MIT"
] | 115 | 2015-01-08T14:41:00.000Z | 2022-02-13T12:31:17.000Z | """
Tutorial - The default method
Request handler objects can implement a method called "default" that
is called when no other suitable method/object could be found.
Essentially, if CherryPy2 can't find a matching request handler object
for the given request URI, it will use the default method of the object
located deepest on the URI path.
Using this mechanism you can easily simulate virtual URI structures
by parsing the extra URI string, which you can access through
cherrypy.request.virtualPath.
The application in this tutorial simulates an URI structure looking
like /users/<username>. Since the <username> bit will not be found (as
there are no matching methods), it is handled by the default method.
"""
import cherrypy
class UsersPage:
def index(self):
# Since this is just a stupid little example, we'll simply
# display a list of links to random, made-up users. In a real
# application, this could be generated from a database result set.
return '''
<a href="./remi">Remi Delon</a><br/>
<a href="./hendrik">Hendrik Mans</a><br/>
<a href="./lorenzo">Lorenzo Lamas</a><br/>
'''
index.exposed = True
def default(self, user):
# Here we react depending on the virtualPath -- the part of the
# path that could not be mapped to an object method. In a real
# application, we would probably do some database lookups here
# instead of the silly if/elif/else construct.
if user == 'remi':
out = "Remi Delon, CherryPy lead developer"
elif user == 'hendrik':
out = "Hendrik Mans, CherryPy co-developer & crazy German"
elif user == 'lorenzo':
out = "Lorenzo Lamas, famous actor and singer!"
else:
out = "Unknown user. :-("
return '%s (<a href="./">back</a>)' % out
default.exposed = True
import os.path
tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
if __name__ == '__main__':
# CherryPy always starts with app.root when trying to map request URIs
# to objects, so we need to mount a request handler root. A request
# to '/' will be mapped to HelloWorld().index().
cherrypy.quickstart(UsersPage(), config=tutconf)
else:
# This branch is for the test suite; you can ignore it.
cherrypy.tree.mount(UsersPage(), config=tutconf)
| 37.375 | 74 | 0.669732 |
import cherrypy
class UsersPage:
def index(self):
# display a list of links to random, made-up users. In a real
# application, this could be generated from a database result set.
return '''
<a href="./remi">Remi Delon</a><br/>
<a href="./hendrik">Hendrik Mans</a><br/>
<a href="./lorenzo">Lorenzo Lamas</a><br/>
'''
index.exposed = True
def default(self, user):
# Here we react depending on the virtualPath -- the part of the
# path that could not be mapped to an object method. In a real
# application, we would probably do some database lookups here
# instead of the silly if/elif/else construct.
if user == 'remi':
out = "Remi Delon, CherryPy lead developer"
elif user == 'hendrik':
out = "Hendrik Mans, CherryPy co-developer & crazy German"
elif user == 'lorenzo':
out = "Lorenzo Lamas, famous actor and singer!"
else:
out = "Unknown user. :-("
return '%s (<a href="./">back</a>)' % out
default.exposed = True
import os.path
tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
if __name__ == '__main__':
# CherryPy always starts with app.root when trying to map request URIs
# to objects, so we need to mount a request handler root. A request
# to '/' will be mapped to HelloWorld().index().
cherrypy.quickstart(UsersPage(), config=tutconf)
else:
# This branch is for the test suite; you can ignore it.
cherrypy.tree.mount(UsersPage(), config=tutconf)
| true | true |
f71c31685adf10f4b09b39fca342aeaf980132fc | 9,541 | py | Python | image-classifier/image-classifier.py | subhadip7879/neural-net | 04aacf7cdec89ea0f58f2c7397c72adefa8c2d4e | [
"MIT"
] | null | null | null | image-classifier/image-classifier.py | subhadip7879/neural-net | 04aacf7cdec89ea0f58f2c7397c72adefa8c2d4e | [
"MIT"
] | 3 | 2017-10-29T17:39:20.000Z | 2017-10-29T18:35:08.000Z | image-classifier/image-classifier.py | subhadip7879/neural-net | 04aacf7cdec89ea0f58f2c7397c72adefa8c2d4e | [
"MIT"
] | 6 | 2017-10-29T17:32:46.000Z | 2018-10-05T09:49:31.000Z | from IPython.display import Image
Image('images/02_network_flowchart.png')
Image('images/02_convolution.png')
import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
from sklearn.metrics import confusion_matrix
import time
from datetime import timedelta
import math
from tensorflow.examples.tutorials.mnist import input_data
tf.__version__
#Convolutional Layer 1.
# will connect each neuron to only a local region of the input volume
# Convolution filters are 5 x 5 pixels.
filter_size1 = 5
num_filters1 = 16
# Convolutional Layer 2.
filter_size2 = 5
num_filters2 = 36
# Fully-connected layer.
fc_size = 128
data = input_data.read_data_sets('data/MNIST/', one_hot=True)
print("Size of:")
print("- Training-set:\t\t{}".format(len(data.train.labels)))
print("- Test-set:\t\t{}".format(len(data.test.labels)))
print("- Validation-set:\t{}".format(len(data.validation.labels)))
data.test.cls = np.argmax(data.test.labels, axis=1)
img_size = 28
# Images are stored in 1d array of this length.
img_size_flat = img_size * img_size
img_shape = (img_size, img_size)
num_channels = 1
num_classes = 10
def plot_images(images, cls_true, cls_pred=None):
assert len(images) == 9
len(cls_true) == 9
fig, axes = plt.subplots(3, 3)
fig.subplots_adjust(hspace=0.3, wspace=0.3)
for i, ax in enumerate(axes.flat):
# Plot image.
ax.imshow(images[i].reshape(img_shape), cmap='binary')
if cls_pred is None:
xlabel = "True: {0}".format(cls_true[i])
else:
xlabel = "True: {0}, Pred: {1}".format(cls_true[i], cls_pred[i])
ax.set_xlabel(xlabel)
ax.set_xticks([])
ax.set_yticks([])
plt.show()
# first images from mnist
images = data.test.images[0:9]
cls_true = data.test.cls[0:9]
# Plot the images and labels
plot_images(images=images, cls_true=cls_true)
def new_weights(shape):
return tf.Variable(tf.truncated_normal(shape, stddev=0.05))
def new_biases(length):
return tf.Variable(tf.constant(0.05, shape=[length]))
def new_conv_layer(input,num_input_channels, filter_size,num_filters,use_pooling=True):
shape = [filter_size, filter_size, num_input_channels, num_filters]
weights = new_weights(shape=shape)
biases = new_biases(length=num_filters)
layer = tf.nn.conv2d(input=input,
filter=weights,
strides=[1, 1, 1, 1],
padding='SAME')
layer += biases
if use_pooling:
layer = tf.nn.max_pool(value=layer,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME')
layer = tf.nn.relu(layer)
return layer, weights
def flatten_layer(layer):
layer_shape = layer.get_shape()
num_features = layer_shape[1:4].num_elements()
layer_flat = tf.reshape(layer, [-1, num_features])
return layer_flat, num_features
def new_fc_layer(input, num_inputs,num_outputs,use_relu=True):
weights = new_weights(shape=[num_inputs, num_outputs])
biases = new_biases(length=num_outputs)
layer = tf.matmul(input, weights) + biases
if use_relu:
layer = tf.nn.relu(layer)
return layer
x = tf.placeholder(tf.float32, shape=[None, img_size_flat], name='x')
x_image = tf.reshape(x, [-1, img_size, img_size, num_channels])
y_true = tf.placeholder(tf.float32, shape=[None, 10], name='y_true')
y_true_cls = tf.argmax(y_true, dimension=1)
layer_conv1, weights_conv1 = \
new_conv_layer(input=x_image,
num_input_channels=num_channels,
filter_size=filter_size1,
num_filters=num_filters1,
use_pooling=True)
layer_conv1
layer_conv2, weights_conv2 = \
new_conv_layer(input=layer_conv1,
num_input_channels=num_filters1,
filter_size=filter_size2,
num_filters=num_filters2,
use_pooling=True)
layer_conv2
layer_flat, num_features = flatten_layer(layer_conv2)
layer_flat
num_features
layer_fc1 = new_fc_layer(input=layer_flat,
num_inputs=num_features,
num_outputs=fc_size,
use_relu=True)
layer_fc1
layer_fc2 = new_fc_layer(input=layer_fc1,
num_inputs=fc_size,
num_outputs=num_classes,
use_relu=False)
layer_fc2
y_pred = tf.nn.softmax(layer_fc2)
y_pred_cls = tf.argmax(y_pred, dimension=1)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=layer_fc2,
labels=y_true)
cost = tf.reduce_mean(cross_entropy)
optimizer = tf.train.AdamOptimizer(learning_rate=1e-4).minimize(cost)
correct_prediction = tf.equal(y_pred_cls, y_true_cls)
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
session = tf.Session()
session.run(tf.global_variables_initializer())
train_batch_size = 64
total_iterations = 0
def optimize(num_iterations):
global total_iterations
start_time = time.time()
for i in range(total_iterations, total_iterations + num_iterations):
x_batch, y_true_batch = data.train.next_batch(train_batch_size)
feed_dict_train = {x: x_batch, y_true: y_true_batch}
session.run(optimizer, feed_dict=feed_dict_train)
if i % 100 == 0:
acc = session.run(accuracy, feed_dict=feed_dict_train)
msg = "Optimization Iteration: {0:>6}, Training Accuracy: {1:>6.1%}"
print(msg.format(i + 1, acc))
total_iterations += num_iterations
end_time = time.time()
time_dif = end_time - start_time
print("Time usage: " + str(timedelta(seconds=int(round(time_dif)))))
def plot_example_errors(cls_pred, correct):
incorrect = (correct == False)
images = data.test.images[incorrect]
cls_pred = cls_pred[incorrect]
cls_true = data.test.cls[incorrect]
plot_images(images=images[0:9], cls_true=cls_true[0:9], cls_pred=cls_pred[0:9])
def plot_confusion_matrix(cls_pred):
cls_true = data.test.cls
cm = confusion_matrix(y_true=cls_true, y_pred=cls_pred)
print(cm)
plt.matshow(cm)
plt.colorbar()
tick_marks = np.arange(num_classes)
plt.xticks(tick_marks, range(num_classes))
plt.yticks(tick_marks, range(num_classes))
plt.xlabel('Predicted')
plt.ylabel('True')
plt.show()
test_batch_size = 256
def print_test_accuracy(show_example_errors=False, show_confusion_matrix=False):
num_test = len(data.test.images)
cls_pred = np.zeros(shape=num_test, dtype=np.int)
i = 0
while i < num_test:
j = min(i + test_batch_size, num_test)
images = data.test.images[i:j, :]
labels = data.test.labels[i:j, :]
feed_dict = {x: images, y_true: labels}
cls_pred[i:j] = session.run(y_pred_cls, feed_dict=feed_dict)
i = j
cls_true = data.test.cls
correct = (cls_true == cls_pred)
correct_sum = correct.sum()
acc = float(correct_sum) / num_test
msg = "Accuracy on Test-Set: {0:.1%} ({1} / {2})"
print(msg.format(acc, correct_sum, num_test))
if show_example_errors:
print("Example errors:")
plot_example_errors(cls_pred=cls_pred, correct=correct)
if show_confusion_matrix:
print("Confusion Matrix:")
plot_confusion_matrix(cls_pred=cls_pred)
print_test_accuracy()
optimize(num_iterations=1)
print_test_accuracy()
optimize(num_iterations=99)
print_test_accuracy(show_example_errors=True)
optimize(num_iterations=900)
print_test_accuracy(show_example_errors=True)
optimize(num_iterations=9000)
print_test_accuracy(show_example_errors=True, show_confusion_matrix=True)
def plot_conv_weights(weights, input_channel=0):
w = session.run(weights)
w_min = np.min(w)
w_max = np.max(w)
num_filters = w.shape[3]
num_grids = math.ceil(math.sqrt(num_filters))
fig, axes = plt.subplots(num_grids, num_grids)
for i, ax in enumerate(axes.flat):
if i<num_filters:
img = w[:, :, input_channel, i]
ax.imshow(img, vmin=w_min, vmax=w_max,
interpolation='nearest', cmap='seismic')
ax.set_xticks([])
ax.set_yticks([])
plt.show()
def plot_conv_layer(layer, image):
feed_dict = {x: [image]}
values = session.run(layer, feed_dict=feed_dict)
num_filters = values.shape[3]
num_grids = math.ceil(math.sqrt(num_filters))
fig, axes = plt.subplots(num_grids, num_grids)
for i, ax in enumerate(axes.flat):
if i<num_filters:
img = values[0, :, :, i]
ax.imshow(img, interpolation='nearest', cmap='binary')
ax.set_xticks([])
ax.set_yticks([])
plt.show()
def plot_image(image):
plt.imshow(image.reshape(img_shape),
interpolation='nearest',
cmap='binary')
plt.show()
image1 = data.test.images[0]
plot_image(image1)
image2 = data.test.images[13]
plot_image(image2)
plot_conv_weights(weights=weights_conv1)
plot_conv_layer(layer=layer_conv1, image=image1)
plot_conv_layer(layer=layer_conv1, image=image2)
plot_conv_weights(weights=weights_conv2, input_channel=0)
plot_conv_weights(weights=weights_conv2, input_channel=1)
plot_conv_layer(layer=layer_conv2, image=image1)
plot_conv_layer(layer=layer_conv2, image=image2)
| 30.097792 | 87 | 0.659889 | from IPython.display import Image
Image('images/02_network_flowchart.png')
Image('images/02_convolution.png')
import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
from sklearn.metrics import confusion_matrix
import time
from datetime import timedelta
import math
from tensorflow.examples.tutorials.mnist import input_data
tf.__version__
filter_size1 = 5
num_filters1 = 16
filter_size2 = 5
num_filters2 = 36
fc_size = 128
data = input_data.read_data_sets('data/MNIST/', one_hot=True)
print("Size of:")
print("- Training-set:\t\t{}".format(len(data.train.labels)))
print("- Test-set:\t\t{}".format(len(data.test.labels)))
print("- Validation-set:\t{}".format(len(data.validation.labels)))
data.test.cls = np.argmax(data.test.labels, axis=1)
img_size = 28
img_size_flat = img_size * img_size
img_shape = (img_size, img_size)
num_channels = 1
num_classes = 10
def plot_images(images, cls_true, cls_pred=None):
assert len(images) == 9
len(cls_true) == 9
fig, axes = plt.subplots(3, 3)
fig.subplots_adjust(hspace=0.3, wspace=0.3)
for i, ax in enumerate(axes.flat):
ax.imshow(images[i].reshape(img_shape), cmap='binary')
if cls_pred is None:
xlabel = "True: {0}".format(cls_true[i])
else:
xlabel = "True: {0}, Pred: {1}".format(cls_true[i], cls_pred[i])
ax.set_xlabel(xlabel)
ax.set_xticks([])
ax.set_yticks([])
plt.show()
images = data.test.images[0:9]
cls_true = data.test.cls[0:9]
plot_images(images=images, cls_true=cls_true)
def new_weights(shape):
return tf.Variable(tf.truncated_normal(shape, stddev=0.05))
def new_biases(length):
return tf.Variable(tf.constant(0.05, shape=[length]))
def new_conv_layer(input,num_input_channels, filter_size,num_filters,use_pooling=True):
shape = [filter_size, filter_size, num_input_channels, num_filters]
weights = new_weights(shape=shape)
biases = new_biases(length=num_filters)
layer = tf.nn.conv2d(input=input,
filter=weights,
strides=[1, 1, 1, 1],
padding='SAME')
layer += biases
if use_pooling:
layer = tf.nn.max_pool(value=layer,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME')
layer = tf.nn.relu(layer)
return layer, weights
def flatten_layer(layer):
layer_shape = layer.get_shape()
num_features = layer_shape[1:4].num_elements()
layer_flat = tf.reshape(layer, [-1, num_features])
return layer_flat, num_features
def new_fc_layer(input, num_inputs,num_outputs,use_relu=True):
weights = new_weights(shape=[num_inputs, num_outputs])
biases = new_biases(length=num_outputs)
layer = tf.matmul(input, weights) + biases
if use_relu:
layer = tf.nn.relu(layer)
return layer
x = tf.placeholder(tf.float32, shape=[None, img_size_flat], name='x')
x_image = tf.reshape(x, [-1, img_size, img_size, num_channels])
y_true = tf.placeholder(tf.float32, shape=[None, 10], name='y_true')
y_true_cls = tf.argmax(y_true, dimension=1)
layer_conv1, weights_conv1 = \
new_conv_layer(input=x_image,
num_input_channels=num_channels,
filter_size=filter_size1,
num_filters=num_filters1,
use_pooling=True)
layer_conv1
layer_conv2, weights_conv2 = \
new_conv_layer(input=layer_conv1,
num_input_channels=num_filters1,
filter_size=filter_size2,
num_filters=num_filters2,
use_pooling=True)
layer_conv2
layer_flat, num_features = flatten_layer(layer_conv2)
layer_flat
num_features
layer_fc1 = new_fc_layer(input=layer_flat,
num_inputs=num_features,
num_outputs=fc_size,
use_relu=True)
layer_fc1
layer_fc2 = new_fc_layer(input=layer_fc1,
num_inputs=fc_size,
num_outputs=num_classes,
use_relu=False)
layer_fc2
y_pred = tf.nn.softmax(layer_fc2)
y_pred_cls = tf.argmax(y_pred, dimension=1)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=layer_fc2,
labels=y_true)
cost = tf.reduce_mean(cross_entropy)
optimizer = tf.train.AdamOptimizer(learning_rate=1e-4).minimize(cost)
correct_prediction = tf.equal(y_pred_cls, y_true_cls)
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
session = tf.Session()
session.run(tf.global_variables_initializer())
train_batch_size = 64
total_iterations = 0
def optimize(num_iterations):
global total_iterations
start_time = time.time()
for i in range(total_iterations, total_iterations + num_iterations):
x_batch, y_true_batch = data.train.next_batch(train_batch_size)
feed_dict_train = {x: x_batch, y_true: y_true_batch}
session.run(optimizer, feed_dict=feed_dict_train)
if i % 100 == 0:
acc = session.run(accuracy, feed_dict=feed_dict_train)
msg = "Optimization Iteration: {0:>6}, Training Accuracy: {1:>6.1%}"
print(msg.format(i + 1, acc))
total_iterations += num_iterations
end_time = time.time()
time_dif = end_time - start_time
print("Time usage: " + str(timedelta(seconds=int(round(time_dif)))))
def plot_example_errors(cls_pred, correct):
incorrect = (correct == False)
images = data.test.images[incorrect]
cls_pred = cls_pred[incorrect]
cls_true = data.test.cls[incorrect]
plot_images(images=images[0:9], cls_true=cls_true[0:9], cls_pred=cls_pred[0:9])
def plot_confusion_matrix(cls_pred):
cls_true = data.test.cls
cm = confusion_matrix(y_true=cls_true, y_pred=cls_pred)
print(cm)
plt.matshow(cm)
plt.colorbar()
tick_marks = np.arange(num_classes)
plt.xticks(tick_marks, range(num_classes))
plt.yticks(tick_marks, range(num_classes))
plt.xlabel('Predicted')
plt.ylabel('True')
plt.show()
test_batch_size = 256
def print_test_accuracy(show_example_errors=False, show_confusion_matrix=False):
num_test = len(data.test.images)
cls_pred = np.zeros(shape=num_test, dtype=np.int)
i = 0
while i < num_test:
j = min(i + test_batch_size, num_test)
images = data.test.images[i:j, :]
labels = data.test.labels[i:j, :]
feed_dict = {x: images, y_true: labels}
cls_pred[i:j] = session.run(y_pred_cls, feed_dict=feed_dict)
i = j
cls_true = data.test.cls
correct = (cls_true == cls_pred)
correct_sum = correct.sum()
acc = float(correct_sum) / num_test
msg = "Accuracy on Test-Set: {0:.1%} ({1} / {2})"
print(msg.format(acc, correct_sum, num_test))
if show_example_errors:
print("Example errors:")
plot_example_errors(cls_pred=cls_pred, correct=correct)
if show_confusion_matrix:
print("Confusion Matrix:")
plot_confusion_matrix(cls_pred=cls_pred)
print_test_accuracy()
optimize(num_iterations=1)
print_test_accuracy()
optimize(num_iterations=99)
print_test_accuracy(show_example_errors=True)
optimize(num_iterations=900)
print_test_accuracy(show_example_errors=True)
optimize(num_iterations=9000)
print_test_accuracy(show_example_errors=True, show_confusion_matrix=True)
def plot_conv_weights(weights, input_channel=0):
w = session.run(weights)
w_min = np.min(w)
w_max = np.max(w)
num_filters = w.shape[3]
num_grids = math.ceil(math.sqrt(num_filters))
fig, axes = plt.subplots(num_grids, num_grids)
for i, ax in enumerate(axes.flat):
if i<num_filters:
img = w[:, :, input_channel, i]
ax.imshow(img, vmin=w_min, vmax=w_max,
interpolation='nearest', cmap='seismic')
ax.set_xticks([])
ax.set_yticks([])
plt.show()
def plot_conv_layer(layer, image):
feed_dict = {x: [image]}
values = session.run(layer, feed_dict=feed_dict)
num_filters = values.shape[3]
num_grids = math.ceil(math.sqrt(num_filters))
fig, axes = plt.subplots(num_grids, num_grids)
for i, ax in enumerate(axes.flat):
if i<num_filters:
img = values[0, :, :, i]
ax.imshow(img, interpolation='nearest', cmap='binary')
ax.set_xticks([])
ax.set_yticks([])
plt.show()
def plot_image(image):
plt.imshow(image.reshape(img_shape),
interpolation='nearest',
cmap='binary')
plt.show()
image1 = data.test.images[0]
plot_image(image1)
image2 = data.test.images[13]
plot_image(image2)
plot_conv_weights(weights=weights_conv1)
plot_conv_layer(layer=layer_conv1, image=image1)
plot_conv_layer(layer=layer_conv1, image=image2)
plot_conv_weights(weights=weights_conv2, input_channel=0)
plot_conv_weights(weights=weights_conv2, input_channel=1)
plot_conv_layer(layer=layer_conv2, image=image1)
plot_conv_layer(layer=layer_conv2, image=image2)
| true | true |
f71c319545eacca0285b17d4ce54f5d246ae71d3 | 17,588 | py | Python | twilio/rest/trunking/v1/trunk/phone_number.py | neetaramaswamy/twilio-python | 28472ffab1a170824ba17f12a6c1692a5e849439 | [
"MIT"
] | 30 | 2018-06-12T12:00:53.000Z | 2021-05-02T01:27:16.000Z | venv/lib/python3.6/site-packages/twilio/rest/trunking/v1/trunk/phone_number.py | ostar0816/mc-crypto | 80ad9896aed1dc952f819a404a458ccfad207d8e | [
"MIT"
] | 10 | 2020-06-06T01:10:07.000Z | 2022-03-12T00:12:22.000Z | venv/lib/python3.6/site-packages/twilio/rest/trunking/v1/trunk/phone_number.py | ostar0816/mc-crypto | 80ad9896aed1dc952f819a404a458ccfad207d8e | [
"MIT"
] | 4 | 2018-06-12T14:14:20.000Z | 2018-06-19T16:01:49.000Z | # coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class PhoneNumberList(ListResource):
""" """
def __init__(self, version, trunk_sid):
"""
Initialize the PhoneNumberList
:param Version version: Version that contains the resource
:param trunk_sid: The trunk_sid
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberList
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberList
"""
super(PhoneNumberList, self).__init__(version)
# Path Solution
self._solution = {'trunk_sid': trunk_sid, }
self._uri = '/Trunks/{trunk_sid}/PhoneNumbers'.format(**self._solution)
def create(self, phone_number_sid):
"""
Create a new PhoneNumberInstance
:param unicode phone_number_sid: The phone_number_sid
:returns: Newly created PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
"""
data = values.of({'PhoneNumberSid': phone_number_sid, })
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return PhoneNumberInstance(self._version, payload, trunk_sid=self._solution['trunk_sid'], )
def stream(self, limit=None, page_size=None):
"""
Streams PhoneNumberInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists PhoneNumberInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of PhoneNumberInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberPage
"""
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return PhoneNumberPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of PhoneNumberInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return PhoneNumberPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a PhoneNumberContext
:param sid: The sid
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
"""
return PhoneNumberContext(self._version, trunk_sid=self._solution['trunk_sid'], sid=sid, )
def __call__(self, sid):
"""
Constructs a PhoneNumberContext
:param sid: The sid
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
"""
return PhoneNumberContext(self._version, trunk_sid=self._solution['trunk_sid'], sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Trunking.V1.PhoneNumberList>'
class PhoneNumberPage(Page):
""" """
def __init__(self, version, response, solution):
"""
Initialize the PhoneNumberPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param trunk_sid: The trunk_sid
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberPage
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberPage
"""
super(PhoneNumberPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of PhoneNumberInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
"""
return PhoneNumberInstance(self._version, payload, trunk_sid=self._solution['trunk_sid'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Trunking.V1.PhoneNumberPage>'
class PhoneNumberContext(InstanceContext):
""" """
def __init__(self, version, trunk_sid, sid):
"""
Initialize the PhoneNumberContext
:param Version version: Version that contains the resource
:param trunk_sid: The trunk_sid
:param sid: The sid
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
"""
super(PhoneNumberContext, self).__init__(version)
# Path Solution
self._solution = {'trunk_sid': trunk_sid, 'sid': sid, }
self._uri = '/Trunks/{trunk_sid}/PhoneNumbers/{sid}'.format(**self._solution)
def fetch(self):
"""
Fetch a PhoneNumberInstance
:returns: Fetched PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return PhoneNumberInstance(
self._version,
payload,
trunk_sid=self._solution['trunk_sid'],
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the PhoneNumberInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete('delete', self._uri)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Trunking.V1.PhoneNumberContext {}>'.format(context)
class PhoneNumberInstance(InstanceResource):
""" """
class AddressRequirement(object):
NONE = "none"
ANY = "any"
LOCAL = "local"
FOREIGN = "foreign"
def __init__(self, version, payload, trunk_sid, sid=None):
"""
Initialize the PhoneNumberInstance
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
"""
super(PhoneNumberInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'account_sid': payload['account_sid'],
'address_requirements': payload['address_requirements'],
'api_version': payload['api_version'],
'beta': payload['beta'],
'capabilities': payload['capabilities'],
'date_created': deserialize.iso8601_datetime(payload['date_created']),
'date_updated': deserialize.iso8601_datetime(payload['date_updated']),
'friendly_name': payload['friendly_name'],
'links': payload['links'],
'phone_number': payload['phone_number'],
'sid': payload['sid'],
'sms_application_sid': payload['sms_application_sid'],
'sms_fallback_method': payload['sms_fallback_method'],
'sms_fallback_url': payload['sms_fallback_url'],
'sms_method': payload['sms_method'],
'sms_url': payload['sms_url'],
'status_callback': payload['status_callback'],
'status_callback_method': payload['status_callback_method'],
'trunk_sid': payload['trunk_sid'],
'url': payload['url'],
'voice_application_sid': payload['voice_application_sid'],
'voice_caller_id_lookup': payload['voice_caller_id_lookup'],
'voice_fallback_method': payload['voice_fallback_method'],
'voice_fallback_url': payload['voice_fallback_url'],
'voice_method': payload['voice_method'],
'voice_url': payload['voice_url'],
}
# Context
self._context = None
self._solution = {'trunk_sid': trunk_sid, 'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: PhoneNumberContext for this PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
"""
if self._context is None:
self._context = PhoneNumberContext(
self._version,
trunk_sid=self._solution['trunk_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def account_sid(self):
"""
:returns: The account_sid
:rtype: unicode
"""
return self._properties['account_sid']
@property
def address_requirements(self):
"""
:returns: The address_requirements
:rtype: PhoneNumberInstance.AddressRequirement
"""
return self._properties['address_requirements']
@property
def api_version(self):
"""
:returns: The api_version
:rtype: unicode
"""
return self._properties['api_version']
@property
def beta(self):
"""
:returns: The beta
:rtype: bool
"""
return self._properties['beta']
@property
def capabilities(self):
"""
:returns: The capabilities
:rtype: unicode
"""
return self._properties['capabilities']
@property
def date_created(self):
"""
:returns: The date_created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The date_updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def friendly_name(self):
"""
:returns: The friendly_name
:rtype: unicode
"""
return self._properties['friendly_name']
@property
def links(self):
"""
:returns: The links
:rtype: unicode
"""
return self._properties['links']
@property
def phone_number(self):
"""
:returns: The phone_number
:rtype: unicode
"""
return self._properties['phone_number']
@property
def sid(self):
"""
:returns: The sid
:rtype: unicode
"""
return self._properties['sid']
@property
def sms_application_sid(self):
"""
:returns: The sms_application_sid
:rtype: unicode
"""
return self._properties['sms_application_sid']
@property
def sms_fallback_method(self):
"""
:returns: The sms_fallback_method
:rtype: unicode
"""
return self._properties['sms_fallback_method']
@property
def sms_fallback_url(self):
"""
:returns: The sms_fallback_url
:rtype: unicode
"""
return self._properties['sms_fallback_url']
@property
def sms_method(self):
"""
:returns: The sms_method
:rtype: unicode
"""
return self._properties['sms_method']
@property
def sms_url(self):
"""
:returns: The sms_url
:rtype: unicode
"""
return self._properties['sms_url']
@property
def status_callback(self):
"""
:returns: The status_callback
:rtype: unicode
"""
return self._properties['status_callback']
@property
def status_callback_method(self):
"""
:returns: The status_callback_method
:rtype: unicode
"""
return self._properties['status_callback_method']
@property
def trunk_sid(self):
"""
:returns: The trunk_sid
:rtype: unicode
"""
return self._properties['trunk_sid']
@property
def url(self):
"""
:returns: The url
:rtype: unicode
"""
return self._properties['url']
@property
def voice_application_sid(self):
"""
:returns: The voice_application_sid
:rtype: unicode
"""
return self._properties['voice_application_sid']
@property
def voice_caller_id_lookup(self):
"""
:returns: The voice_caller_id_lookup
:rtype: bool
"""
return self._properties['voice_caller_id_lookup']
@property
def voice_fallback_method(self):
"""
:returns: The voice_fallback_method
:rtype: unicode
"""
return self._properties['voice_fallback_method']
@property
def voice_fallback_url(self):
"""
:returns: The voice_fallback_url
:rtype: unicode
"""
return self._properties['voice_fallback_url']
@property
def voice_method(self):
"""
:returns: The voice_method
:rtype: unicode
"""
return self._properties['voice_method']
@property
def voice_url(self):
"""
:returns: The voice_url
:rtype: unicode
"""
return self._properties['voice_url']
def fetch(self):
"""
Fetch a PhoneNumberInstance
:returns: Fetched PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the PhoneNumberInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Trunking.V1.PhoneNumberInstance {}>'.format(context)
| 30.641115 | 99 | 0.609108 |
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class PhoneNumberList(ListResource):
def __init__(self, version, trunk_sid):
super(PhoneNumberList, self).__init__(version)
self._solution = {'trunk_sid': trunk_sid, }
self._uri = '/Trunks/{trunk_sid}/PhoneNumbers'.format(**self._solution)
def create(self, phone_number_sid):
data = values.of({'PhoneNumberSid': phone_number_sid, })
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return PhoneNumberInstance(self._version, payload, trunk_sid=self._solution['trunk_sid'], )
def stream(self, limit=None, page_size=None):
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return PhoneNumberPage(self._version, response, self._solution)
def get_page(self, target_url):
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return PhoneNumberPage(self._version, response, self._solution)
def get(self, sid):
return PhoneNumberContext(self._version, trunk_sid=self._solution['trunk_sid'], sid=sid, )
def __call__(self, sid):
return PhoneNumberContext(self._version, trunk_sid=self._solution['trunk_sid'], sid=sid, )
def __repr__(self):
return '<Twilio.Trunking.V1.PhoneNumberList>'
class PhoneNumberPage(Page):
def __init__(self, version, response, solution):
super(PhoneNumberPage, self).__init__(version, response)
self._solution = solution
def get_instance(self, payload):
return PhoneNumberInstance(self._version, payload, trunk_sid=self._solution['trunk_sid'], )
def __repr__(self):
return '<Twilio.Trunking.V1.PhoneNumberPage>'
class PhoneNumberContext(InstanceContext):
def __init__(self, version, trunk_sid, sid):
super(PhoneNumberContext, self).__init__(version)
self._solution = {'trunk_sid': trunk_sid, 'sid': sid, }
self._uri = '/Trunks/{trunk_sid}/PhoneNumbers/{sid}'.format(**self._solution)
def fetch(self):
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return PhoneNumberInstance(
self._version,
payload,
trunk_sid=self._solution['trunk_sid'],
sid=self._solution['sid'],
)
def delete(self):
return self._version.delete('delete', self._uri)
def __repr__(self):
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Trunking.V1.PhoneNumberContext {}>'.format(context)
class PhoneNumberInstance(InstanceResource):
class AddressRequirement(object):
NONE = "none"
ANY = "any"
LOCAL = "local"
FOREIGN = "foreign"
def __init__(self, version, payload, trunk_sid, sid=None):
super(PhoneNumberInstance, self).__init__(version)
self._properties = {
'account_sid': payload['account_sid'],
'address_requirements': payload['address_requirements'],
'api_version': payload['api_version'],
'beta': payload['beta'],
'capabilities': payload['capabilities'],
'date_created': deserialize.iso8601_datetime(payload['date_created']),
'date_updated': deserialize.iso8601_datetime(payload['date_updated']),
'friendly_name': payload['friendly_name'],
'links': payload['links'],
'phone_number': payload['phone_number'],
'sid': payload['sid'],
'sms_application_sid': payload['sms_application_sid'],
'sms_fallback_method': payload['sms_fallback_method'],
'sms_fallback_url': payload['sms_fallback_url'],
'sms_method': payload['sms_method'],
'sms_url': payload['sms_url'],
'status_callback': payload['status_callback'],
'status_callback_method': payload['status_callback_method'],
'trunk_sid': payload['trunk_sid'],
'url': payload['url'],
'voice_application_sid': payload['voice_application_sid'],
'voice_caller_id_lookup': payload['voice_caller_id_lookup'],
'voice_fallback_method': payload['voice_fallback_method'],
'voice_fallback_url': payload['voice_fallback_url'],
'voice_method': payload['voice_method'],
'voice_url': payload['voice_url'],
}
self._context = None
self._solution = {'trunk_sid': trunk_sid, 'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
if self._context is None:
self._context = PhoneNumberContext(
self._version,
trunk_sid=self._solution['trunk_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def account_sid(self):
return self._properties['account_sid']
@property
def address_requirements(self):
return self._properties['address_requirements']
@property
def api_version(self):
return self._properties['api_version']
@property
def beta(self):
return self._properties['beta']
@property
def capabilities(self):
return self._properties['capabilities']
@property
def date_created(self):
return self._properties['date_created']
@property
def date_updated(self):
return self._properties['date_updated']
@property
def friendly_name(self):
return self._properties['friendly_name']
@property
def links(self):
return self._properties['links']
@property
def phone_number(self):
return self._properties['phone_number']
@property
def sid(self):
return self._properties['sid']
@property
def sms_application_sid(self):
return self._properties['sms_application_sid']
@property
def sms_fallback_method(self):
return self._properties['sms_fallback_method']
@property
def sms_fallback_url(self):
return self._properties['sms_fallback_url']
@property
def sms_method(self):
return self._properties['sms_method']
@property
def sms_url(self):
return self._properties['sms_url']
@property
def status_callback(self):
return self._properties['status_callback']
@property
def status_callback_method(self):
return self._properties['status_callback_method']
@property
def trunk_sid(self):
return self._properties['trunk_sid']
@property
def url(self):
return self._properties['url']
@property
def voice_application_sid(self):
return self._properties['voice_application_sid']
@property
def voice_caller_id_lookup(self):
return self._properties['voice_caller_id_lookup']
@property
def voice_fallback_method(self):
return self._properties['voice_fallback_method']
@property
def voice_fallback_url(self):
return self._properties['voice_fallback_url']
@property
def voice_method(self):
return self._properties['voice_method']
@property
def voice_url(self):
return self._properties['voice_url']
def fetch(self):
return self._proxy.fetch()
def delete(self):
return self._proxy.delete()
def __repr__(self):
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Trunking.V1.PhoneNumberInstance {}>'.format(context)
| true | true |
f71c31b5f216202481da86a95da2f3bc59155e05 | 20,865 | py | Python | autotest/osr/osr_ct.py | bnordgren/gdal | 8b9bff36d6e2c4eceb56e8f596286be7f3fc9f24 | [
"MIT"
] | 1 | 2018-11-29T10:15:53.000Z | 2018-11-29T10:15:53.000Z | autotest/osr/osr_ct.py | a0x8o/gdal | 54aa47ee60eea48fa4989d6ce41fdae4e02c6458 | [
"MIT"
] | 1 | 2017-12-30T02:12:01.000Z | 2017-12-30T02:12:01.000Z | autotest/osr/osr_ct.py | notcaremath/gdal | 2c1586ffda1e49d170b96f8f0d87bc7516554047 | [
"MIT"
] | null | null | null | #!/usr/bin/env pytest
# -*- coding: utf-8 -*-
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test coordinate transformations.
# Author: Frank Warmerdam <warmerdam@pobox.com>
#
###############################################################################
# Copyright (c) 2003, Frank Warmerdam <warmerdam@pobox.com>
# Copyright (c) 2009-2013, Even Rouault <even dot rouault at spatialys.com>
# Copyright (c) 2014, Google
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import math
import sys
from osgeo import gdal
from osgeo import osr
from osgeo import ogr
import gdaltest
import pytest
###############################################################################
# Verify that we have PROJ.4 available.
def test_osr_ct_1():
utm_srs = osr.SpatialReference()
utm_srs.SetUTM(11)
utm_srs.SetWellKnownGeogCS('WGS84')
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
try:
gdal.PushErrorHandler('CPLQuietErrorHandler')
ct = osr.CoordinateTransformation(ll_srs, utm_srs)
gdal.PopErrorHandler()
if gdal.GetLastErrorMsg().find('Unable to load PROJ.4') != -1:
pytest.skip('PROJ.4 missing, transforms not available.')
except ValueError:
gdal.PopErrorHandler()
if gdal.GetLastErrorMsg().find('Unable to load PROJ.4') != -1:
pytest.skip('PROJ.4 missing, transforms not available.')
pytest.fail(gdal.GetLastErrorMsg())
assert not (ct is None or ct.this is None), \
'Unable to create simple CoordinateTransformat.'
###############################################################################
# Actually perform a simple LL to UTM conversion.
def test_osr_ct_2():
utm_srs = osr.SpatialReference()
utm_srs.SetUTM(11)
utm_srs.SetWellKnownGeogCS('WGS84')
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
ct = osr.CoordinateTransformation(ll_srs, utm_srs)
result = ct.TransformPoint(32.0, -117.5, 0.0)
assert result[0] == pytest.approx(452772.06, abs=0.01) and result[1] == pytest.approx(3540544.89, abs=0.01) and result[2] == pytest.approx(0.0, abs=0.01), \
'Wrong LL to UTM result'
###############################################################################
# Transform an OGR geometry ... this is mostly aimed at ensuring that
# the OGRCoordinateTransformation target SRS isn't deleted till the output
# geometry which also uses it is deleted.
def test_osr_ct_3():
utm_srs = osr.SpatialReference()
utm_srs.SetUTM(11)
utm_srs.SetWellKnownGeogCS('WGS84')
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
ll_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
ct = osr.CoordinateTransformation(ll_srs, utm_srs)
pnt = ogr.CreateGeometryFromWkt('POINT(-117.5 32.0)', ll_srs)
result = pnt.Transform(ct)
assert result == 0
ll_srs = None
ct = None
utm_srs = None
out_srs = pnt.GetSpatialReference().ExportToPrettyWkt()
assert out_srs[0:6] == 'PROJCS', 'output srs corrupt, ref counting issue?'
pnt = None
###############################################################################
# Actually perform a simple LL to UTM conversion.
# Works for both OG and NG bindings
def test_osr_ct_4():
utm_srs = osr.SpatialReference()
utm_srs.SetUTM(11)
utm_srs.SetWellKnownGeogCS('WGS84')
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
ll_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
ct = osr.CoordinateTransformation(ll_srs, utm_srs)
result = ct.TransformPoints([(-117.5, 32.0, 0.0), (-117.5, 32.0)])
assert len(result) == 2
assert len(result[0]) == 3
for i in range(2):
assert result[i][0] == pytest.approx(452772.06, abs=0.01) and result[i][1] == pytest.approx(3540544.89, abs=0.01) and result[i][2] == pytest.approx(0.0, abs=0.01), \
'Wrong LL to UTM result'
###############################################################################
# Same test, but with any sequence of tuples instead of a tuple of tuple
# New in NG bindings (#3020)
def test_osr_ct_5():
utm_srs = osr.SpatialReference()
utm_srs.SetUTM(11)
utm_srs.SetWellKnownGeogCS('WGS84')
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
ll_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
ct = osr.CoordinateTransformation(ll_srs, utm_srs)
result = ct.TransformPoints(((-117.5, 32.0, 0.0), (-117.5, 32.0)))
for i in range(2):
assert result[i][0] == pytest.approx(452772.06, abs=0.01) and result[i][1] == pytest.approx(3540544.89, abs=0.01) and result[i][2] == pytest.approx(0.0, abs=0.01), \
'Wrong LL to UTM result'
###############################################################################
# Test osr.CreateCoordinateTransformation() method
def test_osr_ct_6():
with gdaltest.error_handler():
ct = osr.CreateCoordinateTransformation(None, None)
assert ct is None
utm_srs = osr.SpatialReference()
utm_srs.SetUTM(11)
utm_srs.SetWellKnownGeogCS('WGS84')
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
ll_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
ct = osr.CreateCoordinateTransformation(ll_srs, utm_srs)
assert ct is not None
result = ct.TransformPoints(((-117.5, 32.0, 0.0), (-117.5, 32.0)))
for i in range(2):
assert result[i][0] == pytest.approx(452772.06, abs=0.01) and result[i][1] == pytest.approx(3540544.89, abs=0.01) and result[i][2] == pytest.approx(0.0, abs=0.01), \
'Wrong LL to UTM result'
###############################################################################
# Actually perform a simple Pseudo Mercator to LL conversion.
def test_osr_ct_7():
pm_srs = osr.SpatialReference()
pm_srs.ImportFromEPSG(3857)
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
ll_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
ct = osr.CoordinateTransformation(pm_srs, ll_srs)
(x, y, z) = ct.TransformPoint(7000000, 7000000, 0)
(exp_x, exp_y, exp_z) = (62.8820698884, 53.0918187696, 0.0)
if (exp_x != pytest.approx(x, abs=0.00001) or
exp_y != pytest.approx(y, abs=0.00001) or
exp_z != pytest.approx(z, abs=0.00001)):
print('Got: (%f, %f, %f)' % (x, y, z))
print('Expected: (%f, %f, %f)' % (exp_x, exp_y, exp_z))
pytest.fail('Wrong LL for Pseudo Mercator result')
pnt = ogr.CreateGeometryFromWkt('POINT(%g %g)' % (7000000, 7000000),
pm_srs)
expected_pnt = ogr.CreateGeometryFromWkt('POINT(%.10f %.10f)' % (exp_x, exp_y),
ll_srs)
result = pnt.Transform(ct)
assert result == 0
if (expected_pnt.GetX() != pytest.approx(pnt.GetX(), abs=0.00001) or
expected_pnt.GetY() != pytest.approx(pnt.GetY(), abs=0.00001) or
expected_pnt.GetZ() != pytest.approx(pnt.GetZ(), abs=0.00001)):
print('Got: %s' % pnt.ExportToWkt())
print('Expected: %s' % expected_pnt.ExportToWkt())
pytest.fail('Failed to transform from Pseudo Mercator to LL')
###############################################################################
# Test WebMercator -> WGS84 optimized transform
def test_osr_ct_8():
src_srs = osr.SpatialReference()
src_srs.ImportFromEPSG(3857)
dst_srs = osr.SpatialReference()
dst_srs.SetWellKnownGeogCS('WGS84')
dst_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
ct = osr.CoordinateTransformation(src_srs, dst_srs)
pnts = [(0, 6274861.39400658), (1, 6274861.39400658)]
result = ct.TransformPoints(pnts)
expected_result = [(0.0, 49.000000000000007, 0.0), (8.9831528411952125e-06, 49.000000000000007, 0.0)]
for i in range(2):
for j in range(3):
if result[i][j] != pytest.approx(expected_result[i][j], abs=1e-10):
print('Got: %s' % str(result))
print('Expected: %s' % str(expected_result))
pytest.fail('Failed to transform from Pseudo Mercator to LL')
pnts = [(0, 6274861.39400658), (1 + 0, 1 + 6274861.39400658)]
result = ct.TransformPoints(pnts)
expected_result = [(0.0, 49.000000000000007, 0.0), (8.9831528411952125e-06, 49.000005893478189, 0.0)]
for i in range(2):
for j in range(3):
if result[i][j] != pytest.approx(expected_result[i][j], abs=1e-10):
print('Got: %s' % str(result))
print('Expected: %s' % str(expected_result))
pytest.fail('Failed to transform from Pseudo Mercator to LL')
###############################################################################
# Test coordinate transformation where only one CRS has a towgs84 clause (#1156)
def test_osr_ct_towgs84_only_one_side():
srs_towgs84 = osr.SpatialReference()
srs_towgs84.SetFromUserInput("+proj=longlat +ellps=GRS80 +towgs84=100,200,300")
srs_just_ellps = osr.SpatialReference()
srs_just_ellps.SetFromUserInput('+proj=longlat +ellps=GRS80')
ct = osr.CoordinateTransformation(srs_towgs84, srs_just_ellps)
(x, y, z) = ct.TransformPoint(0, 0, 0)
assert x == 0
assert y == 0
assert z == 0
ct = osr.CoordinateTransformation(srs_just_ellps, srs_towgs84)
(x, y, z) = ct.TransformPoint(0, 0, 0)
assert x == 0
assert y == 0
assert z == 0
###############################################################################
# Test coordinate transformation where both side have towgs84/datum clause (#1156)
def test_osr_ct_towgs84_both_side():
srs_towgs84 = osr.SpatialReference()
srs_towgs84.SetFromUserInput("+proj=longlat +ellps=GRS80 +towgs84=100,200,300")
srs_other_towgs84 = osr.SpatialReference()
srs_other_towgs84.SetFromUserInput("+proj=longlat +ellps=GRS80 +towgs84=0,0,0")
ct = osr.CoordinateTransformation(srs_towgs84, srs_other_towgs84)
(x, y, z) = ct.TransformPoint(0, 0, 20)
assert x != 0
assert y != 0
assert z == 20
srs_datum_wgs84 = osr.SpatialReference()
srs_datum_wgs84.SetFromUserInput("+proj=longlat +datum=WGS84")
ct = osr.CoordinateTransformation(srs_towgs84, srs_datum_wgs84)
(x, y, z) = ct.TransformPoint(0, 0, 20)
assert x != 0
assert y != 0
assert z == 20
ct = osr.CoordinateTransformation(srs_datum_wgs84, srs_towgs84)
(x, y, z) = ct.TransformPoint(0, 0, 20)
assert x != 0
assert y != 0
assert z == 20
###############################################################################
# Test coordinate transformation with custom operation
def test_osr_ct_options_operation():
options = osr.CoordinateTransformationOptions()
assert options.SetOperation('+proj=affine +s11=-1')
ct = osr.CoordinateTransformation(None, None, options)
assert ct
x, y, z = ct.TransformPoint(1, 2, 3)
assert x == -1
assert y == 2
assert z == 3
###############################################################################
# Test coordinate transformation with area of interest
def test_osr_ct_options_area_of_interest():
srs_nad27 = osr.SpatialReference()
srs_nad27.SetFromUserInput("NAD27")
srs_wgs84 = osr.SpatialReference()
srs_wgs84.SetFromUserInput("WGS84")
options = osr.CoordinateTransformationOptions()
assert not options.SetAreaOfInterest(-200,40,-99,41)
assert not options.SetAreaOfInterest(-100,-100,-99,41)
assert not options.SetAreaOfInterest(-100,40,200,41)
assert not options.SetAreaOfInterest(-100,40,-99,100)
assert options.SetAreaOfInterest(-100,40,-99,41)
ct = osr.CoordinateTransformation(srs_nad27, srs_wgs84, options)
assert ct
x, y, z = ct.TransformPoint(40.5,-99.5,0)
assert x != 40.5
assert x == pytest.approx(40.5, abs=1e-3)
x, y, z = ct.TransformPoint(0,0,0)
if sys.platform == 'darwin':
print("ct.TransformPoint(0,0,0) doesn't return expected result on MacOSX. Not sure why.")
else:
assert x == float('inf')
###############################################################################
# Test 4D transformations
def test_osr_ct_4D():
options = osr.CoordinateTransformationOptions()
assert options.SetOperation('+proj=pipeline +step +proj=unitconvert +xy_in=deg +xy_out=rad +step +proj=cart +step +proj=helmert +convention=position_vector +x=0.0127 +dx=-0.0029 +rx=-0.00039 +drx=-0.00011 +y=0.0065 +dy=-0.0002 +ry=0.00080 +dry=-0.00019 +z=-0.0209 +dz=-0.0006 +rz=-0.00114 +drz=0.00007 +s=0.00195 +ds=0.00001 +t_epoch=1988.0 +step +proj=cart +inv +step +proj=unitconvert +xy_in=rad +xy_out=deg')
ct = osr.CoordinateTransformation(None, None, options)
assert ct
x, y, z, t = ct.TransformPoint(2, 49, 0, 2000)
assert x == pytest.approx(2.0000005420366, abs=1e-10), x
assert y == pytest.approx(49.0000003766711, abs=1e-10), y
assert z == pytest.approx(-0.0222802283242345, abs=1e-8), z
assert t == pytest.approx(2000, abs=1e-10), t
ret = ct.TransformPoints([[2, 49, 0, 2000], [2, 49, 0, 1988]])
assert len(ret) == 2, ret
assert len(ret[0]) == 4, ret
x, y, z, t = ret[0]
assert x == pytest.approx(2.0000005420366, abs=1e-10), x
assert y == pytest.approx(49.0000003766711, abs=1e-10), y
assert z == pytest.approx(-0.0222802283242345, abs=1e-8), z
assert t == pytest.approx(2000, abs=1e-10), t
assert len(ret[1]) == 4, ret
x, y, z, t = ret[1]
assert x == pytest.approx(1.9999998809056305, abs=1e-10), x
assert y == pytest.approx(48.9999995630005, abs=1e-10), y
assert z == pytest.approx(0.005032399669289589, abs=1e-8), z
assert t == pytest.approx(1988, abs=1e-10), t
###############################################################################
# Test geocentric transformations
def test_osr_ct_geocentric():
s = osr.SpatialReference()
s.SetFromUserInput("IGNF:RGR92")
t = osr.SpatialReference()
t.SetFromUserInput("IGNF:REUN47")
ct = osr.CoordinateTransformation(s, t)
assert ct
x, y, z = ct.TransformPoint(3356123.5400, 1303218.3090, 5247430.6050)
assert x == pytest.approx(3353420.949, abs=1e-1)
assert y == pytest.approx(1304075.021, abs=1e-1)
assert z == pytest.approx(5248935.144, abs=1e-1)
###############################################################################
# Test with +lon_wrap=180
def test_osr_ct_lon_wrap():
if osr.GetPROJVersionMajor() * 10000 + osr.GetPROJVersionMinor() * 100 + osr.GetPROJVersionMicro() < 70001:
# Issue before PROJ 7.0.1
pytest.skip()
s = osr.SpatialReference()
s.SetFromUserInput("+proj=longlat +ellps=GRS80")
t = osr.SpatialReference()
t.SetFromUserInput("+proj=longlat +ellps=GRS80 +lon_wrap=180")
ct = osr.CoordinateTransformation(s, t)
assert ct
x, y, _ = ct.TransformPoint(-25, 60, 0)
assert x == pytest.approx(-25 + 360, abs=1e-12)
assert y == pytest.approx(60, abs=1e-12)
###############################################################################
# Test ct.TransformPointWithErrorCode
def test_osr_ct_transformpointwitherrorcode():
if osr.GetPROJVersionMajor() < 8:
# Issue before PROJ 8
pytest.skip()
s = osr.SpatialReference()
s.SetFromUserInput("+proj=longlat +ellps=GRS80")
t = osr.SpatialReference()
t.SetFromUserInput("+proj=tmerc +ellps=GRS80")
ct = osr.CoordinateTransformation(s, t)
assert ct
x, y, z, t, error_code = ct.TransformPointWithErrorCode(1, 2, 3, 4)
assert x == pytest.approx(111257.80439304397, rel=1e-10)
assert y == pytest.approx(221183.3401672801, rel=1e-10)
assert z == 3
assert t == 4
assert error_code == 0
x, y, z, t, error_code = ct.TransformPointWithErrorCode(90, 0, 0, 0)
assert math.isinf(x)
assert error_code == osr.PROJ_ERR_COORD_TRANSFM_OUTSIDE_PROJECTION_DOMAIN
###############################################################################
# Test CoordinateTransformationOptions.SetDesiredAccuracy
def test_osr_ct_options_accuracy():
s = osr.SpatialReference()
s.SetFromUserInput("EPSG:4326")
t = osr.SpatialReference()
t.SetFromUserInput("EPSG:4258") # ETRS89
options = osr.CoordinateTransformationOptions()
options.SetDesiredAccuracy(0.05)
with gdaltest.error_handler():
ct = osr.CoordinateTransformation(s, t, options)
try:
ct.TransformPoint(49, 2, 0)
assert False
except:
pass
###############################################################################
# Test CoordinateTransformationOptions.SetBallparkAllowed
def test_osr_ct_options_ballpark_disallowed():
s = osr.SpatialReference()
s.SetFromUserInput("EPSG:4267") # NAD27
t = osr.SpatialReference()
t.SetFromUserInput("EPSG:4258") # ETRS89
options = osr.CoordinateTransformationOptions()
options.SetBallparkAllowed(False)
with gdaltest.error_handler():
ct = osr.CoordinateTransformation(s, t, options)
try:
ct.TransformPoint(49, 2, 0)
assert False
except:
pass
###############################################################################
# Test that we pass a neutral time when not explicitly specified
def test_osr_ct_non_specified_time_with_time_dependent_transformation():
options = osr.CoordinateTransformationOptions()
options.SetOperation('+proj=pipeline +step +proj=axisswap +order=2,1 +step +proj=unitconvert +xy_in=deg +z_in=m +xy_out=rad +z_out=m +step +proj=cart +ellps=GRS80 +step +inv +proj=helmert +dx=0.0008 +dy=-0.0006 +dz=-0.0014 +drx=6.67e-05 +dry=-0.0007574 +drz=-5.13e-05 +ds=-7e-05 +t_epoch=2010 +convention=coordinate_frame +step +inv +proj=cart +ellps=GRS80 +step +proj=unitconvert +xy_in=rad +z_in=m +xy_out=deg +z_out=m +step +proj=axisswap +order=2,1')
ct = osr.CoordinateTransformation(None, None, options)
assert ct
x, y, _ = ct.TransformPoint(50, -40, 0)
assert x == pytest.approx(50, abs=1e-10)
assert y == pytest.approx(-40, abs=1e-10)
###############################################################################
# Test using OGRSpatialReference::CoordinateEpoch()
def test_osr_ct_take_into_account_srs_coordinate_epoch():
if osr.GetPROJVersionMajor() * 100 + osr.GetPROJVersionMinor() < 702:
pytest.skip('requires PROJ 7.2 or later')
s = osr.SpatialReference()
s.SetFromUserInput("EPSG:7844") # GDA2020
t_2020 = osr.SpatialReference()
t_2020.SetFromUserInput("EPSG:9000") # ITRF2014
t_2020.SetCoordinateEpoch(2020)
# 2020 is the central epoch of the transformation, so no coordinate
# change is expected
ct = osr.CoordinateTransformation(s, t_2020)
x, y, _ = ct.TransformPoint(-30, 150, 0)
assert x == pytest.approx(-30, abs=1e-10)
assert y == pytest.approx(150, abs=1e-10)
t_2030 = osr.SpatialReference()
t_2030.SetFromUserInput("EPSG:9000") # ITRF2014
t_2030.SetCoordinateEpoch(2030)
ct = osr.CoordinateTransformation(s, t_2030)
x, y, _ = ct.TransformPoint(-30, 150, 0)
assert x == pytest.approx(-29.9999950478, abs=1e-10)
assert y == pytest.approx(150.0000022212, abs=1e-10)
ct = osr.CoordinateTransformation(t_2030, s)
x, y, _ = ct.TransformPoint(-29.9999950478, 150.0000022212, 0)
assert x == pytest.approx(-30, abs=1e-10)
assert y == pytest.approx(150, abs=1e-10)
# Not properly supported currently
gdal.ErrorReset()
with gdaltest.error_handler():
ct = osr.CoordinateTransformation(t_2020, t_2030)
assert gdal.GetLastErrorMsg() != ''
| 36.161179 | 458 | 0.620129 | true | true | |
f71c32e40c090845453a2573a352eecf99ded05c | 2,896 | py | Python | openml/__init__.py | Rong-Inspur/openml-python | 07d429c843cf589d8096db76d520317acf7a99ab | [
"BSD-3-Clause"
] | null | null | null | openml/__init__.py | Rong-Inspur/openml-python | 07d429c843cf589d8096db76d520317acf7a99ab | [
"BSD-3-Clause"
] | null | null | null | openml/__init__.py | Rong-Inspur/openml-python | 07d429c843cf589d8096db76d520317acf7a99ab | [
"BSD-3-Clause"
] | null | null | null | """
The OpenML module implements a python interface to
`OpenML <https://www.openml.org>`_, a collaborative platform for machine
learning. OpenML can be used to
* store, download and analyze datasets
* make experiments and their results (e.g. models, predictions)
accesible and reproducible for everybody
* analyze experiments (uploaded by you and other collaborators) and conduct
meta studies
In particular, this module implements a python interface for the
`OpenML REST API <https://www.openml.org/guide#!rest_services>`_
(`REST on wikipedia
<http://en.wikipedia.org/wiki/Representational_state_transfer>`_).
"""
# License: BSD 3-Clause
from . import _api_calls
from . import config
from .datasets import OpenMLDataset, OpenMLDataFeature
from . import datasets
from . import evaluations
from .evaluations import OpenMLEvaluation
from . import extensions
from . import exceptions
from . import tasks
from .tasks import (
OpenMLTask,
OpenMLSplit,
OpenMLSupervisedTask,
OpenMLClassificationTask,
OpenMLRegressionTask,
OpenMLClusteringTask,
OpenMLLearningCurveTask,
)
from . import runs
from .runs import OpenMLRun
from . import flows
from .flows import OpenMLFlow
from . import study
from .study import OpenMLStudy, OpenMLBenchmarkSuite
from . import utils
from . import setups
from .setups import OpenMLSetup, OpenMLParameter
from .__version__ import __version__
def populate_cache(task_ids=None, dataset_ids=None, flow_ids=None,
run_ids=None):
"""
Populate a cache for offline and parallel usage of the OpenML connector.
Parameters
----------
task_ids : iterable
dataset_ids : iterable
flow_ids : iterable
run_ids : iterable
Returns
-------
None
"""
if task_ids is not None:
for task_id in task_ids:
tasks.functions.get_task(task_id)
if dataset_ids is not None:
for dataset_id in dataset_ids:
datasets.functions.get_dataset(dataset_id)
if flow_ids is not None:
for flow_id in flow_ids:
flows.functions.get_flow(flow_id)
if run_ids is not None:
for run_id in run_ids:
runs.functions.get_run(run_id)
__all__ = [
'OpenMLDataset',
'OpenMLDataFeature',
'OpenMLRun',
'OpenMLSplit',
'OpenMLEvaluation',
'OpenMLSetup',
'OpenMLParameter',
'OpenMLTask',
'OpenMLSupervisedTask',
'OpenMLClusteringTask',
'OpenMLLearningCurveTask',
'OpenMLRegressionTask',
'OpenMLClassificationTask',
'OpenMLFlow',
'OpenMLStudy',
'OpenMLBenchmarkSuite',
'datasets',
'evaluations',
'exceptions',
'extensions',
'config',
'runs',
'flows',
'tasks',
'setups',
'study',
'utils',
'_api_calls',
'__version__',
]
# Load the scikit-learn extension by default
import openml.extensions.sklearn # noqa: F401
| 23.737705 | 76 | 0.699931 |
from . import _api_calls
from . import config
from .datasets import OpenMLDataset, OpenMLDataFeature
from . import datasets
from . import evaluations
from .evaluations import OpenMLEvaluation
from . import extensions
from . import exceptions
from . import tasks
from .tasks import (
OpenMLTask,
OpenMLSplit,
OpenMLSupervisedTask,
OpenMLClassificationTask,
OpenMLRegressionTask,
OpenMLClusteringTask,
OpenMLLearningCurveTask,
)
from . import runs
from .runs import OpenMLRun
from . import flows
from .flows import OpenMLFlow
from . import study
from .study import OpenMLStudy, OpenMLBenchmarkSuite
from . import utils
from . import setups
from .setups import OpenMLSetup, OpenMLParameter
from .__version__ import __version__
def populate_cache(task_ids=None, dataset_ids=None, flow_ids=None,
run_ids=None):
if task_ids is not None:
for task_id in task_ids:
tasks.functions.get_task(task_id)
if dataset_ids is not None:
for dataset_id in dataset_ids:
datasets.functions.get_dataset(dataset_id)
if flow_ids is not None:
for flow_id in flow_ids:
flows.functions.get_flow(flow_id)
if run_ids is not None:
for run_id in run_ids:
runs.functions.get_run(run_id)
__all__ = [
'OpenMLDataset',
'OpenMLDataFeature',
'OpenMLRun',
'OpenMLSplit',
'OpenMLEvaluation',
'OpenMLSetup',
'OpenMLParameter',
'OpenMLTask',
'OpenMLSupervisedTask',
'OpenMLClusteringTask',
'OpenMLLearningCurveTask',
'OpenMLRegressionTask',
'OpenMLClassificationTask',
'OpenMLFlow',
'OpenMLStudy',
'OpenMLBenchmarkSuite',
'datasets',
'evaluations',
'exceptions',
'extensions',
'config',
'runs',
'flows',
'tasks',
'setups',
'study',
'utils',
'_api_calls',
'__version__',
]
import openml.extensions.sklearn
| true | true |
f71c33cccf1872a2d06d40b32de68921437e9c87 | 540 | py | Python | src/bos_consensus/middlewares/blockchain/base.py | LuffyEMonkey/isaac-consensus-protocol | 806d967d56ef8862a477b2515c7854af289c10a0 | [
"Apache-2.0"
] | 1 | 2018-04-10T11:00:59.000Z | 2018-04-10T11:00:59.000Z | src/bos_consensus/middlewares/blockchain/base.py | LuffyEMonkey/isaac-consensus-protocol | 806d967d56ef8862a477b2515c7854af289c10a0 | [
"Apache-2.0"
] | null | null | null | src/bos_consensus/middlewares/blockchain/base.py | LuffyEMonkey/isaac-consensus-protocol | 806d967d56ef8862a477b2515c7854af289c10a0 | [
"Apache-2.0"
] | null | null | null | from bos_consensus.util import LoggingMixin
class NoFurtherBlockchainMiddlewares(Exception):
pass
class StopReceiveBallot(Exception):
pass
class BaseBlockchainMiddleware(LoggingMixin):
blockchain = None
def __init__(self, blockchain):
self.blockchain = blockchain
super(BaseBlockchainMiddleware, self).__init__()
self.set_logging('middleware', node=self.blockchain.consensus.node.name)
def received_ballot(self, ballot):
pass
def finished_ballot(self, ballot):
pass
| 20.769231 | 80 | 0.724074 | from bos_consensus.util import LoggingMixin
class NoFurtherBlockchainMiddlewares(Exception):
pass
class StopReceiveBallot(Exception):
pass
class BaseBlockchainMiddleware(LoggingMixin):
blockchain = None
def __init__(self, blockchain):
self.blockchain = blockchain
super(BaseBlockchainMiddleware, self).__init__()
self.set_logging('middleware', node=self.blockchain.consensus.node.name)
def received_ballot(self, ballot):
pass
def finished_ballot(self, ballot):
pass
| true | true |
f71c347fa097a89874460ad438e91b290e146610 | 6,795 | py | Python | Materials/views.py | Gguidini/artheart-db-explorer | 8e854248ff799f74f2702f767e5614e154e4a7f8 | [
"MIT"
] | null | null | null | Materials/views.py | Gguidini/artheart-db-explorer | 8e854248ff799f74f2702f767e5614e154e4a7f8 | [
"MIT"
] | null | null | null | Materials/views.py | Gguidini/artheart-db-explorer | 8e854248ff799f74f2702f767e5614e154e4a7f8 | [
"MIT"
] | null | null | null | """
This file defines functions to manipulate user interaction with the web-interface.
Responsible for views related to the models defined in Materials/models.py.
"""
import os
from django.core.paginator import Paginator
from django.http import HttpResponseForbidden
from django.shortcuts import HttpResponse, redirect, render
from django.urls import reverse_lazy
from ArtHeart.settings import MEDIA_ROOT, MEDIA_URL
from .forms import ApostilaUpload, CategoryUpload, ProjectUpload
from .models import Apostila, Categoria, Project
# Create your views here.
def search(request):
"""
Shows all available Apostilas.
It's possible to search by title, category, and project.
Template for this view is 'Materials/seatch.html'
Links to detail view.
"""
data = {}
projs = Project.objects.all().order_by('name')
cats = Categoria.objects.all().order_by('category')
data['projects'] = projs
data['categories'] = cats
data['media'] = MEDIA_URL
aps = Apostila.objects.all()
# filter entries to be displayed
if request.method == 'GET' and not request.GET == {}:
if 'search' in request.GET:
search = request.GET['search']
aps = aps.filter(title__icontains=search)
if 'project' in request.GET:
proj = request.GET['project']
if proj != '':
aps = aps.filter(project=proj)
if 'categories' in request.GET:
cats = request.GET.getlist('categories')
aps = aps.filter(categories__in=cats)
# Paginator
paginator = Paginator(aps, 30)
page = request.GET.get('page')
aps = paginator.get_page(page)
data['entries'] = aps
return render(request, 'Materials/search.html', data)
def detail(request, pk):
"""
Edit existing Apostila or add new one.
On successful operation, returns to search view.
Template for this view is 'Materials/detail.html'
"""
pk = int(pk)
if request.method == 'GET':
if pk != -1:
ap = Apostila.objects.get(pk=pk)
form = ApostilaUpload(instance=ap)
data = {'doc': ap, 'form': form, 'edit': True}
else:
form = ApostilaUpload()
data = {'form': form, 'edit': False}
return render(request, 'Materials/detail.html', data)
else:
if pk != -1:
if 'file-clear' in request.POST or 'file' in request.FILES:
deleteFile(pk)
form = ApostilaUpload(
request.POST or None, request.FILES or None, instance=Apostila.objects.get(pk=pk))
else:
form = ApostilaUpload(
request.POST or None, request.FILES or None)
if form.is_valid():
entry = form.save()
cats = request.POST.getlist('categories')
projects = request.POST.getlist('project')
selected = []
for c in cats:
selected.append(Categoria.objects.get(pk=c))
entry.categories.set(selected)
selected = []
for p in projects:
selected.append(Project.objects.get(pk=p))
entry.project.set(selected)
entry.save()
return redirect(reverse_lazy('url_search'))
else:
data = {'form': form}
if pk != -1:
data['doc'] = Apostila.objects.get(pk=pk)
data['edit'] = True
else:
data['edit'] = False
return render(request, 'Materials/details.html', )
def projects(request):
"""
Shows available Projects. Also possible to add new Project.
Quick glance at Project's title, client, and state of completion.
User that are not authenticated CANNOT create new projects
Template for this view is 'Materials/projects.html'
Links to edit_project view.
"""
data = {}
data['projects'] = Project.objects.all().order_by('name')
data['form'] = ProjectUpload()
if request.method == 'POST':
form = ProjectUpload(request.POST or None)
if form.is_valid():
form.save()
data['projects'] = Project.objects.all()
data['form'] = form
return render(request, 'Materials/projects.html', data)
else:
return render(request, 'Materials/projects.html', data)
def edit_project(request, pk):
"""
Manages de editing of an existing Project.
On successful operation returns to projects view.
Template for this view is 'Materials/edit_project.html'
"""
p = Project.objects.get(pk=pk)
form = ProjectUpload(instance=p)
entries = p.apostila_set.all()
data = {'doc': p, 'form': form, 'entries':entries}
if request.method == 'POST':
form = ProjectUpload(request.POST, instance=p)
if form.is_valid():
entry = form.save(commit=False)
if 'completed' in request.POST:
entry.completed = True
entry.save()
return redirect(reverse_lazy('url_projects'))
else:
data['form'] = form
return render(request, 'Materials/edit_project.html', data)
return render(request, 'Materials/edit_project.html', data)
def category(request, pk):
"""
Manages creation and Deletion of Categoria.
Template for this view is 'Materials/category.html'
"""
if request.method == 'GET':
if pk is not None:
#try:
Categoria.objects.get(pk=pk).delete()
#except:
# print("Shit happened")
else:
form = CategoryUpload(request.POST or None)
if form.is_valid():
form.save()
cats = Categoria.objects.all().order_by('name')
form = CategoryUpload()
data = {'cat': cats, 'form': form}
return render(request, 'Materials/category.html', data)
def deleteFile(pk):
"""
Deletes a file referenced by an Apostila.
Has no template or URL.
"""
ap = Apostila.objects.get(pk=pk)
try:
os.remove(os.path.join(MEDIA_ROOT, str(ap.file)))
except:
pass # silence error when no file is there
def delete(request, pk):
"""
Deleta uma Apostila do banco de dados e qualquer arquivo referênciado por ele.
Has no template.
"""
doc = Apostila.objects.get(pk=pk)
deleteFile(pk)
doc.delete()
return redirect('url_search')
def delete_project(request, pk):
"""
Deletes a Project along with ALL the Apostilas related to it.
If the Apostilas have files they are deleted as well.
Has no template.
"""
p = Project.objects.get(pk=pk)
aps = p.apostila_set.all()
for a in aps:
# desociates project and Apostila
a.project.remove(p)
p.delete()
return redirect('url_projects')
| 31.027397 | 98 | 0.607653 |
import os
from django.core.paginator import Paginator
from django.http import HttpResponseForbidden
from django.shortcuts import HttpResponse, redirect, render
from django.urls import reverse_lazy
from ArtHeart.settings import MEDIA_ROOT, MEDIA_URL
from .forms import ApostilaUpload, CategoryUpload, ProjectUpload
from .models import Apostila, Categoria, Project
def search(request):
data = {}
projs = Project.objects.all().order_by('name')
cats = Categoria.objects.all().order_by('category')
data['projects'] = projs
data['categories'] = cats
data['media'] = MEDIA_URL
aps = Apostila.objects.all()
if request.method == 'GET' and not request.GET == {}:
if 'search' in request.GET:
search = request.GET['search']
aps = aps.filter(title__icontains=search)
if 'project' in request.GET:
proj = request.GET['project']
if proj != '':
aps = aps.filter(project=proj)
if 'categories' in request.GET:
cats = request.GET.getlist('categories')
aps = aps.filter(categories__in=cats)
paginator = Paginator(aps, 30)
page = request.GET.get('page')
aps = paginator.get_page(page)
data['entries'] = aps
return render(request, 'Materials/search.html', data)
def detail(request, pk):
pk = int(pk)
if request.method == 'GET':
if pk != -1:
ap = Apostila.objects.get(pk=pk)
form = ApostilaUpload(instance=ap)
data = {'doc': ap, 'form': form, 'edit': True}
else:
form = ApostilaUpload()
data = {'form': form, 'edit': False}
return render(request, 'Materials/detail.html', data)
else:
if pk != -1:
if 'file-clear' in request.POST or 'file' in request.FILES:
deleteFile(pk)
form = ApostilaUpload(
request.POST or None, request.FILES or None, instance=Apostila.objects.get(pk=pk))
else:
form = ApostilaUpload(
request.POST or None, request.FILES or None)
if form.is_valid():
entry = form.save()
cats = request.POST.getlist('categories')
projects = request.POST.getlist('project')
selected = []
for c in cats:
selected.append(Categoria.objects.get(pk=c))
entry.categories.set(selected)
selected = []
for p in projects:
selected.append(Project.objects.get(pk=p))
entry.project.set(selected)
entry.save()
return redirect(reverse_lazy('url_search'))
else:
data = {'form': form}
if pk != -1:
data['doc'] = Apostila.objects.get(pk=pk)
data['edit'] = True
else:
data['edit'] = False
return render(request, 'Materials/details.html', )
def projects(request):
data = {}
data['projects'] = Project.objects.all().order_by('name')
data['form'] = ProjectUpload()
if request.method == 'POST':
form = ProjectUpload(request.POST or None)
if form.is_valid():
form.save()
data['projects'] = Project.objects.all()
data['form'] = form
return render(request, 'Materials/projects.html', data)
else:
return render(request, 'Materials/projects.html', data)
def edit_project(request, pk):
p = Project.objects.get(pk=pk)
form = ProjectUpload(instance=p)
entries = p.apostila_set.all()
data = {'doc': p, 'form': form, 'entries':entries}
if request.method == 'POST':
form = ProjectUpload(request.POST, instance=p)
if form.is_valid():
entry = form.save(commit=False)
if 'completed' in request.POST:
entry.completed = True
entry.save()
return redirect(reverse_lazy('url_projects'))
else:
data['form'] = form
return render(request, 'Materials/edit_project.html', data)
return render(request, 'Materials/edit_project.html', data)
def category(request, pk):
if request.method == 'GET':
if pk is not None:
Categoria.objects.get(pk=pk).delete()
else:
form = CategoryUpload(request.POST or None)
if form.is_valid():
form.save()
cats = Categoria.objects.all().order_by('name')
form = CategoryUpload()
data = {'cat': cats, 'form': form}
return render(request, 'Materials/category.html', data)
def deleteFile(pk):
ap = Apostila.objects.get(pk=pk)
try:
os.remove(os.path.join(MEDIA_ROOT, str(ap.file)))
except:
pass
def delete(request, pk):
doc = Apostila.objects.get(pk=pk)
deleteFile(pk)
doc.delete()
return redirect('url_search')
def delete_project(request, pk):
p = Project.objects.get(pk=pk)
aps = p.apostila_set.all()
for a in aps:
a.project.remove(p)
p.delete()
return redirect('url_projects')
| true | true |
f71c352600ea4dd69dce91611e638d4ac22561c3 | 285 | py | Python | utils/__init__.py | 1219521375/bottom-up-attention.pytorch | 4a2e64383f024cc56728dd2a0ee63c8a171663c8 | [
"Apache-2.0"
] | 3 | 2022-02-18T13:38:47.000Z | 2022-03-30T11:30:35.000Z | utils/__init__.py | 1219521375/bottom-up-attention.pytorch | 4a2e64383f024cc56728dd2a0ee63c8a171663c8 | [
"Apache-2.0"
] | null | null | null | utils/__init__.py | 1219521375/bottom-up-attention.pytorch | 4a2e64383f024cc56728dd2a0ee63c8a171663c8 | [
"Apache-2.0"
] | null | null | null | from .utils import save_features
from .extract_features_faster import extract_feat_faster_start
from .extract_features_multigpu import extract_feat_multigpu_start
from .extract_features_singlegpu import extract_feat_singlegpu_start
from .extract_d2features import extract_feat_d2_start | 57 | 68 | 0.915789 | from .utils import save_features
from .extract_features_faster import extract_feat_faster_start
from .extract_features_multigpu import extract_feat_multigpu_start
from .extract_features_singlegpu import extract_feat_singlegpu_start
from .extract_d2features import extract_feat_d2_start | true | true |
f71c359d2395a2f0e5c51625afa87da5d3779d1a | 296,654 | py | Python | tests/test_core.py | Matheus28/emscripten | c1d3ca07227607043a92d55dee65a733851d6813 | [
"MIT"
] | null | null | null | tests/test_core.py | Matheus28/emscripten | c1d3ca07227607043a92d55dee65a733851d6813 | [
"MIT"
] | null | null | null | tests/test_core.py | Matheus28/emscripten | c1d3ca07227607043a92d55dee65a733851d6813 | [
"MIT"
] | null | null | null | # Copyright 2013 The Emscripten Authors. All rights reserved.
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License. Both these licenses can be
# found in the LICENSE file.
import glob
import hashlib
import json
import logging
import os
import random
import re
import shutil
import sys
import time
import unittest
from pathlib import Path
from functools import wraps
if __name__ == '__main__':
raise Exception('do not run this file directly; do something like: tests/runner')
from tools.shared import try_delete, PIPE
from tools.shared import PYTHON, EMCC, EMAR
from tools.utils import WINDOWS, MACOS
from tools import shared, building, config, webassembly
from common import RunnerCore, path_from_root, requires_native_clang, test_file, create_file
from common import skip_if, needs_dylink, no_windows, no_mac, is_slow_test, parameterized
from common import env_modify, with_env_modify, disabled, node_pthreads
from common import read_file, read_binary, require_node, require_v8
from common import NON_ZERO, WEBIDL_BINDER, EMBUILDER
import clang_native
# decorators for limiting which modes a test can run in
logger = logging.getLogger("test_core")
def wasm_simd(f):
def decorated(self):
self.require_v8()
if not self.is_wasm():
self.skipTest('wasm2js only supports MVP for now')
if '-O3' in self.emcc_args:
self.skipTest('SIMD tests are too slow with -O3 in the new LLVM pass manager, https://github.com/emscripten-core/emscripten/issues/13427')
self.emcc_args.append('-msimd128')
self.emcc_args.append('-fno-lax-vector-conversions')
self.v8_args.append('--experimental-wasm-simd')
f(self)
return decorated
def wasm_relaxed_simd(f):
def decorated(self):
# We don't actually run any tests yet, so don't require any engines.
if not self.is_wasm():
self.skipTest('wasm2js only supports MVP for now')
self.emcc_args.append('-mrelaxed-simd')
f(self)
return decorated
def needs_non_trapping_float_to_int(f):
def decorated(self):
if not self.is_wasm():
self.skipTest('wasm2js only supports MVP for now')
f(self)
return decorated
def also_with_wasm_bigint(f):
def decorated(self):
self.set_setting('WASM_BIGINT', 0)
f(self)
if self.is_wasm():
self.set_setting('WASM_BIGINT')
self.require_node()
self.node_args.append('--experimental-wasm-bigint')
f(self)
return decorated
# without EMTEST_ALL_ENGINES set we only run tests in a single VM by
# default. in some tests we know that cross-VM differences may happen and
# so are worth testing, and they should be marked with this decorator
def all_engines(f):
def decorated(self):
old = self.use_all_engines
self.use_all_engines = True
self.set_setting('ENVIRONMENT', 'web,node,shell')
try:
f(self)
finally:
self.use_all_engines = old
return decorated
# Tests exception handling in emscripten exception handling mode, and if
# possible, new wasm EH mode.
def with_both_exception_handling(f):
assert callable(f)
def metafunc(self, native_exceptions):
if native_exceptions:
# Wasm EH is currently supported only in wasm backend and V8
if not self.is_wasm():
self.skipTest('wasm2js does not support wasm exceptions')
self.require_v8()
# FIXME Temporarily disabled. Enable this later when the bug is fixed.
if '-fsanitize=address' in self.emcc_args:
self.skipTest('Wasm EH does not work with asan yet')
self.emcc_args.append('-fwasm-exceptions')
self.v8_args.append('--experimental-wasm-eh')
f(self)
else:
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
f(self)
metafunc._parameterize = {'': (False,),
'wasm_eh': (True,)}
return metafunc
def no_wasm2js(note=''):
assert not callable(note)
def decorated(f):
return skip_if(f, 'is_wasm2js', note)
return decorated
def also_with_noderawfs(func):
def decorated(self):
orig_args = self.emcc_args.copy()
func(self)
print('noderawfs')
self.emcc_args = orig_args + ['-DNODERAWFS']
self.set_setting('NODERAWFS')
self.js_engines = [config.NODE_JS]
func(self)
return decorated
def can_do_standalone(self):
return self.is_wasm() and \
self.get_setting('STACK_OVERFLOW_CHECK', 0) < 2 and \
not self.get_setting('MINIMAL_RUNTIME') and \
not self.get_setting('SAFE_HEAP') and \
'-fsanitize=address' not in self.emcc_args
def also_with_wasmfs(func):
def decorated(self):
func(self)
print('wasmfs')
if self.get_setting('STANDALONE_WASM'):
self.skipTest("test currently cannot run both with WASMFS and STANDALONE_WASM")
self.set_setting('WASMFS')
func(self)
return decorated
# Impure means a test that cannot run in a wasm VM yet, as it is not 100%
# standalone. We can still run them with the JS code though.
def also_with_standalone_wasm(wasm2c=False, impure=False):
def decorated(func):
def metafunc(self, standalone):
if not standalone:
func(self)
else:
if can_do_standalone(self):
self.set_setting('STANDALONE_WASM')
# we will not legalize the JS ffi interface, so we must use BigInt
# support in order for JS to have a chance to run this without trapping
# when it sees an i64 on the ffi.
self.set_setting('WASM_BIGINT')
self.emcc_args.append('-Wno-unused-command-line-argument')
# if we are impure, disallow all wasm engines
if impure:
self.wasm_engines = []
self.js_engines = [config.NODE_JS]
self.node_args.append('--experimental-wasm-bigint')
func(self)
if wasm2c:
print('wasm2c')
self.set_setting('WASM2C')
self.wasm_engines = []
func(self)
metafunc._parameterize = {'': (False,),
'standalone': (True,)}
return metafunc
return decorated
def no_optimize(note=''):
assert not callable(note)
def decorator(func):
assert callable(func)
def decorated(self):
if self.is_optimizing():
self.skipTest(note)
func(self)
return decorated
return decorator
def needs_make(note=''):
assert not callable(note)
if WINDOWS:
return unittest.skip('Tool not available on Windows bots (%s)' % note)
return lambda f: f
def no_asan(note):
assert not callable(note)
def decorator(f):
assert callable(f)
@wraps(f)
def decorated(self, *args, **kwargs):
if '-fsanitize=address' in self.emcc_args:
self.skipTest(note)
f(self, *args, **kwargs)
return decorated
return decorator
def no_lsan(note):
assert not callable(note)
def decorator(f):
assert callable(f)
@wraps(f)
def decorated(self, *args, **kwargs):
if '-fsanitize=leak' in self.emcc_args:
self.skipTest(note)
f(self, *args, **kwargs)
return decorated
return decorator
def make_no_decorator_for_setting(name):
def outer_decorator(note):
assert not callable(note)
def decorator(f):
assert callable(f)
@wraps(f)
def decorated(self, *args, **kwargs):
if (name + '=1') in self.emcc_args or self.get_setting(name):
self.skipTest(note)
f(self, *args, **kwargs)
return decorated
return decorator
return outer_decorator
no_minimal_runtime = make_no_decorator_for_setting('MINIMAL_RUNTIME')
no_safe_heap = make_no_decorator_for_setting('SAFE_HEAP')
def is_sanitizing(args):
return '-fsanitize=' in str(args)
class TestCoreBase(RunnerCore):
def is_wasm2js(self):
return self.get_setting('WASM') == 0
# A simple check whether the compiler arguments cause optimization.
def is_optimizing(self):
return '-O' in str(self.emcc_args) and '-O0' not in self.emcc_args
def can_use_closure(self):
return '-g' not in self.emcc_args and '--profiling' not in self.emcc_args and ('-O2' in self.emcc_args or '-Os' in self.emcc_args)
# Use closure in some tests for some additional coverage
def maybe_closure(self):
if '--closure=1' not in self.emcc_args and self.can_use_closure():
self.emcc_args += ['--closure=1']
logger.debug('using closure compiler..')
return True
return False
def assertStartswith(self, output, prefix):
self.assertEqual(prefix, output[:len(prefix)])
def verify_in_strict_mode(self, filename):
js = read_file(filename)
filename += '.strict.js'
with open(filename, 'w') as outfile:
outfile.write('"use strict";\n' + js)
self.run_js(filename)
def do_core_test(self, testname, **kwargs):
self.do_run_in_out_file_test(Path('core', testname), **kwargs)
def get_bullet_library(self, use_cmake):
if use_cmake:
configure_commands = ['cmake', '.']
configure_args = ['-DBUILD_DEMOS=OFF', '-DBUILD_EXTRAS=OFF', '-DUSE_GLUT=OFF']
# Depending on whether 'configure' or 'cmake' is used to build, Bullet
# places output files in different directory structures.
generated_libs = [Path('src/BulletDynamics/libBulletDynamics.a'),
Path('src/BulletCollision/libBulletCollision.a'),
Path('src/LinearMath/libLinearMath.a')]
else:
configure_commands = ['sh', './configure']
# Force a nondefault --host= so that the configure script will interpret
# that we are doing cross-compilation
# and skip attempting to run the generated executable with './a.out',
# which would fail since we are building a .js file.
configure_args = ['--disable-shared', '--host=i686-pc-linux-gnu',
'--disable-demos', '--disable-dependency-tracking']
generated_libs = [Path('src/.libs/libBulletDynamics.a'),
Path('src/.libs/libBulletCollision.a'),
Path('src/.libs/libLinearMath.a')]
return self.get_library('third_party/bullet', generated_libs,
configure=configure_commands,
configure_args=configure_args,
cache_name_extra=configure_commands[0])
@also_with_standalone_wasm()
@also_with_wasmfs
def test_hello_world(self):
self.do_core_test('test_hello_world.c')
# must not emit this unneeded internal thing
self.assertNotContained('EMSCRIPTEN_GENERATED_FUNCTIONS', read_file('test_hello_world.js'))
def test_wasm_synchronous_compilation(self):
self.set_setting('STRICT_JS')
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.do_core_test('test_hello_world.c')
@also_with_standalone_wasm()
def test_hello_argc(self):
self.do_core_test('test_hello_argc.c')
def test_intvars(self):
self.do_core_test('test_intvars.cpp')
def test_sintvars(self):
self.do_core_test('test_sintvars.c')
def test_int53(self):
self.emcc_args += ['-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=[$convertI32PairToI53,$convertU32PairToI53,$readI53FromU64,$readI53FromI64,$writeI53ToI64,$writeI53ToI64Clamped,$writeI53ToU64Clamped,$writeI53ToI64Signaling,$writeI53ToU64Signaling]']
self.do_core_test('test_int53.c', interleaved_output=False)
def test_i64(self):
self.do_core_test('test_i64.c')
def test_i64_2(self):
self.do_core_test('test_i64_2.cpp')
def test_i64_3(self):
self.do_core_test('test_i64_3.cpp')
def test_i64_4(self):
# stuff that also needs sign corrections
self.do_core_test('test_i64_4.c')
def test_i64_b(self):
self.do_core_test('test_i64_b.cpp')
def test_i64_cmp(self):
self.do_core_test('test_i64_cmp.cpp')
def test_i64_cmp2(self):
self.do_core_test('test_i64_cmp2.c')
def test_i64_double(self):
self.do_core_test('test_i64_double.cpp')
def test_i64_umul(self):
self.do_core_test('test_i64_umul.c')
@also_with_standalone_wasm()
def test_i64_precise(self):
self.do_core_test('test_i64_precise.c')
def test_i64_precise_needed(self):
self.do_core_test('test_i64_precise_needed.c')
def test_i64_llabs(self):
self.do_core_test('test_i64_llabs.c')
def test_i64_zextneg(self):
self.do_core_test('test_i64_zextneg.c')
def test_i64_7z(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_i64_7z.c', args=['hallo'])
def test_i64_i16(self):
self.do_core_test('test_i64_i16.c')
def test_i64_qdouble(self):
self.do_core_test('test_i64_qdouble.c')
def test_i64_varargs(self):
self.do_core_test('test_i64_varargs.c', args='waka fleefl asdfasdfasdfasdf'.split())
@no_wasm2js('wasm_bigint')
def test_i64_invoke_bigint(self):
self.set_setting('WASM_BIGINT')
self.emcc_args += ['-fexceptions']
self.node_args += ['--experimental-wasm-bigint']
self.do_core_test('test_i64_invoke_bigint.cpp', js_engines=[config.NODE_JS])
def test_vararg_copy(self):
self.do_run_in_out_file_test('va_arg/test_va_copy.c')
def test_llvm_fabs(self):
self.do_core_test('test_llvm_fabs.c')
def test_double_varargs(self):
self.do_core_test('test_double_varargs.c')
def test_trivial_struct_varargs(self):
self.do_core_test('test_trivial_struct_varargs.c')
def test_struct_varargs(self):
self.do_core_test('test_struct_varargs.c')
def test_zero_struct_varargs(self):
self.do_core_test('test_zero_struct_varargs.c')
def zzztest_nested_struct_varargs(self):
self.do_core_test('test_nested_struct_varargs.c')
def test_i32_mul_precise(self):
self.do_core_test('test_i32_mul_precise.c')
def test_i16_emcc_intrinsic(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_i16_emcc_intrinsic.c')
def test_double_i64_conversion(self):
self.do_core_test('test_double_i64_conversion.c')
def test_float32_precise(self):
self.do_core_test('test_float32_precise.c')
def test_negative_zero(self):
self.do_core_test('test_negative_zero.c')
def test_literal_negative_zero(self):
self.do_core_test('test_literal_negative_zero.c')
@also_with_standalone_wasm()
def test_bswap64(self):
self.do_core_test('test_bswap64.cpp')
def test_sha1(self):
self.do_runf(test_file('sha1.c'), 'SHA1=15dd99a1991e0b3826fede3deffc1feba42278e6')
def test_wasm32_unknown_emscripten(self):
# No other configuration is supported, so always run this.
self.do_runf(test_file('wasm32-unknown-emscripten.c'), '')
def test_cube2md5(self):
self.emcc_args += ['--embed-file', 'cube2md5.txt']
shutil.copyfile(test_file('cube2md5.txt'), 'cube2md5.txt')
self.do_run_from_file(test_file('cube2md5.cpp'), test_file('cube2md5.ok'), assert_returncode=NON_ZERO)
@also_with_standalone_wasm(wasm2c=True)
@needs_make('make')
def test_cube2hash(self):
# A good test of i64 math
self.do_run('// empty file', 'Usage: hashstring <seed>',
libraries=self.get_library('third_party/cube2hash', ['libcube2hash.a'], configure=None),
includes=[test_file('third_party/cube2hash')], assert_returncode=NON_ZERO)
for text, output in [('fleefl', '892BDB6FD3F62E863D63DA55851700FDE3ACF30204798CE9'),
('fleefl2', 'AA2CC5F96FC9D540CA24FDAF1F71E2942753DB83E8A81B61'),
('64bitisslow', '64D8470573635EC354FEE7B7F87C566FCAF1EFB491041670')]:
self.do_run('src.js', 'hash value: ' + output, args=[text], no_build=True)
def test_unaligned(self):
self.skipTest('LLVM marks the reads of s as fully aligned, making this test invalid')
src = r'''
#include <stdio.h>
struct S {
double x;
int y;
};
int main() {
// the 64-bit value here will not be 8-byte aligned
S s0[3] = { {0x12a751f430142, 22}, {0x17a5c85bad144, 98}, {1, 1}};
char buffer[10*sizeof(S)];
int b = int(buffer);
S *s = (S*)(b + 4-b%8);
s[0] = s0[0];
s[1] = s0[1];
s[2] = s0[2];
printf("*%d : %d : %d\n", sizeof(S), ((unsigned int)&s[0]) % 8 != ((unsigned int)&s[1]) % 8,
((unsigned int)&s[1]) - ((unsigned int)&s[0]));
s[0].x++;
s[0].y++;
s[1].x++;
s[1].y++;
printf("%.1f,%d,%.1f,%d\n", s[0].x, s[0].y, s[1].x, s[1].y);
return 0;
}
'''
# TODO: A version of this with int64s as well
self.do_run(src, '*12 : 1 : 12\n328157500735811.0,23,416012775903557.0,99\n')
return # TODO: continue to the next part here
# Test for undefined behavior in C. This is not legitimate code, but does exist
src = r'''
#include <stdio.h>
int main()
{
int x[10];
char *p = (char*)&x[0];
p++;
short *q = (short*)p;
*q = 300;
printf("*%d:%ld*\n", *q, ((long)q)%2);
int *r = (int*)p;
*r = 515559;
printf("*%d*\n", *r);
long long *t = (long long*)p;
*t = 42949672960;
printf("*%lld*\n", *t);
return 0;
}
'''
try:
self.do_run(src, '*300:1*\n*515559*\n*42949672960*\n')
except Exception as e:
assert 'must be aligned' in str(e), e # expected to fail without emulation
def test_align64(self):
src = r'''
#include <stdio.h>
// inspired by poppler
enum Type {
A = 10,
B = 20
};
struct Object {
Type type;
union {
int intg;
double real;
char *name;
};
};
struct Principal {
double x;
Object a;
double y;
};
int main(int argc, char **argv)
{
int base = argc-1;
Object *o = NULL;
printf("%zu,%zu\n", sizeof(Object), sizeof(Principal));
printf("%ld,%ld,%ld,%ld\n", (long)&o[base].type, (long)&o[base].intg, (long)&o[base].real, (long)&o[base].name);
printf("%ld,%ld,%ld,%ld\n", (long)&o[base+1].type, (long)&o[base+1].intg, (long)&o[base+1].real, (long)&o[base+1].name);
Principal p, q;
p.x = p.y = q.x = q.y = 0;
p.a.type = A;
p.a.real = 123.456;
*(&q.a) = p.a;
printf("%.2f,%d,%.2f,%.2f : %.2f,%d,%.2f,%.2f\n", p.x, p.a.type, p.a.real, p.y, q.x, q.a.type, q.a.real, q.y);
return 0;
}
'''
self.do_run(src, '''16,32
0,8,8,8
16,24,24,24
0.00,10,123.46,0.00 : 0.00,10,123.46,0.00
''')
@no_asan('asan errors on corner cases we check')
def test_aligned_alloc(self):
self.do_runf(test_file('test_aligned_alloc.c'), '',
emcc_args=['-Wno-non-power-of-two-alignment'])
def test_unsigned(self):
src = '''
#include <stdio.h>
const signed char cvals[2] = { -1, -2 }; // compiler can store this is a string, so -1 becomes \\FF, and needs re-signing
int main()
{
{
unsigned char x = 200;
printf("*%d*\\n", x);
unsigned char y = -22;
printf("*%d*\\n", y);
}
int varey = 100;
unsigned int MAXEY = -1, MAXEY2 = -77;
printf("*%u,%d,%u*\\n", MAXEY, varey >= MAXEY, MAXEY2); // 100 >= -1? not in unsigned!
int y = cvals[0];
printf("*%d,%d,%d,%d*\\n", cvals[0], cvals[0] < 0, y, y < 0);
y = cvals[1];
printf("*%d,%d,%d,%d*\\n", cvals[1], cvals[1] < 0, y, y < 0);
// zext issue - see mathop in jsifier
unsigned char x8 = -10;
unsigned long hold = 0;
hold += x8;
int y32 = hold+50;
printf("*%lu,%d*\\n", hold, y32);
// Comparisons
x8 = 0;
for (int i = 0; i < 254; i++) x8++; // make it an actual 254 in JS - not a -2
printf("*%d,%d*\\n", x8+1 == 0xff, x8+1 != 0xff); // 0xff may be '-1' in the bitcode
return 0;
}
'''
self.do_run(src, '*4294967295,0,4294967219*\n*-1,1,-1,1*\n*-2,1,-2,1*\n*246,296*\n*1,0*')
self.emcc_args.append('-Wno-constant-conversion')
src = '''
#include <stdio.h>
int main()
{
{
unsigned char x;
unsigned char *y = &x;
*y = -1;
printf("*%d*\\n", x);
}
{
unsigned short x;
unsigned short *y = &x;
*y = -1;
printf("*%d*\\n", x);
}
/*{ // This case is not checked. The hint for unsignedness is just the %u in printf, and we do not analyze that
unsigned int x;
unsigned int *y = &x;
*y = -1;
printf("*%u*\\n", x);
}*/
{
char x;
char *y = &x;
*y = 255;
printf("*%d*\\n", x);
}
{
char x;
char *y = &x;
*y = 65535;
printf("*%d*\\n", x);
}
{
char x;
char *y = &x;
*y = 0xffffffff;
printf("*%d*\\n", x);
}
return 0;
}
'''
self.do_run(src, '*255*\n*65535*\n*-1*\n*-1*\n*-1*')
def test_bitfields(self):
self.do_core_test('test_bitfields.c')
def test_floatvars(self):
self.do_core_test('test_floatvars.cpp')
def test_closebitcasts(self):
self.do_core_test('closebitcasts.c')
def test_fast_math(self):
self.emcc_args += ['-ffast-math']
self.do_core_test('test_fast_math.c', args=['5', '6', '8'])
def test_zerodiv(self):
self.do_core_test('test_zerodiv.c')
def test_zero_multiplication(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_zero_multiplication.c')
def test_isnan(self):
self.do_core_test('test_isnan.c')
def test_globaldoubles(self):
self.do_core_test('test_globaldoubles.c')
def test_math(self):
self.do_core_test('test_math.c')
def test_erf(self):
self.do_core_test('test_erf.c')
def test_math_hyperbolic(self):
self.do_core_test('test_math_hyperbolic.c')
def test_math_lgamma(self):
self.do_run_in_out_file_test('math/lgamma.c', assert_returncode=NON_ZERO)
def test_math_fmodf(self):
self.do_run_in_out_file_test('math/fmodf.c')
def test_frexp(self):
self.do_core_test('test_frexp.c')
def test_rounding(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_rounding.c')
def test_fcvt(self):
self.do_core_test('test_fcvt.cpp')
def test_llrint(self):
self.do_core_test('test_llrint.c')
def test_getgep(self):
# Generated code includes getelementptr (getelementptr, 0, 1), i.e., GEP as the first param to GEP
self.do_core_test('test_getgep.c')
def test_multiply_defined_symbols(self):
create_file('a1.c', 'int f() { return 1; }')
create_file('a2.c', 'void x() {}')
create_file('b1.c', 'int f() { return 2; }')
create_file('b2.c', 'void y() {}')
create_file('main.c', r'''
#include <stdio.h>
int f();
int main() {
printf("result: %d\n", f());
return 0;
}
''')
self.emcc('a1.c', ['-c'])
self.emcc('a2.c', ['-c'])
self.emcc('b1.c', ['-c'])
self.emcc('b2.c', ['-c'])
self.emcc('main.c', ['-c'])
building.emar('cr', 'liba.a', ['a1.c.o', 'a2.c.o'])
building.emar('cr', 'libb.a', ['b1.c.o', 'b2.c.o'])
building.link_to_object(['main.c.o', 'liba.a', 'libb.a'], 'all.o')
self.emcc('all.o', self.get_emcc_args(), 'all.js')
self.do_run('all.js', 'result: 1', no_build=True)
def test_if(self):
self.do_core_test('test_if.c')
def test_if_else(self):
self.do_core_test('test_if_else.c')
def test_loop(self):
self.do_core_test('test_loop.c')
def test_stack(self):
self.set_setting('INLINING_LIMIT')
# some extra coverage in all test suites for stack checks
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.do_core_test('test_stack.c')
def test_stack_align(self):
src = test_file('core/test_stack_align.cpp')
def test():
self.do_runf(src, ['''align 4: 0
align 8: 0
align 16: 0
align 32: 0
base align: 0, 0, 0, 0'''])
test()
@no_asan('stack size is too low for asan to work properly')
def test_stack_placement(self):
self.set_setting('TOTAL_STACK', 1024)
self.do_core_test('test_stack_placement.c')
self.set_setting('GLOBAL_BASE', 102400)
self.do_core_test('test_stack_placement.c')
@no_asan('asan does not support main modules')
@no_wasm2js('MAIN_MODULE support')
def test_stack_placement_pic(self):
self.set_setting('TOTAL_STACK', 1024)
self.set_setting('MAIN_MODULE')
self.do_core_test('test_stack_placement.c')
self.set_setting('GLOBAL_BASE', 102400)
self.do_core_test('test_stack_placement.c')
def test_strings(self):
self.do_core_test('test_strings.c', args=['wowie', 'too', '74'])
def test_strcmp_uni(self):
self.do_core_test('test_strcmp_uni.c')
def test_strndup(self):
self.do_core_test('test_strndup.c')
def test_errar(self):
self.do_core_test('test_errar.c')
def test_mainenv(self):
self.do_core_test('test_mainenv.c')
def test_funcs(self):
self.do_core_test('test_funcs.c')
def test_structs(self):
self.do_core_test('test_structs.c')
gen_struct_src = '''
#include <stdio.h>
#include <stdlib.h>
#include "emscripten.h"
struct S
{
int x, y;
};
int main()
{
S* a = {{gen_struct}};
a->x = 51; a->y = 62;
printf("*%d,%d*\\n", a->x, a->y);
{{del_struct}}(a);
return 0;
}
'''
def test_mallocstruct(self):
self.do_run(self.gen_struct_src.replace('{{gen_struct}}', '(S*)malloc(sizeof(S))').replace('{{del_struct}}', 'free'), '*51,62*')
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
@parameterized({
'normal': [],
'memvalidate': ['-DEMMALLOC_MEMVALIDATE'],
'memvalidate_verbose': ['-DEMMALLOC_MEMVALIDATE', '-DEMMALLOC_VERBOSE', '-DRANDOM_ITERS=130'],
})
def test_emmalloc(self, *args):
# in newer clang+llvm, the internal calls to malloc in emmalloc may be optimized under
# the assumption that they are external, so like in system_libs.py where we build
# malloc, we need to disable builtin here too
self.set_setting('MALLOC', 'none')
self.emcc_args += ['-fno-builtin'] + list(args)
self.do_run(read_file(path_from_root('system/lib/emmalloc.c')) +
read_file(path_from_root('system/lib/sbrk.c')) +
read_file(test_file('core/test_emmalloc.c')),
read_file(test_file('core/test_emmalloc.out')), force_c=True)
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
def test_emmalloc_usable_size(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += list(args)
self.do_core_test('test_malloc_usable_size.c')
@no_optimize('output is sensitive to optimization flags, so only test unoptimized builds')
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
def test_emmalloc_memory_statistics(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += ['-s', 'INITIAL_MEMORY=128MB', '-g'] + list(args)
self.do_core_test('test_emmalloc_memory_statistics.cpp')
@no_optimize('output is sensitive to optimization flags, so only test unoptimized builds')
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
def test_emmalloc_trim(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += ['-s', 'INITIAL_MEMORY=128MB', '-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MAXIMUM_MEMORY=2147418112'] + list(args)
self.do_core_test('test_emmalloc_trim.cpp')
# Test case against https://github.com/emscripten-core/emscripten/issues/10363
def test_emmalloc_memalign_corruption(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.do_core_test('emmalloc_memalign_corruption.cpp')
def test_newstruct(self):
self.do_run(self.gen_struct_src.replace('{{gen_struct}}', 'new S').replace('{{del_struct}}', 'delete'), '*51,62*')
def test_addr_of_stacked(self):
self.do_core_test('test_addr_of_stacked.c')
def test_globals(self):
self.do_core_test('test_globals.c')
def test_linked_list(self):
self.do_core_test('test_linked_list.c')
def test_sup(self):
self.do_run_in_out_file_test(test_file('core/test_sup.cpp'))
@also_with_standalone_wasm()
def test_assert(self):
self.do_core_test('test_assert.cpp', assert_returncode=NON_ZERO)
def test_wcslen(self):
self.do_core_test('test_wcslen.c')
def test_regex(self):
self.do_core_test('test_regex.c')
@also_with_standalone_wasm(wasm2c=True, impure=True)
def test_longjmp(self):
self.do_core_test('test_longjmp.c')
def test_longjmp2(self):
self.do_core_test('test_longjmp2.c')
@needs_dylink
def test_longjmp2_main_module(self):
# Test for binaryen regression:
# https://github.com/WebAssembly/binaryen/issues/2180
self.set_setting('MAIN_MODULE')
self.do_core_test('test_longjmp2.c')
def test_longjmp3(self):
self.do_core_test('test_longjmp3.c')
def test_longjmp4(self):
self.do_core_test('test_longjmp4.c')
def test_longjmp_funcptr(self):
self.do_core_test('test_longjmp_funcptr.c')
def test_longjmp_repeat(self):
self.do_core_test('test_longjmp_repeat.c')
def test_longjmp_stacked(self):
self.do_core_test('test_longjmp_stacked.c', assert_returncode=NON_ZERO)
def test_longjmp_exc(self):
self.do_core_test('test_longjmp_exc.c', assert_returncode=NON_ZERO)
def test_longjmp_throw(self):
for disable_throw in [0, 1]:
print(disable_throw)
self.set_setting('DISABLE_EXCEPTION_CATCHING', disable_throw)
self.do_core_test('test_longjmp_throw.cpp')
def test_longjmp_unwind(self):
self.do_core_test('test_longjmp_unwind.c', assert_returncode=NON_ZERO)
def test_longjmp_i64(self):
self.emcc_args += ['-g']
self.do_core_test('test_longjmp_i64.c', assert_returncode=NON_ZERO)
def test_siglongjmp(self):
self.do_core_test('test_siglongjmp.c')
def test_setjmp_many(self):
src = r'''
#include <stdio.h>
#include <setjmp.h>
int main(int argc, char** argv) {
jmp_buf buf;
for (int i = 0; i < NUM; i++) printf("%d\n", setjmp(buf));
if (argc-- == 1131) longjmp(buf, 11);
return 0;
}
'''
for num in [1, 5, 20, 1000]:
print('NUM=%d' % num)
self.do_run(src.replace('NUM', str(num)), '0\n' * num)
def test_setjmp_many_2(self):
src = r'''
#include <setjmp.h>
#include <stdio.h>
jmp_buf env;
void luaWork(int d){
int x;
printf("d is at %d\n", d);
longjmp(env, 1);
}
int main()
{
const int ITERATIONS=25;
for(int i = 0; i < ITERATIONS; i++){
if(!setjmp(env)){
luaWork(i);
}
}
return 0;
}
'''
self.do_run(src, r'''d is at 24''')
def test_setjmp_noleak(self):
self.do_runf(test_file('core/test_setjmp_noleak.c'), 'ok.')
@with_both_exception_handling
def test_exceptions(self):
self.set_setting('EXCEPTION_DEBUG')
self.maybe_closure()
for support_longjmp in [0, 1]:
self.set_setting('SUPPORT_LONGJMP', support_longjmp)
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_caught.out'))
def test_exceptions_off(self):
for support_longjmp in [0, 1]:
self.set_setting('DISABLE_EXCEPTION_CATCHING')
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_uncaught.out'), assert_returncode=NON_ZERO)
@no_asan('TODO: ASan support in minimal runtime')
def test_exceptions_minimal_runtime(self):
self.set_setting('EXCEPTION_DEBUG')
self.set_setting('EXIT_RUNTIME')
self.maybe_closure()
self.set_setting('MINIMAL_RUNTIME')
self.emcc_args += ['--pre-js', test_file('minimal_runtime_exit_handling.js')]
for support_longjmp in [0, 1]:
self.set_setting('SUPPORT_LONGJMP', support_longjmp)
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_caught.out'))
self.set_setting('DISABLE_EXCEPTION_CATCHING')
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_uncaught.out'), assert_returncode=NON_ZERO)
@with_both_exception_handling
def test_exceptions_custom(self):
self.set_setting('EXCEPTION_DEBUG')
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.maybe_closure()
src = '''
#include <iostream>
class MyException
{
public:
MyException(){ std::cout << "Construct..."; }
MyException( const MyException & ) { std::cout << "Copy..."; }
~MyException(){ std::cout << "Destruct..."; }
};
int function()
{
std::cout << "Throw...";
throw MyException();
}
int function2()
{
return function();
}
int main()
{
try
{
function2();
}
catch (MyException & e)
{
std::cout << "Caught...";
}
try
{
function2();
}
catch (MyException e)
{
std::cout << "Caught...";
}
return 0;
}
'''
self.do_run(src, 'Throw...Construct...Caught...Destruct...Throw...Construct...Copy...Caught...Destruct...Destruct...')
@with_both_exception_handling
def test_exceptions_2(self):
for safe in [0, 1]:
print(safe)
if safe and '-fsanitize=address' in self.emcc_args:
# Can't use safe heap with ASan
continue
self.set_setting('SAFE_HEAP', safe)
self.do_core_test('test_exceptions_2.cpp')
@with_both_exception_handling
def test_exceptions_3(self):
src = r'''
#include <iostream>
#include <stdexcept>
int main(int argc, char **argv)
{
if (argc != 2) {
std::cout << "need an arg" << std::endl;
return 1;
}
int arg = argv[1][0] - '0';
try {
if (arg == 0) throw "a c string";
if (arg == 1) throw std::exception();
if (arg == 2) throw std::runtime_error("Hello");
} catch(const char * ex) {
std::cout << "Caught C string: " << ex << std::endl;
} catch(const std::exception &ex) {
std::cout << "Caught exception: " << ex.what() << std::endl;
} catch(...) {
std::cout << "Caught something else" << std::endl;
}
std::cout << "Done.\n";
}
'''
print('0')
self.do_run(src, 'Caught C string: a c string\nDone.', args=['0'])
print('1')
self.do_run('src.js', 'Caught exception: std::exception\nDone.', args=['1'], no_build=True)
print('2')
self.do_run('src.js', 'Caught exception: Hello\nDone.', args=['2'], no_build=True)
def test_exceptions_allowed(self):
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ["_Z12somefunctionv"])
# otherwise it is inlined and not identified
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_exceptions_allowed.cpp')
size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'orig.js')
# check that an empty allow list works properly (as in, same as exceptions disabled)
src = test_file('core/test_exceptions_allowed.cpp')
empty_output = test_file('core/test_exceptions_allowed_empty.out')
self.set_setting('EXCEPTION_CATCHING_ALLOWED', [])
self.do_run_from_file(src, empty_output, assert_returncode=NON_ZERO)
empty_size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
empty_size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'empty.js')
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ['fake'])
self.do_run_from_file(src, empty_output, assert_returncode=NON_ZERO)
fake_size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
fake_size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'fake.js')
self.clear_setting('EXCEPTION_CATCHING_ALLOWED')
self.do_run_from_file(src, empty_output, assert_returncode=NON_ZERO)
disabled_size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
disabled_size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'disabled.js')
print('size: %d' % size)
print('empty_size: %d' % empty_size)
print('fake_size: %d' % fake_size)
print('disabled_size: %d' % disabled_size)
# empty list acts the same as fully disabled
self.assertEqual(empty_size, disabled_size)
# big change when we disable exception catching of the function
self.assertGreater(size - empty_size, 0.01 * size)
# full disable can remove a little bit more
self.assertLess(disabled_size, fake_size)
def test_exceptions_allowed_2(self):
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ["main"])
# otherwise it is inlined and not identified
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_exceptions_allowed_2.cpp')
# When 'main' function does not have a signature, its contents will be
# outlined to '__original_main'. Check if we can handle that case.
self.emcc_args += ['-DMAIN_NO_SIGNATURE']
self.do_core_test('test_exceptions_allowed_2.cpp')
def test_exceptions_allowed_uncaught(self):
self.emcc_args += ['-std=c++11']
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ["_Z4testv"])
# otherwise it is inlined and not identified
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_exceptions_allowed_uncaught.cpp')
def test_exceptions_allowed_misuse(self):
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ['foo'])
# Test old =2 setting for DISABLE_EXCEPTION_CATCHING
self.set_setting('DISABLE_EXCEPTION_CATCHING', 2)
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING=X is no longer needed when specifying EXCEPTION_CATCHING_ALLOWED [-Wdeprecated] [-Werror]', err)
# =0 should also be a warning
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING=X is no longer needed when specifying EXCEPTION_CATCHING_ALLOWED [-Wdeprecated] [-Werror]', err)
# =1 should be a hard error
self.set_setting('DISABLE_EXCEPTION_CATCHING', 1)
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING and EXCEPTION_CATCHING_ALLOWED are mutually exclusive', err)
# even setting an empty list should trigger the error;
self.set_setting('EXCEPTION_CATCHING_ALLOWED', [])
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING and EXCEPTION_CATCHING_ALLOWED are mutually exclusive', err)
@with_both_exception_handling
def test_exceptions_uncaught(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
src = r'''
#include <stdio.h>
#include <exception>
struct X {
~X() {
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
}
};
int main() {
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
try {
X x;
throw 1;
} catch(...) {
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
}
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
return 0;
}
'''
self.do_run(src, 'exception? no\nexception? yes\nexception? no\nexception? no\n')
src = r'''
#include <fstream>
#include <iostream>
int main() {
std::ofstream os("test");
os << std::unitbuf << "foo"; // trigger a call to std::uncaught_exception from
// std::basic_ostream::sentry::~sentry
std::cout << "success";
}
'''
self.do_run(src, 'success')
@with_both_exception_handling
def test_exceptions_uncaught_2(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
src = r'''
#include <iostream>
#include <exception>
int main() {
try {
throw std::exception();
} catch(std::exception) {
try {
throw;
} catch(std::exception) {}
}
if (std::uncaught_exception())
std::cout << "ERROR: uncaught_exception still set.";
else
std::cout << "OK";
}
'''
self.do_run(src, 'OK\n')
@with_both_exception_handling
def test_exceptions_typed(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.clear_setting('SAFE_HEAP') # Throwing null will cause an ignorable null pointer access.
self.do_core_test('test_exceptions_typed.cpp')
@with_both_exception_handling
def test_exceptions_virtual_inheritance(self):
self.do_core_test('test_exceptions_virtual_inheritance.cpp')
@with_both_exception_handling
def test_exceptions_convert(self):
self.do_core_test('test_exceptions_convert.cpp')
# TODO Make setjmp-longjmp also use Wasm exception handling
@with_both_exception_handling
def test_exceptions_multi(self):
self.do_core_test('test_exceptions_multi.cpp')
@with_both_exception_handling
def test_exceptions_std(self):
self.clear_setting('SAFE_HEAP')
self.do_core_test('test_exceptions_std.cpp')
@with_both_exception_handling
def test_exceptions_alias(self):
self.do_core_test('test_exceptions_alias.cpp')
@with_both_exception_handling
def test_exceptions_rethrow(self):
self.do_core_test('test_exceptions_rethrow.cpp')
@with_both_exception_handling
def test_exceptions_uncaught_count(self):
self.do_core_test('test_exceptions_uncaught_count.cpp')
@with_both_exception_handling
def test_exceptions_resume(self):
self.set_setting('EXCEPTION_DEBUG')
self.do_core_test('test_exceptions_resume.cpp')
@with_both_exception_handling
def test_exceptions_destroy_virtual(self):
self.do_core_test('test_exceptions_destroy_virtual.cpp')
@with_both_exception_handling
def test_exceptions_refcount(self):
self.do_core_test('test_exceptions_refcount.cpp')
@with_both_exception_handling
def test_exceptions_primary(self):
self.do_core_test('test_exceptions_primary.cpp')
@with_both_exception_handling
def test_exceptions_simplify_cfg(self):
self.do_core_test('test_exceptions_simplify_cfg.cpp')
@with_both_exception_handling
def test_exceptions_libcxx(self):
self.do_core_test('test_exceptions_libcxx.cpp')
@with_both_exception_handling
def test_exceptions_multiple_inherit(self):
self.do_core_test('test_exceptions_multiple_inherit.cpp')
@with_both_exception_handling
def test_exceptions_multiple_inherit_rethrow(self):
self.do_core_test('test_exceptions_multiple_inherit_rethrow.cpp')
@with_both_exception_handling
def test_exceptions_rethrow_missing(self):
create_file('main.cpp', 'int main() { throw; }')
self.do_runf('main.cpp', None, assert_returncode=NON_ZERO)
@with_both_exception_handling
def test_bad_typeid(self):
self.do_run(r'''
// exception example
#include <iostream> // std::cerr
#include <typeinfo> // operator typeid
#include <exception> // std::exception
class Polymorphic {virtual void member(){}};
int main () {
try
{
Polymorphic * pb = 0;
const std::type_info& ti = typeid(*pb); // throws a bad_typeid exception
}
catch (std::exception& e)
{
std::cerr << "exception caught: " << e.what() << '\n';
}
return 0;
}
''', 'exception caught: std::bad_typeid')
def test_iostream_ctors(self):
# iostream stuff must be globally constructed before user global
# constructors, so iostream works in global constructors
self.do_run(r'''
#include <iostream>
struct A {
A() { std::cout << "bug"; }
};
A a;
int main() {
std::cout << "free code" << std::endl;
return 0;
}
''', 'bugfree code')
def test_exceptions_longjmp1(self):
self.set_setting('SUPPORT_LONGJMP')
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_core_test('test_exceptions_longjmp1.cpp')
def test_exceptions_longjmp2(self):
self.set_setting('SUPPORT_LONGJMP')
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_core_test('test_exceptions_longjmp2.cpp')
def test_exceptions_longjmp3(self):
self.set_setting('SUPPORT_LONGJMP')
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_core_test('test_exceptions_longjmp3.cpp')
# Marked as impure since the WASI reactor modules (modules without main)
# are not yet suppored by the wasm engines we test against.
@also_with_standalone_wasm(impure=True)
def test_ctors_no_main(self):
self.emcc_args.append('--no-entry')
self.do_core_test('test_ctors_no_main.cpp')
def test_class(self):
self.do_core_test('test_class.cpp')
def test_inherit(self):
self.do_core_test('test_inherit.cpp')
def test_isdigit_l(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_isdigit_l.cpp')
def test_iswdigit(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_iswdigit.cpp')
def test_polymorph(self):
self.do_core_test('test_polymorph.cpp')
def test_complex(self):
self.do_core_test('test_complex.c')
def test_float_builtins(self):
# tests wasm_libc_rt
self.do_core_test('test_float_builtins.c')
@no_asan('SAFE_HEAP cannot be used with ASan')
def test_segfault(self):
self.set_setting('SAFE_HEAP')
for addr in ['get_null()', 'new D2()']:
print(addr)
src = r'''
#include <stdio.h>
#include <emscripten.h>
struct Classey {
virtual void doIt() = 0;
};
struct D1 : Classey {
virtual void doIt() { printf("fleefl\n"); }
};
struct D2 : Classey {
virtual void doIt() { printf("marfoosh\n"); }
};
EM_JS(Classey*, get_null, (), {
return 0;
});
int main(int argc, char **argv)
{
Classey *p = argc == 100 ? new D1() : (Classey*)%s;
p->doIt();
return 0;
}
''' % addr
if 'get_null' in addr:
self.do_run(src, 'segmentation fault', assert_returncode=NON_ZERO)
else:
self.do_run(src, 'marfoosh')
def test_dynamic_cast(self):
self.do_core_test('test_dynamic_cast.cpp')
def test_dynamic_cast_b(self):
self.do_core_test('test_dynamic_cast_b.cpp')
def test_dynamic_cast_2(self):
self.do_core_test('test_dynamic_cast_2.cpp')
def test_funcptr(self):
self.do_core_test('test_funcptr.c')
def test_mathfuncptr(self):
self.do_core_test('test_mathfuncptr.c')
def test_funcptrfunc(self):
self.do_core_test('test_funcptrfunc.c')
def test_funcptr_namecollide(self):
self.do_core_test('test_funcptr_namecollide.c')
def test_emptyclass(self):
self.do_core_test('test_emptyclass.cpp')
def test_alloca(self):
self.do_core_test('test_alloca.c')
def test_rename(self):
self.do_run_in_out_file_test('stdio/test_rename.c')
def test_remove(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('cstdio/test_remove.cpp')
def test_alloca_stack(self):
self.do_core_test('test_alloca_stack.c')
def test_stack_byval(self):
self.do_core_test('test_stack_byval.cpp')
def test_stack_varargs(self):
# in node.js we allocate argv[0] on the stack, which means the length
# of the program directory influences how much stack we need, and so
# long random temp dir names can lead to random failures. The stack
# size was increased here to avoid that.
self.set_setting('INLINING_LIMIT')
self.set_setting('TOTAL_STACK', 8 * 1024)
self.do_core_test('test_stack_varargs.c')
def test_stack_varargs2(self):
# in node.js we allocate argv[0] on the stack, which means the length
# of the program directory influences how much stack we need, and so
# long random temp dir names can lead to random failures. The stack
# size was increased here to avoid that.
self.set_setting('TOTAL_STACK', 8 * 1024)
src = r'''
#include <stdio.h>
#include <stdlib.h>
void func(int i) {
}
int main() {
for (int i = 0; i < 7000; i++) {
printf("%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",
i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i);
}
printf("ok!\n");
return 0;
}
'''
self.do_run(src, 'ok!')
print('with return')
src = r'''
#include <stdio.h>
#include <stdlib.h>
int main() {
for (int i = 0; i < 7000; i++) {
int j = printf("%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d",
i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i);
printf(" (%d)\n", j);
}
printf("ok!\n");
return 0;
}
'''
self.do_run(src, 'ok!')
print('with definitely no return')
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <stdarg.h>
void vary(const char *s, ...)
{
va_list v;
va_start(v, s);
char d[20];
vsnprintf(d, 20, s, v);
puts(d);
// Try it with copying
va_list tempva;
va_copy(tempva, v);
vsnprintf(d, 20, s, tempva);
puts(d);
va_end(v);
}
int main() {
for (int i = 0; i < 7000; i++) {
int j = printf("%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d",
i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i);
printf(" (%d)\n", j);
vary("*cheez: %d+%d*", 99, 24);
vary("*albeit*");
}
printf("ok!\n");
return 0;
}
'''
self.do_run(src, 'ok!')
def test_stack_void(self):
self.emcc_args.append('-Wno-format-extra-args')
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_stack_void.c')
def test_life(self):
self.emcc_args += ['-std=c99']
self.do_run_in_out_file_test('life.c', args=['2'])
def test_array2(self):
self.do_core_test('test_array2.c')
def test_array2b(self):
self.do_core_test('test_array2b.c')
def test_constglobalstructs(self):
self.do_core_test('test_constglobalstructs.c')
def test_conststructs(self):
self.do_core_test('test_conststructs.c')
def test_bigarray(self):
self.do_core_test('test_bigarray.c')
def test_mod_globalstruct(self):
self.do_core_test('test_mod_globalstruct.c')
def test_sizeof(self):
self.do_core_test('test_sizeof.cpp')
def test_llvm_used(self):
self.do_core_test('test_llvm_used.c')
@no_asan('SAFE_HEAP cannot be used with ASan')
def test_set_align(self):
self.set_setting('SAFE_HEAP')
self.do_core_test('test_set_align.c')
def test_emscripten_api(self):
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_save_me_aimee'])
self.do_core_test('test_emscripten_api.cpp')
if '-fsanitize=address' not in self.emcc_args:
# test EXPORT_ALL (this is not compatible with asan, which doesn't
# support dynamic linking at all or the LINKING flag)
self.set_setting('EXPORTED_FUNCTIONS', [])
self.set_setting('EXPORT_ALL')
self.set_setting('LINKABLE')
self.do_core_test('test_emscripten_api.cpp')
def test_emscripten_run_script_string_int(self):
src = r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
const char *str = emscripten_run_script_string("1+1");
printf("got string: %s\n", str);
return 0;
}
'''
self.do_run(src, '''got string: 2''')
def test_emscripten_run_script_string_utf8(self):
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <emscripten.h>
int main() {
const char *str = emscripten_run_script_string("'\\u2603 \\u2603 \\u2603 Hello!'");
printf("length of returned string: %zu. Position of substring 'Hello': %zu\n", strlen(str), strstr(str, "Hello")-str);
return 0;
}
'''
self.do_run(src, '''length of returned string: 18. Position of substring 'Hello': 12''')
def test_emscripten_run_script_string_null(self):
src = r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
const char *str = emscripten_run_script_string("void(0)");
if (str) {
printf("got string: %s\n", str);
} else {
puts("got null");
}
return 0;
}
'''
self.do_run(src, 'got null')
def test_emscripten_get_now(self):
self.banned_js_engines = [config.V8_ENGINE] # timer limitations in v8 shell
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.maybe_closure()
self.do_runf(test_file('emscripten_get_now.cpp'), 'Timer resolution is good')
def test_emscripten_get_compiler_setting(self):
src = test_file('core/emscripten_get_compiler_setting.c')
output = shared.replace_suffix(src, '.out')
# with assertions, a nice message is shown
self.set_setting('ASSERTIONS')
self.do_runf(src, 'You must build with -s RETAIN_COMPILER_SETTINGS=1', assert_returncode=NON_ZERO)
self.clear_setting('ASSERTIONS')
self.set_setting('RETAIN_COMPILER_SETTINGS')
self.do_runf(src, read_file(output).replace('waka', shared.EMSCRIPTEN_VERSION))
def test_emscripten_has_asyncify(self):
src = r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("%d\n", emscripten_has_asyncify());
return 0;
}
'''
self.set_setting('ASYNCIFY', 0)
self.do_run(src, '0')
self.set_setting('ASYNCIFY')
self.do_run(src, '1')
# TODO: test only worked in non-fastcomp
def test_inlinejs(self):
self.skipTest('non-fastcomp is deprecated and fails in 3.5') # only supports EM_ASM
self.do_core_test('test_inlinejs.c')
if self.emcc_args == []:
# opts will eliminate the comments
out = read_file('src.js')
for i in range(1, 5):
assert ('comment%d' % i) in out
# TODO: test only worked in non-fastcomp
def test_inlinejs2(self):
self.skipTest('non-fastcomp is deprecated and fails in 3.5') # only supports EM_ASM
self.do_core_test('test_inlinejs2.c')
def test_inlinejs3(self):
if self.is_wasm():
self.skipTest('wasm requires a proper asm module')
src = test_file('core/test_inlinejs3.c')
output = shared.unsuffixed(src) + '.out'
self.do_core_test('test_inlinejs3.c')
print('no debugger, check validation')
src = read_file(src).replace('emscripten_debugger();', '')
self.do_run(src, read_file(output))
def test_inlinejs4(self):
self.do_run(r'''
#include <emscripten.h>
#define TO_STRING_INNER(x) #x
#define TO_STRING(x) TO_STRING_INNER(x)
#define assert_msg(msg, file, line) EM_ASM( throw 'Assert (' + msg + ') failed in ' + file + ':' + line + '!'; )
#define assert(expr) { \
if (!(expr)) { \
assert_msg(#expr, TO_STRING(__FILE__), TO_STRING(__LINE__)); \
} \
}
int main(int argc, char **argv) {
assert(argc != 17);
assert(false);
return 0;
}
''', 'false', assert_returncode=NON_ZERO)
def test_em_asm(self):
self.do_core_test('test_em_asm.cpp')
self.emcc_args.append('-std=gnu89')
self.do_core_test('test_em_asm.cpp', force_c=True)
# Tests various different ways to invoke the EM_ASM(), EM_ASM_INT()
# and EM_ASM_DOUBLE() macros.
def test_em_asm_2(self):
self.do_core_test('test_em_asm_2.cpp')
self.emcc_args.append('-std=gnu89')
self.do_core_test('test_em_asm_2.cpp', force_c=True)
# Tests various different ways to invoke the MAIN_THREAD_EM_ASM(), MAIN_THREAD_EM_ASM_INT() and MAIN_THREAD_EM_ASM_DOUBLE() macros.
# This test is identical to test_em_asm_2, just search-replaces EM_ASM to MAIN_THREAD_EM_ASM on the test file. That way if new
# test cases are added to test_em_asm_2.cpp for EM_ASM, they will also get tested in MAIN_THREAD_EM_ASM form.
@no_asan('Cannot use ASan: test depends exactly on heap size')
def test_main_thread_em_asm(self):
src = read_file(test_file('core/test_em_asm_2.cpp'))
create_file('src.cpp', src.replace('EM_ASM', 'MAIN_THREAD_EM_ASM'))
expected_result = read_file(test_file('core/test_em_asm_2.out'))
create_file('result.out', expected_result.replace('EM_ASM', 'MAIN_THREAD_EM_ASM'))
self.do_run_from_file('src.cpp', 'result.out')
self.do_run_from_file('src.cpp', 'result.out', force_c=True)
def test_main_thread_async_em_asm(self):
self.do_core_test('test_main_thread_async_em_asm.cpp')
self.do_core_test('test_main_thread_async_em_asm.cpp', force_c=True)
# Tests MAIN_THREAD_EM_ASM_INT() function call with different signatures.
def test_main_thread_em_asm_signatures(self):
self.do_core_test('test_em_asm_signatures.cpp', assert_returncode=NON_ZERO)
def test_em_asm_unicode(self):
self.do_core_test('test_em_asm_unicode.cpp')
self.do_core_test('test_em_asm_unicode.cpp', force_c=True)
def test_em_asm_types(self):
self.do_core_test('test_em_asm_types.cpp')
self.do_core_test('test_em_asm_types.cpp', force_c=True)
def test_em_asm_unused_arguments(self):
self.do_core_test('test_em_asm_unused_arguments.cpp')
# Verify that EM_ASM macros support getting called with multiple arities.
# Maybe tests will later be joined into larger compilation units?
# Then this must still be compiled separately from other code using EM_ASM
# macros with arities 1-3. Otherwise this may incorrectly report a success.
def test_em_asm_parameter_pack(self):
self.do_core_test('test_em_asm_parameter_pack.cpp')
def test_em_asm_arguments_side_effects(self):
self.do_core_test('test_em_asm_arguments_side_effects.cpp')
self.do_core_test('test_em_asm_arguments_side_effects.cpp', force_c=True)
def test_em_asm_direct(self):
self.do_core_test('test_em_asm_direct.c')
@parameterized({
'': ([], False),
'c': ([], True),
'linked': (['-s', 'MAIN_MODULE'], False),
'linked_c': (['-s', 'MAIN_MODULE'], True),
})
def test_em_js(self, args, force_c):
if 'MAIN_MODULE' in args and not self.is_wasm():
self.skipTest('main module support for non-wasm')
if '-fsanitize=address' in self.emcc_args:
self.skipTest('no dynamic library support in asan yet')
self.emcc_args += args + ['-s', 'EXPORTED_FUNCTIONS=_main,_malloc']
self.do_core_test('test_em_js.cpp', force_c=force_c)
self.assertContained("no args returning int", read_file('test_em_js.js'))
def test_runtime_stacksave(self):
self.do_runf(test_file('core/test_runtime_stacksave.c'), 'success')
# Tests that -s MINIMAL_RUNTIME=1 builds can utilize -s ALLOW_MEMORY_GROWTH=1 option.
def test_minimal_runtime_memorygrowth(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
self.set_setting('MINIMAL_RUNTIME')
src = test_file('core/test_memorygrowth.c')
# Fail without memory growth
self.do_runf(src, 'OOM', assert_returncode=NON_ZERO)
# Win with it
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
def test_memorygrowth(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if self.maybe_closure():
# verify NO_DYNAMIC_EXECUTION is compatible with closure
self.set_setting('DYNAMIC_EXECUTION', 0)
# With typed arrays in particular, it is dangerous to use more memory than INITIAL_MEMORY,
# since we then need to enlarge the heap(s).
src = test_file('core/test_memorygrowth.c')
# Fail without memory growth
self.do_runf(src, 'OOM', assert_returncode=NON_ZERO)
fail = read_file('test_memorygrowth.js')
# Win with it
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
win = read_file('test_memorygrowth.js')
if '-O2' in self.emcc_args and not self.is_wasm():
# Make sure ALLOW_MEMORY_GROWTH generates different code (should be less optimized)
possible_starts = ['// EMSCRIPTEN_START_FUNCS', 'var TOTAL_STACK']
code_start = None
for s in possible_starts:
if fail.find(s) >= 0:
code_start = s
break
assert code_start is not None, 'Generated code must contain one of ' + str(possible_starts)
fail = fail[fail.find(code_start):]
win = win[win.find(code_start):]
assert len(fail) < len(win), 'failing code - without memory growth on - is more optimized, and smaller' + str([len(fail), len(win)])
# Tracing of memory growths should work
# (SAFE_HEAP would instrument the tracing code itself, leading to recursion)
if not self.get_setting('SAFE_HEAP'):
self.emcc_args += ['--tracing']
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
def test_memorygrowth_2(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
# With typed arrays in particular, it is dangerous to use more memory than INITIAL_MEMORY,
# since we then need to enlarge the heap(s).
src = test_file('core/test_memorygrowth_2.c')
# Fail without memory growth
self.do_runf(src, 'OOM', assert_returncode=NON_ZERO)
fail = read_file('test_memorygrowth_2.js')
# Win with it
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
win = read_file('test_memorygrowth_2.js')
if '-O2' in self.emcc_args and not self.is_wasm():
# Make sure ALLOW_MEMORY_GROWTH generates different code (should be less optimized)
assert len(fail) < len(win), 'failing code - without memory growth on - is more optimized, and smaller' + str([len(fail), len(win)])
def test_memorygrowth_3(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
# checks handling of malloc failure properly
self.set_setting('ABORTING_MALLOC', 0)
self.set_setting('SAFE_HEAP')
self.do_core_test('test_memorygrowth_3.c')
@also_with_standalone_wasm(impure=True)
def test_memorygrowth_MAXIMUM_MEMORY(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if not self.is_wasm():
self.skipTest('wasm memory specific test')
# check that memory growth does not exceed the wasm mem max limit
self.emcc_args += ['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'INITIAL_MEMORY=64Mb', '-s', 'MAXIMUM_MEMORY=100Mb']
self.do_core_test('test_memorygrowth_wasm_mem_max.c')
def test_memorygrowth_linear_step(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if not self.is_wasm():
self.skipTest('wasm memory specific test')
# check that memory growth does not exceed the wasm mem max limit and is exactly or one step below the wasm mem max
self.emcc_args += ['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'TOTAL_STACK=1Mb', '-s', 'INITIAL_MEMORY=64Mb', '-s', 'MAXIMUM_MEMORY=130Mb', '-s', 'MEMORY_GROWTH_LINEAR_STEP=1Mb']
self.do_core_test('test_memorygrowth_memory_growth_step.c')
def test_memorygrowth_geometric_step(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if not self.is_wasm():
self.skipTest('wasm memory specific test')
self.emcc_args += ['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MEMORY_GROWTH_GEOMETRIC_STEP=8.5', '-s', 'MEMORY_GROWTH_GEOMETRIC_CAP=32MB']
self.do_core_test('test_memorygrowth_geometric_step.c')
def test_memorygrowth_3_force_fail_reallocBuffer(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('TEST_MEMORY_GROWTH_FAILS')
self.do_core_test('test_memorygrowth_3.c')
@parameterized({
'nogrow': ([],),
'grow': (['-sALLOW_MEMORY_GROWTH', '-sMAXIMUM_MEMORY=18MB'],)
})
@no_asan('requires more memory when growing')
def test_aborting_new(self, args):
# test that C++ new properly errors if we fail to malloc when growth is
# enabled, with or without growth
self.emcc_args += args
self.do_core_test('test_aborting_new.cpp')
@no_wasm2js('no WebAssembly.Memory()')
@no_asan('ASan alters the memory size')
def test_module_wasm_memory(self):
self.emcc_args += ['--pre-js', test_file('core/test_module_wasm_memory.js')]
self.set_setting('IMPORTED_MEMORY')
self.do_runf(test_file('core/test_module_wasm_memory.c'), 'success')
def test_ssr(self): # struct self-ref
src = '''
#include <stdio.h>
// see related things in openjpeg
typedef struct opj_mqc_state {
unsigned int qeval;
int mps;
struct opj_mqc_state *nmps;
struct opj_mqc_state *nlps;
} opj_mqc_state_t;
static opj_mqc_state_t mqc_states[4] = {
{0x5600, 0, &mqc_states[2], &mqc_states[3]},
{0x5602, 1, &mqc_states[3], &mqc_states[2]},
};
int main() {
printf("*%ld*\\n", (long)(mqc_states+1)-(long)mqc_states);
for (int i = 0; i < 2; i++)
printf("%d:%d,%d,%ld,%ld\\n", i, mqc_states[i].qeval, mqc_states[i].mps,
(long)mqc_states[i].nmps-(long)mqc_states, (long)mqc_states[i].nlps-(long)mqc_states);
return 0;
}
'''
self.do_run(src, '''*16*\n0:22016,0,32,48\n1:22018,1,48,32\n''')
def test_tinyfuncstr(self):
self.do_core_test('test_tinyfuncstr.cpp')
def test_llvmswitch(self):
self.do_core_test('test_llvmswitch.c')
def test_cxx_version(self):
self.do_core_test('test_cxx_version.cpp')
@no_wasm2js('massive switches can break js engines')
def test_bigswitch(self):
self.do_runf(test_file('bigswitch.cpp'), '''34962: GL_ARRAY_BUFFER (0x8892)
26214: what?
35040: GL_STREAM_DRAW (0x88E0)
3060: what?
''', args=['34962', '26214', '35040', str(0xbf4)])
@no_wasm2js('massive switches can break js engines')
@is_slow_test
def test_biggerswitch(self):
if not self.is_optimizing():
self.skipTest('nodejs takes >6GB to compile this if the wasm is not optimized, which OOMs, see https://github.com/emscripten-core/emscripten/issues/7928#issuecomment-458308453')
if '-Os' in self.emcc_args:
self.skipTest('hangs in recent upstream clang, see https://bugs.llvm.org/show_bug.cgi?id=43468')
num_cases = 20000
switch_case = self.run_process([PYTHON, test_file('gen_large_switchcase.py'), str(num_cases)], stdout=PIPE, stderr=PIPE).stdout
self.do_run(switch_case, '''58996: 589965899658996
59297: 592975929759297
59598: default
59899: 598995989959899
Success!''')
def test_indirectbr(self):
self.emcc_args = [x for x in self.emcc_args if x != '-g']
self.do_core_test('test_indirectbr.c')
@no_asan('local count too large for VMs')
@no_wasm2js('extremely deep nesting, hits stack limit on some VMs')
def test_indirectbr_many(self):
self.do_core_test('test_indirectbr_many.c')
def test_pack(self):
src = '''
#include <stdio.h>
#include <string.h>
#pragma pack(push,1)
typedef struct header
{
unsigned char id;
unsigned short colour;
unsigned char desc;
} header;
#pragma pack(pop)
typedef struct fatheader
{
unsigned char id;
unsigned short colour;
unsigned char desc;
} fatheader;
int main( int argc, const char *argv[] ) {
header h, *ph = 0;
fatheader fh, *pfh = 0;
printf("*%zu,%ld,%ld*\\n", sizeof(header), (long)((long)&h.desc - (long)&h.id), (long)(&ph[1])-(long)(&ph[0]));
printf("*%zu,%ld,%ld*\\n", sizeof(fatheader), (long)((long)&fh.desc - (long)&fh.id), (long)(&pfh[1])-(long)(&pfh[0]));
return 0;
}
'''
self.do_run(src, '*4,3,4*\n*6,4,6*')
def test_varargs(self):
self.do_core_test('test_varargs.c')
def test_varargs_multi(self):
self.do_core_test('test_varargs_multi.c')
@unittest.skip('clang cannot compile this code with that target yet')
def test_varargs_byval(self):
src = r'''
#include <stdio.h>
#include <stdarg.h>
typedef struct type_a {
union {
double f;
void *p;
int i;
short sym;
} value;
} type_a;
enum mrb_vtype {
MRB_TT_FALSE = 0, /* 0 */
MRB_TT_CLASS = 9 /* 9 */
};
typedef struct type_b {
enum mrb_vtype tt:8;
} type_b;
void print_type_a(int argc, ...);
void print_type_b(int argc, ...);
int main(int argc, char *argv[])
{
type_a a;
type_b b;
a.value.p = (void*) 0x12345678;
b.tt = MRB_TT_CLASS;
printf("The original address of a is: %p\n", a.value.p);
printf("The original type of b is: %d\n", b.tt);
print_type_a(1, a);
print_type_b(1, b);
return 0;
}
void print_type_a(int argc, ...) {
va_list ap;
type_a a;
va_start(ap, argc);
a = va_arg(ap, type_a);
va_end(ap);
printf("The current address of a is: %p\n", a.value.p);
}
void print_type_b(int argc, ...) {
va_list ap;
type_b b;
va_start(ap, argc);
b = va_arg(ap, type_b);
va_end(ap);
printf("The current type of b is: %d\n", b.tt);
}
'''
self.do_run(src, '''The original address of a is: 0x12345678
The original type of b is: 9
The current address of a is: 0x12345678
The current type of b is: 9
''')
def test_functionpointer_libfunc_varargs(self):
self.do_core_test('test_functionpointer_libfunc_varargs.c')
def test_structbyval(self):
self.set_setting('INLINING_LIMIT')
# part 1: make sure that normally, passing structs by value works
src = r'''
#include <stdio.h>
struct point
{
int x, y;
};
void dump(struct point p) {
p.x++; // should not modify
p.y++; // anything in the caller!
printf("dump: %d,%d\n", p.x, p.y);
}
void dumpmod(struct point *p) {
p->x++; // should not modify
p->y++; // anything in the caller!
printf("dump: %d,%d\n", p->x, p->y);
}
int main( int argc, const char *argv[] ) {
point p = { 54, 2 };
printf("pre: %d,%d\n", p.x, p.y);
dump(p);
void (*dp)(point p) = dump; // And, as a function pointer
dp(p);
printf("post: %d,%d\n", p.x, p.y);
dumpmod(&p);
dumpmod(&p);
printf("last: %d,%d\n", p.x, p.y);
return 0;
}
'''
self.do_run(src, 'pre: 54,2\ndump: 55,3\ndump: 55,3\npost: 54,2\ndump: 55,3\ndump: 56,4\nlast: 56,4')
def test_stdlibs(self):
# safe heap prints a warning that messes up our output.
self.set_setting('SAFE_HEAP', 0)
# needs atexit
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_stdlibs.c')
def test_stdbool(self):
create_file('test_stdbool.c', r'''
#include <stdio.h>
#include <stdbool.h>
int main() {
bool x = true;
bool y = false;
printf("*%d*\n", x != y);
return 0;
}
''')
self.do_runf('test_stdbool.c', '*1*')
def test_strtoll_hex(self):
# tests strtoll for hex strings (0x...)
self.do_core_test('test_strtoll_hex.c')
def test_strtoll_dec(self):
# tests strtoll for decimal strings (0x...)
self.do_core_test('test_strtoll_dec.c')
def test_strtoll_bin(self):
# tests strtoll for binary strings (0x...)
self.do_core_test('test_strtoll_bin.c')
def test_strtoll_oct(self):
# tests strtoll for decimal strings (0x...)
self.do_core_test('test_strtoll_oct.c')
def test_strtol_hex(self):
# tests strtoll for hex strings (0x...)
self.do_core_test('test_strtol_hex.c')
def test_strtol_dec(self):
# tests strtoll for decimal strings (0x...)
self.do_core_test('test_strtol_dec.c')
def test_strtol_bin(self):
# tests strtoll for binary strings (0x...)
self.do_core_test('test_strtol_bin.c')
def test_strtol_oct(self):
# tests strtoll for decimal strings (0x...)
self.do_core_test('test_strtol_oct.c')
@also_with_standalone_wasm()
def test_atexit(self):
# Confirms they are called in the proper reverse order
if not self.get_setting('STANDALONE_WASM'):
# STANDALONE_WASM mode always sets EXIT_RUNTIME if main exists
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_atexit.c')
def test_atexit_threads(self):
# also tests thread exit (__cxa_thread_atexit)
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_atexit_threads.c')
@no_asan('test relies on null pointer reads')
def test_pthread_specific(self):
self.do_run_in_out_file_test('pthread/specific.c')
def test_pthread_equal(self):
self.do_run_in_out_file_test('pthread/test_pthread_equal.cpp')
@node_pthreads
def test_pthread_dispatch_after_exit(self):
self.do_run_in_out_file_test('pthread/test_pthread_dispatch_after_exit.c', interleaved_output=False)
@node_pthreads
def test_pthread_atexit(self):
# Test to ensure threads are still running when atexit-registered functions are called
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREAD_POOL_SIZE', 1)
self.do_run_in_out_file_test('pthread/test_pthread_atexit.c')
@node_pthreads
def test_pthread_nested_work_queue(self):
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREAD_POOL_SIZE', 1)
self.do_run_in_out_file_test('pthread/test_pthread_nested_work_queue.c')
@node_pthreads
def test_pthread_thread_local_storage(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_run_in_out_file_test('pthread/test_pthread_thread_local_storage.cpp')
@node_pthreads
def test_pthread_cleanup(self):
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREAD_POOL_SIZE', 4)
self.do_run_in_out_file_test('pthread/test_pthread_cleanup.cpp')
@node_pthreads
def test_pthread_setspecific_mainthread(self):
self.set_setting('EXIT_RUNTIME')
print('.. return')
self.do_runf(test_file('pthread/test_pthread_setspecific_mainthread.c'), 'done!', emcc_args=['-DRETURN'])
print('.. exit')
self.do_runf(test_file('pthread/test_pthread_setspecific_mainthread.c'), 'done!', emcc_args=['-DEXIT'])
print('.. pthread_exit')
self.do_run_in_out_file_test('pthread/test_pthread_setspecific_mainthread.c')
@node_pthreads
@no_mac('https://github.com/emscripten-core/emscripten/issues/15014')
def test_pthread_abort(self):
self.set_setting('PROXY_TO_PTHREAD')
# Add the onAbort handler at runtime during preRun. This means that onAbort
# handler will only be present in the main thread (much like it would if it
# was passed in by pre-populating the module object on prior to loading).
self.add_pre_run("Module.onAbort = function() { console.log('onAbort called'); }")
self.do_run_in_out_file_test('pthread/test_pthread_abort.c', assert_returncode=NON_ZERO)
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
@node_pthreads
def test_pthread_emmalloc(self):
self.emcc_args += ['-fno-builtin']
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASSERTIONS=2')
self.set_setting('MALLOC', 'emmalloc')
self.do_core_test('test_emmalloc.c')
def test_tcgetattr(self):
self.do_runf(test_file('termios/test_tcgetattr.c'), 'success')
def test_time(self):
self.do_core_test('test_time.cpp')
for tz in ['EST+05EDT', 'UTC+0']:
print('extra tz test:', tz)
with env_modify({'TZ': tz}):
# Run the test with different time zone settings if
# possible. It seems that the TZ environment variable does not
# work all the time (at least it's not well respected by
# Node.js on Windows), but it does no harm either.
self.do_core_test('test_time.cpp')
def test_timeb(self):
# Confirms they are called in reverse order
self.do_core_test('test_timeb.c')
def test_time_c(self):
self.do_core_test('test_time_c.c')
def test_gmtime(self):
self.do_core_test('test_gmtime.c')
def test_strptime_tm(self):
self.do_core_test('test_strptime_tm.c')
def test_strptime_days(self):
self.do_core_test('test_strptime_days.c')
def test_strptime_reentrant(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_strptime_reentrant.c')
def test_strftime(self):
self.do_core_test('test_strftime.cpp')
def test_trickystring(self):
self.do_core_test('test_trickystring.c')
def test_statics(self):
self.do_core_test('test_statics.cpp')
def test_copyop(self):
# clang generated code is vulnerable to this, as it uses
# memcpy for assignments, with hardcoded numbers of bytes
# (llvm-gcc copies items one by one).
self.do_core_test('test_copyop.cpp')
def test_memcpy_memcmp(self):
self.banned_js_engines = [config.V8_ENGINE] # Currently broken under V8_ENGINE but not node
def check(output):
output = output.replace('\n \n', '\n') # remove extra node output
return hashlib.sha1(output.encode('utf-8')).hexdigest()
self.do_core_test('test_memcpy_memcmp.c', output_nicerizer=check)
def test_memcpy2(self):
self.do_core_test('test_memcpy2.c')
def test_memcpy3(self):
self.do_core_test('test_memcpy3.c')
@also_with_standalone_wasm()
def test_memcpy_alignment(self):
self.do_runf(test_file('test_memcpy_alignment.cpp'), 'OK.')
def test_memset_alignment(self):
self.do_runf(test_file('test_memset_alignment.cpp'), 'OK.')
def test_memset(self):
self.do_core_test('test_memset.c')
def test_getopt(self):
self.do_core_test('test_getopt.c', args=['-t', '12', '-n', 'foobar'])
def test_getopt_long(self):
self.do_core_test('test_getopt_long.c', args=['--file', 'foobar', '-b'])
def test_memmove(self):
self.do_core_test('test_memmove.c')
def test_memmove2(self):
self.do_core_test('test_memmove2.c')
def test_memmove3(self):
self.do_core_test('test_memmove3.c')
def test_flexarray_struct(self):
self.do_core_test('test_flexarray_struct.c')
def test_bsearch(self):
self.do_core_test('test_bsearch.c')
def test_stack_overflow(self):
self.set_setting('ASSERTIONS', 2)
self.do_runf(test_file('core/stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
def test_stackAlloc(self):
self.do_core_test('stackAlloc.cpp')
def test_nestedstructs(self):
src = '''
#include <stdio.h>
#include "emscripten.h"
struct base {
int x;
float y;
union {
int a;
float b;
};
char c;
};
struct hashtableentry {
int key;
base data;
};
struct hashset {
typedef hashtableentry entry;
struct chain { entry elem; chain *next; };
// struct chainchunk { chain chains[100]; chainchunk *next; };
};
struct hashtable : hashset {
hashtable() {
base *b = NULL;
entry *e = NULL;
chain *c = NULL;
printf("*%zu,%ld,%ld,%ld,%ld,%ld|%zu,%ld,%ld,%ld,%ld,%ld,%ld,%ld|%zu,%ld,%ld,%ld,%ld,%ld,%ld,%ld,%ld,%ld*\\n",
sizeof(base),
long(&(b->x)), long(&(b->y)), long(&(b->a)), long(&(b->b)), long(&(b->c)),
sizeof(hashtableentry),
long(&(e->key)), long(&(e->data)), long(&(e->data.x)), long(&(e->data.y)), long(&(e->data.a)), long(&(e->data.b)), long(&(e->data.c)),
sizeof(hashset::chain),
long(&(c->elem)), long(&(c->next)), long(&(c->elem.key)), long(&(c->elem.data)), long(&(c->elem.data.x)), long(&(c->elem.data.y)), long(&(c->elem.data.a)), long(&(c->elem.data.b)), long(&(c->elem.data.c))
);
}
};
struct B { char buffer[62]; int last; char laster; char laster2; };
struct Bits {
unsigned short A : 1;
unsigned short B : 1;
unsigned short C : 1;
unsigned short D : 1;
unsigned short x1 : 1;
unsigned short x2 : 1;
unsigned short x3 : 1;
unsigned short x4 : 1;
};
int main() {
hashtable t;
// Part 2 - the char[] should be compressed, BUT have a padding space at the end so the next
// one is aligned properly. Also handle char; char; etc. properly.
B *b = NULL;
printf("*%ld,%ld,%ld,%ld,%ld,%ld,%ld,%ld,%zu*\\n", long(b), long(&(b->buffer)), long(&(b->buffer[0])), long(&(b->buffer[1])), long(&(b->buffer[2])),
long(&(b->last)), long(&(b->laster)), long(&(b->laster2)), sizeof(B));
// Part 3 - bitfields, and small structures
Bits *b2 = NULL;
printf("*%zu*\\n", sizeof(Bits));
return 0;
}
'''
# Bloated memory; same layout as C/C++
self.do_run(src, '*16,0,4,8,8,12|20,0,4,4,8,12,12,16|24,0,20,0,4,4,8,12,12,16*\n*0,0,0,1,2,64,68,69,72*\n*2*')
def prep_dlfcn_main(self):
self.set_setting('MAIN_MODULE')
self.set_setting('NODERAWFS')
self.clear_setting('SIDE_MODULE')
def build_dlfcn_lib(self, filename):
self.clear_setting('MAIN_MODULE')
self.set_setting('SIDE_MODULE')
outfile = self.build(filename, js_outfile=not self.is_wasm())
shutil.move(outfile, 'liblib.so')
@needs_dylink
def test_dlfcn_missing(self):
self.set_setting('MAIN_MODULE')
self.set_setting('ASSERTIONS')
src = r'''
#include <dlfcn.h>
#include <stdio.h>
#include <assert.h>
int main() {
void* lib_handle = dlopen("libfoo.so", RTLD_NOW);
assert(!lib_handle);
printf("error: %s\n", dlerror());
return 0;
}
'''
self.do_run(src, "error: Could not load dynamic lib: libfoo.so\nError: ENOENT: no such file or directory, open 'libfoo.so'")
@needs_dylink
def test_dlfcn_basic(self):
create_file('liblib.cpp', '''
#include <cstdio>
class Foo {
public:
Foo() {
puts("Constructing lib object.");
}
};
Foo global;
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = '''
#include <cstdio>
#include <dlfcn.h>
class Bar {
public:
Bar() {
puts("Constructing main object.");
}
};
Bar global;
int main() {
dlopen("liblib.so", RTLD_NOW);
return 0;
}
'''
self.do_run(src, 'Constructing main object.\nConstructing lib object.\n')
@needs_dylink
def test_dlfcn_i64(self):
create_file('liblib.c', '''
#include <inttypes.h>
int64_t foo(int x) {
return (long long)x / (long long)1234;
}
''')
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
src = r'''
#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
#include <dlfcn.h>
typedef int64_t (*int64func)(int);
int main() {
void *lib_handle = dlopen("liblib.so", RTLD_NOW);
if (!lib_handle) {
puts(dlerror());
abort();
}
printf("dll handle: %p\n", lib_handle);
int64func x = (int64func)dlsym(lib_handle, "foo");
printf("foo func handle: %p\n", x);
if (!x) {
printf("dlsym failed: %s\n", dlerror());
return 1;
}
printf("|%lld|\n", x(81234567));
return 0;
}
'''
self.do_run(src, '|65830|')
@needs_dylink
@disabled('EM_ASM in not yet supported in SIDE_MODULE')
def test_dlfcn_em_asm(self):
create_file('liblib.cpp', '''
#include <emscripten.h>
class Foo {
public:
Foo() {
EM_ASM( out("Constructing lib object.") );
}
};
Foo global;
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = '''
#include <emscripten.h>
#include <dlfcn.h>
class Bar {
public:
Bar() {
EM_ASM( out("Constructing main object.") );
}
};
Bar global;
int main() {
dlopen("liblib.so", RTLD_NOW);
EM_ASM( out("All done.") );
return 0;
}
'''
self.do_run(src, 'Constructing main object.\nConstructing lib object.\nAll done.\n')
@needs_dylink
def test_dlfcn_qsort(self):
self.set_setting('EXPORTED_FUNCTIONS', ['_get_cmp'])
create_file('liblib.cpp', '''
int lib_cmp(const void* left, const void* right) {
const int* a = (const int*) left;
const int* b = (const int*) right;
if(*a > *b) return 1;
else if(*a == *b) return 0;
else return -1;
}
typedef int (*CMP_TYPE)(const void*, const void*);
extern "C" CMP_TYPE get_cmp() {
return lib_cmp;
}
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
src = '''
#include <stdio.h>
#include <stdlib.h>
#include <dlfcn.h>
typedef int (*CMP_TYPE)(const void*, const void*);
int main_cmp(const void* left, const void* right) {
const int* a = (const int*) left;
const int* b = (const int*) right;
if(*a < *b) return 1;
else if(*a == *b) return 0;
else return -1;
}
int main() {
void* lib_handle;
CMP_TYPE (*getter_ptr)();
CMP_TYPE lib_cmp_ptr;
int arr[5] = {4, 2, 5, 1, 3};
qsort((void*)arr, 5, sizeof(int), main_cmp);
printf("Sort with main comparison: ");
for (int i = 0; i < 5; i++) {
printf("%d ", arr[i]);
}
printf("\\n");
lib_handle = dlopen("liblib.so", RTLD_NOW);
if (lib_handle == NULL) {
printf("Could not load lib.\\n");
return 1;
}
getter_ptr = (CMP_TYPE (*)()) dlsym(lib_handle, "get_cmp");
if (getter_ptr == NULL) {
printf("Could not find func.\\n");
return 1;
}
lib_cmp_ptr = getter_ptr();
qsort((void*)arr, 5, sizeof(int), lib_cmp_ptr);
printf("Sort with lib comparison: ");
for (int i = 0; i < 5; i++) {
printf("%d ", arr[i]);
}
printf("\\n");
return 0;
}
'''
self.do_run(src, 'Sort with main comparison: 5 4 3 2 1 *Sort with lib comparison: 1 2 3 4 5 *',
output_nicerizer=lambda x: x.replace('\n', '*'))
@needs_dylink
def test_dlfcn_data_and_fptr(self):
# Failing under v8 since: https://chromium-review.googlesource.com/712595
if self.is_wasm():
self.banned_js_engines = [config.V8_ENGINE]
create_file('liblib.cpp', r'''
#include <stdio.h>
int theglobal = 42;
extern void parent_func(); // a function that is defined in the parent
int* lib_get_global_addr() {
return &theglobal;
}
void lib_fptr() {
printf("Second calling lib_fptr from main.\n");
parent_func();
// call it also through a pointer, to check indexizing
void (*p_f)();
p_f = parent_func;
p_f();
}
extern "C" void (*func(int x, void(*fptr)()))() {
printf("In func: %d\n", x);
fptr();
return lib_fptr;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_func'])
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <stdio.h>
#include <dlfcn.h>
#include <emscripten.h>
typedef void (*FUNCTYPE(int, void(*)()))();
FUNCTYPE func;
void EMSCRIPTEN_KEEPALIVE parent_func() {
printf("parent_func called from child\n");
}
void main_fptr() {
printf("First calling main_fptr from lib.\n");
}
int main() {
void* lib_handle;
FUNCTYPE* func_fptr;
// Test basic lib loading.
lib_handle = dlopen("liblib.so", RTLD_NOW);
if (lib_handle == NULL) {
printf("Could not load lib.\n");
return 1;
}
// Test looked up function.
func_fptr = (FUNCTYPE*) dlsym(lib_handle, "func");
// Load twice to test cache.
func_fptr = (FUNCTYPE*) dlsym(lib_handle, "func");
if (func_fptr == NULL) {
printf("Could not find func.\n");
return 1;
}
// Test passing function pointers across module bounds.
void (*fptr)() = func_fptr(13, main_fptr);
fptr();
// Test global data.
int* globaladdr = (int*) dlsym(lib_handle, "theglobal");
if (globaladdr == NULL) {
printf("Could not find global.\n");
return 1;
}
printf("Var: %d\n", *globaladdr);
return 0;
}
'''
self.set_setting('EXPORTED_FUNCTIONS', ['_main'])
self.do_run(src, '''\
In func: 13
First calling main_fptr from lib.
Second calling lib_fptr from main.
parent_func called from child
parent_func called from child
Var: 42
''')
@needs_dylink
def test_dlfcn_varargs(self):
# this test is not actually valid - it fails natively. the child should fail
# to be loaded, not load and successfully see the parent print_ints func
create_file('liblib.cpp', r'''
void print_ints(int n, ...);
extern "C" void func() {
print_ints(2, 13, 42);
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_func'])
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <stdarg.h>
#include <stdio.h>
#include <dlfcn.h>
#include <assert.h>
void print_ints(int n, ...) {
va_list args;
va_start(args, n);
for (int i = 0; i < n; i++) {
printf("%d\n", va_arg(args, int));
}
va_end(args);
}
int main() {
void* lib_handle;
void (*fptr)();
print_ints(2, 100, 200);
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle);
fptr = (void (*)())dlsym(lib_handle, "func");
fptr();
return 0;
}
'''
self.set_setting('EXPORTED_FUNCTIONS', ['_main'])
self.do_run(src, '100\n200\n13\n42\n')
@needs_dylink
def test_dlfcn_alignment_and_zeroing(self):
self.set_setting('INITIAL_MEMORY', '16mb')
create_file('liblib.c', r'''
int prezero = 0;
__attribute__((aligned(1024))) int superAligned = 12345;
int postzero = 0;
''')
self.build_dlfcn_lib('liblib.c')
for i in range(10):
curr = '%d.so' % i
shutil.copyfile('liblib.so', curr)
self.prep_dlfcn_main()
self.set_setting('INITIAL_MEMORY', '128mb')
create_file('src.c', r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
#include <assert.h>
#include <emscripten.h>
int main() {
printf("'prepare' memory with non-zero inited stuff\n");
int num = 120 * 1024 * 1024; // total is 128; we'll use 5*5 = 25 at least, so allocate pretty much all of it
void* mem = malloc(num);
assert(mem);
printf("setting this range to non-zero: %ld - %ld\n", (long)mem, ((long)mem) + num);
memset(mem, 1, num);
EM_ASM({
var value = HEAP8[64*1024*1024];
out('verify middle of memory is non-zero: ' + value);
assert(value === 1);
});
free(mem);
for (int i = 0; i < 10; i++) {
char curr[] = "?.so";
curr[0] = '0' + i;
printf("loading %s\n", curr);
void* lib_handle = dlopen(curr, RTLD_NOW);
if (!lib_handle) {
puts(dlerror());
assert(0);
}
printf("getting superAligned\n");
int* superAligned = (int*)dlsym(lib_handle, "superAligned");
assert(superAligned);
assert(((long)superAligned) % 1024 == 0); // alignment
printf("checking value of superAligned, at %p\n", superAligned);
assert(*superAligned == 12345); // value
printf("getting prezero\n");
int* prezero = (int*)dlsym(lib_handle, "prezero");
assert(prezero);
printf("checking value of prezero, at %p\n", prezero);
assert(*prezero == 0);
*prezero = 1;
assert(*prezero != 0);
printf("getting postzero\n");
int* postzero = (int*)dlsym(lib_handle, "postzero");
printf("checking value of postzero, at %p\n", postzero);
assert(postzero);
printf("checking value of postzero\n");
assert(*postzero == 0);
*postzero = 1;
assert(*postzero != 0);
}
printf("success.\n");
return 0;
}
''')
self.do_runf('src.c', 'success.\n')
@needs_dylink
def test_dlfcn_self(self):
self.set_setting('MAIN_MODULE')
self.set_setting('EXPORT_ALL')
def get_data_export_count(wasm):
wat = self.get_wasm_text(wasm)
lines = wat.splitlines()
exports = [l for l in lines if l.strip().startswith('(export ')]
data_exports = [l for l in exports if '(global ' in l]
return len(data_exports)
self.do_core_test('test_dlfcn_self.c')
export_count = get_data_export_count('test_dlfcn_self.wasm')
# ensure there aren't too many globals; we don't want unnamed_addr
self.assertGreater(export_count, 20)
self.assertLess(export_count, 56)
@needs_dylink
def test_dlfcn_unique_sig(self):
create_file('liblib.c', r'''
#include <stdio.h>
int myfunc(int a, int b, int c, int d, int e, int f, int g, int h, int i, int j, int k, int l, int m) {
return 13;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_myfunc'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
typedef int (*FUNCTYPE)(int, int, int, int, int, int, int, int, int, int, int, int, int);
int main() {
void *lib_handle;
FUNCTYPE func_ptr;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
func_ptr = (FUNCTYPE)dlsym(lib_handle, "myfunc");
assert(func_ptr != NULL);
assert(func_ptr(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) == 13);
puts("success");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
self.do_runf('main.c', 'success')
@needs_dylink
def test_dlfcn_info(self):
create_file('liblib.c', r'''
#include <stdio.h>
int myfunc(int a, int b, int c, int d, int e, int f, int g, int h, int i, int j, int k, int l, int m) {
return 13;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_myfunc'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', '''
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include <dlfcn.h>
typedef int (*FUNCTYPE)(int, int, int, int, int, int, int, int, int, int, int, int, int);
int main() {
void *lib_handle;
FUNCTYPE func_ptr;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
func_ptr = (FUNCTYPE)dlsym(lib_handle, "myfunc");
assert(func_ptr != NULL);
assert(func_ptr(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) == 13);
/* Verify that we don't corrupt func_ptr when calling dladdr. */
Dl_info info;
memset(&info, 0, sizeof(info));
dladdr(func_ptr, &info);
assert(func_ptr != NULL);
assert(func_ptr(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) == 13);
/* Verify something useful lives in info. */
assert(info.dli_fname != NULL);
assert(info.dli_fbase == NULL);
assert(info.dli_sname == NULL);
assert(info.dli_saddr == NULL);
puts("success");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
self.do_runf('main.c', 'success')
@needs_dylink
def test_dlfcn_stacks(self):
create_file('liblib.c', r'''
#include <assert.h>
#include <stdio.h>
#include <string.h>
int myfunc(const char *input) {
char bigstack[1024] = { 0 };
// make sure we didn't just trample the stack!
assert(!strcmp(input, "foobar"));
snprintf(bigstack, sizeof(bigstack), "%s", input);
return strlen(bigstack);
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_myfunc'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', '''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
#include <string.h>
typedef int (*FUNCTYPE)(const char *);
int main() {
void *lib_handle;
FUNCTYPE func_ptr;
char str[128];
snprintf(str, sizeof(str), "foobar");
// HACK: Use strcmp in the main executable so that it doesn't get optimized out and the dynamic library
// is able to use it.
assert(!strcmp(str, "foobar"));
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
func_ptr = (FUNCTYPE)dlsym(lib_handle, "myfunc");
assert(func_ptr != NULL);
assert(func_ptr(str) == 6);
puts("success");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_strcmp'])
self.do_runf('main.c', 'success')
@needs_dylink
def test_dlfcn_funcs(self):
create_file('liblib.c', r'''
#include <assert.h>
#include <stdio.h>
#include <string.h>
typedef void (*voidfunc)();
typedef void (*intfunc)(int);
void callvoid(voidfunc f) { f(); }
void callint(voidfunc f, int x) { f(x); }
void void_0() { printf("void 0\n"); }
void void_1() { printf("void 1\n"); }
voidfunc getvoid(int i) {
switch(i) {
case 0: return void_0;
case 1: return void_1;
default: return NULL;
}
}
void int_0(int x) { printf("int 0 %d\n", x); }
void int_1(int x) { printf("int 1 %d\n", x); }
intfunc getint(int i) {
switch(i) {
case 0: return int_0;
case 1: return int_1;
default: return NULL;
}
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_callvoid', '_callint', '_getvoid', '_getint'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
typedef void (*voidfunc)();
typedef void (*intfunc)(int);
typedef void (*voidcaller)(voidfunc);
typedef void (*intcaller)(intfunc, int);
typedef voidfunc (*voidgetter)(int);
typedef intfunc (*intgetter)(int);
void void_main() { printf("void_main.\n"); }
void int_main(int x) { printf("int_main %d\n", x); }
int main() {
printf("go\n");
void *lib_handle;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
voidcaller callvoid = (voidcaller)dlsym(lib_handle, "callvoid");
assert(callvoid != NULL);
callvoid(void_main);
intcaller callint = (intcaller)dlsym(lib_handle, "callint");
assert(callint != NULL);
callint(int_main, 201);
voidgetter getvoid = (voidgetter)dlsym(lib_handle, "getvoid");
assert(getvoid != NULL);
callvoid(getvoid(0));
callvoid(getvoid(1));
intgetter getint = (intgetter)dlsym(lib_handle, "getint");
assert(getint != NULL);
callint(getint(0), 54);
callint(getint(1), 9000);
assert(getint(1000) == NULL);
puts("ok");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
self.do_runf('main.c', '''go
void_main.
int_main 201
void 0
void 1
int 0 54
int 1 9000
ok
''')
@needs_dylink
def test_dlfcn_mallocs(self):
# will be exhausted without functional malloc/free
self.set_setting('INITIAL_MEMORY', '64mb')
create_file('liblib.c', r'''
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
void *mallocproxy(int n) { return malloc(n); }
void freeproxy(void *p) { free(p); }
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_mallocproxy', '_freeproxy'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.do_runf(test_file('dlmalloc_proxy.c'), '*294,153*')
@needs_dylink
def test_dlfcn_longjmp(self):
create_file('liblib.c', r'''
#include <setjmp.h>
#include <stdio.h>
void jumpy(jmp_buf buf) {
static int i = 0;
i++;
if (i == 10) longjmp(buf, i);
printf("pre %d\n", i);
}
''')
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
#include <setjmp.h>
typedef void (*jumpfunc)(jmp_buf);
int main() {
printf("go!\n");
void *lib_handle;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
jumpfunc jumpy = (jumpfunc)dlsym(lib_handle, "jumpy");
assert(jumpy);
jmp_buf buf;
int jmpval = setjmp(buf);
if (jmpval == 0) {
while (1) jumpy(buf);
} else {
printf("out!\n");
}
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.do_runf('main.c', '''go!
pre 1
pre 2
pre 3
pre 4
pre 5
pre 6
pre 7
pre 8
pre 9
out!
''', force_c=True)
# TODO: make this work. need to forward tempRet0 across modules
# TODO Enable @with_both_exception_handling (the test is not working now)
@needs_dylink
def zzztest_dlfcn_exceptions(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
create_file('liblib.cpp', r'''
extern "C" {
int ok() {
return 65;
}
int fail() {
throw 123;
}
}
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
typedef int (*intfunc)();
int main() {
printf("go!\n");
void *lib_handle;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
intfunc okk = (intfunc)dlsym(lib_handle, "ok");
intfunc faill = (intfunc)dlsym(lib_handle, "fail");
assert(okk && faill);
try {
printf("ok: %d\n", okk());
} catch(...) {
printf("wha\n");
}
try {
printf("fail: %d\n", faill());
} catch(int x) {
printf("int %d\n", x);
}
try {
printf("fail: %d\n", faill());
} catch(double x) {
printf("caught %f\n", x);
}
return 0;
}
'''
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.do_run(src, '''go!
ok: 65
int 123
ok
''')
@needs_dylink
def test_dlfcn_handle_alloc(self):
# verify that dlopen does not allocate already used handles
dirname = self.get_dir()
def indir(name):
return os.path.join(dirname, name)
create_file('a.cpp', r'''
#include <stdio.h>
static struct a {
a() {
puts("a: loaded");
}
} _;
''')
create_file('b.cpp', r'''
#include <stdio.h>
static struct b {
b() {
puts("b: loaded");
}
} _;
''')
self.build_dlfcn_lib('a.cpp')
shutil.move(indir('liblib.so'), indir('liba.so'))
self.build_dlfcn_lib('b.cpp')
shutil.move(indir('liblib.so'), indir('libb.so'))
self.set_setting('MAIN_MODULE')
self.set_setting('NODERAWFS')
self.clear_setting('SIDE_MODULE')
create_file('main.c', r'''
#include <dlfcn.h>
#include <assert.h>
#include <stddef.h>
int main() {
void *liba, *libb, *liba2, *libb2;
int err;
liba = dlopen("liba.so", RTLD_NOW);
assert(liba != NULL);
libb = dlopen("libb.so", RTLD_NOW);
assert(libb != NULL);
// Test that opening libb a second times gives the same handle
libb2 = dlopen("libb.so", RTLD_NOW);
assert(libb == libb2);
err = dlclose(liba);
assert(!err);
liba2 = dlopen("liba.so", RTLD_NOW);
assert(liba2 != libb);
return 0;
}
''')
self.do_runf('main.c', 'a: loaded\nb: loaded\n')
@needs_dylink
@needs_non_trapping_float_to_int
def test_dlfcn_feature_in_lib(self):
self.emcc_args.append('-mnontrapping-fptoint')
create_file('liblib.cpp', r'''
extern "C" int magic(float x) {
return __builtin_wasm_trunc_saturate_s_i32_f32(x);
}
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <dlfcn.h>
#include <stdio.h>
#include <stdlib.h>
typedef int (*fi)(float);
int main() {
void *lib_handle = dlopen("liblib.so", RTLD_NOW);
if (!lib_handle) {
puts(dlerror());
abort();
}
fi x = (fi)dlsym(lib_handle, "magic");
if (!x) {
puts(dlerror());
abort();
}
printf("float: %d.\n", x(42.99));
return 0;
}
'''
self.do_run(src, 'float: 42.\n')
def dylink_test(self, main, side, expected=None, header=None, force_c=False,
main_module=2, **kwargs):
# Same as dylink_testf but take source code in string form
if not isinstance(side, list):
side_file = 'liblib.cpp' if not force_c else 'liblib.c'
create_file(side_file, side)
side = side_file
if not isinstance(main, list):
main_file = 'main.cpp' if not force_c else 'main.c'
create_file(main_file, main)
main = main_file
if header:
create_file('header.h', header)
return self.dylink_testf(main, side, expected, force_c, main_module=main_module, **kwargs)
def dylink_testf(self, main, side=None, expected=None, force_c=False, main_emcc_args=[],
main_module=2,
so_name='liblib.so',
need_reverse=True, **kwargs):
self.maybe_closure()
# Same as dylink_test but takes source code as filenames on disc.
old_args = self.emcc_args.copy()
if not expected:
outfile = shared.replace_suffix(main, '.out')
expected = read_file(outfile)
if not side:
side, ext = os.path.splitext(main)
side += '_side' + ext
# side settings
self.clear_setting('MAIN_MODULE')
self.set_setting('SIDE_MODULE')
side_suffix = 'wasm' if self.is_wasm() else 'js'
if isinstance(side, list):
out_file = 'liblib.' + side_suffix
# side is just a library
self.run_process([EMCC] + side + self.get_emcc_args() + ['-o', out_file])
else:
out_file = self.build(side, js_outfile=(side_suffix == 'js'))
shutil.move(out_file, so_name)
# main settings
self.set_setting('MAIN_MODULE', main_module)
self.clear_setting('SIDE_MODULE')
self.emcc_args += main_emcc_args
self.emcc_args.append(so_name)
if force_c:
self.emcc_args.append('-nostdlib++')
if isinstance(main, list):
# main is just a library
try_delete('main.js')
self.run_process([EMCC] + main + self.get_emcc_args() + ['-o', 'main.js'])
self.do_run('main.js', expected, no_build=True, **kwargs)
else:
self.do_runf(main, expected, force_c=force_c, **kwargs)
self.emcc_args = old_args
if need_reverse:
print('flip')
# Test the reverse as well. There we flip the role of the side module and main module.
# - We add --no-entry since the side module doesn't have a `main`
self.dylink_testf(side, main, expected, force_c, main_emcc_args + ['--no-entry'],
need_reverse=False, **kwargs)
def do_basic_dylink_test(self, **kwargs):
self.dylink_test(r'''
#include <stdio.h>
#include "header.h"
int main() {
printf("other says %d.\n", sidey());
return 0;
}
''', '''
#include "header.h"
int sidey() {
return 11;
}
''', 'other says 11.', 'int sidey();', force_c=True, **kwargs)
@needs_dylink
def test_dylink_basics(self):
self.do_basic_dylink_test(need_reverse=False)
self.verify_in_strict_mode('main.js')
@needs_dylink
def test_dylink_basics_no_modify(self):
if self.is_optimizing():
self.skipTest('no modify mode only works with non-optimizing builds')
self.set_setting('WASM_BIGINT')
self.set_setting('ERROR_ON_WASM_CHANGES_AFTER_LINK')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_basics_lld_report_undefined(self):
self.set_setting('LLD_REPORT_UNDEFINED')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_no_export(self):
self.set_setting('NO_DECLARE_ASM_MODULE_EXPORTS')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_memory_growth(self):
if not self.is_wasm():
self.skipTest('wasm only')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_safe_heap(self):
self.set_setting('SAFE_HEAP')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_function_pointer_equality(self):
self.dylink_test(r'''
#include <stdio.h>
#include "header.h"
int main() {
void* puts_side = get_address();
printf("main module address %p.\n", &puts);
printf("side module address address %p.\n", puts_side);
if (&puts == puts_side)
printf("success\n");
else
printf("failure\n");
return 0;
}
''', '''
#include <stdio.h>
#include "header.h"
void* get_address() {
return (void*)&puts;
}
''', 'success', header='void* get_address();', force_c=True)
@needs_dylink
def test_dylink_floats(self):
self.dylink_test(r'''
#include <stdio.h>
extern float sidey();
int main() {
printf("other says %.2f.\n", sidey()+1);
return 0;
}
''', '''
float sidey() { return 11.5; }
''', 'other says 12.50', force_c=True)
@needs_dylink
def test_dylink_printf(self):
self.dylink_test(r'''
#include <stdio.h>
void sidey();
int main() {
printf("hello from main\n");
sidey();
return 0;
}
''', r'''
#include <stdio.h>
void sidey() {
printf("hello from side\n");
}
''', 'hello from main\nhello from side\n', force_c=True)
# Verify that a function pointer can be passed back and forth and invoked
# on both sides.
@needs_dylink
def test_dylink_funcpointer(self):
self.dylink_test(
main=r'''
#include <stdio.h>
#include <assert.h>
#include "header.h"
intfunc sidey(intfunc f);
void a(int arg) { printf("hello from funcptr: %d\n", arg); }
int main() {
intfunc b = sidey(a);
assert(a == b);
b(0);
return 0;
}
''',
side='''
#include "header.h"
intfunc sidey(intfunc f) { f(1); return f; }
''',
expected='hello from funcptr: 1\nhello from funcptr: 0\n',
header='typedef void (*intfunc)(int );', force_c=True)
@needs_dylink
# test dynamic linking of a module with multiple function pointers, stored
# statically
def test_dylink_static_funcpointers(self):
self.dylink_test(
main=r'''
#include <stdio.h>
#include "header.h"
void areturn0() { printf("hello 0\n"); }
void areturn1() { printf("hello 1\n"); }
void areturn2() { printf("hello 2\n"); }
voidfunc func_ptrs[3] = { areturn0, areturn1, areturn2 };
int main(int argc, char **argv) {
sidey(func_ptrs[0]);
sidey(func_ptrs[1]);
sidey(func_ptrs[2]);
return 0;
}
''',
side='''
#include "header.h"
void sidey(voidfunc f) { f(); }
''',
expected='hello 0\nhello 1\nhello 2\n',
header='typedef void (*voidfunc)(); void sidey(voidfunc f);', force_c=True)
@needs_dylink
def test_dylink_funcpointers_wrapper(self):
self.dylink_test(
main=r'''\
#include <stdio.h>
#include "header.h"
int main(int argc, char **argv) {
charfunc f1 = emscripten_run_script;
f1("console.log('one')");
charfunc f2 = get();
f2("console.log('two')");
return 0;
}
''',
side='''\
#include "header.h"
charfunc get() {
return emscripten_run_script;
}
''',
expected='one\ntwo\n',
header='''\
#include <emscripten.h>
typedef void (*charfunc)(const char*);
extern charfunc get();
''', force_c=True)
@needs_dylink
def test_dylink_static_funcpointer_float(self):
self.dylink_test(
main=r'''\
#include <stdio.h>
#include "header.h"
int sidey(floatfunc f);
float func1(float f) { printf("hello 1: %f\n", f); return 0; }
floatfunc f1 = &func1;
int main(int argc, char **argv) {
printf("got: %d\n", sidey(f1));
f1(12.34);
return 0;
}
''',
side='''\
#include "header.h"
int sidey(floatfunc f) { f(56.78); return 1; }
''',
expected='hello 1: 56.779999\ngot: 1\nhello 1: 12.340000\n',
header='typedef float (*floatfunc)(float);', force_c=True)
@needs_dylink
def test_missing_signatures(self):
create_file('test_sig.c', r'''#include <emscripten.h>
int main() {
return 0 == ( (long)&emscripten_run_script_string +
(long)&emscripten_run_script );
}''')
self.set_setting('MAIN_MODULE', 1)
# also test main module with 4GB of memory. we need to emit a "maximum"
# clause then, even though 4GB is the maximum; see
# https://github.com/emscripten-core/emscripten/issues/14130
self.set_setting('ALLOW_MEMORY_GROWTH', '1')
self.set_setting('MAXIMUM_MEMORY', '4GB')
self.do_runf('test_sig.c', '')
@needs_dylink
def test_dylink_global_init(self):
self.dylink_test(r'''
#include <stdio.h>
struct Class {
Class() { printf("a new Class\n"); }
};
static Class c;
int main() {
return 0;
}
''', r'''
void nothing() {}
''', 'a new Class\n')
@needs_dylink
def test_dylink_global_inits(self):
def test():
self.dylink_test(header=r'''
#include <stdio.h>
struct Class {
Class(const char *name) { printf("new %s\n", name); }
};
''', main=r'''
#include "header.h"
static Class c("main");
int main() {
return 0;
}
''', side=r'''
#include "header.h"
static Class c("side");
''', expected=['new main\nnew side\n', 'new side\nnew main\n'])
test()
print('check warnings')
self.set_setting('ASSERTIONS', 2)
test()
# TODO: this in wasm
# full = self.run_js('src.js')
# self.assertNotContained('already exists', full)
@needs_dylink
def test_dylink_i64(self):
self.dylink_test(r'''
#include <stdio.h>
#include <stdint.h>
extern int64_t sidey();
int main() {
printf("other says %lld.\n", sidey());
return 0;
}
''', '''
#include <stdint.h>
int64_t sidey() {
return 42;
}
''', 'other says 42.', force_c=True)
@all_engines
@needs_dylink
def test_dylink_i64_b(self):
self.dylink_test(r'''
#include <stdio.h>
#include <stdint.h>
extern int64_t sidey();
int64_t testAdd(int64_t a) {
return a + 1;
}
int64_t testAddB(int a) {
return a + 1;
}
typedef int64_t (*testAddHandler)(int64_t);
testAddHandler h = &testAdd;
typedef int64_t (*testAddBHandler)(int);
testAddBHandler hb = &testAddB;
int main() {
printf("other says %lld.\n", sidey());
int64_t r = h(42);
printf("my fp says: %lld.\n", r);
int64_t rb = hb(42);
printf("my second fp says: %lld.\n", r);
}
''', '''
#include <stdint.h>
int64_t sidey() {
volatile int64_t x = 0x12345678abcdef12LL;
x += x % 17;
x = 18 - x;
return x;
}
''', 'other says -1311768467750121224.\nmy fp says: 43.\nmy second fp says: 43.', force_c=True)
@needs_dylink
@also_with_wasm_bigint
def test_dylink_i64_c(self):
self.dylink_test(r'''
#include <stdio.h>
#include <inttypes.h>
#include "header.h"
typedef int32_t (*fp_type_32)(int32_t, int32_t, int32_t);
typedef int64_t (*fp_type_64)(int32_t, int32_t, int32_t);
int32_t internal_function_ret_32(int32_t i, int32_t j, int32_t k) {
return 32;
}
int64_t internal_function_ret_64(int32_t i, int32_t j, int32_t k) {
return 64;
}
int main() {
fp_type_32 fp32_internal = &internal_function_ret_32;
fp_type_32 fp32_external = &function_ret_32;
fp_type_64 fp64_external = &function_ret_64;
fp_type_64 fp64_internal = &internal_function_ret_64;
int32_t ires32 = fp32_internal(0,0,0);
printf("res32 - internal %d\n",ires32);
int32_t eres32 = fp32_external(0,0,0);
printf("res32 - external %d\n",eres32);
int64_t ires64 = fp64_internal(0,0,0);
printf("res64 - internal %" PRId64 "\n",ires64);
int64_t eres64 = fp64_external(0,0,0);
printf("res64 - external %" PRId64 "\n",eres64);
return 0;
}
''', '''
#include "header.h"
int32_t function_ret_32(int32_t i, int32_t j, int32_t k) {
return 32;
}
int64_t function_ret_64(int32_t i, int32_t j, int32_t k) {
return 64;
}
''', '''res32 - internal 32
res32 - external 32
res64 - internal 64
res64 - external 64\n''', header='''
#include <emscripten.h>
#include <stdint.h>
EMSCRIPTEN_KEEPALIVE int32_t function_ret_32(int32_t i, int32_t j, int32_t k);
EMSCRIPTEN_KEEPALIVE int64_t function_ret_64(int32_t i, int32_t j, int32_t k);
''', force_c=True)
@needs_dylink
@also_with_wasm_bigint
def test_dylink_i64_invoke(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.dylink_test(r'''\
#include <stdio.h>
#include <stdint.h>
extern "C" int64_t sidey(int64_t arg);
int main(int argc, char *argv[]) {
int64_t temp = 42;
printf("got %lld\n", sidey(temp));
return 0;
}''', r'''\
#include <stdint.h>
#include <stdio.h>
#include <emscripten.h>
extern "C" {
EMSCRIPTEN_KEEPALIVE int64_t do_call(int64_t arg) {
if (arg == 0) {
throw;
}
return 2 * arg;
}
int64_t sidey(int64_t arg) {
try {
return do_call(arg);
} catch(...) {
return 0;
}
}
}''', 'got 84', need_reverse=False)
@needs_dylink
def test_dylink_class(self):
self.dylink_test(header=r'''
#include <stdio.h>
struct Class {
Class(const char *name);
};
''', main=r'''
#include "header.h"
int main() {
Class c("main");
return 0;
}
''', side=r'''
#include "header.h"
Class::Class(const char *name) { printf("new %s\n", name); }
''', expected=['new main\n'])
@needs_dylink
def test_dylink_global_var(self):
self.dylink_test(main=r'''
#include <stdio.h>
extern int x;
int main() {
printf("extern is %d.\n", x);
return 0;
}
''', side=r'''
int x = 123;
''', expected=['extern is 123.\n'], force_c=True)
@needs_dylink
def test_dylink_global_var_modded(self):
self.dylink_test(main=r'''
#include <stdio.h>
extern int x;
int main() {
printf("extern is %d.\n", x);
return 0;
}
''', side=r'''
int x = 123;
struct Initter {
Initter() { x = 456; }
};
Initter initter;
''', expected=['extern is 456.\n'])
@needs_dylink
def test_dylink_stdlib(self):
self.dylink_test(header=r'''
#include <math.h>
#include <stdlib.h>
#include <string.h>
char *side(const char *data);
double pow_two(double x);
''', main=r'''
#include <stdio.h>
#include "header.h"
int main() {
char *temp = side("hello through side\n");
char *ret = (char*)malloc(strlen(temp)+1);
strcpy(ret, temp);
temp[1] = 'x';
puts(ret);
printf("pow_two: %d.\n", (int)pow_two(5.9));
return 0;
}
''', side=r'''
#include "header.h"
char *side(const char *data) {
char *ret = (char*)malloc(strlen(data)+1);
strcpy(ret, data);
return ret;
}
double pow_two(double x) {
return pow(2, x);
}
''', expected=['hello through side\n\npow_two: 59.'], force_c=True)
@needs_dylink
def test_dylink_jslib(self):
create_file('lib.js', r'''
mergeInto(LibraryManager.library, {
test_lib_func: function(x) {
return x + 17.2;
}
});
''')
self.dylink_test(header=r'''
extern double test_lib_func(int input);
''', main=r'''
#include <stdio.h>
#include "header.h"
extern double sidey();
int main2() { return 11; }
int main() {
int input = sidey();
double temp = test_lib_func(input);
printf("other says %.2f\n", temp);
printf("more: %.5f, %d\n", temp, input);
return 0;
}
''', side=r'''
#include <stdio.h>
#include "header.h"
extern int main2();
double sidey() {
int temp = main2();
printf("main2 sed: %d\n", temp);
printf("main2 sed: %u, %c\n", temp, temp/2);
return test_lib_func(temp);
}
''', expected='other says 45.2', main_emcc_args=['--js-library', 'lib.js'], force_c=True)
@needs_dylink
def test_dylink_many_postsets(self):
NUM = 1234
self.dylink_test(header=r'''
#include <stdio.h>
typedef void (*voidfunc)();
static void simple() {
printf("simple.\n");
}
static volatile voidfunc funcs[''' + str(NUM) + '] = { ' + ','.join(['simple'] * NUM) + r''' };
static void test() {
volatile int i = ''' + str(NUM - 1) + r''';
funcs[i]();
i = 0;
funcs[i]();
}
extern void more();
''', main=r'''
#include "header.h"
int main() {
test();
more();
return 0;
}
''', side=r'''
#include "header.h"
void more() {
test();
}
''', expected=['simple.\nsimple.\nsimple.\nsimple.\n'], force_c=True)
@needs_dylink
def test_dylink_postsets_chunking(self):
self.dylink_test(header=r'''
extern int global_var;
''', main=r'''
#include <stdio.h>
#include "header.h"
// prepare 99 global variable with local initializer
static int p = 1;
#define P(x) __attribute__((used)) int *padding##x = &p;
P(01) P(02) P(03) P(04) P(05) P(06) P(07) P(08) P(09) P(10)
P(11) P(12) P(13) P(14) P(15) P(16) P(17) P(18) P(19) P(20)
P(21) P(22) P(23) P(24) P(25) P(26) P(27) P(28) P(29) P(30)
P(31) P(32) P(33) P(34) P(35) P(36) P(37) P(38) P(39) P(40)
P(41) P(42) P(43) P(44) P(45) P(46) P(47) P(48) P(49) P(50)
P(51) P(52) P(53) P(54) P(55) P(56) P(57) P(58) P(59) P(60)
P(61) P(62) P(63) P(64) P(65) P(66) P(67) P(68) P(69) P(70)
P(71) P(72) P(73) P(74) P(75) P(76) P(77) P(78) P(79) P(80)
P(81) P(82) P(83) P(84) P(85) P(86) P(87) P(88) P(89) P(90)
P(91) P(92) P(93) P(94) P(95) P(96) P(97) P(98) P(99)
// prepare global variable with global initializer
int *ptr = &global_var;
int main(int argc, char *argv[]) {
printf("%d\n", *ptr);
}
''', side=r'''
#include "header.h"
int global_var = 12345;
''', expected=['12345\n'], force_c=True)
@needs_dylink
@parameterized({
'libcxx': ('libc,libc++,libmalloc,libc++abi',),
'all': ('1',),
'missing': ('libc,libmalloc', False, False, False),
'missing_assertions': ('libc,libmalloc', False, False, True),
})
def test_dylink_syslibs(self, syslibs, expect_pass=True, need_reverse=True, assertions=True):
# one module uses libcxx, need to force its inclusion when it isn't the main
self.emcc_args.append('-Wno-deprecated')
self.set_setting('WARN_ON_UNDEFINED_SYMBOLS', 0)
if assertions is not None:
self.set_setting('ASSERTIONS', int(assertions))
passed = True
try:
with env_modify({'EMCC_FORCE_STDLIBS': syslibs, 'EMCC_ONLY_FORCED_STDLIBS': '1'}):
self.dylink_test(main=r'''
void side();
int main() {
side();
return 0;
}
''', side=r'''
#include <iostream>
void side() { std::cout << "cout hello from side\n"; }
''', expected=['cout hello from side\n'], need_reverse=need_reverse, main_module=1)
except Exception as e:
if expect_pass:
raise
print('(seeing expected fail)')
passed = False
assertion = 'build the MAIN_MODULE with EMCC_FORCE_STDLIBS=1 in the environment'
if self.get_setting('ASSERTIONS'):
self.assertContained(assertion, str(e))
else:
self.assertNotContained(assertion, str(e))
assert passed == expect_pass, ['saw', passed, 'but expected', expect_pass]
@needs_dylink
@with_env_modify({'EMCC_FORCE_STDLIBS': 'libc++'})
def test_dylink_iostream(self):
self.dylink_test(header=r'''
#include <iostream>
#include <string>
std::string side();
''', main=r'''
#include "header.h"
int main() {
std::cout << "hello from main " << side() << std::endl;
return 0;
}
''', side=r'''
#include "header.h"
std::string side() { return "and hello from side"; }
''', expected=['hello from main and hello from side\n'])
@needs_dylink
def test_dylink_dynamic_cast(self): # issue 3465
self.dylink_test(header=r'''
class Base {
public:
virtual void printName();
};
class Derived : public Base {
public:
void printName();
};
''', main=r'''
#include "header.h"
#include <iostream>
using namespace std;
int main() {
cout << "starting main" << endl;
Base *base = new Base();
Base *derived = new Derived();
base->printName();
derived->printName();
if (dynamic_cast<Derived*>(derived)) {
cout << "OK" << endl;
} else {
cout << "KO" << endl;
}
return 0;
}
''', side=r'''
#include "header.h"
#include <iostream>
using namespace std;
void Base::printName() {
cout << "Base" << endl;
}
void Derived::printName() {
cout << "Derived" << endl;
}
''', expected=['starting main\nBase\nDerived\nOK'])
@with_both_exception_handling
@needs_dylink
def test_dylink_raii_exceptions(self):
self.dylink_test(main=r'''
#include <stdio.h>
extern int side();
int main() {
printf("from side: %d.\n", side());
}
''', side=r'''
#include <stdio.h>
typedef int (*ifdi)(float, double, int);
int func_with_special_sig(float a, double b, int c) {
printf("special %f %f %d\n", a, b, c);
return 1337;
}
struct DestructorCaller {
~DestructorCaller() { printf("destroy\n"); }
};
int side() {
// d has a destructor that must be called on function
// exit, which means an invoke will be used for the
// indirect call here - and the signature of that call
// is special and not present in the main module, so
// it must be generated for the side module.
DestructorCaller d;
volatile ifdi p = func_with_special_sig;
return p(2.18281, 3.14159, 42);
}
''', expected=['special 2.182810 3.141590 42\ndestroy\nfrom side: 1337.\n'])
@needs_dylink
@disabled('https://github.com/emscripten-core/emscripten/issues/12815')
def test_dylink_hyper_dupe(self):
self.set_setting('INITIAL_MEMORY', '64mb')
self.set_setting('ASSERTIONS', 2)
# test hyper-dynamic linking, and test duplicate warnings
create_file('third.cpp', r'''
#include <stdio.h>
int sidef() { return 36; }
int sideg = 49;
int bsidef() { return 536; }
extern void only_in_second_1(int x);
extern int second_to_third;
int third_to_second = 1337;
void only_in_third_0() {
// note we access our own globals directly, so
// it doesn't matter that overriding failed
printf("only_in_third_0: %d, %d, %d\n", sidef(), sideg, second_to_third);
only_in_second_1(2112);
}
void only_in_third_1(int x) {
printf("only_in_third_1: %d, %d, %d, %d\n", sidef(), sideg, second_to_third, x);
}
''')
if self.is_wasm():
libname = 'third.wasm'
else:
libname = 'third.js'
self.run_process([EMCC, 'third.cpp', '-o', libname, '-s', 'SIDE_MODULE'] + self.get_emcc_args())
self.dylink_test(main=r'''
#include <stdio.h>
#include <emscripten.h>
extern int sidef();
extern int sideg;
extern int bsidef();
extern int bsideg;
extern void only_in_second_0();
extern void only_in_third_0();
int main() {
EM_ASM({
loadDynamicLibrary('%s'); // hyper-dynamic! works at least for functions (and consts not used in same block)
});
printf("sidef: %%d, sideg: %%d.\n", sidef(), sideg);
printf("bsidef: %%d.\n", bsidef());
only_in_second_0();
only_in_third_0();
}
''' % libname,
side=r'''
#include <stdio.h>
int sidef() { return 10; } // third will try to override these, but fail!
int sideg = 20;
extern void only_in_third_1(int x);
int second_to_third = 500;
extern int third_to_second;
void only_in_second_0() {
printf("only_in_second_0: %d, %d, %d\n", sidef(), sideg, third_to_second);
only_in_third_1(1221);
}
void only_in_second_1(int x) {
printf("only_in_second_1: %d, %d, %d, %d\n", sidef(), sideg, third_to_second, x);
}
''',
expected=['sidef: 10, sideg: 20.\nbsidef: 536.\nonly_in_second_0: 10, 20, 1337\nonly_in_third_1: 36, 49, 500, 1221\nonly_in_third_0: 36, 49, 500\nonly_in_second_1: 10, 20, 1337, 2112\n'],
# in wasm, we can't flip as the side would have an EM_ASM, which we don't support yet TODO
need_reverse=not self.is_wasm())
print('check warnings')
full = self.run_js('src.js')
self.assertContained("warning: symbol '_sideg' from '%s' already exists" % libname, full)
@needs_dylink
def test_dylink_load_compiled_side_module(self):
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args.append('-lnodefs.js')
self.set_setting('INITIAL_MEMORY', '64mb')
# This test loads the module at runtime with loadWebAssemblyModule so we
# want to suppress the automatic loading that would otherwise be done at
# startup.
self.set_setting('NO_AUTOLOAD_DYLIBS')
self.dylink_test(main=r'''
#include <stdio.h>
#include <emscripten.h>
extern int sidef();
int main() {
EM_ASM({
FS.mkdir('/working');
FS.mount(NODEFS,{ root: '.' }, '/working');
var libData = FS.readFile('/working/liblib.so', {encoding: 'binary'});
if (!(libData instanceof Uint8Array)) {
libData = new Uint8Array(libData);
}
var compiledModule = new WebAssembly.Module(libData);
var sideExports = loadWebAssemblyModule(compiledModule, {loadAsync: false, nodelete: true});
mergeLibSymbols(sideExports, 'liblib.so');
});
printf("sidef: %d.\n", sidef());
}
''',
side=r'''
#include <stdio.h>
int sidef() { return 10; }
''',
expected=['sidef: 10'],
# in wasm, we can't flip as the side would have an EM_ASM, which we don't support yet TODO
need_reverse=not self.is_wasm())
@needs_dylink
def test_dylink_dso_needed(self):
def do_run(src, expected_output, emcc_args=[]):
create_file('main.c', src + 'int main() { return test_main(); }')
self.do_runf('main.c', expected_output, emcc_args=emcc_args)
self._test_dylink_dso_needed(do_run)
@needs_dylink
def test_dylink_dot_a(self):
# .a linking must force all .o files inside it, when in a shared module
create_file('third.c', 'int sidef() { return 36; }')
create_file('fourth.c', 'int sideg() { return 17; }')
self.run_process([EMCC, '-fPIC', '-c', 'third.c', '-o', 'third.o'] + self.get_emcc_args())
self.run_process([EMCC, '-fPIC', '-c', 'fourth.c', '-o', 'fourth.o'] + self.get_emcc_args())
self.run_process([EMAR, 'rc', 'libfourth.a', 'fourth.o'])
self.dylink_test(main=r'''
#include <stdio.h>
#include <emscripten.h>
int sidef();
int sideg();
int main() {
printf("sidef: %d, sideg: %d.\n", sidef(), sideg());
}
''',
# contents of libfourth.a must be included, even if they aren't referred to!
side=['libfourth.a', 'third.o'],
expected=['sidef: 36, sideg: 17.\n'], force_c=True)
@needs_dylink
def test_dylink_spaghetti(self):
self.dylink_test(main=r'''
#include <stdio.h>
int main_x = 72;
extern int side_x;
int adjust = side_x + 10;
int *ptr = &side_x;
struct Class {
Class() {
printf("main init sees %d, %d, %d.\n", adjust, *ptr, main_x);
}
};
Class cm;
int main() {
printf("main main sees %d, %d, %d.\n", adjust, *ptr, main_x);
return 0;
}
''', side=r'''
#include <stdio.h>
extern int main_x;
int side_x = -534;
int adjust2 = main_x + 10;
int *ptr2 = &main_x;
struct SideClass {
SideClass() {
printf("side init sees %d, %d, %d.\n", adjust2, *ptr2, side_x);
}
};
SideClass cs;
''', expected=['''\
side init sees 82, 72, -534.
main init sees -524, -534, 72.
main main sees -524, -534, 72.
''', '''\
main init sees -524, -534, 72.
side init sees 82, 72, -534.
main main sees -524, -534, 72.
'''])
@needs_make('mingw32-make')
@needs_dylink
def test_dylink_zlib(self):
self.emcc_args += ['-Wno-shift-negative-value', '-I' + test_file('third_party/zlib')]
self.set_setting('RELOCATABLE')
zlib_archive = self.get_zlib_library()
self.dylink_test(main=read_file(test_file('third_party/zlib/example.c')),
side=zlib_archive,
expected=read_file(test_file('core/test_zlib.out')),
force_c=True)
# @needs_dylink
# def test_dylink_bullet(self):
# self.emcc_args += ['-I' + test_file('bullet/src')]
# side = self.get_bullet_library(self, True)
# self.dylink_test(main=read_file(test_file('bullet/Demos/HelloWorld/HelloWorld.cpp')),
# side=side,
# expected=[read_file(test_file('bullet/output.txt')), # different roundings
# read_file(test_file('bullet/output2.txt')),
# read_file(test_file('bullet/output3.txt'))])
@needs_dylink
def test_dylink_rtti(self):
# Verify that objects created in one module and be dynamic_cast<> correctly
# in the another module.
# Each module will define its own copy of certain COMDAT symbols such as
# each classs's typeinfo, but at runtime they should both use the same one.
# Use LLD_REPORT_UNDEFINED to test that it works as expected with weak/COMDAT
# symbols.
self.set_setting('LLD_REPORT_UNDEFINED')
header = '''
#include <cstddef>
class Foo {
public:
virtual ~Foo() {}
};
class Bar : public Foo {
public:
virtual ~Bar() {}
};
bool is_bar(Foo* foo);
'''
main = '''
#include <stdio.h>
#include "header.h"
int main() {
Bar bar;
if (!is_bar(&bar)) {
puts("failure");
return 1;
}
puts("success");
return 0;
}
'''
side = '''
#include "header.h"
bool is_bar(Foo* foo) {
return dynamic_cast<Bar*>(foo) != nullptr;
}
'''
self.dylink_test(main=main,
side=side,
header=header,
expected='success')
@needs_dylink
def test_dylink_argv_argc(self):
# Verify that argc and argv can be sent to main when main is in a side module
self.emcc_args += ['--extern-pre-js', 'pre.js']
create_file('pre.js', '''
var Module = { arguments: ['hello', 'world!'] }
''')
self.dylink_test(
'', # main module is empty.
r'''
#include <stdio.h>
int main(int argc, char const *argv[]) {
printf("%d ", argc);
for (int i=1; i<argc; i++) printf("%s ", argv[i]);
printf("\n");
return 0;
}
''',
expected='3 hello world!',
need_reverse=False)
@needs_dylink
def test_dylink_weak(self):
# Verify that weakly defined symbols can be defined in both side module and main
# module but that only one gets used at runtime.
self.dylink_testf(test_file('core/test_dylink_weak.c'), need_reverse=False)
@node_pthreads
@needs_dylink
def test_dylink_tls(self):
self.emcc_args.append('-Wno-experimental')
self.dylink_testf(test_file('core/test_dylink_tls.c'),
need_reverse=False)
@node_pthreads
@needs_dylink
def test_dylink_tls_export(self):
self.emcc_args.append('-Wno-experimental')
self.dylink_testf(test_file('core/test_dylink_tls_export.c'),
need_reverse=False)
def test_random(self):
src = r'''#include <stdlib.h>
#include <stdio.h>
int main()
{
srandom(0xdeadbeef);
printf("%ld\n", random());
}
'''
self.do_run(src, '956867869')
def test_rand(self):
src = r'''#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
int main()
{
// we need RAND_MAX to be a bitmask (power of 2 minus 1). this assertions guarantees
// if RAND_MAX changes the test failure will focus attention on that issue here.
assert(RAND_MAX == 0x7fffffff);
srand(0xdeadbeef);
for(int i = 0; i < 10; ++i)
printf("%d\n", rand());
unsigned int seed = 0xdeadbeef;
for(int i = 0; i < 10; ++i)
printf("%d\n", rand_r(&seed));
bool haveEvenAndOdd = true;
for(int i = 1; i <= 30; ++i)
{
int mask = 1 << i;
if (mask > RAND_MAX) break;
bool haveEven = false;
bool haveOdd = false;
for(int j = 0; j < 1000 && (!haveEven || !haveOdd); ++j)
{
if ((rand() & mask) == 0)
haveEven = true;
else
haveOdd = true;
}
haveEvenAndOdd = haveEvenAndOdd && haveEven && haveOdd;
}
if (haveEvenAndOdd)
printf("Have even and odd!\n");
return 0;
}
'''
expected = '''490242850
2074599277
1480056542
1912638067
931112055
2110392489
2053422194
1614832492
216117595
174823244
760368382
602359081
1121118963
1291018924
1608306807
352705809
958258461
1182561381
114276303
1481323674
Have even and odd!
'''
self.do_run(src, expected)
def test_strtod(self):
self.do_core_test('test_strtod.c')
def test_strtold(self):
self.do_core_test('test_strtold.c')
def test_strtok(self):
self.do_core_test('test_strtok.c')
def test_strtol(self):
self.do_core_test('test_strtol.c')
def test_transtrcase(self):
self.do_core_test('test_transtrcase.c')
@no_wasm2js('very slow to compile')
@is_slow_test
def test_printf(self):
# needs to flush stdio streams
self.emcc_args.append('-Wno-format')
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('printf/test.c')
def test_printf_2(self):
self.do_core_test('test_printf_2.c')
def test_printf_float(self):
self.do_run_in_out_file_test('printf/test_float.c')
def test_printf_octal(self):
self.do_run_in_out_file_test('printf/test_octal.c')
def test_printf_macros(self):
self.do_core_test('test_printf_macros.c')
def test_vprintf(self):
self.do_core_test('test_vprintf.c')
def test_vsnprintf(self):
self.do_core_test('test_vsnprintf.c')
def test_printf_more(self):
self.do_core_test('test_printf_more.c')
def test_perrar(self):
self.do_core_test('test_perrar.c')
def test_atoX(self):
self.do_core_test('test_atoX.c')
def test_strstr(self):
self.do_core_test('test_strstr.c')
def test_fnmatch(self):
self.do_core_test('test_fnmatch.cpp')
def test_sscanf(self):
self.do_core_test('test_sscanf.c')
def test_sscanf_2(self):
# doubles
for ftype in ['float', 'double']:
src = r'''
#include <stdio.h>
int main(){
char strval1[] = "1.2345678901";
char strval2[] = "1.23456789e5";
char strval3[] = "1.23456789E5";
char strval4[] = "1.2345678e-5";
char strval5[] = "1.2345678E-5";
double dblval = 1.2345678901;
double tstval;
sscanf(strval1, "%lf", &tstval);
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval2, "%lf", &tstval);
dblval = 123456.789;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval3, "%lf", &tstval);
dblval = 123456.789;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval4, "%lf", &tstval);
dblval = 0.000012345678;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval5, "%lf", &tstval);
dblval = 0.000012345678;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
return 0;
}
'''
if ftype == 'float':
self.do_run(src.replace('%lf', '%f').replace('double', 'float'), '''Pass: 1.234568 1.234568
Pass: 123456.789062 123456.789062
Pass: 123456.789062 123456.789062
Pass: 0.000012 0.000012
Pass: 0.000012 0.000012''')
else:
self.do_run(src, '''Pass: 1.234568 1.234568
Pass: 123456.789000 123456.789000
Pass: 123456.789000 123456.789000
Pass: 0.000012 0.000012
Pass: 0.000012 0.000012''')
def test_sscanf_n(self):
self.do_core_test('test_sscanf_n.c')
def test_sscanf_whitespace(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_sscanf_whitespace.c')
def test_sscanf_other_whitespace(self):
# use i16s in printf
self.set_setting('SAFE_HEAP', 0)
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_sscanf_other_whitespace.c')
def test_sscanf_3(self):
self.do_core_test('test_sscanf_3.c')
def test_sscanf_4(self):
self.do_core_test('test_sscanf_4.c')
def test_sscanf_5(self):
self.do_core_test('test_sscanf_5.c')
def test_sscanf_6(self):
self.do_core_test('test_sscanf_6.c')
def test_sscanf_skip(self):
self.do_core_test('test_sscanf_skip.c')
def test_sscanf_caps(self):
self.do_core_test('test_sscanf_caps.c')
def test_sscanf_hex(self):
self.do_core_test('test_sscanf_hex.cpp')
def test_sscanf_float(self):
self.do_core_test('test_sscanf_float.c')
def test_langinfo(self):
self.do_core_test('test_langinfo.c')
def test_files(self):
self.banned_js_engines = [config.SPIDERMONKEY_ENGINE] # closure can generate variables called 'gc', which pick up js shell stuff
if self.maybe_closure(): # Use closure here, to test we don't break FS stuff
self.emcc_args = [x for x in self.emcc_args if x != '-g'] # ensure we test --closure 1 --memory-init-file 1 (-g would disable closure)
elif '-O3' in self.emcc_args and not self.is_wasm():
print('closure 2')
self.emcc_args += ['--closure', '2'] # Use closure 2 here for some additional coverage
return self.skipTest('TODO: currently skipped because CI runs out of memory running Closure in this test!')
self.emcc_args += ['--pre-js', 'pre.js']
self.set_setting('FORCE_FILESYSTEM')
print('base', self.emcc_args)
create_file('pre.js', '''
/** @suppress{checkTypes}*/
Module = {
'noFSInit': true,
'preRun': function() {
FS.createLazyFile('/', 'test.file', 'test.file', true, false);
// Test FS_* exporting
Module['FS_createDataFile']('/', 'somefile.binary', [100, 200, 50, 25, 10, 77, 123], true, false, false); // 200 becomes -56, since signed chars are used in memory
var test_files_input = 'hi there!';
var test_files_input_index = 0;
FS.init(function() {
return test_files_input.charCodeAt(test_files_input_index++) || null;
});
}
};
''')
create_file('test.file', 'some data')
mem_file = 'files.js.mem'
try_delete(mem_file)
def clean(out):
return '\n'.join([line for line in out.split('\n') if 'binaryen' not in line and 'wasm' not in line and 'so not running' not in line])
self.do_runf(test_file('files.cpp'), ('size: 7\ndata: 100,-56,50,25,10,77,123\nloop: 100 -56 50 25 10 77 123 \ninput:hi there!\ntexto\n$\n5 : 10,30,20,11,88\nother=some data.\nseeked=me da.\nseeked=ata.\nseeked=ta.\nfscanfed: 10 - hello\n5 bytes to dev/null: 5\nok.\ntexte\n', 'size: 7\ndata: 100,-56,50,25,10,77,123\nloop: 100 -56 50 25 10 77 123 \ninput:hi there!\ntexto\ntexte\n$\n5 : 10,30,20,11,88\nother=some data.\nseeked=me da.\nseeked=ata.\nseeked=ta.\nfscanfed: 10 - hello\n5 bytes to dev/null: 5\nok.\n'),
output_nicerizer=clean)
if self.uses_memory_init_file():
self.assertExists(mem_file)
def test_files_m(self):
# Test for Module.stdin etc.
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
create_file('pre.js', '''
Module = {
data: [10, 20, 40, 30],
stdin: function() { return Module.data.pop() || null },
stdout: function(x) { out('got: ' + x) }
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
src = r'''
#include <stdio.h>
#include <unistd.h>
int main () {
char c;
fprintf(stderr, "isatty? %d,%d,%d\n", isatty(fileno(stdin)), isatty(fileno(stdout)), isatty(fileno(stderr)));
while ((c = fgetc(stdin)) != EOF) {
putc(c+5, stdout);
}
return 0;
}
'''
def clean(out):
return '\n'.join(l for l in out.splitlines() if 'warning' not in l and 'binaryen' not in l)
self.do_run(src, ('got: 35\ngot: 45\ngot: 25\ngot: 15\nisatty? 0,0,1\n', 'got: 35\ngot: 45\ngot: 25\ngot: 15\nisatty? 0,0,1', 'isatty? 0,0,1\ngot: 35\ngot: 45\ngot: 25\ngot: 15'), output_nicerizer=clean)
def test_mount(self):
self.set_setting('FORCE_FILESYSTEM')
self.do_runf(test_file('fs/test_mount.c'), 'success')
def test_getdents64(self):
self.do_runf(test_file('fs/test_getdents64.cpp'), '..')
def test_getdents64_special_cases(self):
# https://bugs.chromium.org/p/v8/issues/detail?id=6881
self.banned_js_engines = [config.V8_ENGINE]
self.do_run_in_out_file_test('fs/test_getdents64_special_cases.cpp')
def test_getcwd_with_non_ascii_name(self):
# https://bugs.chromium.org/p/v8/issues/detail?id=6881
self.banned_js_engines = [config.V8_ENGINE]
self.do_run_in_out_file_test('fs/test_getcwd_with_non_ascii_name.cpp')
def test_proc_self_fd(self):
self.do_run_in_out_file_test('fs/test_proc_self_fd.c')
def test_fwrite_0(self):
self.do_core_test('test_fwrite_0.c')
def test_fgetc_ungetc(self):
print('TODO: update this test once the musl ungetc-on-EOF-stream bug is fixed upstream and reaches us')
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
print(fs)
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('stdio/test_fgetc_ungetc.c'), 'success', js_engines=[config.NODE_JS])
def test_fgetc_unsigned(self):
src = r'''
#include <stdio.h>
int main() {
FILE *file = fopen("file_with_byte_234.txt", "rb");
int c = fgetc(file);
printf("*%d\n", c);
}
'''
create_file('file_with_byte_234.txt', b'\xea', binary=True)
self.emcc_args += ['--embed-file', 'file_with_byte_234.txt']
self.do_run(src, '*234\n')
def test_fgets_eol(self):
src = r'''
#include <stdio.h>
char buf[32];
int main()
{
const char *r = "SUCCESS";
FILE *f = fopen("eol.txt", "r");
while (fgets(buf, 32, f) != NULL) {
if (buf[0] == '\0') {
r = "FAIL";
break;
}
}
printf("%s\n", r);
fclose(f);
return 0;
}
'''
open('eol.txt', 'wb').write(b'\n')
self.emcc_args += ['--embed-file', 'eol.txt']
self.do_run(src, 'SUCCESS\n')
def test_fscanf(self):
create_file('three_numbers.txt', '-1 0.1 -.1')
src = r'''
#include <stdio.h>
#include <assert.h>
#include <float.h>
int main()
{
float x = FLT_MAX, y = FLT_MAX, z = FLT_MAX;
FILE* fp = fopen("three_numbers.txt", "r");
if (fp) {
int match = fscanf(fp, " %f %f %f ", &x, &y, &z);
printf("match = %d\n", match);
printf("x = %0.1f, y = %0.1f, z = %0.1f\n", x, y, z);
} else {
printf("failed to open three_numbers.txt\n");
}
return 0;
}
'''
self.emcc_args += ['--embed-file', 'three_numbers.txt']
self.do_run(src, 'match = 3\nx = -1.0, y = 0.1, z = -0.1\n')
def test_fscanf_2(self):
create_file('a.txt', '''1/2/3 4/5/6 7/8/9
''')
self.emcc_args += ['--embed-file', 'a.txt']
self.do_run(r'''#include <cstdio>
#include <iostream>
using namespace std;
int
main( int argv, char ** argc ) {
cout << "fscanf test" << endl;
FILE * file;
file = fopen("a.txt", "rb");
int vertexIndex[4];
int normalIndex[4];
int uvIndex[4];
int matches = fscanf(file, "%d/%d/%d %d/%d/%d %d/%d/%d %d/%d/%d\n", &vertexIndex[0], &uvIndex[0], &normalIndex[0], &vertexIndex [1], &uvIndex[1], &normalIndex[1], &vertexIndex[2], &uvIndex[2], &normalIndex[2], &vertexIndex[3], &uvIndex[3], &normalIndex[3]);
cout << matches << endl;
return 0;
}
''', 'fscanf test\n9\n')
def test_fileno(self):
create_file('empty.txt', '')
src = r'''
#include <stdio.h>
#include <unistd.h>
int main()
{
FILE* fp = fopen("empty.txt", "r");
if (fp) {
printf("%d\n", fileno(fp));
} else {
printf("failed to open empty.txt\n");
}
return 0;
}
'''
self.emcc_args += ['--embed-file', 'empty.txt']
self.do_run(src, '3\n')
def test_readdir(self):
self.do_run_in_out_file_test('dirent/test_readdir.c')
def test_readdir_empty(self):
self.do_run_in_out_file_test('dirent/test_readdir_empty.c')
def test_stat(self):
self.do_runf(test_file('stat/test_stat.c'), 'success')
self.verify_in_strict_mode('test_stat.js')
def test_fstatat(self):
self.do_runf(test_file('stat/test_fstatat.c'), 'success')
def test_stat_chmod(self):
self.do_runf(test_file('stat/test_chmod.c'), 'success')
def test_stat_mknod(self):
self.do_runf(test_file('stat/test_mknod.c'), 'success')
def test_fcntl(self):
self.add_pre_run("FS.createDataFile('/', 'test', 'abcdef', true, true, false);")
self.do_run_in_out_file_test('fcntl/test_fcntl.c')
def test_fcntl_open(self):
self.do_run_in_out_file_test('fcntl/test_fcntl_open.c')
@also_with_wasm_bigint
def test_fcntl_misc(self):
self.add_pre_run("FS.createDataFile('/', 'test', 'abcdef', true, true, false);")
self.do_run_in_out_file_test('fcntl/test_fcntl_misc.c')
def test_poll(self):
self.add_pre_run('''
var dummy_device = FS.makedev(64, 0);
FS.registerDevice(dummy_device, {});
FS.createDataFile('/', 'file', 'abcdef', true, true, false);
FS.mkdev('/device', dummy_device);
''')
self.do_core_test('test_poll.c')
def test_statvfs(self):
self.do_core_test('test_statvfs.c')
def test_libgen(self):
self.do_core_test('test_libgen.c')
def test_utime(self):
self.do_runf(test_file('utime/test_utime.c'), 'success')
def test_futimens(self):
self.do_runf(test_file('utime', 'test_futimens.c'), 'success')
@no_minimal_runtime('MINIMAL_RUNTIME does not have getValue() and setValue() (TODO add it to a JS library function to get it in)')
def test_utf(self):
self.banned_js_engines = [config.SPIDERMONKEY_ENGINE] # only node handles utf well
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.set_setting('EXPORTED_RUNTIME_METHODS', ['getValue', 'setValue', 'UTF8ToString', 'stringToUTF8'])
self.do_core_test('test_utf.c')
def test_utf32(self):
if self.get_setting('MINIMAL_RUNTIME'):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$UTF32ToString', '$stringToUTF32', '$lengthBytesUTF32'])
else:
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF32ToString', 'stringToUTF32', 'lengthBytesUTF32'])
self.do_runf(test_file('utf32.cpp'), 'OK.')
self.do_runf(test_file('utf32.cpp'), 'OK.', args=['-fshort-wchar'])
def test_utf16(self):
self.do_runf(test_file('core/test_utf16.cpp'), 'OK.')
def test_utf8(self):
if self.get_setting('MINIMAL_RUNTIME'):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$AsciiToString', '$stringToAscii', '$writeAsciiToMemory'])
else:
self.set_setting('EXPORTED_RUNTIME_METHODS',
['UTF8ToString', 'stringToUTF8', 'AsciiToString', 'stringToAscii'])
self.do_runf(test_file('utf8.cpp'), 'OK.')
@also_with_wasm_bigint
def test_utf8_textdecoder(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF8ToString', 'stringToUTF8'])
self.emcc_args += ['--embed-file', test_file('utf8_corpus.txt') + '@/utf8_corpus.txt']
self.do_runf(test_file('benchmark_utf8.cpp'), 'OK.')
# Test that invalid character in UTF8 does not cause decoding to crash.
def test_utf8_invalid(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF8ToString', 'stringToUTF8'])
for decoder_mode in [[], ['-s', 'TEXTDECODER']]:
self.emcc_args += decoder_mode
print(str(decoder_mode))
self.do_runf(test_file('utf8_invalid.cpp'), 'OK.')
# Test that invalid character in UTF8 does not cause decoding to crash.
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_utf8_invalid(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF8ToString', 'stringToUTF8'])
self.set_setting('MINIMAL_RUNTIME')
for decoder_mode in [False, True]:
self.set_setting('TEXTDECODER', decoder_mode)
print(str(decoder_mode))
self.do_runf(test_file('utf8_invalid.cpp'), 'OK.')
def test_utf16_textdecoder(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF16ToString', 'stringToUTF16', 'lengthBytesUTF16'])
self.emcc_args += ['--embed-file', test_file('utf16_corpus.txt') + '@/utf16_corpus.txt']
self.do_runf(test_file('benchmark_utf16.cpp'), 'OK.')
def test_wprintf(self):
self.do_core_test('test_wprintf.cpp')
def test_write_stdout_fileno(self):
self.do_core_test('test_write_stdout_fileno.c')
self.do_core_test('test_write_stdout_fileno.c', args=['-s', 'FILESYSTEM=0'])
def test_direct_string_constant_usage(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_direct_string_constant_usage.cpp')
def test_std_cout_new(self):
self.do_core_test('test_std_cout_new.cpp')
def test_std_function_incomplete_return(self):
self.do_core_test('test_std_function_incomplete_return.cpp')
def test_istream(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
for linkable in [0]: # , 1]:
print(linkable)
# regression check for issue #273
self.set_setting('LINKABLE', linkable)
self.do_core_test('test_istream.cpp')
def test_fs_base(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$FS'])
self.uses_es6 = True
self.add_pre_run(read_file(test_file('filesystem/src.js')))
src = 'int main() {return 0;}\n'
expected = read_file(test_file('filesystem/output.txt'))
self.do_run(src, expected)
@also_with_noderawfs
@is_slow_test
def test_fs_nodefs_rw(self):
# TODO(sbc): This test exposes in issue in the way we run closure compiler and
# causes it to generate non-ES5 output.
# Remove this line once we fix: https://github.com/emscripten-core/emscripten/issues/12628
self.uses_es6 = True
self.emcc_args += ['-lnodefs.js']
self.set_setting('SYSCALL_DEBUG')
self.do_runf(test_file('fs/test_nodefs_rw.c'), 'success')
if self.maybe_closure():
self.do_runf(test_file('fs/test_nodefs_rw.c'), 'success')
@also_with_noderawfs
def test_fs_nodefs_cloexec(self):
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('fs/test_nodefs_cloexec.c'), 'success')
def test_fs_nodefs_home(self):
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('fs/test_nodefs_home.c'), 'success', js_engines=[config.NODE_JS])
def test_fs_nodefs_nofollow(self):
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('fs/test_nodefs_nofollow.c'), 'success', js_engines=[config.NODE_JS])
def test_fs_trackingdelegate(self):
self.set_setting('FS_DEBUG')
self.do_run_in_out_file_test('fs/test_trackingdelegate.c')
@also_with_noderawfs
def test_fs_writeFile(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING') # see issue 2334
self.do_run_in_out_file_test('fs/test_writeFile.cpp')
def test_fs_write(self):
self.do_run_in_out_file_test('fs/test_write.cpp')
@also_with_noderawfs
def test_fs_emptyPath(self):
self.do_run_in_out_file_test('fs/test_emptyPath.c')
@also_with_noderawfs
def test_fs_append(self):
self.do_runf(test_file('fs/test_append.c'), 'success')
def test_fs_mmap(self):
self.uses_es6 = True
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS', 'NODERAWFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
if fs == 'NODERAWFS':
self.emcc_args += ['-lnodefs.js', '-lnoderawfs.js']
self.do_run_in_out_file_test('fs/test_mmap.c')
@parameterized({
'': [],
'minimal_runtime': ['-s', 'MINIMAL_RUNTIME=1']
})
def test_fs_no_main(self, *args):
# library_fs.js uses hooks to enable ignoreing of permisions up until ATMAINs are run. This
# test verified that they work correctly, even in programs without a main function.
create_file('pre.js', '''
Module['preRun'] = function() {
assert(FS.ignorePermissions, "ignorePermissions not set during preRun");
}
Module['onRuntimeInitialized'] = function() {
assert(!FS.ignorePermissions, "ignorePermissions not unset during onRuntimeInitialized");
assert(_foo() == 42);
}
''')
self.set_setting('EXPORTED_FUNCTIONS', '_foo')
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args += ['--pre-js', 'pre.js'] + list(args)
self.do_run('int foo() { return 42; }', '', force_c=True)
@also_with_noderawfs
def test_fs_errorstack(self):
# Enables strict mode, which may catch some strict-mode-only errors
# so that users can safely work with strict JavaScript if enabled.
create_file('pre.js', '"use strict";')
self.emcc_args += ['--pre-js', 'pre.js']
self.set_setting('FORCE_FILESYSTEM')
self.set_setting('ASSERTIONS')
self.do_run(r'''
#include <emscripten.h>
#include <iostream>
int main(void) {
std::cout << "hello world\n"; // should work with strict mode
EM_ASM(
try {
FS.readFile('/dummy.txt');
} catch (err) {
err.stack = err.stack; // should be writable
throw err;
}
);
return 0;
}
''', 'at Object.readFile', assert_returncode=NON_ZERO) # engines has different error stack format
@also_with_noderawfs
def test_fs_llseek(self):
self.set_setting('FORCE_FILESYSTEM')
self.do_runf(test_file('fs/test_llseek.c'), 'success')
def test_fs_64bit(self):
self.do_runf(test_file('fs/test_64bit.c'), 'success')
def test_sigalrm(self):
self.do_runf(test_file('test_sigalrm.c'), 'Received alarm!')
self.set_setting('EXIT_RUNTIME')
self.do_runf(test_file('test_sigalrm.c'), 'Received alarm!')
def test_signals(self):
self.do_core_test(test_file('test_signals.c'))
@no_windows('https://github.com/emscripten-core/emscripten/issues/8882')
def test_unistd_access(self):
self.uses_es6 = True
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/access.c', js_engines=[config.NODE_JS])
# Node.js fs.chmod is nearly no-op on Windows
if not WINDOWS:
self.emcc_args = orig_compiler_opts
self.set_setting('NODERAWFS')
self.do_run_in_out_file_test('unistd/access.c', js_engines=[config.NODE_JS])
def test_unistd_curdir(self):
self.uses_es6 = True
self.do_run_in_out_file_test('unistd/curdir.c')
@also_with_noderawfs
def test_unistd_close(self):
self.do_run_in_out_file_test('unistd/close.c')
def test_unistd_confstr(self):
self.do_run_in_out_file_test('unistd/confstr.c')
def test_unistd_ttyname(self):
self.do_runf(test_file('unistd/ttyname.c'), 'success')
@also_with_noderawfs
def test_unistd_pipe(self):
self.do_runf(test_file('unistd/pipe.c'), 'success')
@also_with_noderawfs
def test_unistd_dup(self):
self.do_run_in_out_file_test('unistd/dup.c')
def test_unistd_pathconf(self):
self.do_run_in_out_file_test('unistd/pathconf.c')
def test_unistd_truncate(self):
self.uses_es6 = True
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/truncate.c', js_engines=[config.NODE_JS])
@no_windows("Windows throws EPERM rather than EACCES or EINVAL")
@unittest.skipIf(WINDOWS or os.geteuid() == 0, "Root access invalidates this test by being able to write on readonly files")
def test_unistd_truncate_noderawfs(self):
self.uses_es6 = True
self.set_setting('NODERAWFS')
self.maybe_closure()
self.do_run_in_out_file_test('unistd/truncate.c', js_engines=[config.NODE_JS])
def test_unistd_swab(self):
self.do_run_in_out_file_test('unistd/swab.c')
def test_unistd_isatty(self):
self.do_runf(test_file('unistd/isatty.c'), 'success')
@also_with_standalone_wasm()
def test_unistd_sysconf(self):
self.do_run_in_out_file_test('unistd/sysconf.c')
@no_asan('ASan alters memory layout')
def test_unistd_sysconf_phys_pages(self):
filename = test_file('unistd/sysconf_phys_pages.c')
if self.get_setting('ALLOW_MEMORY_GROWTH'):
expected = (2 * 1024 * 1024 * 1024) // webassembly.WASM_PAGE_SIZE
else:
expected = 16 * 1024 * 1024 // webassembly.WASM_PAGE_SIZE
self.do_runf(filename, str(expected) + ', errno: 0')
def test_unistd_login(self):
self.do_run_in_out_file_test('unistd/login.c')
@no_windows('https://github.com/emscripten-core/emscripten/issues/8882')
def test_unistd_unlink(self):
self.clear()
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
# symlinks on node.js on non-linux behave differently (e.g. on Windows they require administrative privileges)
# so skip testing those bits on that combination.
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
if WINDOWS:
self.emcc_args += ['-DNO_SYMLINK=1']
if MACOS:
continue
self.do_runf(test_file('unistd/unlink.c'), 'success', js_engines=[config.NODE_JS])
# Several differences/bugs on non-linux including https://github.com/nodejs/node/issues/18014
if not WINDOWS and not MACOS:
self.emcc_args = orig_compiler_opts + ['-DNODERAWFS']
# 0 if root user
if os.geteuid() == 0:
self.emcc_args += ['-DSKIP_ACCESS_TESTS']
self.set_setting('NODERAWFS')
self.do_runf(test_file('unistd/unlink.c'), 'success', js_engines=[config.NODE_JS])
def test_unistd_links(self):
self.clear()
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
if WINDOWS and fs == 'NODEFS':
print('Skipping NODEFS part of this test for test_unistd_links on Windows, since it would require administrative privileges.', file=sys.stderr)
# Also, other detected discrepancies if you do end up running this test on NODEFS:
# test expects /, but Windows gives \ as path slashes.
# Calling readlink() on a non-link gives error 22 EINVAL on Unix, but simply error 0 OK on Windows.
continue
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/links.c', js_engines=[config.NODE_JS])
@no_windows('Skipping NODEFS test, since it would require administrative privileges.')
def test_unistd_symlink_on_nodefs(self):
# Also, other detected discrepancies if you do end up running this test on NODEFS:
# test expects /, but Windows gives \ as path slashes.
# Calling readlink() on a non-link gives error 22 EINVAL on Unix, but simply error 0 OK on Windows.
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/symlink_on_nodefs.c', js_engines=[config.NODE_JS])
def test_unistd_sleep(self):
self.do_run_in_out_file_test('unistd/sleep.c')
@also_with_wasm_bigint
def test_unistd_io(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$ERRNO_CODES'])
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.clear()
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/io.c')
@no_windows('https://github.com/emscripten-core/emscripten/issues/8882')
def test_unistd_misc(self):
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/misc.c', js_engines=[config.NODE_JS], interleaved_output=False)
def test_unistd_fstatfs(self):
self.do_run_in_out_file_test('unistd/fstatfs.c')
# i64s in the API, which we'd need to legalize for JS, so in standalone mode
# all we can test is wasm VMs
@also_with_standalone_wasm(wasm2c=True)
def test_posixtime(self):
self.banned_js_engines = [config.V8_ENGINE] # v8 lacks monotonic time
self.do_core_test('test_posixtime.c')
def test_uname(self):
self.do_core_test('test_uname.c')
def test_unary_literal(self):
self.do_core_test('test_unary_literal.cpp')
def test_env(self):
expected = read_file(test_file('env/output.txt'))
self.do_runf(test_file('env/src.c'), [
expected.replace('{{{ THIS_PROGRAM }}}', self.in_dir('src.js')).replace('\\', '/'), # node, can find itself properly
expected.replace('{{{ THIS_PROGRAM }}}', './this.program') # spidermonkey, v8
])
def test_environ(self):
expected = read_file(test_file('env/output-mini.txt'))
self.do_runf(test_file('env/src-mini.c'), [
expected.replace('{{{ THIS_PROGRAM }}}', self.in_dir('src-mini.js')).replace('\\', '/'), # node, can find itself properly
expected.replace('{{{ THIS_PROGRAM }}}', './this.program') # spidermonkey, v8
])
def test_systypes(self):
self.do_core_test('test_systypes.c')
def test_stddef(self):
self.do_core_test('test_stddef.cpp')
self.do_core_test('test_stddef.cpp', force_c=True)
def test_getloadavg(self):
self.do_core_test('test_getloadavg.c')
def test_nl_types(self):
self.do_core_test('test_nl_types.c')
def test_799(self):
src = test_file('799.cpp')
self.do_runf(src, '''Set PORT family: 0, port: 3979
Get PORT family: 0
PORT: 3979
''')
def test_ctype(self):
self.do_core_test('test_ctype.c')
def test_strcasecmp(self):
self.do_core_test('test_strcasecmp.c')
def test_atomic(self):
self.do_core_test('test_atomic.c')
def test_atomic_cxx(self):
# the wasm backend has lock-free atomics, but not asm.js or asm2wasm
self.emcc_args += ['-DIS_64BIT_LOCK_FREE=1']
self.do_core_test('test_atomic_cxx.cpp')
# TODO: test with USE_PTHREADS in wasm backend as well
def test_phiundef(self):
self.do_core_test('test_phiundef.c')
def test_netinet_in(self):
self.do_run_in_out_file_test('netinet/in.cpp')
@needs_dylink
def test_main_module_static_align(self):
if self.get_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('no shared modules with memory growth')
self.set_setting('MAIN_MODULE')
self.do_core_test('test_main_module_static_align.cpp')
# libc++ tests
def test_iostream_and_determinism(self):
create_file('src.cpp', '''
#include <iostream>
int main()
{
std::cout << "hello world" << std::endl << 77 << "." << std::endl;
return 0;
}
''')
num = 5
for i in range(num):
print('(iteration %d)' % i)
# add some timing nondeterminism here, not that we need it, but whatever
time.sleep(random.random() / (10 * num))
self.do_runf('src.cpp', 'hello world\n77.\n')
# Verify that this build is identical to the previous one
if os.path.exists('src.js.previous'):
self.assertBinaryEqual('src.js', 'src.js.previous')
shutil.copy2('src.js', 'src.js.previous')
# Same but for the wasm file.
if self.is_wasm() and not self.get_setting('WASM2JS'):
if os.path.exists('src.wasm.previous'):
self.assertBinaryEqual('src.wasm', 'src.wasm.previous')
shutil.copy2('src.wasm', 'src.wasm.previous')
def test_stdvec(self):
self.do_core_test('test_stdvec.cpp')
def test_random_device(self):
self.maybe_closure()
self.do_core_test('test_random_device.cpp')
def test_reinterpreted_ptrs(self):
self.do_core_test('test_reinterpreted_ptrs.cpp')
def test_js_libraries(self):
create_file('main.cpp', '''
#include <stdio.h>
extern "C" {
extern void printey();
extern int calcey(int x, int y);
}
int main() {
printey();
printf("*%d*\\n", calcey(10, 22));
return 0;
}
''')
create_file('mylib1.js', '''
mergeInto(LibraryManager.library, {
printey: function() {
out('hello from lib!');
}
});
''')
create_file('mylib2.js', '''
mergeInto(LibraryManager.library, {
calcey: function(x, y) {
return x + y;
}
});
''')
self.emcc_args += ['--js-library', 'mylib1.js', '--js-library', 'mylib2.js']
self.do_runf('main.cpp', 'hello from lib!\n*32*\n')
def test_unicode_js_library(self):
create_file('main.cpp', '''
#include <stdio.h>
extern "C" {
extern void printey();
}
int main() {
printey();
return 0;
}
''')
self.emcc_args += ['--js-library', test_file('unicode_library.js')]
self.do_runf('main.cpp', u'Unicode snowman \u2603 says hello!')
def test_funcptr_import_type(self):
self.emcc_args += ['--js-library', test_file('core/test_funcptr_import_type.js')]
self.do_core_test('test_funcptr_import_type.cpp')
@no_asan('ASan does not work with EXPORT_ALL')
def test_constglobalunion(self):
self.set_setting('EXPORT_ALL')
self.do_run(r'''
#include <stdio.h>
struct one_const {
long a;
};
struct two_consts {
long a;
long b;
};
union some_consts {
struct one_const one;
struct two_consts two;
};
union some_consts my_consts = {{
1
}};
struct one_const addr_of_my_consts = {
(long)(&my_consts)
};
int main(void) {
printf("%li\n", (long)!!addr_of_my_consts.a);
return 0;
}
''', '1')
### 'Medium' tests
def test_fannkuch(self):
results = [(1, 0), (2, 1), (3, 2), (4, 4), (5, 7), (6, 10), (7, 16), (8, 22)]
self.build(test_file('fannkuch.cpp'))
for i, j in results:
print(i, j)
self.do_run('fannkuch.js', 'Pfannkuchen(%d) = %d.' % (i, j), args=[str(i)], no_build=True)
def test_raytrace(self):
# TODO: Should we remove this test?
self.skipTest('Relies on double value rounding, extremely sensitive')
src = read_file(test_file('raytrace.cpp')).replace('double', 'float')
output = read_file(test_file('raytrace.ppm'))
self.do_run(src, output, args=['3', '16'])
def test_fasta(self):
results = [(1, '''GG*ctt**tgagc*'''),
(20, '''GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTT*cttBtatcatatgctaKggNcataaaSatgtaaaDcDRtBggDtctttataattcBgtcg**tacgtgtagcctagtgtttgtgttgcgttatagtctatttgtggacacagtatggtcaaa**tgacgtcttttgatctgacggcgttaacaaagatactctg*'''),
(50, '''GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGA*TCACCTGAGGTCAGGAGTTCGAGACCAGCCTGGCCAACAT*cttBtatcatatgctaKggNcataaaSatgtaaaDcDRtBggDtctttataattcBgtcg**tactDtDagcctatttSVHtHttKtgtHMaSattgWaHKHttttagacatWatgtRgaaa**NtactMcSMtYtcMgRtacttctWBacgaa**agatactctgggcaacacacatacttctctcatgttgtttcttcggacctttcataacct**ttcctggcacatggttagctgcacatcacaggattgtaagggtctagtggttcagtgagc**ggaatatcattcgtcggtggtgttaatctatctcggtgtagcttataaatgcatccgtaa**gaatattatgtttatttgtcggtacgttcatggtagtggtgtcgccgatttagacgtaaa**ggcatgtatg*''')]
old = self.emcc_args
orig_src = read_file(test_file('fasta.cpp'))
def test(extra_args):
self.emcc_args = old + extra_args
for t in ['float', 'double']:
print(t)
src = orig_src.replace('double', t)
with open('fasta.cpp', 'w') as f:
f.write(src)
self.build('fasta.cpp')
for arg, output in results:
self.do_run('fasta.js', output, args=[str(arg)], output_nicerizer=lambda x: x.replace('\n', '*'), no_build=True)
shutil.copyfile('fasta.js', '%s.js' % t)
test([])
@needs_non_trapping_float_to_int
def test_fasta_nontrapping(self):
self.emcc_args += ['-mnontrapping-fptoint']
self.test_fasta()
def test_whets(self):
self.do_runf(test_file('whets.cpp'), 'Single Precision C Whetstone Benchmark')
# node is slower, and fail on 64-bit
@require_v8
@no_asan('depends on the specifics of memory size, which for asan we are forced to increase')
def test_dlmalloc_inline(self):
# needed with typed arrays
self.set_setting('INITIAL_MEMORY', '128mb')
src = read_file(path_from_root('system/lib/dlmalloc.c')) + '\n\n\n' + read_file(test_file('dlmalloc_test.c'))
self.do_run(src, '*1,0*', args=['200', '1'], force_c=True)
self.do_run('src.js', '*400,0*', args=['400', '400'], force_c=True, no_build=True)
# node is slower, and fail on 64-bit
@require_v8
@no_asan('depends on the specifics of memory size, which for asan we are forced to increase')
def test_dlmalloc(self):
# needed with typed arrays
self.set_setting('INITIAL_MEMORY', '128mb')
# Linked version
self.do_runf(test_file('dlmalloc_test.c'), '*1,0*', args=['200', '1'])
self.do_run('dlmalloc_test.js', '*400,0*', args=['400', '400'], no_build=True)
# TODO: do this in other passes too, passing their opts into emcc
if self.emcc_args == []:
# emcc should build in dlmalloc automatically, and do all the sign correction etc. for it
try_delete('src.js')
self.run_process([EMCC, test_file('dlmalloc_test.c'), '-s', 'INITIAL_MEMORY=128MB', '-o', 'src.js'], stdout=PIPE, stderr=self.stderr_redirect)
self.do_run(None, '*1,0*', ['200', '1'], no_build=True)
self.do_run(None, '*400,0*', ['400', '400'], no_build=True)
# The same for new and all its variants
src = read_file(test_file('new.cpp'))
for new, delete in [
('malloc(100)', 'free'),
('new char[100]', 'delete[]'),
('new Structy', 'delete'),
('new int', 'delete'),
('new Structy[10]', 'delete[]'),
]:
self.do_run(src.replace('{{{ NEW }}}', new).replace('{{{ DELETE }}}', delete), '*1,0*')
# Tests that a large allocation should gracefully fail
@no_asan('the memory size limit here is too small for asan')
def test_dlmalloc_large(self):
self.emcc_args += ['-s', 'ABORTING_MALLOC=0', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'MAXIMUM_MEMORY=128MB']
self.do_runf(test_file('dlmalloc_test_large.c'), '0 0 0 1')
@no_asan('asan also changes malloc, and that ends up linking in new twice')
def test_dlmalloc_partial(self):
# present part of the symbols of dlmalloc, not all
src = read_file(test_file('new.cpp')).replace('{{{ NEW }}}', 'new int').replace('{{{ DELETE }}}', 'delete') + '''
#include <new>
void* operator new(size_t size) {
printf("new %zu!\\n", size);
return malloc(size);
}
'''
self.do_run(src, 'new 4!\n*1,0*')
@no_asan('asan also changes malloc, and that ends up linking in new twice')
def test_dlmalloc_partial_2(self):
if 'SAFE_HEAP' in str(self.emcc_args):
self.skipTest('we do unsafe stuff here')
# present part of the symbols of dlmalloc, not all. malloc is harder to link than new which is weak.
self.do_core_test('test_dlmalloc_partial_2.c', assert_returncode=NON_ZERO)
def test_libcxx(self):
self.do_runf(test_file('hashtest.cpp'),
'june -> 30\nPrevious (in alphabetical order) is july\nNext (in alphabetical order) is march')
self.do_run('''
#include <set>
#include <stdio.h>
int main() {
std::set<int> fetchOriginatorNums;
fetchOriginatorNums.insert(171);
printf("hello world\\n");
return 0;
}
''', 'hello world')
def test_typeid(self):
self.do_core_test('test_typeid.cpp')
def test_static_variable(self):
# needs atexit
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_static_variable.cpp')
def test_fakestat(self):
self.do_core_test('test_fakestat.c')
def test_mmap(self):
# ASan needs more memory, but that is set up separately
if '-fsanitize=address' not in self.emcc_args:
self.set_setting('INITIAL_MEMORY', '128mb')
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_mmap.c')
def test_mmap_file(self):
for extra_args in [[]]:
self.emcc_args += ['--embed-file', 'data.dat'] + extra_args
x = 'data from the file........'
s = ''
while len(s) < 9000:
if len(s) + len(x) < 9000:
s += x
continue
s += '.'
assert len(s) == 9000
create_file('data.dat', s)
self.do_runf(test_file('mmap_file.c'), '*\n' + s[0:20] + '\n' + s[4096:4096 + 20] + '\n*\n')
@no_lsan('Test code contains memory leaks')
def test_cubescript(self):
# uses register keyword
self.emcc_args += ['-std=c++03', '-Wno-dynamic-class-memaccess']
self.maybe_closure()
self.emcc_args += ['-I', test_file('third_party/cubescript')]
# Test code contains memory leaks
if '-fsanitize=address' in self.emcc_args:
self.emcc_args += ['--pre-js', test_file('asan-no-leak.js')]
def test():
src = test_file('third_party/cubescript/command.cpp')
self.do_runf(src, '*\nTemp is 33\n9\n5\nhello, everyone\n*')
test()
print('asyncify') # extra coverage
self.set_setting('ASYNCIFY')
test()
@needs_dylink
def test_relocatable_void_function(self):
self.set_setting('RELOCATABLE')
self.do_core_test('test_relocatable_void_function.c')
@wasm_simd
def test_wasm_intrinsics_simd(self):
def run():
self.do_runf(test_file('test_wasm_intrinsics_simd.c'), 'Success!')
# Improves test readability
self.emcc_args.append('-Wno-c++11-narrowing')
self.emcc_args.extend(['-Wpedantic', '-Werror', '-Wall', '-xc++'])
run()
self.emcc_args.append('-funsigned-char')
run()
# Tests invoking the NEON SIMD API via arm_neon.h header
@wasm_simd
def test_neon_wasm_simd(self):
self.emcc_args.append('-Wno-c++11-narrowing')
self.emcc_args.append('-mfpu=neon')
self.emcc_args.append('-msimd128')
self.do_runf(test_file('neon/test_neon_wasm_simd.cpp'), 'Success!')
# Tests invoking the SIMD API via x86 SSE1 xmmintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
@no_safe_heap('has unaligned 64-bit operations in wasm')
def test_sse1(self):
src = test_file('sse/test_sse1.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse', '-o', 'test_sse1', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse1', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse']
self.maybe_closure()
self.do_runf(src, native_result)
# Tests invoking the SIMD API via x86 SSE2 emmintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
@no_safe_heap('has unaligned 64-bit operations in wasm')
@is_slow_test
def test_sse2(self):
src = test_file('sse/test_sse2.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse2', '-Wno-argument-outside-range', '-o', 'test_sse2', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse2', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse2', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
# Tests invoking the SIMD API via x86 SSE3 pmmintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
def test_sse3(self):
src = test_file('sse/test_sse3.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse3', '-Wno-argument-outside-range', '-o', 'test_sse3', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse3', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse3', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
# Tests invoking the SIMD API via x86 SSSE3 tmmintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
def test_ssse3(self):
src = test_file('sse/test_ssse3.cpp')
self.run_process([shared.CLANG_CXX, src, '-mssse3', '-Wno-argument-outside-range', '-o', 'test_ssse3', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_ssse3', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-mssse3', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
# Tests invoking the SIMD API via x86 SSE4.1 smmintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
@is_slow_test
def test_sse4_1(self):
src = test_file('sse/test_sse4_1.cpp')
if not self.is_optimizing() and '-fsanitize=address' in self.emcc_args:
# ASan with -O0 fails with:
# Compiling function #69:"__original_main" failed: local count too large
self.emcc_args.append('-O1')
self.run_process([shared.CLANG_CXX, src, '-msse4.1', '-Wno-argument-outside-range', '-o', 'test_sse4_1', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse4_1', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse4.1', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
# Tests invoking the SIMD API via x86 SSE4.2 nmmintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
def test_sse4_2(self):
src = test_file('sse/test_sse4_2.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse4.2', '-Wno-argument-outside-range', '-o', 'test_sse4_2', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse4_2', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse4.2', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
# Tests invoking the SIMD API via x86 AVX avxintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
def test_avx(self):
src = test_file('sse/test_avx.cpp')
self.run_process([shared.CLANG_CXX, src, '-mavx', '-Wno-argument-outside-range', '-o', 'test_avx', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_avx', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-mavx', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
def test_sse_diagnostics(self):
self.emcc_args.remove('-Werror')
src = test_file('sse/test_sse_diagnostic.cpp')
p = self.run_process(
[shared.EMXX, src, '-msse', '-DWASM_SIMD_COMPAT_SLOW'] + self.get_emcc_args(),
stderr=PIPE)
self.assertContained('Instruction emulated via slow path.', p.stderr)
@requires_native_clang
@wasm_relaxed_simd
def test_relaxed_simd_implies_simd128(self):
src = test_file('sse/test_sse1.cpp')
self.build(src, emcc_args=['-msse'])
@no_asan('call stack exceeded on some versions of node')
def test_gcc_unmangler(self):
self.emcc_args += ['-I' + test_file('third_party/libiberty')]
self.do_runf(test_file('third_party/libiberty/cp-demangle.c'), '*d_demangle(char const*, int, unsigned int*)*', args=['_ZL10d_demanglePKciPj'])
@needs_make('make')
def test_lua(self):
self.emcc_args.remove('-Werror')
libs = self.get_library('third_party/lua', [Path('src/lua.o'), Path('src/liblua.a')], make=['make', 'generic'], configure=None)
self.do_run('',
'hello lua world!\n17\n1\n2\n3\n4\n7',
args=['-e', '''print("hello lua world!");print(17);for x = 1,4 do print(x) end;print(10-3)'''],
libraries=libs,
includes=[test_file('lua')],
output_nicerizer=lambda output: output.replace('\n\n', '\n').replace('\n\n', '\n'))
@no_asan('issues with freetype itself')
@needs_make('configure script')
@is_slow_test
def test_freetype(self):
self.add_pre_run("FS.createDataFile('/', 'font.ttf', %s, true, false, false);" % str(
list(bytearray(read_binary(test_file('freetype/LiberationSansBold.ttf'))))
))
# Not needed for js, but useful for debugging
shutil.copyfile(test_file('freetype/LiberationSansBold.ttf'), 'font.ttf')
# Main
self.do_run_from_file(test_file('freetype/main.c'),
test_file('freetype/ref.txt'),
args=['font.ttf', 'test!', '150', '120', '25'],
libraries=self.get_freetype_library(),
includes=[test_file('third_party/freetype/include')])
# github issue 324
print('[issue 324]')
self.do_run_from_file(test_file('freetype/main_2.c'),
test_file('freetype/ref_2.txt'),
args=['font.ttf', 'w', '32', '32', '25'],
libraries=self.get_freetype_library(),
includes=[test_file('third_party/freetype/include')])
print('[issue 324 case 2]')
self.do_run_from_file(test_file('freetype/main_3.c'),
test_file('freetype/ref_3.txt'),
args=['font.ttf', 'W', '32', '32', '0'],
libraries=self.get_freetype_library(),
includes=[test_file('third_party/freetype/include')])
print('[issue 324 case 3]')
self.do_run('main_3.js',
read_file(test_file('freetype/ref_4.txt')),
args=['font.ttf', 'ea', '40', '32', '0'],
no_build=True)
@no_asan('local count too large for VMs')
@is_slow_test
def test_sqlite(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_sqlite3_open', '_sqlite3_close', '_sqlite3_exec', '_sqlite3_free'])
if '-g' in self.emcc_args:
print("disabling inlining") # without registerize (which -g disables), we generate huge amounts of code
self.set_setting('INLINING_LIMIT')
# newer clang has a warning for implicit conversions that lose information,
# which happens in sqlite (see #9138)
self.emcc_args += ['-Wno-implicit-int-float-conversion']
# newer clang warns about "suspicious concatenation of string literals in an
# array initialization; did you mean to separate the elements with a comma?"
self.emcc_args += ['-Wno-string-concatenation']
# ignore unknown flags, which lets the above flags be used on github CI
# before the LLVM change rolls in (the same LLVM change that adds the
# warning also starts to warn on it)
self.emcc_args += ['-Wno-unknown-warning-option']
self.emcc_args += ['-Wno-pointer-bool-conversion']
self.emcc_args += ['-I' + test_file('third_party/sqlite')]
src = '''
#define SQLITE_DISABLE_LFS
#define LONGDOUBLE_TYPE double
#define SQLITE_INT64_TYPE long long int
#define SQLITE_THREADSAFE 0
'''
src += read_file(test_file('third_party/sqlite/sqlite3.c'))
src += read_file(test_file('sqlite/benchmark.c'))
self.do_run(src,
read_file(test_file('sqlite/benchmark.txt')),
includes=[test_file('sqlite')],
force_c=True)
@needs_make('mingw32-make')
@is_slow_test
@parameterized({
'cmake': (True,),
'configure': (False,)
})
def test_zlib(self, use_cmake):
if WINDOWS and not use_cmake:
self.skipTest("Windows cannot run configure sh scripts")
self.maybe_closure()
self.emcc_args.append('-Wno-shift-negative-value')
if '-g' in self.emcc_args:
self.emcc_args.append('-gsource-map') # more source maps coverage
if use_cmake:
make_args = []
configure = ['cmake', '.']
else:
make_args = ['libz.a']
configure = ['sh', './configure']
self.do_run_from_file(
test_file('third_party/zlib/example.c'),
test_file('core/test_zlib.out'),
libraries=self.get_library('third_party/zlib', 'libz.a', make_args=make_args, configure=configure),
includes=[test_file('third_party/zlib'), 'building', 'zlib'])
@needs_make('make')
@is_slow_test
@parameterized({
'cmake': (True,),
'autoconf': (False,)
})
# Called thus so it runs late in the alphabetical cycle... it is long
def test_bullet(self, use_cmake):
if WINDOWS and not use_cmake:
self.skipTest("Windows cannot run configure sh scripts")
self.emcc_args += [
'-Wno-c++11-narrowing',
'-Wno-deprecated-register',
'-Wno-writable-strings',
'-Wno-shift-negative-value',
'-Wno-format'
]
# extra testing for ASSERTIONS == 2
if use_cmake:
self.set_setting('ASSERTIONS', 2)
self.emcc_args.append('-Wno-unused-command-line-argument')
self.do_runf(test_file('third_party/bullet/Demos/HelloWorld/HelloWorld.cpp'),
[read_file(test_file('bullet/output.txt')), # different roundings
read_file(test_file('bullet/output2.txt')),
read_file(test_file('bullet/output3.txt')),
read_file(test_file('bullet/output4.txt'))],
libraries=self.get_bullet_library(use_cmake),
includes=[test_file('third_party/bullet/src')])
@unittest.skip('LLVM changes have caused this C++ to no longer compile, https://github.com/emscripten-core/emscripten/issues/14614')
@no_asan('issues with freetype itself')
@needs_make('depends on freetype')
@is_slow_test
def test_poppler(self):
pdf_data = read_binary(test_file('poppler/paper.pdf'))
create_file('paper.pdf.js', str(list(bytearray(pdf_data))))
create_file('pre.js', '''
Module.preRun = function() {
FS.createDataFile('/', 'paper.pdf', eval(read_('paper.pdf.js')), true, false, false);
};
Module.postRun = function() {
var FileData = Array.from(MEMFS.getFileDataAsTypedArray(FS.root.contents['filename-1.ppm']));
out("Data: " + JSON.stringify(FileData.map(function(x) { return unSign(x, 8) })));
};
''')
self.emcc_args += ['--pre-js', 'pre.js', '-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=$unSign']
ppm_data = str(list(bytearray(read_binary(test_file('poppler/ref.ppm')))))
self.do_run('', ppm_data.replace(' ', ''),
libraries=self.get_poppler_library(),
args=['-scale-to', '512', 'paper.pdf', 'filename'])
@needs_make('make')
@is_slow_test
def test_openjpeg(self):
def do_test_openjpeg():
def line_splitter(data):
out = ''
counter = 0
for ch in data:
out += ch
if ch == ' ' and counter > 60:
out += '\n'
counter = 0
else:
counter += 1
return out
# remove -g, so we have one test without it by default
self.emcc_args = [x for x in self.emcc_args if x != '-g']
original_j2k = test_file('openjpeg/syntensity_lobby_s.j2k')
image_bytes = list(bytearray(read_binary(original_j2k)))
create_file('pre.js', """
Module.preRun = function() { FS.createDataFile('/', 'image.j2k', %s, true, false, false); };
Module.postRun = function() {
out('Data: ' + JSON.stringify(Array.from(MEMFS.getFileDataAsTypedArray(FS.analyzePath('image.raw').object))));
};
""" % line_splitter(str(image_bytes)))
# ensure libpng is built so that openjpeg's configure step can detect it.
# If we don't do this then we don't know what the state of the cache will be
# and this test would different non-deterministic results based on, for example,
# what other tests had previously run.
self.run_process([EMBUILDER, 'build', 'libpng'])
lib = self.get_library('third_party/openjpeg',
[Path('codec/CMakeFiles/j2k_to_image.dir/index.c.o'),
Path('codec/CMakeFiles/j2k_to_image.dir/convert.c.o'),
Path('codec/CMakeFiles/j2k_to_image.dir/__/common/color.c.o'),
Path('bin/libopenjpeg.a')],
configure=['cmake', '.'],
# configure_args=['--enable-tiff=no', '--enable-jp3d=no', '--enable-png=no'],
make_args=[]) # no -j 2, since parallel builds can fail
# We use doubles in JS, so we get slightly different values than native code. So we
# check our output by comparing the average pixel difference
def image_compare(output):
# Get the image generated by JS, from the JSON.stringify'd array
m = re.search(r'\[[\d, -]*\]', output)
self.assertIsNotNone(m, 'Failed to find proper image output in: ' + output)
# Evaluate the output as a python array
js_data = eval(m.group(0))
js_data = [x if x >= 0 else 256 + x for x in js_data] # Our output may be signed, so unsign it
# Get the correct output
true_data = bytearray(read_binary(test_file('openjpeg/syntensity_lobby_s.raw')))
# Compare them
assert(len(js_data) == len(true_data))
num = len(js_data)
diff_total = js_total = true_total = 0
for i in range(num):
js_total += js_data[i]
true_total += true_data[i]
diff_total += abs(js_data[i] - true_data[i])
js_mean = js_total / float(num)
true_mean = true_total / float(num)
diff_mean = diff_total / float(num)
image_mean = 83.265
# print '[image stats:', js_mean, image_mean, true_mean, diff_mean, num, ']'
assert abs(js_mean - image_mean) < 0.01, [js_mean, image_mean]
assert abs(true_mean - image_mean) < 0.01, [true_mean, image_mean]
assert diff_mean < 0.01, diff_mean
return output
# Explictly disable EXIT_RUNTIME, since otherwise addOnPostRun does not work.
# https://github.com/emscripten-core/emscripten/issues/15080
self.set_setting('EXIT_RUNTIME', 0)
self.emcc_args += ['--minify=0'] # to compare the versions
self.emcc_args += ['--pre-js', 'pre.js']
def do_test():
self.do_runf(test_file('third_party/openjpeg/codec/j2k_to_image.c'),
'Successfully generated', # The real test for valid output is in image_compare
args='-i image.j2k -o image.raw'.split(),
emcc_args=['-sUSE_LIBPNG'],
libraries=lib,
includes=[test_file('third_party/openjpeg/libopenjpeg'),
test_file('third_party/openjpeg/codec'),
test_file('third_party/openjpeg/common'),
Path(self.get_build_dir(), 'third_party/openjpeg')],
output_nicerizer=image_compare)
do_test()
# extra testing
if self.get_setting('ALLOW_MEMORY_GROWTH') == 1:
print('no memory growth', file=sys.stderr)
self.set_setting('ALLOW_MEMORY_GROWTH', 0)
do_test()
if '-fsanitize=address' in self.emcc_args:
# In ASan mode we need a large initial memory (or else wasm-ld fails).
# The OpenJPEG CMake will build several executables (which we need parts
# of in our testing, see above), so we must enable the flag for them all.
with env_modify({'EMMAKEN_CFLAGS': '-sINITIAL_MEMORY=300MB'}):
do_test_openjpeg()
else:
do_test_openjpeg()
@no_asan('call stack exceeded on some versions of node')
@is_slow_test
def test_fuzz(self):
self.emcc_args += ['-I' + test_file('fuzz/include'), '-w']
def run_all(x):
print(x)
for name in sorted(glob.glob(test_file('fuzz/*.c')) + glob.glob(test_file('fuzz/*.cpp'))):
if 'newfail' in name:
continue
if os.path.basename(name).startswith('temp_fuzzcode'):
continue
print(name)
if name.endswith('.cpp'):
self.emcc_args.append('-std=c++03')
self.do_runf(test_file('fuzz', name),
read_file(test_file('fuzz', name + '.txt')))
if name.endswith('.cpp'):
self.emcc_args.remove('-std=c++03')
run_all('normal')
self.emcc_args += ['-flto']
run_all('lto')
@also_with_standalone_wasm(wasm2c=True, impure=True)
@no_asan('autodebug logging interferes with asan')
@with_env_modify({'EMCC_AUTODEBUG': '1'})
def test_autodebug_wasm(self):
# test that the program both works and also emits some of the logging
# (but without the specific output, as it is logging the actual locals
# used and so forth, which will change between opt modes and updates of
# llvm etc.)
def check(out):
for msg in ['log_execution', 'get_i32', 'set_i32', 'load_ptr', 'load_val', 'store_ptr', 'store_val']:
self.assertIn(msg, out)
return out
self.do_runf(test_file('core/test_autodebug.c'),
'success', output_nicerizer=check)
@parameterized({
'full': ('full',),
'mask': ('mask',),
'none': ('none',),
})
def test_wasm2c_sandboxing(self, mode):
if not can_do_standalone(self):
return self.skipTest('standalone mode not supported')
self.set_setting('STANDALONE_WASM')
self.set_setting('WASM2C')
self.set_setting('WASM2C_SANDBOXING', mode)
self.wasm_engines = []
self.do_core_test('test_hello_world.c')
### Integration tests
def test_ccall(self):
self.emcc_args.append('-Wno-return-stack-address')
self.set_setting('EXPORTED_RUNTIME_METHODS', ['ccall', 'cwrap'])
self.set_setting('WASM_ASYNC_COMPILATION', 0)
create_file('post.js', '''
out('*');
var ret;
ret = Module['ccall']('get_int', 'number'); out([typeof ret, ret].join(','));
ret = ccall('get_float', 'number'); out([typeof ret, ret.toFixed(2)].join(','));
ret = ccall('get_bool', 'boolean'); out([typeof ret, ret].join(','));
ret = ccall('get_string', 'string'); out([typeof ret, ret].join(','));
ret = ccall('print_int', null, ['number'], [12]); out(typeof ret);
ret = ccall('print_float', null, ['number'], [14.56]); out(typeof ret);
ret = ccall('print_bool', null, ['boolean'], [true]); out(typeof ret);
ret = ccall('print_string', null, ['string'], ["cheez"]); out(typeof ret);
ret = ccall('print_string', null, ['array'], [[97, 114, 114, 45, 97, 121, 0]]); out(typeof ret); // JS array
ret = ccall('print_string', null, ['array'], [new Uint8Array([97, 114, 114, 45, 97, 121, 0])]); out(typeof ret); // typed array
ret = ccall('multi', 'number', ['number', 'number', 'number', 'string'], [2, 1.4, 3, 'more']); out([typeof ret, ret].join(','));
var p = ccall('malloc', 'pointer', ['number'], [4]);
setValue(p, 650, 'i32');
ret = ccall('pointer', 'pointer', ['pointer'], [p]); out([typeof ret, getValue(ret, 'i32')].join(','));
out('*');
// part 2: cwrap
var noThirdParam = Module['cwrap']('get_int', 'number');
out(noThirdParam());
var multi = Module['cwrap']('multi', 'number', ['number', 'number', 'number', 'string']);
out(multi(2, 1.4, 3, 'atr'));
out(multi(8, 5.4, 4, 'bret'));
out('*');
// part 3: avoid stack explosion and check it's restored correctly
for (var i = 0; i < TOTAL_STACK/60; i++) {
ccall('multi', 'number', ['number', 'number', 'number', 'string'], [0, 0, 0, '123456789012345678901234567890123456789012345678901234567890']);
}
out('stack is ok.');
ccall('call_ccall_again', null);
''')
self.emcc_args += ['--post-js', 'post.js']
self.set_setting('EXPORTED_FUNCTIONS', ['_get_int', '_get_float', '_get_bool', '_get_string', '_print_int', '_print_float', '_print_bool', '_print_string', '_multi', '_pointer', '_call_ccall_again', '_malloc'])
self.do_core_test('test_ccall.cpp')
if self.maybe_closure():
self.do_core_test('test_ccall.cpp')
def test_EXPORTED_RUNTIME_METHODS(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$dynCall'])
self.do_core_test('EXPORTED_RUNTIME_METHODS.c')
# test dyncall (and other runtime methods in support.js) can be exported
self.emcc_args += ['-DEXPORTED']
self.set_setting('EXPORTED_RUNTIME_METHODS', ['dynCall', 'addFunction', 'lengthBytesUTF8', 'getTempRet0', 'setTempRet0'])
self.do_core_test('EXPORTED_RUNTIME_METHODS.c')
@parameterized({
'': [],
'minimal_runtime': ['-s', 'MINIMAL_RUNTIME=1']
})
def test_dyncall_specific(self, *args):
cases = [
('DIRECT', []),
('DYNAMIC_SIG', ['-s', 'DYNCALLS=1', '-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=$dynCall']),
]
if 'MINIMAL_RUNTIME=1' in args:
self.emcc_args += ['--pre-js', test_file('minimal_runtime_exit_handling.js')]
else:
cases += [
('EXPORTED', []),
('EXPORTED_DYNAMIC_SIG', ['-s', 'DYNCALLS=1', '-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=$dynCall', '-s', 'EXPORTED_RUNTIME_METHODS=dynCall']),
('FROM_OUTSIDE', ['-s', 'EXPORTED_RUNTIME_METHODS=dynCall_iiji'])
]
for which, extra_args in cases:
print(str(args) + ' ' + which)
self.do_core_test('dyncall_specific.c', emcc_args=['-D' + which] + list(args) + extra_args)
def test_getValue_setValue(self):
# these used to be exported, but no longer are by default
def test(output_prefix='', args=[], assert_returncode=0):
src = test_file('core/getValue_setValue.cpp')
expected = test_file('core/getValue_setValue' + output_prefix + '.out')
self.do_run_from_file(src, expected, assert_returncode=assert_returncode, emcc_args=args)
# see that direct usage (not on module) works. we don't export, but the use
# keeps it alive through JSDCE
test(args=['-DDIRECT'])
# see that with assertions, we get a nice error message
self.set_setting('EXPORTED_RUNTIME_METHODS', [])
self.set_setting('ASSERTIONS')
test('_assert', assert_returncode=NON_ZERO)
self.set_setting('ASSERTIONS', 0)
# see that when we export them, things work on the module
self.set_setting('EXPORTED_RUNTIME_METHODS', ['getValue', 'setValue'])
test()
@parameterized({
'': ([],),
'_files': (['-DUSE_FILES'],)
})
def test_FS_exports(self, extra_args):
# these used to be exported, but no longer are by default
def test(output_prefix='', args=[], assert_returncode=0):
args += extra_args
print(args)
self.do_runf(test_file('core/FS_exports.cpp'),
(read_file(test_file('core/FS_exports' + output_prefix + '.out')),
read_file(test_file('core/FS_exports' + output_prefix + '_2.out'))),
assert_returncode=assert_returncode, emcc_args=args)
# see that direct usage (not on module) works. we don't export, but the use
# keeps it alive through JSDCE
test(args=['-DDIRECT', '-s', 'FORCE_FILESYSTEM'])
# see that with assertions, we get a nice error message
self.set_setting('EXPORTED_RUNTIME_METHODS', [])
self.set_setting('ASSERTIONS')
test('_assert', assert_returncode=NON_ZERO)
self.set_setting('ASSERTIONS', 0)
# see that when we export them, things work on the module
self.set_setting('EXPORTED_RUNTIME_METHODS', ['FS_createDataFile'])
test(args=['-s', 'FORCE_FILESYSTEM'])
def test_legacy_exported_runtime_numbers(self):
# these used to be exported, but no longer are by default
def test(output_prefix='', args=[], assert_returncode=0):
old = self.emcc_args.copy()
self.emcc_args += args
src = test_file('core/legacy_exported_runtime_numbers.cpp')
expected = test_file('core/legacy_exported_runtime_numbers%s.out' % output_prefix)
self.do_run_from_file(src, expected, assert_returncode=assert_returncode)
self.emcc_args = old
# see that direct usage (not on module) works. we don't export, but the use
# keeps it alive through JSDCE
test(args=['-DDIRECT'])
# see that with assertions, we get a nice error message
self.set_setting('EXPORTED_RUNTIME_METHODS', [])
self.set_setting('ASSERTIONS')
test('_assert', assert_returncode=NON_ZERO)
self.set_setting('ASSERTIONS', 0)
# see that when we export them, things work on the module
self.set_setting('EXPORTED_RUNTIME_METHODS', ['ALLOC_STACK'])
test()
def test_response_file(self):
response_data = '-o %s/response_file.js %s' % (self.get_dir(), test_file('hello_world.cpp'))
create_file('rsp_file', response_data.replace('\\', '\\\\'))
self.run_process([EMCC, "@rsp_file"] + self.get_emcc_args())
self.do_run('response_file.js', 'hello, world', no_build=True)
self.assertContained('response file not found: foo.txt', self.expect_fail([EMCC, '@foo.txt']))
def test_linker_response_file(self):
objfile = 'response_file.o'
self.run_process([EMCC, '-c', test_file('hello_world.cpp'), '-o', objfile] + self.get_emcc_args())
# This should expand into -Wl,--start-group <objfile> -Wl,--end-group
response_data = '--start-group ' + objfile + ' --end-group'
create_file('rsp_file', response_data.replace('\\', '\\\\'))
self.run_process([EMCC, "-Wl,@rsp_file", '-o', 'response_file.o.js'] + self.get_emcc_args())
self.do_run('response_file.o.js', 'hello, world', no_build=True)
def test_exported_response(self):
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <emscripten.h>
extern "C" {
int other_function() { return 5; }
}
int main() {
int x = EM_ASM_INT({ return Module._other_function() });
emscripten_run_script_string(""); // Add a reference to a symbol that exists in src/deps_info.json to uncover issue #2836 in the test suite.
printf("waka %d!\n", x);
return 0;
}
'''
create_file('exps', '["_main","_other_function"]')
self.set_setting('EXPORTED_FUNCTIONS', '@exps')
self.do_run(src, '''waka 5!''')
assert 'other_function' in read_file('src.js')
def test_large_exported_response(self):
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <emscripten.h>
extern "C" {
'''
js_funcs = []
num_exports = 5000
count = 0
while count < num_exports:
src += 'int exported_func_from_response_file_%d () { return %d;}\n' % (count, count)
js_funcs.append('_exported_func_from_response_file_%d' % count)
count += 1
src += r'''
}
int main() {
int x = EM_ASM_INT({ return Module._exported_func_from_response_file_4999() });
emscripten_run_script_string(""); // Add a reference to a symbol that exists in src/deps_info.json to uncover issue #2836 in the test suite.
printf("waka %d!\n", x);
return 0;
}
'''
js_funcs.append('_main')
create_file('large_exported_response.json', json.dumps(js_funcs))
self.set_setting('EXPORTED_FUNCTIONS', '@large_exported_response.json')
self.do_run(src, 'waka 4999!')
self.assertContained('_exported_func_from_response_file_1', read_file('src.js'))
def test_add_function(self):
self.set_setting('INVOKE_RUN', 0)
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.set_setting('RESERVED_FUNCTION_POINTERS')
self.set_setting('EXPORTED_RUNTIME_METHODS', ['callMain'])
src = test_file('interop/test_add_function.cpp')
post_js = test_file('interop/test_add_function_post.js')
self.emcc_args += ['--post-js', post_js]
print('basics')
self.do_run_in_out_file_test('interop/test_add_function.cpp')
print('with RESERVED_FUNCTION_POINTERS=0')
self.set_setting('RESERVED_FUNCTION_POINTERS', 0)
expected = 'Unable to grow wasm table'
if self.is_wasm2js():
# in wasm2js the error message doesn't come from the VM, but from our
# emulation code. when ASSERTIONS are enabled we show a clear message, but
# in optimized builds we don't waste code size on that, and the JS engine
# shows a generic error.
expected = 'wasmTable.grow is not a function'
self.do_runf(src, expected, assert_returncode=NON_ZERO)
print('- with table growth')
self.set_setting('ALLOW_TABLE_GROWTH')
self.emcc_args += ['-DGROWTH']
# enable costly assertions to verify correct table behavior
self.set_setting('ASSERTIONS', 2)
self.do_run_in_out_file_test('interop/test_add_function.cpp', interleaved_output=False)
def test_getFuncWrapper_sig_alias(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$getFuncWrapper'])
src = r'''
#include <stdio.h>
#include <emscripten.h>
void func1(int a) {
printf("func1\n");
}
void func2(int a, int b) {
printf("func2\n");
}
int main() {
EM_ASM({
getFuncWrapper($0, 'vi')(0);
getFuncWrapper($1, 'vii')(0, 0);
}, func1, func2);
return 0;
}
'''
self.do_run(src, 'func1\nfunc2\n')
def test_emulate_function_pointer_casts(self):
# Forcibly disable EXIT_RUNTIME due to:
# https://github.com/emscripten-core/emscripten/issues/15081
self.set_setting('EXIT_RUNTIME', 0)
self.set_setting('EMULATE_FUNCTION_POINTER_CASTS')
self.do_core_test('test_emulate_function_pointer_casts.cpp')
@no_wasm2js('TODO: nicely printed names in wasm2js')
@parameterized({
'normal': ([],),
'noexcept': (['-fno-exceptions'],)
})
def test_demangle_stacks(self, extra_args):
self.emcc_args += extra_args
self.set_setting('DEMANGLE_SUPPORT')
self.set_setting('ASSERTIONS')
# disable aggressive inlining in binaryen
self.set_setting('BINARYEN_EXTRA_PASSES', '--one-caller-inline-max-function-size=1')
# ensure function names are preserved
self.emcc_args += ['--profiling-funcs']
self.do_core_test('test_demangle_stacks.cpp', assert_returncode=NON_ZERO)
print('without assertions, the stack is not printed, but a message suggesting assertions is')
self.set_setting('ASSERTIONS', 0)
self.do_core_test('test_demangle_stacks_noassert.cpp', assert_returncode=NON_ZERO)
def test_demangle_stacks_symbol_map(self):
# disable aggressive inlining in binaryen
self.set_setting('BINARYEN_EXTRA_PASSES', '--one-caller-inline-max-function-size=1')
self.set_setting('DEMANGLE_SUPPORT')
if '-O' not in str(self.emcc_args) or '-O0' in self.emcc_args or '-O1' in self.emcc_args or '-g' in self.emcc_args:
self.skipTest("without opts, we don't emit a symbol map")
self.emcc_args += ['--emit-symbol-map']
self.do_runf(test_file('core/test_demangle_stacks.cpp'), 'Aborted', assert_returncode=NON_ZERO)
# make sure the shortened name is the right one
full_aborter = None
short_aborter = None
for line in open('test_demangle_stacks.js.symbols').readlines():
if ':' not in line:
continue
# split by the first ':' (wasm backend demangling may include more :'s later on)
short, full = line.split(':', 1)
if 'Aborter' in full:
short_aborter = short
full_aborter = full
self.assertIsNotNone(full_aborter)
self.assertIsNotNone(short_aborter)
print('full:', full_aborter, 'short:', short_aborter)
if config.SPIDERMONKEY_ENGINE and os.path.exists(config.SPIDERMONKEY_ENGINE[0]):
output = self.run_js('test_demangle_stacks.js', engine=config.SPIDERMONKEY_ENGINE, assert_returncode=NON_ZERO)
# we may see the full one, if -g, or the short one if not
if ' ' + short_aborter + ' ' not in output and ' ' + full_aborter + ' ' not in output:
# stack traces may also be ' name ' or 'name@' etc
if '\n' + short_aborter + ' ' not in output and '\n' + full_aborter + ' ' not in output and 'wasm-function[' + short_aborter + ']' not in output:
if '\n' + short_aborter + '@' not in output and '\n' + full_aborter + '@' not in output:
self.assertContained(' ' + short_aborter + ' ' + '\n' + ' ' + full_aborter + ' ', output)
@no_safe_heap('tracing from sbrk into JS leads to an infinite loop')
def test_tracing(self):
self.emcc_args += ['--tracing']
self.do_core_test('test_tracing.c')
@disabled('https://github.com/emscripten-core/emscripten/issues/9527')
def test_eval_ctors(self):
if '-O2' not in str(self.emcc_args) or '-O1' in str(self.emcc_args):
self.skipTest('need js optimizations')
if not self.is_wasm():
self.skipTest('this test uses wasm binaries')
print('leave printf in ctor')
self.set_setting('EVAL_CTORS')
self.do_run(r'''
#include <stdio.h>
struct C {
C() { printf("constructing!\n"); } // don't remove this!
};
C c;
int main() {}
''', "constructing!\n")
def get_code_size():
if self.is_wasm():
# Use number of functions as a for code size
return self.count_wasm_contents('hello_libcxx.wasm', 'funcs')
else:
return os.path.getsize('hello_libcxx.js')
def get_mem_size():
if self.is_wasm():
# Use number of functions as a for code size
return self.count_wasm_contents('hello_libcxx.wasm', 'memory-data')
if self.uses_memory_init_file():
return os.path.getsize('hello_libcxx.js.mem')
# otherwise we ignore memory size
return 0
def do_test(test):
self.set_setting('EVAL_CTORS')
test()
ec_code_size = get_code_size()
ec_mem_size = get_mem_size()
self.clear_setting('EVAL_CTORS')
test()
code_size = get_code_size()
mem_size = get_mem_size()
if mem_size:
print('mem: ', mem_size, '=>', ec_mem_size)
self.assertGreater(ec_mem_size, mem_size)
print('code:', code_size, '=>', ec_code_size)
self.assertLess(ec_code_size, code_size)
print('remove ctor of just assigns to memory')
def test1():
self.do_run(r'''
#include <stdio.h>
struct C {
int x;
C() {
volatile int y = 10;
y++;
x = y;
}
};
C c;
int main() {
printf("x: %d\n", c.x);
}
''', "x: 11\n")
do_test(test1)
# The wasm backend currently exports a single initalizer so the ctor
# evaluation is all or nothing. As well as that it doesn't currently
# do DCE of libcxx symbols (because the are marked as visibility(defaault)
# and because of that we end up not being able to eval ctors unless all
# libcxx constrcutors can be eval'd
print('libcxx - remove 2 ctors from iostream code')
output = 'hello, world!'
def test2():
self.do_runf(test_file('hello_libcxx.cpp'), output)
do_test(test2)
print('assertions too')
self.set_setting('ASSERTIONS')
self.do_runf(test_file('hello_libcxx.cpp'), output)
self.set_setting('ASSERTIONS', 0)
print('remove just some, leave others')
def test3():
self.do_run(r'''
#include <iostream>
#include <string>
class std_string {
public:
std_string(): ptr(nullptr) { std::cout << "std_string()\n"; }
std_string(const char* s): ptr(s) { std::cout << "std_string(const char* s)" << std::endl; }
std_string(const std_string& s): ptr(s.ptr) { std::cout << "std_string(const std_string& s) " << std::endl; }
const char* data() const { return ptr; }
private:
const char* ptr;
};
const std_string txtTestString("212121\0");
const std::string s2text("someweirdtext");
int main() {
std::cout << s2text << std::endl;
std::cout << txtTestString.data() << std::endl;
std::cout << txtTestString.data() << std::endl;
return 0;
}
''', '''std_string(const char* s)
someweirdtext
212121
212121
''') # noqa
do_test(test3)
def test_embind(self):
self.emcc_args += ['--bind']
create_file('test_embind.cpp', r'''
#include <stdio.h>
#include <emscripten/val.h>
using namespace emscripten;
int main() {
val Math = val::global("Math");
// two ways to call Math.abs
printf("abs(-10): %d\n", Math.call<int>("abs", -10));
printf("abs(-11): %d\n", Math["abs"](-11).as<int>());
return 0;
}
''')
self.do_runf('test_embind.cpp', 'abs(-10): 10\nabs(-11): 11')
def test_embind_2(self):
self.emcc_args += ['--bind', '--post-js', 'post.js']
create_file('post.js', '''
function printLerp() {
out('lerp ' + Module.lerp(100, 200, 66) + '.');
}
''')
create_file('test_embind_2.cpp', r'''
#include <stdio.h>
#include <emscripten.h>
#include <emscripten/bind.h>
using namespace emscripten;
int lerp(int a, int b, int t) {
return (100 - t) * a + t * b;
}
EMSCRIPTEN_BINDINGS(my_module) {
function("lerp", &lerp);
}
int main(int argc, char **argv) {
EM_ASM(printLerp());
return 0;
}
''')
self.do_runf('test_embind_2.cpp', 'lerp 166')
def test_embind_3(self):
self.emcc_args += ['--bind', '--post-js', 'post.js']
create_file('post.js', '''
function ready() {
try {
Module.compute(new Uint8Array([1,2,3]));
} catch(e) {
out(e);
}
}
''')
create_file('test_embind_3.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
using namespace emscripten;
int compute(int array[]) {
return 0;
}
EMSCRIPTEN_BINDINGS(my_module) {
function("compute", &compute, allow_raw_pointers());
}
int main(int argc, char **argv) {
EM_ASM(ready());
return 0;
}
''')
self.do_runf('test_embind_3.cpp', 'UnboundTypeError: Cannot call compute due to unbound types: Pi')
def test_embind_4(self):
self.emcc_args += ['--bind', '--post-js', 'post.js']
create_file('post.js', '''
function printFirstElement() {
out(Module.getBufferView()[0]);
}
''')
create_file('test_embind_4.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
#include <emscripten/val.h>
#include <stdio.h>
using namespace emscripten;
const size_t kBufferSize = 1024;
double buffer[kBufferSize];
val getBufferView(void) {
val v = val(typed_memory_view(kBufferSize, buffer));
return v;
}
EMSCRIPTEN_BINDINGS(my_module) {
function("getBufferView", &getBufferView);
}
int main(int argc, char **argv) {
buffer[0] = 107;
EM_ASM(printFirstElement());
return 0;
}
''')
self.do_runf('test_embind_4.cpp', '107')
def test_embind_5(self):
self.emcc_args += ['--bind']
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_embind_5.cpp')
def test_embind_custom_marshal(self):
self.emcc_args += ['--bind', '--pre-js', test_file('embind/test_custom_marshal.js')]
self.do_run_in_out_file_test('embind/test_custom_marshal.cpp', assert_identical=True)
def test_embind_float_constants(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_float_constants.cpp')
def test_embind_negative_constants(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_negative_constants.cpp')
@also_with_wasm_bigint
def test_embind_unsigned(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_unsigned.cpp')
def test_embind_val(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_val.cpp')
@no_wasm2js('wasm_bigint')
def test_embind_i64_val(self):
self.set_setting('WASM_BIGINT')
self.emcc_args += ['--bind']
self.node_args += ['--experimental-wasm-bigint']
self.do_run_in_out_file_test('embind/test_i64_val.cpp', assert_identical=True)
@no_wasm2js('wasm_bigint')
def test_embind_i64_binding(self):
self.set_setting('WASM_BIGINT')
self.emcc_args += ['--bind']
self.node_args += ['--experimental-wasm-bigint']
self.do_run_in_out_file_test('embind/test_i64_binding.cpp', assert_identical=True)
def test_embind_no_rtti(self):
create_file('main.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
#include <emscripten/val.h>
#include <stdio.h>
EM_JS(void, calltest, (), {
console.log("dotest returned: " + Module.dotest());
});
int main(int argc, char** argv){
printf("418\n");
calltest();
return 0;
}
int test() {
return 42;
}
EMSCRIPTEN_BINDINGS(my_module) {
emscripten::function("dotest", &test);
}
''')
self.emcc_args += ['--bind', '-fno-rtti', '-DEMSCRIPTEN_HAS_UNBOUND_TYPE_NAMES=0']
self.do_runf('main.cpp', '418\ndotest returned: 42\n')
def test_embind_polymorphic_class_no_rtti(self):
self.emcc_args += ['--bind', '-fno-rtti', '-DEMSCRIPTEN_HAS_UNBOUND_TYPE_NAMES=0']
self.do_core_test('test_embind_polymorphic_class_no_rtti.cpp')
def test_embind_no_rtti_followed_by_rtti(self):
src = r'''
#include <emscripten.h>
#include <emscripten/bind.h>
#include <emscripten/val.h>
#include <stdio.h>
EM_JS(void, calltest, (), {
console.log("dotest returned: " + Module.dotest());
});
int main(int argc, char** argv){
printf("418\n");
calltest();
return 0;
}
int test() {
return 42;
}
EMSCRIPTEN_BINDINGS(my_module) {
emscripten::function("dotest", &test);
}
'''
self.emcc_args += ['--bind', '-fno-rtti', '-frtti']
self.do_run(src, '418\ndotest returned: 42\n')
@parameterized({
'': (None, False),
'all': ('ALL', False),
'fast': ('FAST', False),
'default': ('DEFAULT', False),
'all_growth': ('ALL', True),
})
def test_webidl(self, mode, allow_memory_growth):
self.uses_es6 = True
self.set_setting('WASM_ASYNC_COMPILATION', 0)
if self.maybe_closure():
# avoid closure minified names competing with our test code in the global name space
self.set_setting('MODULARIZE')
else:
self.set_setting('WASM_ASYNC_COMPILATION', 0)
# Force IDL checks mode
with env_modify({'IDL_CHECKS': mode}):
self.run_process([WEBIDL_BINDER, test_file('webidl/test.idl'), 'glue'])
self.assertExists('glue.cpp')
self.assertExists('glue.js')
post_js = '\n\n'
if self.get_setting('MODULARIZE'):
post_js += 'var TheModule = Module();\n'
else:
post_js += 'var TheModule = Module;\n'
post_js += '\n\n'
if allow_memory_growth:
post_js += "var isMemoryGrowthAllowed = true;\n"
else:
post_js += "var isMemoryGrowthAllowed = false;\n"
post_js += read_file(test_file('webidl/post.js'))
post_js += '\n\n'
create_file('extern-post.js', post_js)
# Export things on "TheModule". This matches the typical use pattern of the bound library
# being used as Box2D.* or Ammo.*, and we cannot rely on "Module" being always present (closure may remove it).
self.emcc_args += ['-s', 'EXPORTED_FUNCTIONS=_malloc,_free', '--post-js=glue.js', '--extern-post-js=extern-post.js']
if allow_memory_growth:
self.set_setting('ALLOW_MEMORY_GROWTH')
if not mode:
mode = 'DEFAULT'
expected = test_file('webidl/output_%s.txt' % mode)
self.do_run_from_file(test_file('webidl/test.cpp'), expected)
### Tests for tools
@no_wasm2js('TODO: source maps in wasm2js')
@parameterized({
'': ([],),
'minimal_runtime': (['-s', 'MINIMAL_RUNTIME'],),
})
def test_source_map(self, args):
if '-g' not in self.emcc_args:
self.emcc_args.append('-g')
self.emcc_args += args
src = '''
#include <stdio.h>
#include <assert.h>
__attribute__((noinline)) int foo() {
printf("hi"); // line 6
return 1; // line 7
}
int main() {
printf("%d", foo()); // line 11
return 0; // line 12
}
'''
create_file('src.cpp', src)
out_filename = 'a.out.js'
wasm_filename = 'a.out.wasm'
no_maps_filename = 'no-maps.out.js'
assert '-gsource-map' not in self.emcc_args
self.emcc('src.cpp', self.get_emcc_args(), out_filename)
# the file name may find its way into the generated code, so make sure we
# can do an apples-to-apples comparison by compiling with the same file name
shutil.move(out_filename, no_maps_filename)
no_maps_file = read_file(no_maps_filename)
no_maps_file = re.sub(' *//[@#].*$', '', no_maps_file, flags=re.MULTILINE)
self.emcc_args.append('-gsource-map')
self.emcc(os.path.abspath('src.cpp'),
self.get_emcc_args(),
out_filename,
stderr=PIPE)
map_referent = out_filename if not self.is_wasm() else wasm_filename
# after removing the @line and @sourceMappingURL comments, the build
# result should be identical to the non-source-mapped debug version.
# this is worth checking because the parser AST swaps strings for token
# objects when generating source maps, so we want to make sure the
# optimizer can deal with both types.
map_filename = map_referent + '.map'
data = json.load(open(map_filename))
if hasattr(data, 'file'):
# the file attribute is optional, but if it is present it needs to refer
# the output file.
self.assertPathsIdentical(map_referent, data['file'])
self.assertGreater(len(data['sources']), 1)
self.assertPathsIdentical('src.cpp', data['sources'][0])
if hasattr(data, 'sourcesContent'):
# the sourcesContent attribute is optional, but if it is present it
# needs to containt valid source text.
self.assertTextDataIdentical(src, data['sourcesContent'][0])
mappings = json.loads(self.run_js(
path_from_root('tests/sourcemap2json.js'),
args=[map_filename]))
seen_lines = set()
for m in mappings:
if m['source'] == 'src.cpp':
seen_lines.add(m['originalLine'])
# ensure that all the 'meaningful' lines in the original code get mapped
# when optimizing, the binaryen optimizer may remove some of them (by inlining, etc.)
if self.is_optimizing():
self.assertTrue(seen_lines.issuperset([11, 12]), seen_lines)
else:
self.assertTrue(seen_lines.issuperset([6, 7, 11, 12]), seen_lines)
@no_wasm2js('TODO: source maps in wasm2js')
def test_dwarf(self):
self.emcc_args.append('-g')
create_file('src.cpp', '''
#include <emscripten.h>
EM_JS(int, out_to_js, (int x), {})
void foo() {
out_to_js(0); // line 5
out_to_js(1); // line 6
out_to_js(2); // line 7
// A silly possible recursion to avoid binaryen doing any inlining.
if (out_to_js(3)) foo();
}
int main() {
foo();
}
''')
js_filename = 'a.out.js'
wasm_filename = 'a.out.wasm'
self.emcc('src.cpp', self.get_emcc_args(), js_filename)
out = self.run_process([shared.LLVM_DWARFDUMP, wasm_filename, '-all'], stdout=PIPE).stdout
# parse the sections
sections = {}
curr_section_name = ''
curr_section_body = ''
def add_section():
if curr_section_name:
sections[curr_section_name] = curr_section_body
for line in out.splitlines():
if ' contents:' in line:
# a new section, a line like ".debug_str contents:"
add_section()
curr_section_name = line.split(' ')[0]
curr_section_body = ''
else:
# possibly a line in a section
if curr_section_name:
curr_section_body += line + '\n'
add_section()
# make sure the right sections exist
self.assertIn('.debug_abbrev', sections)
self.assertIn('.debug_info', sections)
self.assertIn('.debug_line', sections)
self.assertIn('.debug_str', sections)
self.assertIn('.debug_ranges', sections)
# verify some content in the sections
self.assertIn('"src.cpp"', sections['.debug_info'])
# the line section looks like this:
# Address Line Column File ISA Discriminator Flags
# ------------------ ------ ------ ------ --- ------------- -------------
# 0x000000000000000b 5 0 3 0 0 is_stmt
src_to_addr = {}
found_src_cpp = False
for line in sections['.debug_line'].splitlines():
if 'name: "src.cpp"' in line:
found_src_cpp = True
if not found_src_cpp:
continue
if 'debug_line' in line:
break
if line.startswith('0x'):
while ' ' in line:
line = line.replace(' ', ' ')
addr, line, col = line.split(' ')[:3]
key = (int(line), int(col))
src_to_addr.setdefault(key, []).append(addr)
# each of the calls must remain in the binary, and be mapped
self.assertIn((5, 9), src_to_addr)
self.assertIn((6, 9), src_to_addr)
self.assertIn((7, 9), src_to_addr)
def get_dwarf_addr(line, col):
addrs = src_to_addr[(line, col)]
# we assume the simple calls have one address
self.assertEqual(len(addrs), 1)
return int(addrs[0], 0)
# the lines must appear in sequence (as calls to JS, the optimizer cannot
# reorder them)
self.assertLess(get_dwarf_addr(5, 9), get_dwarf_addr(6, 9))
self.assertLess(get_dwarf_addr(6, 9), get_dwarf_addr(7, 9))
# Get the wat, printing with -g which has binary offsets
wat = self.run_process([Path(building.get_binaryen_bin(), 'wasm-opt'),
wasm_filename, '-g', '--print'], stdout=PIPE).stdout
# We expect to see a pattern like this in optimized builds (there isn't
# much that can change with such calls to JS (they can't be reordered or
# anything else):
#
# ;; code offset: 0x?
# (drop
# ;; code offset: 0x?
# (call $out_to_js
# ;; code offset: 0x?
# (local.get ?) or (i32.const ?)
# )
# )
#
# In the stacky stream of instructions form, it is
#
# local.get or i32.const
# call $out_to_js
# drop
#
# However, in an unoptimized build the constant may be assigned earlier in
# some other manner, so stop here.
if not self.is_optimizing():
return
# get_wat_addr gets the address of one of the 3 interesting calls, by its
# index (0,1,2).
def get_wat_addr(call_index):
# find the call_index-th call
call_loc = -1
for i in range(call_index + 1):
call_loc = wat.find('call $out_to_js', call_loc + 1)
assert call_loc > 0
# the call begins with the local.get/i32.const printed below it, which is
# the first instruction in the stream, so it has the lowest address
start_addr_loc = wat.find('0x', call_loc)
assert start_addr_loc > 0
start_addr_loc_end = wat.find('\n', start_addr_loc)
start_addr = int(wat[start_addr_loc:start_addr_loc_end], 0)
# the call ends with the drop, which is the last in the stream, at the
# highest address
end_addr_loc = wat.rfind('drop', 0, call_loc)
assert end_addr_loc > 0
end_addr_loc = wat.rfind('0x', 0, end_addr_loc)
assert end_addr_loc > 0
end_addr_loc_end = wat.find('\n', end_addr_loc)
assert end_addr_loc_end > 0
end_addr = int(wat[end_addr_loc:end_addr_loc_end], 0)
return (start_addr, end_addr)
# match up the DWARF and the wat
for i in range(3):
dwarf_addr = get_dwarf_addr(5 + i, 9)
start_wat_addr, end_wat_addr = get_wat_addr(i)
# the dwarf may match any of the 3 instructions that form the stream of
# of instructions implementing the call in the source code, in theory
self.assertLessEqual(start_wat_addr, dwarf_addr)
self.assertLessEqual(dwarf_addr, end_wat_addr)
def test_modularize_closure_pre(self):
# test that the combination of modularize + closure + pre-js works. in that mode,
# closure should not minify the Module object in a way that the pre-js cannot use it.
create_file('post.js', 'var TheModule = Module();\n')
self.emcc_args += [
'--pre-js', test_file('core/modularize_closure_pre.js'),
'--extern-post-js=post.js',
'--closure=1',
'-g1',
'-s',
'MODULARIZE=1',
]
self.do_core_test('modularize_closure_pre.c')
@no_wasm2js('symbol names look different wasm2js backtraces')
def test_emscripten_log(self):
self.banned_js_engines = [config.V8_ENGINE] # v8 doesn't support console.log
self.set_setting('DEMANGLE_SUPPORT')
if '-g' not in self.emcc_args:
self.emcc_args.append('-g')
self.emcc_args += ['-DRUN_FROM_JS_SHELL']
self.do_run_in_out_file_test('emscripten_log/emscripten_log.cpp', interleaved_output=False)
# test closure compiler as well
if self.maybe_closure():
self.emcc_args += ['-g1'] # extra testing
self.do_run_in_out_file_test('emscripten_log/emscripten_log_with_closure.cpp', interleaved_output=False)
def test_float_literals(self):
self.do_run_in_out_file_test('test_float_literals.cpp')
def test_exit_status(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
create_file('exit.c', r'''
#include <stdio.h>
#include <assert.h>
#include <stdlib.h>
#include <unistd.h>
static void cleanup() {
#ifndef NORMAL_EXIT
assert(0 && "cleanup should only be called from normal exit()");
#endif
printf("cleanup\n");
}
int main() {
atexit(cleanup); // this atexit should still be called
printf("hello, world!\n");
// Unusual exit status to make sure it's working!
#ifdef CAPITAL_EXIT
_Exit(118);
#elif defined(UNDER_EXIT)
_exit(118);
#elif defined(NORMAL_EXIT)
exit(118);
#endif
}
''')
create_file('pre.js', '''
Module.onExit = function() {
out('I see exit status: ' + EXITSTATUS);
}
''')
self.emcc_args += ['--pre-js', 'pre.js']
print('.. exit')
self.do_runf('exit.c', 'hello, world!\ncleanup\nI see exit status: 118', assert_returncode=118, emcc_args=['-DNORMAL_EXIT'])
print('.. _exit')
self.do_runf('exit.c', 'hello, world!\nI see exit status: 118', assert_returncode=118, emcc_args=['-DUNDER_EXIT'])
print('.. _Exit')
self.do_runf('exit.c', 'hello, world!\nI see exit status: 118', assert_returncode=118, emcc_args=['-DCAPITAL_EXIT'])
def test_noexitruntime(self):
src = r'''
#include <emscripten.h>
#include <stdio.h>
static int testPre = TEST_PRE;
struct Global {
Global() {
printf("in Global()\n");
if (testPre) { EM_ASM(noExitRuntime = true;); }
}
~Global() { printf("ERROR: in ~Global()\n"); }
} global;
int main() {
if (!testPre) { EM_ASM(noExitRuntime = true;); }
printf("in main()\n");
}
'''
self.do_run(src.replace('TEST_PRE', '0'), 'in Global()\nin main()')
self.do_run(src.replace('TEST_PRE', '1'), 'in Global()\nin main()')
def test_minmax(self):
self.do_runf(test_file('test_minmax.c'), 'NAN != NAN\nSuccess!')
def test_localeconv(self):
self.do_run_in_out_file_test('core/test_localeconv.c')
def test_newlocale(self):
self.do_run_in_out_file_test('core/test_newlocale.c')
def test_setlocale(self):
self.do_run_in_out_file_test('core/test_setlocale.c')
def test_vswprintf_utf8(self):
self.do_run_in_out_file_test('vswprintf_utf8.c')
# needs setTimeout which only node has
@require_node
def test_async_hello(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASYNCIFY')
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
void f(void *p) {
*(int*)p = 99;
printf("!");
}
int main() {
int i = 0;
printf("Hello");
emscripten_async_call(f, &i, 1);
printf("World");
emscripten_sleep(100);
printf("%d\n", i);
}
''')
self.do_runf('main.c', 'HelloWorld!99')
@require_node
def test_async_ccall_bad(self):
# check bad ccall use
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASYNCIFY')
self.set_setting('ASSERTIONS')
self.set_setting('INVOKE_RUN', 0)
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("Hello");
emscripten_sleep(100);
printf("World\n");
}
''')
create_file('pre.js', '''
Module['onRuntimeInitialized'] = function() {
try {
ccall('main', 'number', ['number', 'string'], [2, 'waka']);
var never = true;
} catch(e) {
out(e);
assert(!never);
}
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_runf('main.c', 'The call to main is running asynchronously.')
@require_node
def test_async_ccall_good(self):
# check reasonable ccall use
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASYNCIFY')
self.set_setting('ASSERTIONS')
self.set_setting('INVOKE_RUN', 0)
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("Hello");
emscripten_sleep(100);
printf("World\n");
}
''')
create_file('pre.js', '''
Module['onRuntimeInitialized'] = function() {
ccall('main', null, ['number', 'string'], [2, 'waka'], { async: true });
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_runf('main.c', 'HelloWorld')
@parameterized({
'': (False,),
'exit_runtime': (True,),
})
def test_async_ccall_promise(self, exit_runtime):
self.set_setting('ASYNCIFY')
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASSERTIONS')
self.set_setting('INVOKE_RUN', 0)
self.set_setting('EXIT_RUNTIME', exit_runtime)
self.set_setting('EXPORTED_FUNCTIONS', ['_stringf', '_floatf'])
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
const char* stringf(char* param) {
emscripten_sleep(20);
printf("stringf: %s", param);
return "second";
}
double floatf() {
emscripten_sleep(20);
emscripten_sleep(20);
return 6.4;
}
''')
create_file('pre.js', r'''
Module['onRuntimeInitialized'] = function() {
runtimeKeepalivePush();
ccall('stringf', 'string', ['string'], ['first\n'], { async: true })
.then(function(val) {
console.log(val);
ccall('floatf', 'number', null, null, { async: true }).then(function(arg) {
console.log(arg);
runtimeKeepalivePop();
maybeExit();
});
});
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_runf('main.c', 'stringf: first\nsecond\n6.4')
def test_fibers_asyncify(self):
self.set_setting('ASYNCIFY')
self.maybe_closure()
self.do_runf(test_file('test_fibers.cpp'), '*leaf-0-100-1-101-1-102-2-103-3-104-5-105-8-106-13-107-21-108-34-109-*')
def test_asyncify_unused(self):
# test a program not using asyncify, but the pref is set
self.set_setting('ASYNCIFY')
self.do_core_test('test_hello_world.c')
@parameterized({
'normal': ([], True),
'removelist_a': (['-s', 'ASYNCIFY_REMOVE=["foo(int, double)"]'], False),
'removelist_b': (['-s', 'ASYNCIFY_REMOVE=["bar()"]'], True),
'removelist_c': (['-s', 'ASYNCIFY_REMOVE=["baz()"]'], False),
'onlylist_a': (['-s', 'ASYNCIFY_ONLY=["main","__original_main","foo(int, double)","baz()","c_baz","Structy::funcy()","bar()"]'], True),
'onlylist_b': (['-s', 'ASYNCIFY_ONLY=["main","__original_main","foo(int, double)","baz()","c_baz","Structy::funcy()"]'], True),
'onlylist_c': (['-s', 'ASYNCIFY_ONLY=["main","__original_main","foo(int, double)","baz()","c_baz"]'], False),
'onlylist_d': (['-s', 'ASYNCIFY_ONLY=["foo(int, double)","baz()","c_baz","Structy::funcy()"]'], False, None, True),
'onlylist_b_response': ([], True, '["main","__original_main","foo(int, double)","baz()","c_baz","Structy::funcy()"]'),
'onlylist_c_response': ([], False, '["main","__original_main","foo(int, double)","baz()","c_baz"]'),
})
def test_asyncify_lists(self, args, should_pass, response=None, no_san=False):
if no_san and is_sanitizing(self.emcc_args):
self.skipTest('remaining asyncify+sanitizer TODO')
if response is not None:
create_file('response.file', response)
self.set_setting('ASYNCIFY_ONLY', '@response.file')
self.set_setting('ASYNCIFY')
self.emcc_args += args
if should_pass:
self.do_core_test('test_asyncify_lists.cpp', assert_identical=True)
else:
self.do_runf(test_file('core/test_asyncify_lists.cpp'), ('RuntimeError', 'Thrown at'), assert_returncode=NON_ZERO)
# use of ASYNCIFY_* options may require intermediate debug info. that should
# not end up emitted in the final binary
# (note that we can't check this if sanitizers run, as they include a lot of
# static strings that would match the search)
if self.is_wasm() and not is_sanitizing(self.emcc_args):
binary = read_binary('test_asyncify_lists.wasm')
# there should be no name section
self.assertFalse(b'name' in binary)
# in a fully-optimized build, imports and exports are minified too and we
# can verify that our function names appear nowhere
if '-O3' in self.emcc_args:
self.assertFalse(b'main' in binary)
@parameterized({
'normal': ([], True),
'ignoreindirect': (['-s', 'ASYNCIFY_IGNORE_INDIRECT'], False),
'add': (['-s', 'ASYNCIFY_IGNORE_INDIRECT', '-s', 'ASYNCIFY_ADD=["__original_main","main","virt()"]'], True),
})
def test_asyncify_indirect_lists(self, args, should_pass):
self.set_setting('ASYNCIFY')
self.emcc_args += args
try:
self.do_core_test('test_asyncify_indirect_lists.cpp', assert_identical=True)
if not should_pass:
should_pass = True
raise Exception('should not have passed')
except Exception:
if should_pass:
raise
@no_asan('asyncify stack operations confuse asan')
def test_emscripten_scan_registers(self):
self.set_setting('ASYNCIFY')
self.do_core_test('test_emscripten_scan_registers.cpp')
def test_asyncify_assertions(self):
self.set_setting('ASYNCIFY')
self.set_setting('ASYNCIFY_IMPORTS', ['suspend'])
self.set_setting('ASSERTIONS')
self.do_core_test('test_asyncify_assertions.c', assert_returncode=NON_ZERO)
@no_lsan('leaks asyncify stack during exit')
@no_asan('leaks asyncify stack during exit')
def test_asyncify_during_exit(self):
self.set_setting('ASYNCIFY')
self.set_setting('ASSERTIONS')
self.set_setting('EXIT_RUNTIME', 1)
self.do_core_test('test_asyncify_during_exit.cpp', assert_returncode=NON_ZERO)
print('NO_ASYNC')
self.do_core_test('test_asyncify_during_exit.cpp', emcc_args=['-DNO_ASYNC'], out_suffix='_no_async')
@no_asan('asyncify stack operations confuse asan')
@no_wasm2js('dynamic linking support in wasm2js')
def test_asyncify_main_module(self):
self.set_setting('ASYNCIFY', 1)
self.set_setting('MAIN_MODULE', 2)
self.do_core_test('test_hello_world.c')
@no_asan('asyncify stack operations confuse asan')
@no_wasm2js('TODO: lazy loading in wasm2js')
@parameterized({
'conditional': (True,),
'unconditional': (False,),
})
def test_emscripten_lazy_load_code(self, conditional):
self.set_setting('ASYNCIFY_LAZY_LOAD_CODE')
self.set_setting('ASYNCIFY_IGNORE_INDIRECT')
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += ['--profiling-funcs'] # so that we can find the functions for the changes below
if conditional:
self.emcc_args += ['-DCONDITIONAL']
self.do_core_test('emscripten_lazy_load_code.cpp', args=['0'])
first_size = os.path.getsize('emscripten_lazy_load_code.wasm')
second_size = os.path.getsize('emscripten_lazy_load_code.wasm.lazy.wasm')
print('first wasm size', first_size)
print('second wasm size', second_size)
if not conditional and self.is_optimizing() and '-g' not in self.emcc_args:
# If the call to lazy-load is unconditional, then the optimizer can dce
# out more than half
self.assertLess(first_size, 0.6 * second_size)
wasm1 = read_binary('emscripten_lazy_load_code.wasm')
wasm2 = read_binary('emscripten_lazy_load_code.wasm.lazy.wasm')
self.assertNotEqual(wasm1, wasm2)
# attempts to "break" the wasm by adding an unreachable in $foo_end. returns whether we found it.
def break_wasm(name):
wat = self.run_process([Path(building.get_binaryen_bin(), 'wasm-dis'), name], stdout=PIPE).stdout
lines = wat.splitlines()
wat = None
for i in range(len(lines)):
if '(func $foo_end ' in lines[i]:
j = i + 1
while '(local ' in lines[j]:
j += 1
# we found the first line after the local defs
lines[j] = '(unreachable)' + lines[j]
wat = '\n'.join(lines)
break
if wat is None:
# $foo_end is not present in the wasm, nothing to break
shutil.copyfile(name, name + '.orig')
return False
with open('wat.wat', 'w') as f:
f.write(wat)
shutil.move(name, name + '.orig')
self.run_process([Path(building.get_binaryen_bin(), 'wasm-as'), 'wat.wat', '-o', name, '-g'])
return True
def verify_working(args=['0']):
self.assertContained('foo_end\n', self.run_js('emscripten_lazy_load_code.js', args=args))
def verify_broken(args=['0']):
self.assertNotContained('foo_end\n', self.run_js('emscripten_lazy_load_code.js', args=args, assert_returncode=NON_ZERO))
# the first-loaded wasm will not reach the second call, since we call it after lazy-loading.
# verify that by changing the first wasm to throw in that function
found_foo_end = break_wasm('emscripten_lazy_load_code.wasm')
if not conditional and self.is_optimizing():
self.assertFalse(found_foo_end, 'should have optimizd out $foo_end')
verify_working()
# but breaking the second wasm actually breaks us
break_wasm('emscripten_lazy_load_code.wasm.lazy.wasm')
verify_broken()
# restore
shutil.copyfile('emscripten_lazy_load_code.wasm.orig', 'emscripten_lazy_load_code.wasm')
shutil.copyfile('emscripten_lazy_load_code.wasm.lazy.wasm.orig', 'emscripten_lazy_load_code.wasm.lazy.wasm')
verify_working()
if conditional:
# if we do not call the lazy load function, then we do not need the lazy wasm,
# and we do the second call in the first wasm
os.remove('emscripten_lazy_load_code.wasm.lazy.wasm')
verify_broken()
verify_working(['42'])
break_wasm('emscripten_lazy_load_code.wasm')
verify_broken()
# Test basic wasm2js functionality in all core compilation modes.
@no_asan('no wasm2js support yet in asan')
def test_wasm2js(self):
if not self.is_wasm():
self.skipTest('redundant to test wasm2js in wasm2js* mode')
self.set_setting('WASM', 0)
self.do_core_test('test_hello_world.c')
# a mem init file is emitted just like with JS
expect_memory_init_file = self.uses_memory_init_file()
if expect_memory_init_file:
self.assertExists('test_hello_world.js.mem')
mem = read_binary('test_hello_world.js.mem')
self.assertTrue(mem[-1] != b'\0')
else:
self.assertNotExists('test_hello_world.js.mem')
@no_asan('no wasm2js support yet in asan')
def test_maybe_wasm2js(self):
if not self.is_wasm():
self.skipTest('redundant to test wasm2js in wasm2js* mode')
self.set_setting('MAYBE_WASM2JS')
# see that running as wasm works
self.do_core_test('test_hello_world.c')
# run wasm2js, bundle the code, and use the wasm2js path
cmd = [PYTHON, path_from_root('tools/maybe_wasm2js.py'), 'test_hello_world.js', 'test_hello_world.wasm']
if self.is_optimizing():
cmd += ['-O2']
self.run_process(cmd, stdout=open('do_wasm2js.js', 'w')).stdout
# remove the wasm to make sure we never use it again
os.remove('test_hello_world.wasm')
# verify that it runs
self.assertContained('hello, world!', self.run_js('do_wasm2js.js'))
@no_asan('no wasm2js support yet in asan')
@parameterized({
'': ([],),
'minimal_runtime': (['-s', 'MINIMAL_RUNTIME'],),
})
def test_wasm2js_fallback(self, args):
if not self.is_wasm():
self.skipTest('redundant to test wasm2js in wasm2js* mode')
cmd = [EMCC, test_file('small_hello_world.c'), '-s', 'WASM=2'] + args
self.run_process(cmd)
# First run with WebAssembly support enabled
# Move the Wasm2js fallback away to test it is not accidentally getting loaded.
os.rename('a.out.wasm.js', 'a.out.wasm.js.unused')
self.assertContained('hello!', self.run_js('a.out.js'))
os.rename('a.out.wasm.js.unused', 'a.out.wasm.js')
# Then disable WebAssembly support in VM, and try again.. Should still work with Wasm2JS fallback.
open('b.out.js', 'w').write('WebAssembly = undefined;\n' + read_file('a.out.js'))
os.remove('a.out.wasm') # Also delete the Wasm file to test that it is not attempted to be loaded.
self.assertContained('hello!', self.run_js('b.out.js'))
def test_cxx_self_assign(self):
# See https://github.com/emscripten-core/emscripten/pull/2688 and http://llvm.org/bugs/show_bug.cgi?id=18735
self.do_run(r'''
#include <map>
#include <stdio.h>
int main() {
std::map<int, int> m;
m[0] = 1;
m = m;
// size should still be one after self assignment
if (m.size() == 1) {
printf("ok.\n");
}
}
''', 'ok.')
def test_memprof_requirements(self):
# This test checks for the global variables required to run the memory
# profiler. It would fail if these variables were made no longer global
# or if their identifiers were changed.
create_file('main.cpp', '''
extern "C" {
void check_memprof_requirements();
}
int main() {
check_memprof_requirements();
return 0;
}
''')
create_file('lib.js', '''
mergeInto(LibraryManager.library, {
check_memprof_requirements: function() {
if (typeof _emscripten_stack_get_base === 'function' &&
typeof _emscripten_stack_get_end === 'function' &&
typeof _emscripten_stack_get_current === 'function' &&
typeof Module['___heap_base'] === 'number') {
out('able to run memprof');
} else {
out('missing the required variables to run memprof');
}
}
});
''')
self.emcc_args += ['--memoryprofiler', '--js-library', 'lib.js']
self.do_runf('main.cpp', 'able to run memprof')
def test_fs_dict(self):
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args += ['-lidbfs.js']
self.emcc_args += ['-lnodefs.js']
create_file('pre.js', '''
Module = {};
Module['preRun'] = function() {
out(typeof FS.filesystems['MEMFS']);
out(typeof FS.filesystems['IDBFS']);
out(typeof FS.filesystems['NODEFS']);
// Globals
console.log(typeof MEMFS);
console.log(typeof IDBFS);
console.log(typeof NODEFS);
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_run('int main() { return 0; }', 'object\nobject\nobject\nobject\nobject\nobject')
def test_fs_dict_none(self):
# if IDBFS and NODEFS are not enabled, they are not present.
self.set_setting('FORCE_FILESYSTEM')
self.set_setting('ASSERTIONS')
create_file('pre.js', '''
Module = {};
Module['preRun'] = function() {
out(typeof FS.filesystems['MEMFS']);
out(typeof FS.filesystems['IDBFS']);
out(typeof FS.filesystems['NODEFS']);
// Globals
if (ASSERTIONS) {
console.log(typeof MEMFS);
console.log(IDBFS);
console.log(NODEFS);
FS.mkdir('/working1');
try {
FS.mount(IDBFS, {}, '/working1');
} catch (e) {
console.log('|' + e + '|');
}
}
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
expected = '''\
object
undefined
undefined
object
IDBFS is no longer included by default; build with -lidbfs.js
NODEFS is no longer included by default; build with -lnodefs.js
|IDBFS is no longer included by default; build with -lidbfs.js|'''
self.do_run('int main() { return 0; }', expected)
def test_stack_overflow_check(self):
self.set_setting('TOTAL_STACK', 1048576)
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.do_runf(test_file('stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
self.emcc_args += ['-DONE_BIG_STRING']
self.do_runf(test_file('stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
# ASSERTIONS=2 implies STACK_OVERFLOW_CHECK=2
self.clear_setting('STACK_OVERFLOW_CHECK')
self.set_setting('ASSERTIONS', 2)
self.do_runf(test_file('stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
@node_pthreads
def test_binaryen_2170_emscripten_atomic_cas_u8(self):
self.set_setting('USE_PTHREADS')
self.do_run_in_out_file_test('binaryen_2170_emscripten_atomic_cas_u8.cpp')
@also_with_standalone_wasm()
def test_sbrk(self):
self.do_runf(test_file('sbrk_brk.cpp'), 'OK.')
def test_brk(self):
self.emcc_args += ['-DTEST_BRK=1']
self.do_runf(test_file('sbrk_brk.cpp'), 'OK.')
# Tests that we can use the dlmalloc mallinfo() function to obtain information
# about malloc()ed blocks and compute how much memory is used/freed.
@no_asan('mallinfo is not part of ASan malloc')
def test_mallinfo(self):
self.do_runf(test_file('mallinfo.cpp'), 'OK.')
@no_asan('cannot replace malloc/free with ASan')
def test_wrap_malloc(self):
self.do_runf(test_file('wrap_malloc.cpp'), 'OK.')
def test_environment(self):
self.set_setting('ASSERTIONS')
def test(assert_returncode=0):
self.do_core_test('test_hello_world.c', assert_returncode=assert_returncode)
js = read_file('test_hello_world.js')
assert ('require(' in js) == ('node' in self.get_setting('ENVIRONMENT')), 'we should have require() calls only if node js specified'
for engine in config.JS_ENGINES:
print(engine)
# set us to test in just this engine
self.banned_js_engines = [e for e in config.JS_ENGINES if e != engine]
# tell the compiler to build with just that engine
if engine == config.NODE_JS:
right = 'node'
wrong = 'shell'
else:
right = 'shell'
wrong = 'node'
# test with the right env
self.set_setting('ENVIRONMENT', right)
print('ENVIRONMENT =', self.get_setting('ENVIRONMENT'))
test()
# test with the wrong env
self.set_setting('ENVIRONMENT', wrong)
print('ENVIRONMENT =', self.get_setting('ENVIRONMENT'))
try:
test(assert_returncode=NON_ZERO)
raise Exception('unexpected success')
except Exception as e:
self.assertContained('not compiled for this environment', str(e))
# test with a combined env
self.set_setting('ENVIRONMENT', right + ',' + wrong)
print('ENVIRONMENT =', self.get_setting('ENVIRONMENT'))
test()
def test_postrun_exception(self):
# verify that an exception thrown in postRun() will not trigger the
# compilation failed handler, and will be printed to stderr.
# Explictly disable EXIT_RUNTIME, since otherwise addOnPostRun does not work.
# https://github.com/emscripten-core/emscripten/issues/15080
self.set_setting('EXIT_RUNTIME', 0)
self.add_post_run('ThisFunctionDoesNotExist()')
self.build(test_file('core/test_hello_world.c'))
output = self.run_js('test_hello_world.js', assert_returncode=NON_ZERO)
self.assertStartswith(output, 'hello, world!')
self.assertContained('ThisFunctionDoesNotExist is not defined', output)
# Tests that building with -s DECLARE_ASM_MODULE_EXPORTS=0 works
def test_no_declare_asm_module_exports(self):
self.set_setting('DECLARE_ASM_MODULE_EXPORTS', 0)
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.maybe_closure()
self.do_runf(test_file('declare_asm_module_exports.cpp'), 'jsFunction: 1')
js = read_file('declare_asm_module_exports.js')
occurances = js.count('cFunction')
if self.is_optimizing() and '-g' not in self.emcc_args:
# In optimized builds only the single reference cFunction that exists in the EM_ASM should exist
if self.is_wasm():
self.assertEqual(occurances, 1)
else:
# With js the asm module itself also contains a reference for the cFunction name
self.assertEqual(occurances, 2)
else:
print(occurances)
# Tests that building with -s DECLARE_ASM_MODULE_EXPORTS=0 works
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_no_declare_asm_module_exports(self):
self.set_setting('DECLARE_ASM_MODULE_EXPORTS', 0)
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.maybe_closure()
self.set_setting('MINIMAL_RUNTIME')
self.do_runf(test_file('declare_asm_module_exports.cpp'), 'jsFunction: 1')
# Tests that -s MINIMAL_RUNTIME=1 works well in different build modes
@parameterized({
'default': ([],),
'streaming': (['-s', 'MINIMAL_RUNTIME_STREAMING_WASM_COMPILATION'],),
'streaming_inst': (['-s', 'MINIMAL_RUNTIME_STREAMING_WASM_INSTANTIATION'],),
'no_export': (['-s', 'DECLARE_ASM_MODULE_EXPORTS=0'],)
})
def test_minimal_runtime_hello_world(self, args):
# TODO: Support for non-Node.js shells has not yet been added to MINIMAL_RUNTIME
self.banned_js_engines = [config.V8_ENGINE, config.SPIDERMONKEY_ENGINE]
self.emcc_args = args
self.set_setting('MINIMAL_RUNTIME')
self.maybe_closure()
self.do_runf(test_file('small_hello_world.c'), 'hello')
# Test that printf() works in MINIMAL_RUNTIME=1
@parameterized({
'fs': ('FORCE_FILESYSTEM',),
'nofs': ('NO_FILESYSTEM',),
})
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_hello_printf(self, extra_setting):
self.set_setting('MINIMAL_RUNTIME')
self.set_setting(extra_setting)
# $FS is not fully compatible with MINIMAL_RUNTIME so fails with closure
# compiler. lsan also pulls in $FS
if '-fsanitize=leak' not in self.emcc_args and extra_setting != 'FORCE_FILESYSTEM':
self.maybe_closure()
self.do_runf(test_file('hello_world.c'), 'hello, world!')
# Tests that -s MINIMAL_RUNTIME=1 works well with SAFE_HEAP
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_safe_heap(self):
self.set_setting('MINIMAL_RUNTIME')
self.set_setting('SAFE_HEAP')
# $FS is not fully compatible with MINIMAL_RUNTIME so fails with closure
# compiler.
# lsan pulls in $FS
if '-fsanitize=leak' not in self.emcc_args:
self.maybe_closure()
self.do_runf(test_file('small_hello_world.c'), 'hello')
# Tests global initializer with -s MINIMAL_RUNTIME=1
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_global_initializer(self):
self.set_setting('MINIMAL_RUNTIME')
self.maybe_closure()
self.do_runf(test_file('test_global_initializer.cpp'), 't1 > t0: 1')
@no_wasm2js('wasm2js does not support PROXY_TO_PTHREAD (custom section support)')
def test_return_address(self):
self.set_setting('USE_OFFSET_CONVERTER')
self.do_runf(test_file('core/test_return_address.c'), 'passed')
@no_wasm2js('TODO: sanitizers in wasm2js')
@no_asan('-fsanitize-minimal-runtime cannot be used with ASan')
def test_ubsan_minimal_too_many_errors(self):
self.emcc_args += ['-fsanitize=undefined', '-fsanitize-minimal-runtime']
if not self.is_wasm():
if self.is_optimizing():
self.skipTest('test can only be run without optimizations on asm.js')
# Need to use `-g` to get proper line numbers in asm.js
self.emcc_args += ['-g']
self.do_runf(test_file('core/test_ubsan_minimal_too_many_errors.c'),
expected_output='ubsan: add-overflow\n' * 20 + 'ubsan: too many errors\n')
@no_wasm2js('TODO: sanitizers in wasm2js')
@no_asan('-fsanitize-minimal-runtime cannot be used with ASan')
def test_ubsan_minimal_errors_same_place(self):
self.emcc_args += ['-fsanitize=undefined', '-fsanitize-minimal-runtime']
if not self.is_wasm():
if self.is_optimizing():
self.skipTest('test can only be run without optimizations on asm.js')
# Need to use `-g` to get proper line numbers in asm.js
self.emcc_args += ['-g']
self.do_runf(test_file('core/test_ubsan_minimal_errors_same_place.c'),
expected_output='ubsan: add-overflow\n' * 5)
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_integer': (['-fsanitize=integer'],),
'fsanitize_overflow': (['-fsanitize=signed-integer-overflow'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_overflow(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_overflow.c'),
assert_all=True, expected_output=[
".c:3:5: runtime error: signed integer overflow: 2147483647 + 1 cannot be represented in type 'int'",
".c:7:7: runtime error: signed integer overflow: 2147483647 + 1 cannot be represented in type 'int'",
])
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_return': (['-fsanitize=return'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_no_return(self, args):
self.emcc_args += ['-Wno-return-type'] + args
self.do_runf(test_file('core/test_ubsan_full_no_return.cpp'),
expected_output='.cpp:1:5: runtime error: execution reached the end of a value-returning function without returning a value', assert_returncode=NON_ZERO)
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_integer': (['-fsanitize=integer'],),
'fsanitize_shift': (['-fsanitize=shift'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_left_shift(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_left_shift.c'),
assert_all=True, expected_output=[
'.c:3:5: runtime error: left shift of negative value -1',
".c:7:5: runtime error: left shift of 16 by 29 places cannot be represented in type 'int'"
])
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_null': (['-fsanitize=null'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_null_ref(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_null_ref.cpp'),
assert_all=True, expected_output=[
".cpp:3:12: runtime error: reference binding to null pointer of type 'int'",
".cpp:4:13: runtime error: reference binding to null pointer of type 'int'",
".cpp:5:14: runtime error: reference binding to null pointer of type 'int'",
])
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_vptr': (['-fsanitize=vptr'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_static_cast(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_static_cast.cpp'),
assert_all=True, expected_output=[
".cpp:18:10: runtime error: downcast of address",
"which does not point to an object of type 'R'",
])
@parameterized({
'g': ('-g', [
".cpp:3:12: runtime error: reference binding to null pointer of type 'int'",
'in main',
]),
'g4': ('-gsource-map', [
".cpp:3:12: runtime error: reference binding to null pointer of type 'int'",
'in main ',
'.cpp:3:8'
]),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_stack_trace(self, g_flag, expected_output):
if g_flag == '-gsource-map':
if not self.is_wasm():
self.skipTest('wasm2js has no source map support')
elif '-Oz' in self.emcc_args:
self.skipTest('-Oz breaks stack traces')
create_file('pre.js', 'Module = {UBSAN_OPTIONS: "print_stacktrace=1"};')
self.emcc_args += ['-fsanitize=null', g_flag, '--pre-js=pre.js']
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(test_file('core/test_ubsan_full_null_ref.cpp'),
assert_all=True, expected_output=expected_output)
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_typeinfo_eq(self):
# https://github.com/emscripten-core/emscripten/issues/13330
src = r'''
#include <typeinfo>
#include <stdio.h>
int main() {
int mismatch = typeid(int) != typeid(int);
printf("ok\n");
return mismatch;
}
'''
self.emcc_args.append('-fsanitize=undefined')
self.do_run(src, 'ok\n')
def test_template_class_deduction(self):
self.emcc_args += ['-std=c++17']
self.do_core_test('test_template_class_deduction.cpp')
@no_wasm2js('TODO: ASAN in wasm2js')
@no_safe_heap('asan does not work with SAFE_HEAP')
@parameterized({
'c': ['test_asan_no_error.c'],
'cpp': ['test_asan_no_error.cpp'],
})
def test_asan_no_error(self, name):
self.emcc_args.append('-fsanitize=address')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_runf(test_file('core', name), '', assert_returncode=NON_ZERO)
# note: these tests have things like -fno-builtin-memset in order to avoid
# clang optimizing things away. for example, a memset might be optimized into
# stores, and then the stores identified as dead, which leaves nothing for
# asan to test. here we want to test asan itself, so we work around that.
@no_safe_heap('asan does not work with SAFE_HEAP')
@parameterized({
'use_after_free_c': ('test_asan_use_after_free.c', [
'AddressSanitizer: heap-use-after-free on address',
]),
'use_after_free_cpp': ('test_asan_use_after_free.cpp', [
'AddressSanitizer: heap-use-after-free on address',
]),
'use_after_return': ('test_asan_use_after_return.c', [
'AddressSanitizer: stack-use-after-return on address',
], ['-Wno-return-stack-address']),
'static_buffer_overflow': ('test_asan_static_buffer_overflow.c', [
'AddressSanitizer: global-buffer-overflow on address',
], ['-fno-builtin-memset']),
'heap_buffer_overflow_c': ('test_asan_heap_buffer_overflow.c', [
'AddressSanitizer: heap-buffer-overflow on address',
], ['-fno-builtin-memset']),
'heap_buffer_overflow_cpp': ('test_asan_heap_buffer_overflow.cpp', [
'AddressSanitizer: heap-buffer-overflow on address',
], ['-fno-builtin-memset']),
'stack_buffer_overflow': ('test_asan_stack_buffer_overflow.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'stack_buffer_overflow_js': ('test_asan_stack_buffer_overflow_js.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'bitfield_unround_size': ('test_asan_bitfield_unround_size.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'bitfield_unround_offset': ('test_asan_bitfield_unround_offset.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'bitfield_round': ('test_asan_bitfield_round.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'memset_null': ('test_asan_memset_null.c', [
'AddressSanitizer: null-pointer-dereference on address 0x00000001'
], ['-fno-builtin-memset']),
'memset_freed': ('test_asan_memset_freed.c', [
'AddressSanitizer: heap-use-after-free on address'
], ['-fno-builtin-memset']),
'strcpy': ('test_asan_strcpy.c', [
'AddressSanitizer: heap-buffer-overflow on address'
], ['-fno-builtin-strcpy']),
'memcpy': ('test_asan_memcpy.c', [
'AddressSanitizer: heap-buffer-overflow on address'
], ['-fno-builtin-memcpy']),
'memchr': ('test_asan_memchr.c', [
'AddressSanitizer: global-buffer-overflow on address'
], ['-fno-builtin-memchr']),
'vector': ('test_asan_vector.cpp', [
'AddressSanitizer: container-overflow on address'
]),
})
def test_asan(self, name, expected_output, cflags=None):
if '-Oz' in self.emcc_args:
self.skipTest('-Oz breaks source maps')
if not self.is_wasm():
self.skipTest('wasm2js has no ASan support')
self.emcc_args.append('-fsanitize=address')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
if cflags:
self.emcc_args += cflags
self.do_runf(test_file('core', name),
expected_output=expected_output, assert_all=True,
check_for_error=False, assert_returncode=NON_ZERO)
@no_safe_heap('asan does not work with SAFE_HEAP')
@no_wasm2js('TODO: ASAN in wasm2js')
def test_asan_js_stack_op(self):
self.emcc_args.append('-fsanitize=address')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_runf(test_file('core/test_asan_js_stack_op.c'),
expected_output='Hello, World!')
@no_safe_heap('asan does not work with SAFE_HEAP')
@no_wasm2js('TODO: ASAN in wasm2js')
def test_asan_api(self):
self.emcc_args.append('-fsanitize=address')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_core_test('test_asan_api.c')
@no_safe_heap('asan does not work with SAFE_HEAP')
@no_wasm2js('TODO: ASAN in wasm2js')
def test_asan_modularized_with_closure(self):
# the bug is that createModule() returns undefined, instead of the
# proper Promise object.
create_file('post.js', 'if (!(createModule() instanceof Promise)) throw "Promise was not returned :(";\n')
self.emcc_args += ['-fsanitize=address', '--extern-post-js=post.js']
self.set_setting('MODULARIZE')
self.set_setting('EXPORT_NAME', 'createModule')
self.set_setting('USE_CLOSURE_COMPILER')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_runf(test_file('hello_world.c'), expected_output='hello, world!')
@no_asan('SAFE_HEAP cannot be used with ASan')
def test_safe_heap_user_js(self):
self.set_setting('SAFE_HEAP')
self.do_runf(test_file('core/test_safe_heap_user_js.c'),
expected_output=['Aborted(segmentation fault storing 1 bytes to address 0)'], assert_returncode=NON_ZERO)
def test_safe_stack(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
if self.is_optimizing():
expected = ['Aborted(stack overflow)']
else:
expected = ['Aborted(stack overflow)', '__handle_stack_overflow']
self.do_runf(test_file('core/test_safe_stack.c'),
expected_output=expected,
assert_returncode=NON_ZERO, assert_all=True)
@node_pthreads
def test_safe_stack_pthread(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('USE_PTHREADS')
if self.is_optimizing():
expected = ['Aborted(stack overflow)']
else:
expected = ['Aborted(stack overflow)', '__handle_stack_overflow']
self.do_runf(test_file('core/test_safe_stack.c'),
expected_output=expected,
assert_returncode=NON_ZERO, assert_all=True)
def test_safe_stack_alloca(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
if self.is_optimizing():
expected = ['Aborted(stack overflow)']
else:
expected = ['Aborted(stack overflow)', '__handle_stack_overflow']
self.do_runf(test_file('core/test_safe_stack_alloca.c'),
expected_output=expected,
assert_returncode=NON_ZERO, assert_all=True)
@needs_dylink
def test_safe_stack_dylink(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
self.dylink_test(r'''
#include <stdio.h>
extern void sidey();
int main() {
sidey();
}
''', '''
#include <string.h>
static long accumulator = 0;
int f(int *b) {
// Infinite recursion while recording stack pointer locations
// so that compiler can't eliminate the stack allocs.
accumulator += (long)b;
int a[1024];
return f(a);
}
void sidey() {
f(NULL);
}
''', ['Aborted(stack overflow)', '__handle_stack_overflow'], assert_returncode=NON_ZERO, force_c=True)
def test_fpic_static(self):
self.emcc_args.append('-fPIC')
self.do_core_test('test_hello_world.c')
@node_pthreads
def test_pthread_create(self):
self.set_setting('EXIT_RUNTIME')
# test that the node environment can be specified by itself, and that still
# works with pthreads (even though we did not specify 'node,worker')
self.set_setting('ENVIRONMENT', 'node')
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_c11_threads(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREADS_DEBUG')
if not self.has_changed_setting('INITIAL_MEMORY'):
self.set_setting('INITIAL_MEMORY', '64mb')
# test that the node and worker environments can be specified
self.set_setting('ENVIRONMENT', 'node,worker')
self.do_run_in_out_file_test('pthread/test_pthread_c11_threads.c')
@node_pthreads
def test_pthread_cxx_threads(self):
self.set_setting('PROXY_TO_PTHREAD')
self.clear_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '64Mb')
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('pthread/test_pthread_cxx_threads.cpp')
@node_pthreads
def test_pthread_create_pool(self):
# with a pool, we can synchronously depend on workers being available
self.set_setting('PTHREAD_POOL_SIZE', 2)
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-DALLOW_SYNC']
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_create_proxy(self):
# with PROXY_TO_PTHREAD, we can synchronously depend on workers being available
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-DALLOW_SYNC']
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_create_embind_stack_check(self):
# embind should work with stack overflow checks (see #12356)
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_exceptions(self):
self.set_setting('PTHREAD_POOL_SIZE', 2)
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-fexceptions']
self.do_run_in_out_file_test('core/pthread/exceptions.cpp')
@node_pthreads
def test_pthread_exit_process(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-DEXIT_RUNTIME', '--pre-js', test_file('core/pthread/test_pthread_exit_runtime.pre.js')]
self.do_run_in_out_file_test('core/pthread/test_pthread_exit_runtime.c', assert_returncode=42)
@node_pthreads
@no_wasm2js('wasm2js does not support PROXY_TO_PTHREAD (custom section support)')
def test_pthread_offset_converter(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_OFFSET_CONVERTER')
if '-g' in self.emcc_args:
self.emcc_args += ['-DDEBUG']
self.do_runf(test_file('core/test_return_address.c'), 'passed')
@node_pthreads
@no_wasm2js('wasm2js does not support PROXY_TO_PTHREAD (custom section support)')
def test_pthread_offset_converter_modularize(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_OFFSET_CONVERTER')
self.set_setting('MODULARIZE')
create_file('post.js', 'var m = require("./test_return_address.js"); m();')
self.emcc_args += ['--extern-post-js', 'post.js', '-s', 'EXPORT_NAME=foo']
if '-g' in self.emcc_args:
self.emcc_args += ['-DDEBUG']
self.do_runf(test_file('core/test_return_address.c'), 'passed')
def test_emscripten_atomics_stub(self):
self.do_run_in_out_file_test('core/pthread/emscripten_atomics.c')
@no_asan('incompatibility with atomics')
@node_pthreads
def test_emscripten_atomics(self):
self.set_setting('USE_PTHREADS')
self.do_run_in_out_file_test('core/pthread/emscripten_atomics.c')
@no_asan('incompatibility with atomics')
@node_pthreads
def test_emscripten_futexes(self):
self.set_setting('USE_PTHREADS')
self.do_run_in_out_file_test('core/pthread/emscripten_futexes.c')
@node_pthreads
def test_stdio_locking(self):
self.set_setting('PTHREAD_POOL_SIZE', '2')
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('core', 'test_stdio_locking.c')
@needs_dylink
@node_pthreads
def test_pthread_dylink_basics(self):
self.emcc_args.append('-Wno-experimental')
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.do_basic_dylink_test()
@needs_dylink
@node_pthreads
def test_pthread_dylink(self):
self.emcc_args.append('-Wno-experimental')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_PTHREADS')
self.set_setting('LLD_REPORT_UNDEFINED')
self.set_setting('PTHREAD_POOL_SIZE', 2)
main = test_file('core/pthread/test_pthread_dylink.c')
# test with a long .so name, as a regression test for
# https://github.com/emscripten-core/emscripten/issues/14833
# where we had a bug with long names + TextDecoder + pthreads + dylink
very_long_name = 'very_very_very_very_very_very_very_very_very_long.so'
self.dylink_testf(main, so_name=very_long_name,
need_reverse=False)
@needs_dylink
@node_pthreads
def test_pthread_dylink_tls(self):
self.emcc_args.append('-Wno-experimental')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_PTHREADS')
self.set_setting('PTHREAD_POOL_SIZE=1')
main = test_file('core/pthread/test_pthread_dylink_tls.c')
self.dylink_testf(main, need_reverse=False)
@needs_dylink
@node_pthreads
def test_Module_dynamicLibraries_pthreads(self):
# test that Module.dynamicLibraries works with pthreads
self.emcc_args += ['-pthread', '-Wno-experimental']
self.emcc_args += ['--extern-pre-js', 'pre.js']
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
# This test is for setting dynamicLibraries at runtime so we don't
# want emscripten loading `liblib.so` automatically (which it would
# do without this setting.
self.set_setting('NO_AUTOLOAD_DYLIBS')
create_file('pre.js', '''
if (!global.Module) {
// This is the initial load (not a worker)
// Define the initial state of Module as we would
// in the html shell file.
// Use var to escape the scope of the if statement
var Module = {
dynamicLibraries: ['liblib.so']
};
}
''')
self.dylink_test(
r'''
#include <stdio.h>
int side();
int main() {
printf("result is %d", side());
return 0;
}
''',
r'''
int side() { return 42; }
''',
'result is 42')
# Tests the emscripten_get_exported_function() API.
def test_emscripten_get_exported_function(self):
# Could also test with -s ALLOW_TABLE_GROWTH=1
self.set_setting('RESERVED_FUNCTION_POINTERS', 2)
self.emcc_args += ['-lexports.js']
self.do_core_test('test_get_exported_function.cpp')
# Tests the emscripten_get_exported_function() API.
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_emscripten_get_exported_function(self):
# Could also test with -s ALLOW_TABLE_GROWTH=1
self.set_setting('RESERVED_FUNCTION_POINTERS', 2)
self.set_setting('MINIMAL_RUNTIME')
self.emcc_args += ['-lexports.js']
self.do_core_test('test_get_exported_function.cpp')
# Marked as impure since the WASI reactor modules (modules without main)
# are not yet suppored by the wasm engines we test against.
@also_with_standalone_wasm(impure=True)
def test_undefined_main(self):
if self.get_setting('STANDALONE_WASM'):
# In standalone we don't support implicitly building without main. The user has to explicitly
# opt out (see below).
err = self.expect_fail([EMCC, test_file('core/test_ctors_no_main.cpp')] + self.get_emcc_args())
self.assertContained('error: undefined symbol: main (referenced by top-level compiled C/C++ code)', err)
self.assertContained('warning: To build in STANDALONE_WASM mode without a main(), use emcc --no-entry', err)
elif not self.get_setting('LLD_REPORT_UNDEFINED') and not self.get_setting('STRICT'):
# Traditionally in emscripten we allow main to be implicitly undefined. This allows programs
# with a main and libraries without a main to be compiled identically.
# However we are trying to move away from that model to a more explicit opt-out model. See:
# https://github.com/emscripten-core/emscripten/issues/9640
self.do_core_test('test_ctors_no_main.cpp')
# Disabling IGNORE_MISSING_MAIN should cause link to fail due to missing main
self.set_setting('IGNORE_MISSING_MAIN', 0)
err = self.expect_fail([EMCC, test_file('core/test_ctors_no_main.cpp')] + self.get_emcc_args())
self.assertContained('error: entry symbol not defined (pass --no-entry to suppress): main', err)
# In non-standalone mode exporting an empty list of functions signal that we don't
# have a main and so should not generate an error.
self.set_setting('EXPORTED_FUNCTIONS', [])
self.do_core_test('test_ctors_no_main.cpp')
self.clear_setting('EXPORTED_FUNCTIONS')
def test_undefined_main_explict(self):
# If we pass --no-entry this test should compile without issue
self.emcc_args.append('--no-entry')
self.do_core_test('test_ctors_no_main.cpp')
def test_undefined_main_wasm_output(self):
if not can_do_standalone(self):
self.skipTest('standalone mode only')
err = self.expect_fail([EMCC, '-o', 'out.wasm', test_file('core/test_ctors_no_main.cpp')] + self.get_emcc_args())
self.assertContained('undefined symbol: main', err)
def test_export_start(self):
if not can_do_standalone(self):
self.skipTest('standalone mode only')
self.set_setting('STANDALONE_WASM')
self.set_setting('EXPORTED_FUNCTIONS', ['__start'])
self.do_core_test('test_hello_world.c')
@unittest.skip("memory64 functionality only partially working")
def test_memory64_hello_world(self):
self.set_setting('MEMORY64', 2)
self.do_core_test('test_hello_world.c')
# Tests the operation of API found in #include <emscripten/math.h>
def test_emscripten_math(self):
self.do_core_test('test_emscripten_math.c')
# Tests that users can pass custom JS options from command line using
# the -jsDfoo=val syntax:
# See https://github.com/emscripten-core/emscripten/issues/10580.
def test_custom_js_options(self):
self.emcc_args += ['--js-library', test_file('core/test_custom_js_settings.js'), '-jsDCUSTOM_JS_OPTION=1']
self.do_core_test('test_custom_js_settings.c')
self.assertContained('cannot change built-in settings values with a -jsD directive', self.expect_fail([EMCC, '-jsDWASM=0']))
# Tests <emscripten/stack.h> API
@no_asan('stack allocation sizes are no longer predictable')
def test_emscripten_stack(self):
self.set_setting('TOTAL_STACK', 4 * 1024 * 1024)
self.do_core_test('test_stack_get_free.c')
# Tests settings.ABORT_ON_WASM_EXCEPTIONS
def test_abort_on_exceptions(self):
# Explictly disable EXIT_RUNTIME, since otherwise addOnPostRun does not work.
# https://github.com/emscripten-core/emscripten/issues/15080
self.set_setting('EXIT_RUNTIME', 0)
self.set_setting('ABORT_ON_WASM_EXCEPTIONS')
self.set_setting('EXPORTED_RUNTIME_METHODS', ['ccall', 'cwrap'])
self.emcc_args += ['--bind', '--post-js', test_file('core/test_abort_on_exception_post.js')]
self.do_core_test('test_abort_on_exception.cpp', interleaved_output=False)
@needs_dylink
def test_gl_main_module(self):
self.set_setting('MAIN_MODULE')
self.do_runf(test_file('core/test_gl_get_proc_address.c'))
@needs_dylink
def test_main_module_js_symbol(self):
self.set_setting('MAIN_MODULE', 2)
self.emcc_args += ['--js-library', test_file('core/test_main_module_js_symbol.js')]
self.do_runf(test_file('core/test_main_module_js_symbol.c'))
def test_REVERSE_DEPS(self):
create_file('connect.c', '#include <sys/socket.h>\nint main() { return (int)(long)&connect; }')
self.run_process([EMCC, 'connect.c'])
base_size = os.path.getsize('a.out.wasm')
# 'auto' should work (its the default)
self.run_process([EMCC, 'connect.c', '-sREVERSE_DEPS=auto'])
# 'all' should work too although it should produce a larger binary
self.run_process([EMCC, 'connect.c', '-sREVERSE_DEPS=all'])
self.assertGreater(os.path.getsize('a.out.wasm'), base_size)
# 'none' should fail to link because the dependency on ntohs was not added.
err = self.expect_fail([EMCC, 'connect.c', '-sREVERSE_DEPS=none'])
self.assertContained('undefined symbol: ntohs', err)
def test_emscripten_async_call(self):
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test(test_file('core/test_emscripten_async_call.c'))
@no_asan('asyncify stack operations confuse asan')
@parameterized({
'': ([],),
'no_dynamic_execution': (['-s', 'DYNAMIC_EXECUTION=0'],)
})
def test_embind_lib_with_asyncify(self, args):
self.uses_es6 = True
self.emcc_args += [
'--bind',
'-s', 'ASYNCIFY',
'-s', 'ASYNCIFY_IMPORTS=["sleep_and_return"]',
'--post-js', test_file('core/embind_lib_with_asyncify.test.js'),
]
self.emcc_args += args
self.do_core_test('embind_lib_with_asyncify.cpp')
@no_asan('asyncify stack operations confuse asan')
def test_em_async_js(self):
self.uses_es6 = True
self.set_setting('ASYNCIFY')
self.maybe_closure()
self.do_core_test('test_em_async_js.c')
# Generate tests for everything
def make_run(name, emcc_args, settings=None, env=None):
if env is None:
env = {}
if settings is None:
settings = {}
if settings:
# Until we create a way to specify link-time settings separately from compile-time settings
# we need to pass this flag here to avoid warnings from compile-only commands.
emcc_args.append('-Wno-unused-command-line-argument')
TT = type(name, (TestCoreBase,), dict(run_name=name, env=env, __module__=__name__)) # noqa
def tearDown(self):
try:
super(TT, self).tearDown()
finally:
for k, v in self.env.items():
del os.environ[k]
TT.tearDown = tearDown
def setUp(self):
super(TT, self).setUp()
for k, v in self.env.items():
assert k not in os.environ, k + ' should not be in environment'
os.environ[k] = v
os.chdir(self.get_dir()) # Ensure the directory exists and go there
for k, v in settings.items():
self.set_setting(k, v)
self.emcc_args += emcc_args
TT.setUp = setUp
return TT
# Main wasm test modes
wasm0 = make_run('wasm0', emcc_args=['-O0'])
wasm0g = make_run('wasm0g', emcc_args=['-O0', '-g'])
wasm1 = make_run('wasm1', emcc_args=['-O1'])
wasm2 = make_run('wasm2', emcc_args=['-O2'])
wasm2g = make_run('wasm2g', emcc_args=['-O2', '-g'])
wasm3 = make_run('wasm3', emcc_args=['-O3'])
wasms = make_run('wasms', emcc_args=['-Os'])
wasmz = make_run('wasmz', emcc_args=['-Oz'])
wasmlto0 = make_run('wasmlto0', emcc_args=['-flto', '-O0'])
wasmlto1 = make_run('wasmlto1', emcc_args=['-flto', '-O1'])
wasmlto2 = make_run('wasmlto2', emcc_args=['-flto', '-O2'])
wasmlto3 = make_run('wasmlto3', emcc_args=['-flto', '-O3'])
wasmltos = make_run('wasmltos', emcc_args=['-flto', '-Os'])
wasmltoz = make_run('wasmltoz', emcc_args=['-flto', '-Oz'])
wasm2js0 = make_run('wasm2js0', emcc_args=['-O0'], settings={'WASM': 0})
wasm2js1 = make_run('wasm2js1', emcc_args=['-O1'], settings={'WASM': 0})
wasm2js2 = make_run('wasm2js2', emcc_args=['-O2'], settings={'WASM': 0})
wasm2js3 = make_run('wasm2js3', emcc_args=['-O3'], settings={'WASM': 0})
wasm2jss = make_run('wasm2jss', emcc_args=['-Os'], settings={'WASM': 0})
wasm2jsz = make_run('wasm2jsz', emcc_args=['-Oz'], settings={'WASM': 0})
# Secondary test modes - run directly when there is a specific need
# features
simd2 = make_run('simd2', emcc_args=['-O2', '-msimd128'])
bulkmem2 = make_run('bulkmem2', emcc_args=['-O2', '-mbulk-memory'])
# wasm
wasm2s = make_run('wasm2s', emcc_args=['-O2'], settings={'SAFE_HEAP': 1})
wasm2ss = make_run('wasm2ss', emcc_args=['-O2'], settings={'STACK_OVERFLOW_CHECK': 2})
# Add DEFAULT_TO_CXX=0
strict = make_run('strict', emcc_args=[], settings={'STRICT': 1})
lsan = make_run('lsan', emcc_args=['-fsanitize=leak', '--profiling'], settings={'ALLOW_MEMORY_GROWTH': 1})
asan = make_run('asan', emcc_args=['-fsanitize=address', '--profiling'], settings={'ALLOW_MEMORY_GROWTH': 1})
asani = make_run('asani', emcc_args=['-fsanitize=address', '--profiling', '--pre-js', os.path.join(os.path.dirname(__file__), 'asan-no-leak.js')],
settings={'ALLOW_MEMORY_GROWTH': 1})
# Experimental modes (not tested by CI)
lld = make_run('lld', emcc_args=[], settings={'LLD_REPORT_UNDEFINED': 1})
minimal0 = make_run('minimal0', emcc_args=['-g'], settings={'MINIMAL_RUNTIME': 1})
# TestCoreBase is just a shape for the specific subclasses, we don't test it itself
del TestCoreBase # noqa
| 33.856882 | 545 | 0.637918 |
import glob
import hashlib
import json
import logging
import os
import random
import re
import shutil
import sys
import time
import unittest
from pathlib import Path
from functools import wraps
if __name__ == '__main__':
raise Exception('do not run this file directly; do something like: tests/runner')
from tools.shared import try_delete, PIPE
from tools.shared import PYTHON, EMCC, EMAR
from tools.utils import WINDOWS, MACOS
from tools import shared, building, config, webassembly
from common import RunnerCore, path_from_root, requires_native_clang, test_file, create_file
from common import skip_if, needs_dylink, no_windows, no_mac, is_slow_test, parameterized
from common import env_modify, with_env_modify, disabled, node_pthreads
from common import read_file, read_binary, require_node, require_v8
from common import NON_ZERO, WEBIDL_BINDER, EMBUILDER
import clang_native
logger = logging.getLogger("test_core")
def wasm_simd(f):
def decorated(self):
self.require_v8()
if not self.is_wasm():
self.skipTest('wasm2js only supports MVP for now')
if '-O3' in self.emcc_args:
self.skipTest('SIMD tests are too slow with -O3 in the new LLVM pass manager, https://github.com/emscripten-core/emscripten/issues/13427')
self.emcc_args.append('-msimd128')
self.emcc_args.append('-fno-lax-vector-conversions')
self.v8_args.append('--experimental-wasm-simd')
f(self)
return decorated
def wasm_relaxed_simd(f):
def decorated(self):
if not self.is_wasm():
self.skipTest('wasm2js only supports MVP for now')
self.emcc_args.append('-mrelaxed-simd')
f(self)
return decorated
def needs_non_trapping_float_to_int(f):
def decorated(self):
if not self.is_wasm():
self.skipTest('wasm2js only supports MVP for now')
f(self)
return decorated
def also_with_wasm_bigint(f):
def decorated(self):
self.set_setting('WASM_BIGINT', 0)
f(self)
if self.is_wasm():
self.set_setting('WASM_BIGINT')
self.require_node()
self.node_args.append('--experimental-wasm-bigint')
f(self)
return decorated
def all_engines(f):
def decorated(self):
old = self.use_all_engines
self.use_all_engines = True
self.set_setting('ENVIRONMENT', 'web,node,shell')
try:
f(self)
finally:
self.use_all_engines = old
return decorated
def with_both_exception_handling(f):
assert callable(f)
def metafunc(self, native_exceptions):
if native_exceptions:
if not self.is_wasm():
self.skipTest('wasm2js does not support wasm exceptions')
self.require_v8()
if '-fsanitize=address' in self.emcc_args:
self.skipTest('Wasm EH does not work with asan yet')
self.emcc_args.append('-fwasm-exceptions')
self.v8_args.append('--experimental-wasm-eh')
f(self)
else:
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
f(self)
metafunc._parameterize = {'': (False,),
'wasm_eh': (True,)}
return metafunc
def no_wasm2js(note=''):
assert not callable(note)
def decorated(f):
return skip_if(f, 'is_wasm2js', note)
return decorated
def also_with_noderawfs(func):
def decorated(self):
orig_args = self.emcc_args.copy()
func(self)
print('noderawfs')
self.emcc_args = orig_args + ['-DNODERAWFS']
self.set_setting('NODERAWFS')
self.js_engines = [config.NODE_JS]
func(self)
return decorated
def can_do_standalone(self):
return self.is_wasm() and \
self.get_setting('STACK_OVERFLOW_CHECK', 0) < 2 and \
not self.get_setting('MINIMAL_RUNTIME') and \
not self.get_setting('SAFE_HEAP') and \
'-fsanitize=address' not in self.emcc_args
def also_with_wasmfs(func):
def decorated(self):
func(self)
print('wasmfs')
if self.get_setting('STANDALONE_WASM'):
self.skipTest("test currently cannot run both with WASMFS and STANDALONE_WASM")
self.set_setting('WASMFS')
func(self)
return decorated
def also_with_standalone_wasm(wasm2c=False, impure=False):
def decorated(func):
def metafunc(self, standalone):
if not standalone:
func(self)
else:
if can_do_standalone(self):
self.set_setting('STANDALONE_WASM')
self.set_setting('WASM_BIGINT')
self.emcc_args.append('-Wno-unused-command-line-argument')
if impure:
self.wasm_engines = []
self.js_engines = [config.NODE_JS]
self.node_args.append('--experimental-wasm-bigint')
func(self)
if wasm2c:
print('wasm2c')
self.set_setting('WASM2C')
self.wasm_engines = []
func(self)
metafunc._parameterize = {'': (False,),
'standalone': (True,)}
return metafunc
return decorated
def no_optimize(note=''):
assert not callable(note)
def decorator(func):
assert callable(func)
def decorated(self):
if self.is_optimizing():
self.skipTest(note)
func(self)
return decorated
return decorator
def needs_make(note=''):
assert not callable(note)
if WINDOWS:
return unittest.skip('Tool not available on Windows bots (%s)' % note)
return lambda f: f
def no_asan(note):
assert not callable(note)
def decorator(f):
assert callable(f)
@wraps(f)
def decorated(self, *args, **kwargs):
if '-fsanitize=address' in self.emcc_args:
self.skipTest(note)
f(self, *args, **kwargs)
return decorated
return decorator
def no_lsan(note):
assert not callable(note)
def decorator(f):
assert callable(f)
@wraps(f)
def decorated(self, *args, **kwargs):
if '-fsanitize=leak' in self.emcc_args:
self.skipTest(note)
f(self, *args, **kwargs)
return decorated
return decorator
def make_no_decorator_for_setting(name):
def outer_decorator(note):
assert not callable(note)
def decorator(f):
assert callable(f)
@wraps(f)
def decorated(self, *args, **kwargs):
if (name + '=1') in self.emcc_args or self.get_setting(name):
self.skipTest(note)
f(self, *args, **kwargs)
return decorated
return decorator
return outer_decorator
no_minimal_runtime = make_no_decorator_for_setting('MINIMAL_RUNTIME')
no_safe_heap = make_no_decorator_for_setting('SAFE_HEAP')
def is_sanitizing(args):
return '-fsanitize=' in str(args)
class TestCoreBase(RunnerCore):
def is_wasm2js(self):
return self.get_setting('WASM') == 0
def is_optimizing(self):
return '-O' in str(self.emcc_args) and '-O0' not in self.emcc_args
def can_use_closure(self):
return '-g' not in self.emcc_args and '--profiling' not in self.emcc_args and ('-O2' in self.emcc_args or '-Os' in self.emcc_args)
def maybe_closure(self):
if '--closure=1' not in self.emcc_args and self.can_use_closure():
self.emcc_args += ['--closure=1']
logger.debug('using closure compiler..')
return True
return False
def assertStartswith(self, output, prefix):
self.assertEqual(prefix, output[:len(prefix)])
def verify_in_strict_mode(self, filename):
js = read_file(filename)
filename += '.strict.js'
with open(filename, 'w') as outfile:
outfile.write('"use strict";\n' + js)
self.run_js(filename)
def do_core_test(self, testname, **kwargs):
self.do_run_in_out_file_test(Path('core', testname), **kwargs)
def get_bullet_library(self, use_cmake):
if use_cmake:
configure_commands = ['cmake', '.']
configure_args = ['-DBUILD_DEMOS=OFF', '-DBUILD_EXTRAS=OFF', '-DUSE_GLUT=OFF']
generated_libs = [Path('src/BulletDynamics/libBulletDynamics.a'),
Path('src/BulletCollision/libBulletCollision.a'),
Path('src/LinearMath/libLinearMath.a')]
else:
configure_commands = ['sh', './configure']
configure_args = ['--disable-shared', '--host=i686-pc-linux-gnu',
'--disable-demos', '--disable-dependency-tracking']
generated_libs = [Path('src/.libs/libBulletDynamics.a'),
Path('src/.libs/libBulletCollision.a'),
Path('src/.libs/libLinearMath.a')]
return self.get_library('third_party/bullet', generated_libs,
configure=configure_commands,
configure_args=configure_args,
cache_name_extra=configure_commands[0])
@also_with_standalone_wasm()
@also_with_wasmfs
def test_hello_world(self):
self.do_core_test('test_hello_world.c')
self.assertNotContained('EMSCRIPTEN_GENERATED_FUNCTIONS', read_file('test_hello_world.js'))
def test_wasm_synchronous_compilation(self):
self.set_setting('STRICT_JS')
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.do_core_test('test_hello_world.c')
@also_with_standalone_wasm()
def test_hello_argc(self):
self.do_core_test('test_hello_argc.c')
def test_intvars(self):
self.do_core_test('test_intvars.cpp')
def test_sintvars(self):
self.do_core_test('test_sintvars.c')
def test_int53(self):
self.emcc_args += ['-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=[$convertI32PairToI53,$convertU32PairToI53,$readI53FromU64,$readI53FromI64,$writeI53ToI64,$writeI53ToI64Clamped,$writeI53ToU64Clamped,$writeI53ToI64Signaling,$writeI53ToU64Signaling]']
self.do_core_test('test_int53.c', interleaved_output=False)
def test_i64(self):
self.do_core_test('test_i64.c')
def test_i64_2(self):
self.do_core_test('test_i64_2.cpp')
def test_i64_3(self):
self.do_core_test('test_i64_3.cpp')
def test_i64_4(self):
self.do_core_test('test_i64_4.c')
def test_i64_b(self):
self.do_core_test('test_i64_b.cpp')
def test_i64_cmp(self):
self.do_core_test('test_i64_cmp.cpp')
def test_i64_cmp2(self):
self.do_core_test('test_i64_cmp2.c')
def test_i64_double(self):
self.do_core_test('test_i64_double.cpp')
def test_i64_umul(self):
self.do_core_test('test_i64_umul.c')
@also_with_standalone_wasm()
def test_i64_precise(self):
self.do_core_test('test_i64_precise.c')
def test_i64_precise_needed(self):
self.do_core_test('test_i64_precise_needed.c')
def test_i64_llabs(self):
self.do_core_test('test_i64_llabs.c')
def test_i64_zextneg(self):
self.do_core_test('test_i64_zextneg.c')
def test_i64_7z(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_i64_7z.c', args=['hallo'])
def test_i64_i16(self):
self.do_core_test('test_i64_i16.c')
def test_i64_qdouble(self):
self.do_core_test('test_i64_qdouble.c')
def test_i64_varargs(self):
self.do_core_test('test_i64_varargs.c', args='waka fleefl asdfasdfasdfasdf'.split())
@no_wasm2js('wasm_bigint')
def test_i64_invoke_bigint(self):
self.set_setting('WASM_BIGINT')
self.emcc_args += ['-fexceptions']
self.node_args += ['--experimental-wasm-bigint']
self.do_core_test('test_i64_invoke_bigint.cpp', js_engines=[config.NODE_JS])
def test_vararg_copy(self):
self.do_run_in_out_file_test('va_arg/test_va_copy.c')
def test_llvm_fabs(self):
self.do_core_test('test_llvm_fabs.c')
def test_double_varargs(self):
self.do_core_test('test_double_varargs.c')
def test_trivial_struct_varargs(self):
self.do_core_test('test_trivial_struct_varargs.c')
def test_struct_varargs(self):
self.do_core_test('test_struct_varargs.c')
def test_zero_struct_varargs(self):
self.do_core_test('test_zero_struct_varargs.c')
def zzztest_nested_struct_varargs(self):
self.do_core_test('test_nested_struct_varargs.c')
def test_i32_mul_precise(self):
self.do_core_test('test_i32_mul_precise.c')
def test_i16_emcc_intrinsic(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_i16_emcc_intrinsic.c')
def test_double_i64_conversion(self):
self.do_core_test('test_double_i64_conversion.c')
def test_float32_precise(self):
self.do_core_test('test_float32_precise.c')
def test_negative_zero(self):
self.do_core_test('test_negative_zero.c')
def test_literal_negative_zero(self):
self.do_core_test('test_literal_negative_zero.c')
@also_with_standalone_wasm()
def test_bswap64(self):
self.do_core_test('test_bswap64.cpp')
def test_sha1(self):
self.do_runf(test_file('sha1.c'), 'SHA1=15dd99a1991e0b3826fede3deffc1feba42278e6')
def test_wasm32_unknown_emscripten(self):
self.do_runf(test_file('wasm32-unknown-emscripten.c'), '')
def test_cube2md5(self):
self.emcc_args += ['--embed-file', 'cube2md5.txt']
shutil.copyfile(test_file('cube2md5.txt'), 'cube2md5.txt')
self.do_run_from_file(test_file('cube2md5.cpp'), test_file('cube2md5.ok'), assert_returncode=NON_ZERO)
@also_with_standalone_wasm(wasm2c=True)
@needs_make('make')
def test_cube2hash(self):
self.do_run('// empty file', 'Usage: hashstring <seed>',
libraries=self.get_library('third_party/cube2hash', ['libcube2hash.a'], configure=None),
includes=[test_file('third_party/cube2hash')], assert_returncode=NON_ZERO)
for text, output in [('fleefl', '892BDB6FD3F62E863D63DA55851700FDE3ACF30204798CE9'),
('fleefl2', 'AA2CC5F96FC9D540CA24FDAF1F71E2942753DB83E8A81B61'),
('64bitisslow', '64D8470573635EC354FEE7B7F87C566FCAF1EFB491041670')]:
self.do_run('src.js', 'hash value: ' + output, args=[text], no_build=True)
def test_unaligned(self):
self.skipTest('LLVM marks the reads of s as fully aligned, making this test invalid')
src = r'''
#include <stdio.h>
struct S {
double x;
int y;
};
int main() {
// the 64-bit value here will not be 8-byte aligned
S s0[3] = { {0x12a751f430142, 22}, {0x17a5c85bad144, 98}, {1, 1}};
char buffer[10*sizeof(S)];
int b = int(buffer);
S *s = (S*)(b + 4-b%8);
s[0] = s0[0];
s[1] = s0[1];
s[2] = s0[2];
printf("*%d : %d : %d\n", sizeof(S), ((unsigned int)&s[0]) % 8 != ((unsigned int)&s[1]) % 8,
((unsigned int)&s[1]) - ((unsigned int)&s[0]));
s[0].x++;
s[0].y++;
s[1].x++;
s[1].y++;
printf("%.1f,%d,%.1f,%d\n", s[0].x, s[0].y, s[1].x, s[1].y);
return 0;
}
'''
self.do_run(src, '*12 : 1 : 12\n328157500735811.0,23,416012775903557.0,99\n')
return
src = r'''
#include <stdio.h>
int main()
{
int x[10];
char *p = (char*)&x[0];
p++;
short *q = (short*)p;
*q = 300;
printf("*%d:%ld*\n", *q, ((long)q)%2);
int *r = (int*)p;
*r = 515559;
printf("*%d*\n", *r);
long long *t = (long long*)p;
*t = 42949672960;
printf("*%lld*\n", *t);
return 0;
}
'''
try:
self.do_run(src, '*300:1*\n*515559*\n*42949672960*\n')
except Exception as e:
assert 'must be aligned' in str(e), e
def test_align64(self):
src = r'''
#include <stdio.h>
// inspired by poppler
enum Type {
A = 10,
B = 20
};
struct Object {
Type type;
union {
int intg;
double real;
char *name;
};
};
struct Principal {
double x;
Object a;
double y;
};
int main(int argc, char **argv)
{
int base = argc-1;
Object *o = NULL;
printf("%zu,%zu\n", sizeof(Object), sizeof(Principal));
printf("%ld,%ld,%ld,%ld\n", (long)&o[base].type, (long)&o[base].intg, (long)&o[base].real, (long)&o[base].name);
printf("%ld,%ld,%ld,%ld\n", (long)&o[base+1].type, (long)&o[base+1].intg, (long)&o[base+1].real, (long)&o[base+1].name);
Principal p, q;
p.x = p.y = q.x = q.y = 0;
p.a.type = A;
p.a.real = 123.456;
*(&q.a) = p.a;
printf("%.2f,%d,%.2f,%.2f : %.2f,%d,%.2f,%.2f\n", p.x, p.a.type, p.a.real, p.y, q.x, q.a.type, q.a.real, q.y);
return 0;
}
'''
self.do_run(src, '''16,32
0,8,8,8
16,24,24,24
0.00,10,123.46,0.00 : 0.00,10,123.46,0.00
''')
@no_asan('asan errors on corner cases we check')
def test_aligned_alloc(self):
self.do_runf(test_file('test_aligned_alloc.c'), '',
emcc_args=['-Wno-non-power-of-two-alignment'])
def test_unsigned(self):
src = '''
#include <stdio.h>
const signed char cvals[2] = { -1, -2 }; // compiler can store this is a string, so -1 becomes \\FF, and needs re-signing
int main()
{
{
unsigned char x = 200;
printf("*%d*\\n", x);
unsigned char y = -22;
printf("*%d*\\n", y);
}
int varey = 100;
unsigned int MAXEY = -1, MAXEY2 = -77;
printf("*%u,%d,%u*\\n", MAXEY, varey >= MAXEY, MAXEY2); // 100 >= -1? not in unsigned!
int y = cvals[0];
printf("*%d,%d,%d,%d*\\n", cvals[0], cvals[0] < 0, y, y < 0);
y = cvals[1];
printf("*%d,%d,%d,%d*\\n", cvals[1], cvals[1] < 0, y, y < 0);
// zext issue - see mathop in jsifier
unsigned char x8 = -10;
unsigned long hold = 0;
hold += x8;
int y32 = hold+50;
printf("*%lu,%d*\\n", hold, y32);
// Comparisons
x8 = 0;
for (int i = 0; i < 254; i++) x8++; // make it an actual 254 in JS - not a -2
printf("*%d,%d*\\n", x8+1 == 0xff, x8+1 != 0xff); // 0xff may be '-1' in the bitcode
return 0;
}
'''
self.do_run(src, '*4294967295,0,4294967219*\n*-1,1,-1,1*\n*-2,1,-2,1*\n*246,296*\n*1,0*')
self.emcc_args.append('-Wno-constant-conversion')
src = '''
#include <stdio.h>
int main()
{
{
unsigned char x;
unsigned char *y = &x;
*y = -1;
printf("*%d*\\n", x);
}
{
unsigned short x;
unsigned short *y = &x;
*y = -1;
printf("*%d*\\n", x);
}
/*{ // This case is not checked. The hint for unsignedness is just the %u in printf, and we do not analyze that
unsigned int x;
unsigned int *y = &x;
*y = -1;
printf("*%u*\\n", x);
}*/
{
char x;
char *y = &x;
*y = 255;
printf("*%d*\\n", x);
}
{
char x;
char *y = &x;
*y = 65535;
printf("*%d*\\n", x);
}
{
char x;
char *y = &x;
*y = 0xffffffff;
printf("*%d*\\n", x);
}
return 0;
}
'''
self.do_run(src, '*255*\n*65535*\n*-1*\n*-1*\n*-1*')
def test_bitfields(self):
self.do_core_test('test_bitfields.c')
def test_floatvars(self):
self.do_core_test('test_floatvars.cpp')
def test_closebitcasts(self):
self.do_core_test('closebitcasts.c')
def test_fast_math(self):
self.emcc_args += ['-ffast-math']
self.do_core_test('test_fast_math.c', args=['5', '6', '8'])
def test_zerodiv(self):
self.do_core_test('test_zerodiv.c')
def test_zero_multiplication(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_zero_multiplication.c')
def test_isnan(self):
self.do_core_test('test_isnan.c')
def test_globaldoubles(self):
self.do_core_test('test_globaldoubles.c')
def test_math(self):
self.do_core_test('test_math.c')
def test_erf(self):
self.do_core_test('test_erf.c')
def test_math_hyperbolic(self):
self.do_core_test('test_math_hyperbolic.c')
def test_math_lgamma(self):
self.do_run_in_out_file_test('math/lgamma.c', assert_returncode=NON_ZERO)
def test_math_fmodf(self):
self.do_run_in_out_file_test('math/fmodf.c')
def test_frexp(self):
self.do_core_test('test_frexp.c')
def test_rounding(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_rounding.c')
def test_fcvt(self):
self.do_core_test('test_fcvt.cpp')
def test_llrint(self):
self.do_core_test('test_llrint.c')
def test_getgep(self):
self.do_core_test('test_getgep.c')
def test_multiply_defined_symbols(self):
create_file('a1.c', 'int f() { return 1; }')
create_file('a2.c', 'void x() {}')
create_file('b1.c', 'int f() { return 2; }')
create_file('b2.c', 'void y() {}')
create_file('main.c', r'''
#include <stdio.h>
int f();
int main() {
printf("result: %d\n", f());
return 0;
}
''')
self.emcc('a1.c', ['-c'])
self.emcc('a2.c', ['-c'])
self.emcc('b1.c', ['-c'])
self.emcc('b2.c', ['-c'])
self.emcc('main.c', ['-c'])
building.emar('cr', 'liba.a', ['a1.c.o', 'a2.c.o'])
building.emar('cr', 'libb.a', ['b1.c.o', 'b2.c.o'])
building.link_to_object(['main.c.o', 'liba.a', 'libb.a'], 'all.o')
self.emcc('all.o', self.get_emcc_args(), 'all.js')
self.do_run('all.js', 'result: 1', no_build=True)
def test_if(self):
self.do_core_test('test_if.c')
def test_if_else(self):
self.do_core_test('test_if_else.c')
def test_loop(self):
self.do_core_test('test_loop.c')
def test_stack(self):
self.set_setting('INLINING_LIMIT')
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.do_core_test('test_stack.c')
def test_stack_align(self):
src = test_file('core/test_stack_align.cpp')
def test():
self.do_runf(src, ['''align 4: 0
align 8: 0
align 16: 0
align 32: 0
base align: 0, 0, 0, 0'''])
test()
@no_asan('stack size is too low for asan to work properly')
def test_stack_placement(self):
self.set_setting('TOTAL_STACK', 1024)
self.do_core_test('test_stack_placement.c')
self.set_setting('GLOBAL_BASE', 102400)
self.do_core_test('test_stack_placement.c')
@no_asan('asan does not support main modules')
@no_wasm2js('MAIN_MODULE support')
def test_stack_placement_pic(self):
self.set_setting('TOTAL_STACK', 1024)
self.set_setting('MAIN_MODULE')
self.do_core_test('test_stack_placement.c')
self.set_setting('GLOBAL_BASE', 102400)
self.do_core_test('test_stack_placement.c')
def test_strings(self):
self.do_core_test('test_strings.c', args=['wowie', 'too', '74'])
def test_strcmp_uni(self):
self.do_core_test('test_strcmp_uni.c')
def test_strndup(self):
self.do_core_test('test_strndup.c')
def test_errar(self):
self.do_core_test('test_errar.c')
def test_mainenv(self):
self.do_core_test('test_mainenv.c')
def test_funcs(self):
self.do_core_test('test_funcs.c')
def test_structs(self):
self.do_core_test('test_structs.c')
gen_struct_src = '''
#include <stdio.h>
#include <stdlib.h>
#include "emscripten.h"
struct S
{
int x, y;
};
int main()
{
S* a = {{gen_struct}};
a->x = 51; a->y = 62;
printf("*%d,%d*\\n", a->x, a->y);
{{del_struct}}(a);
return 0;
}
'''
def test_mallocstruct(self):
self.do_run(self.gen_struct_src.replace('{{gen_struct}}', '(S*)malloc(sizeof(S))').replace('{{del_struct}}', 'free'), '*51,62*')
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
@parameterized({
'normal': [],
'memvalidate': ['-DEMMALLOC_MEMVALIDATE'],
'memvalidate_verbose': ['-DEMMALLOC_MEMVALIDATE', '-DEMMALLOC_VERBOSE', '-DRANDOM_ITERS=130'],
})
def test_emmalloc(self, *args):
self.set_setting('MALLOC', 'none')
self.emcc_args += ['-fno-builtin'] + list(args)
self.do_run(read_file(path_from_root('system/lib/emmalloc.c')) +
read_file(path_from_root('system/lib/sbrk.c')) +
read_file(test_file('core/test_emmalloc.c')),
read_file(test_file('core/test_emmalloc.out')), force_c=True)
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
def test_emmalloc_usable_size(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += list(args)
self.do_core_test('test_malloc_usable_size.c')
@no_optimize('output is sensitive to optimization flags, so only test unoptimized builds')
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
def test_emmalloc_memory_statistics(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += ['-s', 'INITIAL_MEMORY=128MB', '-g'] + list(args)
self.do_core_test('test_emmalloc_memory_statistics.cpp')
@no_optimize('output is sensitive to optimization flags, so only test unoptimized builds')
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
def test_emmalloc_trim(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += ['-s', 'INITIAL_MEMORY=128MB', '-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MAXIMUM_MEMORY=2147418112'] + list(args)
self.do_core_test('test_emmalloc_trim.cpp')
def test_emmalloc_memalign_corruption(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.do_core_test('emmalloc_memalign_corruption.cpp')
def test_newstruct(self):
self.do_run(self.gen_struct_src.replace('{{gen_struct}}', 'new S').replace('{{del_struct}}', 'delete'), '*51,62*')
def test_addr_of_stacked(self):
self.do_core_test('test_addr_of_stacked.c')
def test_globals(self):
self.do_core_test('test_globals.c')
def test_linked_list(self):
self.do_core_test('test_linked_list.c')
def test_sup(self):
self.do_run_in_out_file_test(test_file('core/test_sup.cpp'))
@also_with_standalone_wasm()
def test_assert(self):
self.do_core_test('test_assert.cpp', assert_returncode=NON_ZERO)
def test_wcslen(self):
self.do_core_test('test_wcslen.c')
def test_regex(self):
self.do_core_test('test_regex.c')
@also_with_standalone_wasm(wasm2c=True, impure=True)
def test_longjmp(self):
self.do_core_test('test_longjmp.c')
def test_longjmp2(self):
self.do_core_test('test_longjmp2.c')
@needs_dylink
def test_longjmp2_main_module(self):
self.set_setting('MAIN_MODULE')
self.do_core_test('test_longjmp2.c')
def test_longjmp3(self):
self.do_core_test('test_longjmp3.c')
def test_longjmp4(self):
self.do_core_test('test_longjmp4.c')
def test_longjmp_funcptr(self):
self.do_core_test('test_longjmp_funcptr.c')
def test_longjmp_repeat(self):
self.do_core_test('test_longjmp_repeat.c')
def test_longjmp_stacked(self):
self.do_core_test('test_longjmp_stacked.c', assert_returncode=NON_ZERO)
def test_longjmp_exc(self):
self.do_core_test('test_longjmp_exc.c', assert_returncode=NON_ZERO)
def test_longjmp_throw(self):
for disable_throw in [0, 1]:
print(disable_throw)
self.set_setting('DISABLE_EXCEPTION_CATCHING', disable_throw)
self.do_core_test('test_longjmp_throw.cpp')
def test_longjmp_unwind(self):
self.do_core_test('test_longjmp_unwind.c', assert_returncode=NON_ZERO)
def test_longjmp_i64(self):
self.emcc_args += ['-g']
self.do_core_test('test_longjmp_i64.c', assert_returncode=NON_ZERO)
def test_siglongjmp(self):
self.do_core_test('test_siglongjmp.c')
def test_setjmp_many(self):
src = r'''
#include <stdio.h>
#include <setjmp.h>
int main(int argc, char** argv) {
jmp_buf buf;
for (int i = 0; i < NUM; i++) printf("%d\n", setjmp(buf));
if (argc-- == 1131) longjmp(buf, 11);
return 0;
}
'''
for num in [1, 5, 20, 1000]:
print('NUM=%d' % num)
self.do_run(src.replace('NUM', str(num)), '0\n' * num)
def test_setjmp_many_2(self):
src = r'''
#include <setjmp.h>
#include <stdio.h>
jmp_buf env;
void luaWork(int d){
int x;
printf("d is at %d\n", d);
longjmp(env, 1);
}
int main()
{
const int ITERATIONS=25;
for(int i = 0; i < ITERATIONS; i++){
if(!setjmp(env)){
luaWork(i);
}
}
return 0;
}
'''
self.do_run(src, r'''d is at 24''')
def test_setjmp_noleak(self):
self.do_runf(test_file('core/test_setjmp_noleak.c'), 'ok.')
@with_both_exception_handling
def test_exceptions(self):
self.set_setting('EXCEPTION_DEBUG')
self.maybe_closure()
for support_longjmp in [0, 1]:
self.set_setting('SUPPORT_LONGJMP', support_longjmp)
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_caught.out'))
def test_exceptions_off(self):
for support_longjmp in [0, 1]:
self.set_setting('DISABLE_EXCEPTION_CATCHING')
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_uncaught.out'), assert_returncode=NON_ZERO)
@no_asan('TODO: ASan support in minimal runtime')
def test_exceptions_minimal_runtime(self):
self.set_setting('EXCEPTION_DEBUG')
self.set_setting('EXIT_RUNTIME')
self.maybe_closure()
self.set_setting('MINIMAL_RUNTIME')
self.emcc_args += ['--pre-js', test_file('minimal_runtime_exit_handling.js')]
for support_longjmp in [0, 1]:
self.set_setting('SUPPORT_LONGJMP', support_longjmp)
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_caught.out'))
self.set_setting('DISABLE_EXCEPTION_CATCHING')
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_uncaught.out'), assert_returncode=NON_ZERO)
@with_both_exception_handling
def test_exceptions_custom(self):
self.set_setting('EXCEPTION_DEBUG')
self.set_setting('EXIT_RUNTIME')
self.maybe_closure()
src = '''
#include <iostream>
class MyException
{
public:
MyException(){ std::cout << "Construct..."; }
MyException( const MyException & ) { std::cout << "Copy..."; }
~MyException(){ std::cout << "Destruct..."; }
};
int function()
{
std::cout << "Throw...";
throw MyException();
}
int function2()
{
return function();
}
int main()
{
try
{
function2();
}
catch (MyException & e)
{
std::cout << "Caught...";
}
try
{
function2();
}
catch (MyException e)
{
std::cout << "Caught...";
}
return 0;
}
'''
self.do_run(src, 'Throw...Construct...Caught...Destruct...Throw...Construct...Copy...Caught...Destruct...Destruct...')
@with_both_exception_handling
def test_exceptions_2(self):
for safe in [0, 1]:
print(safe)
if safe and '-fsanitize=address' in self.emcc_args:
continue
self.set_setting('SAFE_HEAP', safe)
self.do_core_test('test_exceptions_2.cpp')
@with_both_exception_handling
def test_exceptions_3(self):
src = r'''
#include <iostream>
#include <stdexcept>
int main(int argc, char **argv)
{
if (argc != 2) {
std::cout << "need an arg" << std::endl;
return 1;
}
int arg = argv[1][0] - '0';
try {
if (arg == 0) throw "a c string";
if (arg == 1) throw std::exception();
if (arg == 2) throw std::runtime_error("Hello");
} catch(const char * ex) {
std::cout << "Caught C string: " << ex << std::endl;
} catch(const std::exception &ex) {
std::cout << "Caught exception: " << ex.what() << std::endl;
} catch(...) {
std::cout << "Caught something else" << std::endl;
}
std::cout << "Done.\n";
}
'''
print('0')
self.do_run(src, 'Caught C string: a c string\nDone.', args=['0'])
print('1')
self.do_run('src.js', 'Caught exception: std::exception\nDone.', args=['1'], no_build=True)
print('2')
self.do_run('src.js', 'Caught exception: Hello\nDone.', args=['2'], no_build=True)
def test_exceptions_allowed(self):
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ["_Z12somefunctionv"])
# otherwise it is inlined and not identified
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_exceptions_allowed.cpp')
size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'orig.js')
# check that an empty allow list works properly (as in, same as exceptions disabled)
src = test_file('core/test_exceptions_allowed.cpp')
empty_output = test_file('core/test_exceptions_allowed_empty.out')
self.set_setting('EXCEPTION_CATCHING_ALLOWED', [])
self.do_run_from_file(src, empty_output, assert_returncode=NON_ZERO)
empty_size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
empty_size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'empty.js')
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ['fake'])
self.do_run_from_file(src, empty_output, assert_returncode=NON_ZERO)
fake_size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
fake_size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'fake.js')
self.clear_setting('EXCEPTION_CATCHING_ALLOWED')
self.do_run_from_file(src, empty_output, assert_returncode=NON_ZERO)
disabled_size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
disabled_size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'disabled.js')
print('size: %d' % size)
print('empty_size: %d' % empty_size)
print('fake_size: %d' % fake_size)
print('disabled_size: %d' % disabled_size)
# empty list acts the same as fully disabled
self.assertEqual(empty_size, disabled_size)
# big change when we disable exception catching of the function
self.assertGreater(size - empty_size, 0.01 * size)
# full disable can remove a little bit more
self.assertLess(disabled_size, fake_size)
def test_exceptions_allowed_2(self):
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ["main"])
# otherwise it is inlined and not identified
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_exceptions_allowed_2.cpp')
# When 'main' function does not have a signature, its contents will be
# outlined to '__original_main'. Check if we can handle that case.
self.emcc_args += ['-DMAIN_NO_SIGNATURE']
self.do_core_test('test_exceptions_allowed_2.cpp')
def test_exceptions_allowed_uncaught(self):
self.emcc_args += ['-std=c++11']
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ["_Z4testv"])
# otherwise it is inlined and not identified
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_exceptions_allowed_uncaught.cpp')
def test_exceptions_allowed_misuse(self):
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ['foo'])
# Test old =2 setting for DISABLE_EXCEPTION_CATCHING
self.set_setting('DISABLE_EXCEPTION_CATCHING', 2)
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING=X is no longer needed when specifying EXCEPTION_CATCHING_ALLOWED [-Wdeprecated] [-Werror]', err)
# =0 should also be a warning
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING=X is no longer needed when specifying EXCEPTION_CATCHING_ALLOWED [-Wdeprecated] [-Werror]', err)
# =1 should be a hard error
self.set_setting('DISABLE_EXCEPTION_CATCHING', 1)
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING and EXCEPTION_CATCHING_ALLOWED are mutually exclusive', err)
# even setting an empty list should trigger the error;
self.set_setting('EXCEPTION_CATCHING_ALLOWED', [])
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING and EXCEPTION_CATCHING_ALLOWED are mutually exclusive', err)
@with_both_exception_handling
def test_exceptions_uncaught(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
src = r'''
#include <stdio.h>
#include <exception>
struct X {
~X() {
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
}
};
int main() {
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
try {
X x;
throw 1;
} catch(...) {
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
}
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
return 0;
}
'''
self.do_run(src, 'exception? no\nexception? yes\nexception? no\nexception? no\n')
src = r'''
#include <fstream>
#include <iostream>
int main() {
std::ofstream os("test");
os << std::unitbuf << "foo"; // trigger a call to std::uncaught_exception from
// std::basic_ostream::sentry::~sentry
std::cout << "success";
}
'''
self.do_run(src, 'success')
@with_both_exception_handling
def test_exceptions_uncaught_2(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
src = r'''
#include <iostream>
#include <exception>
int main() {
try {
throw std::exception();
} catch(std::exception) {
try {
throw;
} catch(std::exception) {}
}
if (std::uncaught_exception())
std::cout << "ERROR: uncaught_exception still set.";
else
std::cout << "OK";
}
'''
self.do_run(src, 'OK\n')
@with_both_exception_handling
def test_exceptions_typed(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.clear_setting('SAFE_HEAP') # Throwing null will cause an ignorable null pointer access.
self.do_core_test('test_exceptions_typed.cpp')
@with_both_exception_handling
def test_exceptions_virtual_inheritance(self):
self.do_core_test('test_exceptions_virtual_inheritance.cpp')
@with_both_exception_handling
def test_exceptions_convert(self):
self.do_core_test('test_exceptions_convert.cpp')
# TODO Make setjmp-longjmp also use Wasm exception handling
@with_both_exception_handling
def test_exceptions_multi(self):
self.do_core_test('test_exceptions_multi.cpp')
@with_both_exception_handling
def test_exceptions_std(self):
self.clear_setting('SAFE_HEAP')
self.do_core_test('test_exceptions_std.cpp')
@with_both_exception_handling
def test_exceptions_alias(self):
self.do_core_test('test_exceptions_alias.cpp')
@with_both_exception_handling
def test_exceptions_rethrow(self):
self.do_core_test('test_exceptions_rethrow.cpp')
@with_both_exception_handling
def test_exceptions_uncaught_count(self):
self.do_core_test('test_exceptions_uncaught_count.cpp')
@with_both_exception_handling
def test_exceptions_resume(self):
self.set_setting('EXCEPTION_DEBUG')
self.do_core_test('test_exceptions_resume.cpp')
@with_both_exception_handling
def test_exceptions_destroy_virtual(self):
self.do_core_test('test_exceptions_destroy_virtual.cpp')
@with_both_exception_handling
def test_exceptions_refcount(self):
self.do_core_test('test_exceptions_refcount.cpp')
@with_both_exception_handling
def test_exceptions_primary(self):
self.do_core_test('test_exceptions_primary.cpp')
@with_both_exception_handling
def test_exceptions_simplify_cfg(self):
self.do_core_test('test_exceptions_simplify_cfg.cpp')
@with_both_exception_handling
def test_exceptions_libcxx(self):
self.do_core_test('test_exceptions_libcxx.cpp')
@with_both_exception_handling
def test_exceptions_multiple_inherit(self):
self.do_core_test('test_exceptions_multiple_inherit.cpp')
@with_both_exception_handling
def test_exceptions_multiple_inherit_rethrow(self):
self.do_core_test('test_exceptions_multiple_inherit_rethrow.cpp')
@with_both_exception_handling
def test_exceptions_rethrow_missing(self):
create_file('main.cpp', 'int main() { throw; }')
self.do_runf('main.cpp', None, assert_returncode=NON_ZERO)
@with_both_exception_handling
def test_bad_typeid(self):
self.do_run(r'''
// exception example
#include <iostream> // std::cerr
#include <typeinfo> // operator typeid
#include <exception> // std::exception
class Polymorphic {virtual void member(){}};
int main () {
try
{
Polymorphic * pb = 0;
const std::type_info& ti = typeid(*pb); // throws a bad_typeid exception
}
catch (std::exception& e)
{
std::cerr << "exception caught: " << e.what() << '\n';
}
return 0;
}
''', 'exception caught: std::bad_typeid')
def test_iostream_ctors(self):
# iostream stuff must be globally constructed before user global
# constructors, so iostream works in global constructors
self.do_run(r'''
#include <iostream>
struct A {
A() { std::cout << "bug"; }
};
A a;
int main() {
std::cout << "free code" << std::endl;
return 0;
}
''', 'bugfree code')
def test_exceptions_longjmp1(self):
self.set_setting('SUPPORT_LONGJMP')
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_core_test('test_exceptions_longjmp1.cpp')
def test_exceptions_longjmp2(self):
self.set_setting('SUPPORT_LONGJMP')
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_core_test('test_exceptions_longjmp2.cpp')
def test_exceptions_longjmp3(self):
self.set_setting('SUPPORT_LONGJMP')
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_core_test('test_exceptions_longjmp3.cpp')
# Marked as impure since the WASI reactor modules (modules without main)
# are not yet suppored by the wasm engines we test against.
@also_with_standalone_wasm(impure=True)
def test_ctors_no_main(self):
self.emcc_args.append('--no-entry')
self.do_core_test('test_ctors_no_main.cpp')
def test_class(self):
self.do_core_test('test_class.cpp')
def test_inherit(self):
self.do_core_test('test_inherit.cpp')
def test_isdigit_l(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_isdigit_l.cpp')
def test_iswdigit(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_iswdigit.cpp')
def test_polymorph(self):
self.do_core_test('test_polymorph.cpp')
def test_complex(self):
self.do_core_test('test_complex.c')
def test_float_builtins(self):
# tests wasm_libc_rt
self.do_core_test('test_float_builtins.c')
@no_asan('SAFE_HEAP cannot be used with ASan')
def test_segfault(self):
self.set_setting('SAFE_HEAP')
for addr in ['get_null()', 'new D2()']:
print(addr)
src = r'''
#include <stdio.h>
#include <emscripten.h>
struct Classey {
virtual void doIt() = 0;
};
struct D1 : Classey {
virtual void doIt() { printf("fleefl\n"); }
};
struct D2 : Classey {
virtual void doIt() { printf("marfoosh\n"); }
};
EM_JS(Classey*, get_null, (), {
return 0;
});
int main(int argc, char **argv)
{
Classey *p = argc == 100 ? new D1() : (Classey*)%s;
p->doIt();
return 0;
}
''' % addr
if 'get_null' in addr:
self.do_run(src, 'segmentation fault', assert_returncode=NON_ZERO)
else:
self.do_run(src, 'marfoosh')
def test_dynamic_cast(self):
self.do_core_test('test_dynamic_cast.cpp')
def test_dynamic_cast_b(self):
self.do_core_test('test_dynamic_cast_b.cpp')
def test_dynamic_cast_2(self):
self.do_core_test('test_dynamic_cast_2.cpp')
def test_funcptr(self):
self.do_core_test('test_funcptr.c')
def test_mathfuncptr(self):
self.do_core_test('test_mathfuncptr.c')
def test_funcptrfunc(self):
self.do_core_test('test_funcptrfunc.c')
def test_funcptr_namecollide(self):
self.do_core_test('test_funcptr_namecollide.c')
def test_emptyclass(self):
self.do_core_test('test_emptyclass.cpp')
def test_alloca(self):
self.do_core_test('test_alloca.c')
def test_rename(self):
self.do_run_in_out_file_test('stdio/test_rename.c')
def test_remove(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('cstdio/test_remove.cpp')
def test_alloca_stack(self):
self.do_core_test('test_alloca_stack.c')
def test_stack_byval(self):
self.do_core_test('test_stack_byval.cpp')
def test_stack_varargs(self):
# in node.js we allocate argv[0] on the stack, which means the length
# of the program directory influences how much stack we need, and so
# long random temp dir names can lead to random failures. The stack
# size was increased here to avoid that.
self.set_setting('INLINING_LIMIT')
self.set_setting('TOTAL_STACK', 8 * 1024)
self.do_core_test('test_stack_varargs.c')
def test_stack_varargs2(self):
# in node.js we allocate argv[0] on the stack, which means the length
# of the program directory influences how much stack we need, and so
# long random temp dir names can lead to random failures. The stack
# size was increased here to avoid that.
self.set_setting('TOTAL_STACK', 8 * 1024)
src = r'''
#include <stdio.h>
#include <stdlib.h>
void func(int i) {
}
int main() {
for (int i = 0; i < 7000; i++) {
printf("%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",
i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i);
}
printf("ok!\n");
return 0;
}
'''
self.do_run(src, 'ok!')
print('with return')
src = r'''
#include <stdio.h>
#include <stdlib.h>
int main() {
for (int i = 0; i < 7000; i++) {
int j = printf("%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d",
i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i);
printf(" (%d)\n", j);
}
printf("ok!\n");
return 0;
}
'''
self.do_run(src, 'ok!')
print('with definitely no return')
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <stdarg.h>
void vary(const char *s, ...)
{
va_list v;
va_start(v, s);
char d[20];
vsnprintf(d, 20, s, v);
puts(d);
// Try it with copying
va_list tempva;
va_copy(tempva, v);
vsnprintf(d, 20, s, tempva);
puts(d);
va_end(v);
}
int main() {
for (int i = 0; i < 7000; i++) {
int j = printf("%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d",
i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i);
printf(" (%d)\n", j);
vary("*cheez: %d+%d*", 99, 24);
vary("*albeit*");
}
printf("ok!\n");
return 0;
}
'''
self.do_run(src, 'ok!')
def test_stack_void(self):
self.emcc_args.append('-Wno-format-extra-args')
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_stack_void.c')
def test_life(self):
self.emcc_args += ['-std=c99']
self.do_run_in_out_file_test('life.c', args=['2'])
def test_array2(self):
self.do_core_test('test_array2.c')
def test_array2b(self):
self.do_core_test('test_array2b.c')
def test_constglobalstructs(self):
self.do_core_test('test_constglobalstructs.c')
def test_conststructs(self):
self.do_core_test('test_conststructs.c')
def test_bigarray(self):
self.do_core_test('test_bigarray.c')
def test_mod_globalstruct(self):
self.do_core_test('test_mod_globalstruct.c')
def test_sizeof(self):
self.do_core_test('test_sizeof.cpp')
def test_llvm_used(self):
self.do_core_test('test_llvm_used.c')
@no_asan('SAFE_HEAP cannot be used with ASan')
def test_set_align(self):
self.set_setting('SAFE_HEAP')
self.do_core_test('test_set_align.c')
def test_emscripten_api(self):
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_save_me_aimee'])
self.do_core_test('test_emscripten_api.cpp')
if '-fsanitize=address' not in self.emcc_args:
# test EXPORT_ALL (this is not compatible with asan, which doesn't
self.set_setting('EXPORTED_FUNCTIONS', [])
self.set_setting('EXPORT_ALL')
self.set_setting('LINKABLE')
self.do_core_test('test_emscripten_api.cpp')
def test_emscripten_run_script_string_int(self):
src = r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
const char *str = emscripten_run_script_string("1+1");
printf("got string: %s\n", str);
return 0;
}
'''
self.do_run(src, '''got string: 2''')
def test_emscripten_run_script_string_utf8(self):
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <emscripten.h>
int main() {
const char *str = emscripten_run_script_string("'\\u2603 \\u2603 \\u2603 Hello!'");
printf("length of returned string: %zu. Position of substring 'Hello': %zu\n", strlen(str), strstr(str, "Hello")-str);
return 0;
}
'''
self.do_run(src, '''length of returned string: 18. Position of substring 'Hello': 12''')
def test_emscripten_run_script_string_null(self):
src = r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
const char *str = emscripten_run_script_string("void(0)");
if (str) {
printf("got string: %s\n", str);
} else {
puts("got null");
}
return 0;
}
'''
self.do_run(src, 'got null')
def test_emscripten_get_now(self):
self.banned_js_engines = [config.V8_ENGINE]
self.set_setting('EXIT_RUNTIME')
self.maybe_closure()
self.do_runf(test_file('emscripten_get_now.cpp'), 'Timer resolution is good')
def test_emscripten_get_compiler_setting(self):
src = test_file('core/emscripten_get_compiler_setting.c')
output = shared.replace_suffix(src, '.out')
self.set_setting('ASSERTIONS')
self.do_runf(src, 'You must build with -s RETAIN_COMPILER_SETTINGS=1', assert_returncode=NON_ZERO)
self.clear_setting('ASSERTIONS')
self.set_setting('RETAIN_COMPILER_SETTINGS')
self.do_runf(src, read_file(output).replace('waka', shared.EMSCRIPTEN_VERSION))
def test_emscripten_has_asyncify(self):
src = r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("%d\n", emscripten_has_asyncify());
return 0;
}
'''
self.set_setting('ASYNCIFY', 0)
self.do_run(src, '0')
self.set_setting('ASYNCIFY')
self.do_run(src, '1')
def test_inlinejs(self):
self.skipTest('non-fastcomp is deprecated and fails in 3.5')
self.do_core_test('test_inlinejs.c')
if self.emcc_args == []:
out = read_file('src.js')
for i in range(1, 5):
assert ('comment%d' % i) in out
def test_inlinejs2(self):
self.skipTest('non-fastcomp is deprecated and fails in 3.5')
self.do_core_test('test_inlinejs2.c')
def test_inlinejs3(self):
if self.is_wasm():
self.skipTest('wasm requires a proper asm module')
src = test_file('core/test_inlinejs3.c')
output = shared.unsuffixed(src) + '.out'
self.do_core_test('test_inlinejs3.c')
print('no debugger, check validation')
src = read_file(src).replace('emscripten_debugger();', '')
self.do_run(src, read_file(output))
def test_inlinejs4(self):
self.do_run(r'''
#include <emscripten.h>
#define TO_STRING_INNER(x) #x
#define TO_STRING(x) TO_STRING_INNER(x)
#define assert_msg(msg, file, line) EM_ASM( throw 'Assert (' + msg + ') failed in ' + file + ':' + line + '!'; )
#define assert(expr) { \
if (!(expr)) { \
assert_msg(#expr, TO_STRING(__FILE__), TO_STRING(__LINE__)); \
} \
}
int main(int argc, char **argv) {
assert(argc != 17);
assert(false);
return 0;
}
''', 'false', assert_returncode=NON_ZERO)
def test_em_asm(self):
self.do_core_test('test_em_asm.cpp')
self.emcc_args.append('-std=gnu89')
self.do_core_test('test_em_asm.cpp', force_c=True)
def test_em_asm_2(self):
self.do_core_test('test_em_asm_2.cpp')
self.emcc_args.append('-std=gnu89')
self.do_core_test('test_em_asm_2.cpp', force_c=True)
@no_asan('Cannot use ASan: test depends exactly on heap size')
def test_main_thread_em_asm(self):
src = read_file(test_file('core/test_em_asm_2.cpp'))
create_file('src.cpp', src.replace('EM_ASM', 'MAIN_THREAD_EM_ASM'))
expected_result = read_file(test_file('core/test_em_asm_2.out'))
create_file('result.out', expected_result.replace('EM_ASM', 'MAIN_THREAD_EM_ASM'))
self.do_run_from_file('src.cpp', 'result.out')
self.do_run_from_file('src.cpp', 'result.out', force_c=True)
def test_main_thread_async_em_asm(self):
self.do_core_test('test_main_thread_async_em_asm.cpp')
self.do_core_test('test_main_thread_async_em_asm.cpp', force_c=True)
def test_main_thread_em_asm_signatures(self):
self.do_core_test('test_em_asm_signatures.cpp', assert_returncode=NON_ZERO)
def test_em_asm_unicode(self):
self.do_core_test('test_em_asm_unicode.cpp')
self.do_core_test('test_em_asm_unicode.cpp', force_c=True)
def test_em_asm_types(self):
self.do_core_test('test_em_asm_types.cpp')
self.do_core_test('test_em_asm_types.cpp', force_c=True)
def test_em_asm_unused_arguments(self):
self.do_core_test('test_em_asm_unused_arguments.cpp')
def test_em_asm_parameter_pack(self):
self.do_core_test('test_em_asm_parameter_pack.cpp')
def test_em_asm_arguments_side_effects(self):
self.do_core_test('test_em_asm_arguments_side_effects.cpp')
self.do_core_test('test_em_asm_arguments_side_effects.cpp', force_c=True)
def test_em_asm_direct(self):
self.do_core_test('test_em_asm_direct.c')
@parameterized({
'': ([], False),
'c': ([], True),
'linked': (['-s', 'MAIN_MODULE'], False),
'linked_c': (['-s', 'MAIN_MODULE'], True),
})
def test_em_js(self, args, force_c):
if 'MAIN_MODULE' in args and not self.is_wasm():
self.skipTest('main module support for non-wasm')
if '-fsanitize=address' in self.emcc_args:
self.skipTest('no dynamic library support in asan yet')
self.emcc_args += args + ['-s', 'EXPORTED_FUNCTIONS=_main,_malloc']
self.do_core_test('test_em_js.cpp', force_c=force_c)
self.assertContained("no args returning int", read_file('test_em_js.js'))
def test_runtime_stacksave(self):
self.do_runf(test_file('core/test_runtime_stacksave.c'), 'success')
def test_minimal_runtime_memorygrowth(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
self.set_setting('MINIMAL_RUNTIME')
src = test_file('core/test_memorygrowth.c')
self.do_runf(src, 'OOM', assert_returncode=NON_ZERO)
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
def test_memorygrowth(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if self.maybe_closure():
self.set_setting('DYNAMIC_EXECUTION', 0)
src = test_file('core/test_memorygrowth.c')
self.do_runf(src, 'OOM', assert_returncode=NON_ZERO)
fail = read_file('test_memorygrowth.js')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
win = read_file('test_memorygrowth.js')
if '-O2' in self.emcc_args and not self.is_wasm():
possible_starts = ['// EMSCRIPTEN_START_FUNCS', 'var TOTAL_STACK']
code_start = None
for s in possible_starts:
if fail.find(s) >= 0:
code_start = s
break
assert code_start is not None, 'Generated code must contain one of ' + str(possible_starts)
fail = fail[fail.find(code_start):]
win = win[win.find(code_start):]
assert len(fail) < len(win), 'failing code - without memory growth on - is more optimized, and smaller' + str([len(fail), len(win)])
if not self.get_setting('SAFE_HEAP'):
self.emcc_args += ['--tracing']
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
def test_memorygrowth_2(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
src = test_file('core/test_memorygrowth_2.c')
self.do_runf(src, 'OOM', assert_returncode=NON_ZERO)
fail = read_file('test_memorygrowth_2.js')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
win = read_file('test_memorygrowth_2.js')
if '-O2' in self.emcc_args and not self.is_wasm():
assert len(fail) < len(win), 'failing code - without memory growth on - is more optimized, and smaller' + str([len(fail), len(win)])
def test_memorygrowth_3(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
self.set_setting('ABORTING_MALLOC', 0)
self.set_setting('SAFE_HEAP')
self.do_core_test('test_memorygrowth_3.c')
@also_with_standalone_wasm(impure=True)
def test_memorygrowth_MAXIMUM_MEMORY(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if not self.is_wasm():
self.skipTest('wasm memory specific test')
self.emcc_args += ['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'INITIAL_MEMORY=64Mb', '-s', 'MAXIMUM_MEMORY=100Mb']
self.do_core_test('test_memorygrowth_wasm_mem_max.c')
def test_memorygrowth_linear_step(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if not self.is_wasm():
self.skipTest('wasm memory specific test')
self.emcc_args += ['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'TOTAL_STACK=1Mb', '-s', 'INITIAL_MEMORY=64Mb', '-s', 'MAXIMUM_MEMORY=130Mb', '-s', 'MEMORY_GROWTH_LINEAR_STEP=1Mb']
self.do_core_test('test_memorygrowth_memory_growth_step.c')
def test_memorygrowth_geometric_step(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if not self.is_wasm():
self.skipTest('wasm memory specific test')
self.emcc_args += ['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MEMORY_GROWTH_GEOMETRIC_STEP=8.5', '-s', 'MEMORY_GROWTH_GEOMETRIC_CAP=32MB']
self.do_core_test('test_memorygrowth_geometric_step.c')
def test_memorygrowth_3_force_fail_reallocBuffer(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('TEST_MEMORY_GROWTH_FAILS')
self.do_core_test('test_memorygrowth_3.c')
@parameterized({
'nogrow': ([],),
'grow': (['-sALLOW_MEMORY_GROWTH', '-sMAXIMUM_MEMORY=18MB'],)
})
@no_asan('requires more memory when growing')
def test_aborting_new(self, args):
self.emcc_args += args
self.do_core_test('test_aborting_new.cpp')
@no_wasm2js('no WebAssembly.Memory()')
@no_asan('ASan alters the memory size')
def test_module_wasm_memory(self):
self.emcc_args += ['--pre-js', test_file('core/test_module_wasm_memory.js')]
self.set_setting('IMPORTED_MEMORY')
self.do_runf(test_file('core/test_module_wasm_memory.c'), 'success')
def test_ssr(self):
src = '''
#include <stdio.h>
// see related things in openjpeg
typedef struct opj_mqc_state {
unsigned int qeval;
int mps;
struct opj_mqc_state *nmps;
struct opj_mqc_state *nlps;
} opj_mqc_state_t;
static opj_mqc_state_t mqc_states[4] = {
{0x5600, 0, &mqc_states[2], &mqc_states[3]},
{0x5602, 1, &mqc_states[3], &mqc_states[2]},
};
int main() {
printf("*%ld*\\n", (long)(mqc_states+1)-(long)mqc_states);
for (int i = 0; i < 2; i++)
printf("%d:%d,%d,%ld,%ld\\n", i, mqc_states[i].qeval, mqc_states[i].mps,
(long)mqc_states[i].nmps-(long)mqc_states, (long)mqc_states[i].nlps-(long)mqc_states);
return 0;
}
'''
self.do_run(src, '''*16*\n0:22016,0,32,48\n1:22018,1,48,32\n''')
def test_tinyfuncstr(self):
self.do_core_test('test_tinyfuncstr.cpp')
def test_llvmswitch(self):
self.do_core_test('test_llvmswitch.c')
def test_cxx_version(self):
self.do_core_test('test_cxx_version.cpp')
@no_wasm2js('massive switches can break js engines')
def test_bigswitch(self):
self.do_runf(test_file('bigswitch.cpp'), '''34962: GL_ARRAY_BUFFER (0x8892)
26214: what?
35040: GL_STREAM_DRAW (0x88E0)
3060: what?
''', args=['34962', '26214', '35040', str(0xbf4)])
@no_wasm2js('massive switches can break js engines')
@is_slow_test
def test_biggerswitch(self):
if not self.is_optimizing():
self.skipTest('nodejs takes >6GB to compile this if the wasm is not optimized, which OOMs, see https://github.com/emscripten-core/emscripten/issues/7928#issuecomment-458308453')
if '-Os' in self.emcc_args:
self.skipTest('hangs in recent upstream clang, see https://bugs.llvm.org/show_bug.cgi?id=43468')
num_cases = 20000
switch_case = self.run_process([PYTHON, test_file('gen_large_switchcase.py'), str(num_cases)], stdout=PIPE, stderr=PIPE).stdout
self.do_run(switch_case, '''58996: 589965899658996
59297: 592975929759297
59598: default
59899: 598995989959899
Success!''')
def test_indirectbr(self):
self.emcc_args = [x for x in self.emcc_args if x != '-g']
self.do_core_test('test_indirectbr.c')
@no_asan('local count too large for VMs')
@no_wasm2js('extremely deep nesting, hits stack limit on some VMs')
def test_indirectbr_many(self):
self.do_core_test('test_indirectbr_many.c')
def test_pack(self):
src = '''
#include <stdio.h>
#include <string.h>
#pragma pack(push,1)
typedef struct header
{
unsigned char id;
unsigned short colour;
unsigned char desc;
} header;
#pragma pack(pop)
typedef struct fatheader
{
unsigned char id;
unsigned short colour;
unsigned char desc;
} fatheader;
int main( int argc, const char *argv[] ) {
header h, *ph = 0;
fatheader fh, *pfh = 0;
printf("*%zu,%ld,%ld*\\n", sizeof(header), (long)((long)&h.desc - (long)&h.id), (long)(&ph[1])-(long)(&ph[0]));
printf("*%zu,%ld,%ld*\\n", sizeof(fatheader), (long)((long)&fh.desc - (long)&fh.id), (long)(&pfh[1])-(long)(&pfh[0]));
return 0;
}
'''
self.do_run(src, '*4,3,4*\n*6,4,6*')
def test_varargs(self):
self.do_core_test('test_varargs.c')
def test_varargs_multi(self):
self.do_core_test('test_varargs_multi.c')
@unittest.skip('clang cannot compile this code with that target yet')
def test_varargs_byval(self):
src = r'''
#include <stdio.h>
#include <stdarg.h>
typedef struct type_a {
union {
double f;
void *p;
int i;
short sym;
} value;
} type_a;
enum mrb_vtype {
MRB_TT_FALSE = 0, /* 0 */
MRB_TT_CLASS = 9 /* 9 */
};
typedef struct type_b {
enum mrb_vtype tt:8;
} type_b;
void print_type_a(int argc, ...);
void print_type_b(int argc, ...);
int main(int argc, char *argv[])
{
type_a a;
type_b b;
a.value.p = (void*) 0x12345678;
b.tt = MRB_TT_CLASS;
printf("The original address of a is: %p\n", a.value.p);
printf("The original type of b is: %d\n", b.tt);
print_type_a(1, a);
print_type_b(1, b);
return 0;
}
void print_type_a(int argc, ...) {
va_list ap;
type_a a;
va_start(ap, argc);
a = va_arg(ap, type_a);
va_end(ap);
printf("The current address of a is: %p\n", a.value.p);
}
void print_type_b(int argc, ...) {
va_list ap;
type_b b;
va_start(ap, argc);
b = va_arg(ap, type_b);
va_end(ap);
printf("The current type of b is: %d\n", b.tt);
}
'''
self.do_run(src, '''The original address of a is: 0x12345678
The original type of b is: 9
The current address of a is: 0x12345678
The current type of b is: 9
''')
def test_functionpointer_libfunc_varargs(self):
self.do_core_test('test_functionpointer_libfunc_varargs.c')
def test_structbyval(self):
self.set_setting('INLINING_LIMIT')
src = r'''
#include <stdio.h>
struct point
{
int x, y;
};
void dump(struct point p) {
p.x++; // should not modify
p.y++; // anything in the caller!
printf("dump: %d,%d\n", p.x, p.y);
}
void dumpmod(struct point *p) {
p->x++; // should not modify
p->y++; // anything in the caller!
printf("dump: %d,%d\n", p->x, p->y);
}
int main( int argc, const char *argv[] ) {
point p = { 54, 2 };
printf("pre: %d,%d\n", p.x, p.y);
dump(p);
void (*dp)(point p) = dump; // And, as a function pointer
dp(p);
printf("post: %d,%d\n", p.x, p.y);
dumpmod(&p);
dumpmod(&p);
printf("last: %d,%d\n", p.x, p.y);
return 0;
}
'''
self.do_run(src, 'pre: 54,2\ndump: 55,3\ndump: 55,3\npost: 54,2\ndump: 55,3\ndump: 56,4\nlast: 56,4')
def test_stdlibs(self):
self.set_setting('SAFE_HEAP', 0)
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_stdlibs.c')
def test_stdbool(self):
create_file('test_stdbool.c', r'''
#include <stdio.h>
#include <stdbool.h>
int main() {
bool x = true;
bool y = false;
printf("*%d*\n", x != y);
return 0;
}
''')
self.do_runf('test_stdbool.c', '*1*')
def test_strtoll_hex(self):
self.do_core_test('test_strtoll_hex.c')
def test_strtoll_dec(self):
self.do_core_test('test_strtoll_dec.c')
def test_strtoll_bin(self):
self.do_core_test('test_strtoll_bin.c')
def test_strtoll_oct(self):
self.do_core_test('test_strtoll_oct.c')
def test_strtol_hex(self):
self.do_core_test('test_strtol_hex.c')
def test_strtol_dec(self):
self.do_core_test('test_strtol_dec.c')
def test_strtol_bin(self):
self.do_core_test('test_strtol_bin.c')
def test_strtol_oct(self):
self.do_core_test('test_strtol_oct.c')
@also_with_standalone_wasm()
def test_atexit(self):
if not self.get_setting('STANDALONE_WASM'):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_atexit.c')
def test_atexit_threads(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_atexit_threads.c')
@no_asan('test relies on null pointer reads')
def test_pthread_specific(self):
self.do_run_in_out_file_test('pthread/specific.c')
def test_pthread_equal(self):
self.do_run_in_out_file_test('pthread/test_pthread_equal.cpp')
@node_pthreads
def test_pthread_dispatch_after_exit(self):
self.do_run_in_out_file_test('pthread/test_pthread_dispatch_after_exit.c', interleaved_output=False)
@node_pthreads
def test_pthread_atexit(self):
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREAD_POOL_SIZE', 1)
self.do_run_in_out_file_test('pthread/test_pthread_atexit.c')
@node_pthreads
def test_pthread_nested_work_queue(self):
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREAD_POOL_SIZE', 1)
self.do_run_in_out_file_test('pthread/test_pthread_nested_work_queue.c')
@node_pthreads
def test_pthread_thread_local_storage(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_run_in_out_file_test('pthread/test_pthread_thread_local_storage.cpp')
@node_pthreads
def test_pthread_cleanup(self):
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREAD_POOL_SIZE', 4)
self.do_run_in_out_file_test('pthread/test_pthread_cleanup.cpp')
@node_pthreads
def test_pthread_setspecific_mainthread(self):
self.set_setting('EXIT_RUNTIME')
print('.. return')
self.do_runf(test_file('pthread/test_pthread_setspecific_mainthread.c'), 'done!', emcc_args=['-DRETURN'])
print('.. exit')
self.do_runf(test_file('pthread/test_pthread_setspecific_mainthread.c'), 'done!', emcc_args=['-DEXIT'])
print('.. pthread_exit')
self.do_run_in_out_file_test('pthread/test_pthread_setspecific_mainthread.c')
@node_pthreads
@no_mac('https://github.com/emscripten-core/emscripten/issues/15014')
def test_pthread_abort(self):
self.set_setting('PROXY_TO_PTHREAD')
self.add_pre_run("Module.onAbort = function() { console.log('onAbort called'); }")
self.do_run_in_out_file_test('pthread/test_pthread_abort.c', assert_returncode=NON_ZERO)
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
@node_pthreads
def test_pthread_emmalloc(self):
self.emcc_args += ['-fno-builtin']
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASSERTIONS=2')
self.set_setting('MALLOC', 'emmalloc')
self.do_core_test('test_emmalloc.c')
def test_tcgetattr(self):
self.do_runf(test_file('termios/test_tcgetattr.c'), 'success')
def test_time(self):
self.do_core_test('test_time.cpp')
for tz in ['EST+05EDT', 'UTC+0']:
print('extra tz test:', tz)
with env_modify({'TZ': tz}):
# Node.js on Windows), but it does no harm either.
self.do_core_test('test_time.cpp')
def test_timeb(self):
# Confirms they are called in reverse order
self.do_core_test('test_timeb.c')
def test_time_c(self):
self.do_core_test('test_time_c.c')
def test_gmtime(self):
self.do_core_test('test_gmtime.c')
def test_strptime_tm(self):
self.do_core_test('test_strptime_tm.c')
def test_strptime_days(self):
self.do_core_test('test_strptime_days.c')
def test_strptime_reentrant(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_strptime_reentrant.c')
def test_strftime(self):
self.do_core_test('test_strftime.cpp')
def test_trickystring(self):
self.do_core_test('test_trickystring.c')
def test_statics(self):
self.do_core_test('test_statics.cpp')
def test_copyop(self):
# clang generated code is vulnerable to this, as it uses
# memcpy for assignments, with hardcoded numbers of bytes
# (llvm-gcc copies items one by one).
self.do_core_test('test_copyop.cpp')
def test_memcpy_memcmp(self):
self.banned_js_engines = [config.V8_ENGINE] # Currently broken under V8_ENGINE but not node
def check(output):
output = output.replace('\n \n', '\n') # remove extra node output
return hashlib.sha1(output.encode('utf-8')).hexdigest()
self.do_core_test('test_memcpy_memcmp.c', output_nicerizer=check)
def test_memcpy2(self):
self.do_core_test('test_memcpy2.c')
def test_memcpy3(self):
self.do_core_test('test_memcpy3.c')
@also_with_standalone_wasm()
def test_memcpy_alignment(self):
self.do_runf(test_file('test_memcpy_alignment.cpp'), 'OK.')
def test_memset_alignment(self):
self.do_runf(test_file('test_memset_alignment.cpp'), 'OK.')
def test_memset(self):
self.do_core_test('test_memset.c')
def test_getopt(self):
self.do_core_test('test_getopt.c', args=['-t', '12', '-n', 'foobar'])
def test_getopt_long(self):
self.do_core_test('test_getopt_long.c', args=['--file', 'foobar', '-b'])
def test_memmove(self):
self.do_core_test('test_memmove.c')
def test_memmove2(self):
self.do_core_test('test_memmove2.c')
def test_memmove3(self):
self.do_core_test('test_memmove3.c')
def test_flexarray_struct(self):
self.do_core_test('test_flexarray_struct.c')
def test_bsearch(self):
self.do_core_test('test_bsearch.c')
def test_stack_overflow(self):
self.set_setting('ASSERTIONS', 2)
self.do_runf(test_file('core/stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
def test_stackAlloc(self):
self.do_core_test('stackAlloc.cpp')
def test_nestedstructs(self):
src = '''
#include <stdio.h>
#include "emscripten.h"
struct base {
int x;
float y;
union {
int a;
float b;
};
char c;
};
struct hashtableentry {
int key;
base data;
};
struct hashset {
typedef hashtableentry entry;
struct chain { entry elem; chain *next; };
// struct chainchunk { chain chains[100]; chainchunk *next; };
};
struct hashtable : hashset {
hashtable() {
base *b = NULL;
entry *e = NULL;
chain *c = NULL;
printf("*%zu,%ld,%ld,%ld,%ld,%ld|%zu,%ld,%ld,%ld,%ld,%ld,%ld,%ld|%zu,%ld,%ld,%ld,%ld,%ld,%ld,%ld,%ld,%ld*\\n",
sizeof(base),
long(&(b->x)), long(&(b->y)), long(&(b->a)), long(&(b->b)), long(&(b->c)),
sizeof(hashtableentry),
long(&(e->key)), long(&(e->data)), long(&(e->data.x)), long(&(e->data.y)), long(&(e->data.a)), long(&(e->data.b)), long(&(e->data.c)),
sizeof(hashset::chain),
long(&(c->elem)), long(&(c->next)), long(&(c->elem.key)), long(&(c->elem.data)), long(&(c->elem.data.x)), long(&(c->elem.data.y)), long(&(c->elem.data.a)), long(&(c->elem.data.b)), long(&(c->elem.data.c))
);
}
};
struct B { char buffer[62]; int last; char laster; char laster2; };
struct Bits {
unsigned short A : 1;
unsigned short B : 1;
unsigned short C : 1;
unsigned short D : 1;
unsigned short x1 : 1;
unsigned short x2 : 1;
unsigned short x3 : 1;
unsigned short x4 : 1;
};
int main() {
hashtable t;
// Part 2 - the char[] should be compressed, BUT have a padding space at the end so the next
// one is aligned properly. Also handle char; char; etc. properly.
B *b = NULL;
printf("*%ld,%ld,%ld,%ld,%ld,%ld,%ld,%ld,%zu*\\n", long(b), long(&(b->buffer)), long(&(b->buffer[0])), long(&(b->buffer[1])), long(&(b->buffer[2])),
long(&(b->last)), long(&(b->laster)), long(&(b->laster2)), sizeof(B));
// Part 3 - bitfields, and small structures
Bits *b2 = NULL;
printf("*%zu*\\n", sizeof(Bits));
return 0;
}
'''
# Bloated memory; same layout as C/C++
self.do_run(src, '*16,0,4,8,8,12|20,0,4,4,8,12,12,16|24,0,20,0,4,4,8,12,12,16*\n*0,0,0,1,2,64,68,69,72*\n*2*')
def prep_dlfcn_main(self):
self.set_setting('MAIN_MODULE')
self.set_setting('NODERAWFS')
self.clear_setting('SIDE_MODULE')
def build_dlfcn_lib(self, filename):
self.clear_setting('MAIN_MODULE')
self.set_setting('SIDE_MODULE')
outfile = self.build(filename, js_outfile=not self.is_wasm())
shutil.move(outfile, 'liblib.so')
@needs_dylink
def test_dlfcn_missing(self):
self.set_setting('MAIN_MODULE')
self.set_setting('ASSERTIONS')
src = r'''
#include <dlfcn.h>
#include <stdio.h>
#include <assert.h>
int main() {
void* lib_handle = dlopen("libfoo.so", RTLD_NOW);
assert(!lib_handle);
printf("error: %s\n", dlerror());
return 0;
}
'''
self.do_run(src, "error: Could not load dynamic lib: libfoo.so\nError: ENOENT: no such file or directory, open 'libfoo.so'")
@needs_dylink
def test_dlfcn_basic(self):
create_file('liblib.cpp', '''
#include <cstdio>
class Foo {
public:
Foo() {
puts("Constructing lib object.");
}
};
Foo global;
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = '''
#include <cstdio>
#include <dlfcn.h>
class Bar {
public:
Bar() {
puts("Constructing main object.");
}
};
Bar global;
int main() {
dlopen("liblib.so", RTLD_NOW);
return 0;
}
'''
self.do_run(src, 'Constructing main object.\nConstructing lib object.\n')
@needs_dylink
def test_dlfcn_i64(self):
create_file('liblib.c', '''
#include <inttypes.h>
int64_t foo(int x) {
return (long long)x / (long long)1234;
}
''')
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
src = r'''
#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
#include <dlfcn.h>
typedef int64_t (*int64func)(int);
int main() {
void *lib_handle = dlopen("liblib.so", RTLD_NOW);
if (!lib_handle) {
puts(dlerror());
abort();
}
printf("dll handle: %p\n", lib_handle);
int64func x = (int64func)dlsym(lib_handle, "foo");
printf("foo func handle: %p\n", x);
if (!x) {
printf("dlsym failed: %s\n", dlerror());
return 1;
}
printf("|%lld|\n", x(81234567));
return 0;
}
'''
self.do_run(src, '|65830|')
@needs_dylink
@disabled('EM_ASM in not yet supported in SIDE_MODULE')
def test_dlfcn_em_asm(self):
create_file('liblib.cpp', '''
#include <emscripten.h>
class Foo {
public:
Foo() {
EM_ASM( out("Constructing lib object.") );
}
};
Foo global;
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = '''
#include <emscripten.h>
#include <dlfcn.h>
class Bar {
public:
Bar() {
EM_ASM( out("Constructing main object.") );
}
};
Bar global;
int main() {
dlopen("liblib.so", RTLD_NOW);
EM_ASM( out("All done.") );
return 0;
}
'''
self.do_run(src, 'Constructing main object.\nConstructing lib object.\nAll done.\n')
@needs_dylink
def test_dlfcn_qsort(self):
self.set_setting('EXPORTED_FUNCTIONS', ['_get_cmp'])
create_file('liblib.cpp', '''
int lib_cmp(const void* left, const void* right) {
const int* a = (const int*) left;
const int* b = (const int*) right;
if(*a > *b) return 1;
else if(*a == *b) return 0;
else return -1;
}
typedef int (*CMP_TYPE)(const void*, const void*);
extern "C" CMP_TYPE get_cmp() {
return lib_cmp;
}
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
src = '''
#include <stdio.h>
#include <stdlib.h>
#include <dlfcn.h>
typedef int (*CMP_TYPE)(const void*, const void*);
int main_cmp(const void* left, const void* right) {
const int* a = (const int*) left;
const int* b = (const int*) right;
if(*a < *b) return 1;
else if(*a == *b) return 0;
else return -1;
}
int main() {
void* lib_handle;
CMP_TYPE (*getter_ptr)();
CMP_TYPE lib_cmp_ptr;
int arr[5] = {4, 2, 5, 1, 3};
qsort((void*)arr, 5, sizeof(int), main_cmp);
printf("Sort with main comparison: ");
for (int i = 0; i < 5; i++) {
printf("%d ", arr[i]);
}
printf("\\n");
lib_handle = dlopen("liblib.so", RTLD_NOW);
if (lib_handle == NULL) {
printf("Could not load lib.\\n");
return 1;
}
getter_ptr = (CMP_TYPE (*)()) dlsym(lib_handle, "get_cmp");
if (getter_ptr == NULL) {
printf("Could not find func.\\n");
return 1;
}
lib_cmp_ptr = getter_ptr();
qsort((void*)arr, 5, sizeof(int), lib_cmp_ptr);
printf("Sort with lib comparison: ");
for (int i = 0; i < 5; i++) {
printf("%d ", arr[i]);
}
printf("\\n");
return 0;
}
'''
self.do_run(src, 'Sort with main comparison: 5 4 3 2 1 *Sort with lib comparison: 1 2 3 4 5 *',
output_nicerizer=lambda x: x.replace('\n', '*'))
@needs_dylink
def test_dlfcn_data_and_fptr(self):
# Failing under v8 since: https://chromium-review.googlesource.com/712595
if self.is_wasm():
self.banned_js_engines = [config.V8_ENGINE]
create_file('liblib.cpp', r'''
#include <stdio.h>
int theglobal = 42;
extern void parent_func(); // a function that is defined in the parent
int* lib_get_global_addr() {
return &theglobal;
}
void lib_fptr() {
printf("Second calling lib_fptr from main.\n");
parent_func();
// call it also through a pointer, to check indexizing
void (*p_f)();
p_f = parent_func;
p_f();
}
extern "C" void (*func(int x, void(*fptr)()))() {
printf("In func: %d\n", x);
fptr();
return lib_fptr;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_func'])
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <stdio.h>
#include <dlfcn.h>
#include <emscripten.h>
typedef void (*FUNCTYPE(int, void(*)()))();
FUNCTYPE func;
void EMSCRIPTEN_KEEPALIVE parent_func() {
printf("parent_func called from child\n");
}
void main_fptr() {
printf("First calling main_fptr from lib.\n");
}
int main() {
void* lib_handle;
FUNCTYPE* func_fptr;
// Test basic lib loading.
lib_handle = dlopen("liblib.so", RTLD_NOW);
if (lib_handle == NULL) {
printf("Could not load lib.\n");
return 1;
}
// Test looked up function.
func_fptr = (FUNCTYPE*) dlsym(lib_handle, "func");
// Load twice to test cache.
func_fptr = (FUNCTYPE*) dlsym(lib_handle, "func");
if (func_fptr == NULL) {
printf("Could not find func.\n");
return 1;
}
// Test passing function pointers across module bounds.
void (*fptr)() = func_fptr(13, main_fptr);
fptr();
// Test global data.
int* globaladdr = (int*) dlsym(lib_handle, "theglobal");
if (globaladdr == NULL) {
printf("Could not find global.\n");
return 1;
}
printf("Var: %d\n", *globaladdr);
return 0;
}
'''
self.set_setting('EXPORTED_FUNCTIONS', ['_main'])
self.do_run(src, '''\
In func: 13
First calling main_fptr from lib.
Second calling lib_fptr from main.
parent_func called from child
parent_func called from child
Var: 42
''')
@needs_dylink
def test_dlfcn_varargs(self):
# this test is not actually valid - it fails natively. the child should fail
# to be loaded, not load and successfully see the parent print_ints func
create_file('liblib.cpp', r'''
void print_ints(int n, ...);
extern "C" void func() {
print_ints(2, 13, 42);
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_func'])
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <stdarg.h>
#include <stdio.h>
#include <dlfcn.h>
#include <assert.h>
void print_ints(int n, ...) {
va_list args;
va_start(args, n);
for (int i = 0; i < n; i++) {
printf("%d\n", va_arg(args, int));
}
va_end(args);
}
int main() {
void* lib_handle;
void (*fptr)();
print_ints(2, 100, 200);
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle);
fptr = (void (*)())dlsym(lib_handle, "func");
fptr();
return 0;
}
'''
self.set_setting('EXPORTED_FUNCTIONS', ['_main'])
self.do_run(src, '100\n200\n13\n42\n')
@needs_dylink
def test_dlfcn_alignment_and_zeroing(self):
self.set_setting('INITIAL_MEMORY', '16mb')
create_file('liblib.c', r'''
int prezero = 0;
__attribute__((aligned(1024))) int superAligned = 12345;
int postzero = 0;
''')
self.build_dlfcn_lib('liblib.c')
for i in range(10):
curr = '%d.so' % i
shutil.copyfile('liblib.so', curr)
self.prep_dlfcn_main()
self.set_setting('INITIAL_MEMORY', '128mb')
create_file('src.c', r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
#include <assert.h>
#include <emscripten.h>
int main() {
printf("'prepare' memory with non-zero inited stuff\n");
int num = 120 * 1024 * 1024; // total is 128; we'll use 5*5 = 25 at least, so allocate pretty much all of it
void* mem = malloc(num);
assert(mem);
printf("setting this range to non-zero: %ld - %ld\n", (long)mem, ((long)mem) + num);
memset(mem, 1, num);
EM_ASM({
var value = HEAP8[64*1024*1024];
out('verify middle of memory is non-zero: ' + value);
assert(value === 1);
});
free(mem);
for (int i = 0; i < 10; i++) {
char curr[] = "?.so";
curr[0] = '0' + i;
printf("loading %s\n", curr);
void* lib_handle = dlopen(curr, RTLD_NOW);
if (!lib_handle) {
puts(dlerror());
assert(0);
}
printf("getting superAligned\n");
int* superAligned = (int*)dlsym(lib_handle, "superAligned");
assert(superAligned);
assert(((long)superAligned) % 1024 == 0); // alignment
printf("checking value of superAligned, at %p\n", superAligned);
assert(*superAligned == 12345); // value
printf("getting prezero\n");
int* prezero = (int*)dlsym(lib_handle, "prezero");
assert(prezero);
printf("checking value of prezero, at %p\n", prezero);
assert(*prezero == 0);
*prezero = 1;
assert(*prezero != 0);
printf("getting postzero\n");
int* postzero = (int*)dlsym(lib_handle, "postzero");
printf("checking value of postzero, at %p\n", postzero);
assert(postzero);
printf("checking value of postzero\n");
assert(*postzero == 0);
*postzero = 1;
assert(*postzero != 0);
}
printf("success.\n");
return 0;
}
''')
self.do_runf('src.c', 'success.\n')
@needs_dylink
def test_dlfcn_self(self):
self.set_setting('MAIN_MODULE')
self.set_setting('EXPORT_ALL')
def get_data_export_count(wasm):
wat = self.get_wasm_text(wasm)
lines = wat.splitlines()
exports = [l for l in lines if l.strip().startswith('(export ')]
data_exports = [l for l in exports if '(global ' in l]
return len(data_exports)
self.do_core_test('test_dlfcn_self.c')
export_count = get_data_export_count('test_dlfcn_self.wasm')
self.assertGreater(export_count, 20)
self.assertLess(export_count, 56)
@needs_dylink
def test_dlfcn_unique_sig(self):
create_file('liblib.c', r'''
#include <stdio.h>
int myfunc(int a, int b, int c, int d, int e, int f, int g, int h, int i, int j, int k, int l, int m) {
return 13;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_myfunc'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
typedef int (*FUNCTYPE)(int, int, int, int, int, int, int, int, int, int, int, int, int);
int main() {
void *lib_handle;
FUNCTYPE func_ptr;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
func_ptr = (FUNCTYPE)dlsym(lib_handle, "myfunc");
assert(func_ptr != NULL);
assert(func_ptr(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) == 13);
puts("success");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
self.do_runf('main.c', 'success')
@needs_dylink
def test_dlfcn_info(self):
create_file('liblib.c', r'''
#include <stdio.h>
int myfunc(int a, int b, int c, int d, int e, int f, int g, int h, int i, int j, int k, int l, int m) {
return 13;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_myfunc'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', '''
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include <dlfcn.h>
typedef int (*FUNCTYPE)(int, int, int, int, int, int, int, int, int, int, int, int, int);
int main() {
void *lib_handle;
FUNCTYPE func_ptr;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
func_ptr = (FUNCTYPE)dlsym(lib_handle, "myfunc");
assert(func_ptr != NULL);
assert(func_ptr(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) == 13);
/* Verify that we don't corrupt func_ptr when calling dladdr. */
Dl_info info;
memset(&info, 0, sizeof(info));
dladdr(func_ptr, &info);
assert(func_ptr != NULL);
assert(func_ptr(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) == 13);
/* Verify something useful lives in info. */
assert(info.dli_fname != NULL);
assert(info.dli_fbase == NULL);
assert(info.dli_sname == NULL);
assert(info.dli_saddr == NULL);
puts("success");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
self.do_runf('main.c', 'success')
@needs_dylink
def test_dlfcn_stacks(self):
create_file('liblib.c', r'''
#include <assert.h>
#include <stdio.h>
#include <string.h>
int myfunc(const char *input) {
char bigstack[1024] = { 0 };
// make sure we didn't just trample the stack!
assert(!strcmp(input, "foobar"));
snprintf(bigstack, sizeof(bigstack), "%s", input);
return strlen(bigstack);
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_myfunc'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', '''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
#include <string.h>
typedef int (*FUNCTYPE)(const char *);
int main() {
void *lib_handle;
FUNCTYPE func_ptr;
char str[128];
snprintf(str, sizeof(str), "foobar");
// HACK: Use strcmp in the main executable so that it doesn't get optimized out and the dynamic library
// is able to use it.
assert(!strcmp(str, "foobar"));
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
func_ptr = (FUNCTYPE)dlsym(lib_handle, "myfunc");
assert(func_ptr != NULL);
assert(func_ptr(str) == 6);
puts("success");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_strcmp'])
self.do_runf('main.c', 'success')
@needs_dylink
def test_dlfcn_funcs(self):
create_file('liblib.c', r'''
#include <assert.h>
#include <stdio.h>
#include <string.h>
typedef void (*voidfunc)();
typedef void (*intfunc)(int);
void callvoid(voidfunc f) { f(); }
void callint(voidfunc f, int x) { f(x); }
void void_0() { printf("void 0\n"); }
void void_1() { printf("void 1\n"); }
voidfunc getvoid(int i) {
switch(i) {
case 0: return void_0;
case 1: return void_1;
default: return NULL;
}
}
void int_0(int x) { printf("int 0 %d\n", x); }
void int_1(int x) { printf("int 1 %d\n", x); }
intfunc getint(int i) {
switch(i) {
case 0: return int_0;
case 1: return int_1;
default: return NULL;
}
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_callvoid', '_callint', '_getvoid', '_getint'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
typedef void (*voidfunc)();
typedef void (*intfunc)(int);
typedef void (*voidcaller)(voidfunc);
typedef void (*intcaller)(intfunc, int);
typedef voidfunc (*voidgetter)(int);
typedef intfunc (*intgetter)(int);
void void_main() { printf("void_main.\n"); }
void int_main(int x) { printf("int_main %d\n", x); }
int main() {
printf("go\n");
void *lib_handle;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
voidcaller callvoid = (voidcaller)dlsym(lib_handle, "callvoid");
assert(callvoid != NULL);
callvoid(void_main);
intcaller callint = (intcaller)dlsym(lib_handle, "callint");
assert(callint != NULL);
callint(int_main, 201);
voidgetter getvoid = (voidgetter)dlsym(lib_handle, "getvoid");
assert(getvoid != NULL);
callvoid(getvoid(0));
callvoid(getvoid(1));
intgetter getint = (intgetter)dlsym(lib_handle, "getint");
assert(getint != NULL);
callint(getint(0), 54);
callint(getint(1), 9000);
assert(getint(1000) == NULL);
puts("ok");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
self.do_runf('main.c', '''go
void_main.
int_main 201
void 0
void 1
int 0 54
int 1 9000
ok
''')
@needs_dylink
def test_dlfcn_mallocs(self):
# will be exhausted without functional malloc/free
self.set_setting('INITIAL_MEMORY', '64mb')
create_file('liblib.c', r'''
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
void *mallocproxy(int n) { return malloc(n); }
void freeproxy(void *p) { free(p); }
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_mallocproxy', '_freeproxy'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.do_runf(test_file('dlmalloc_proxy.c'), '*294,153*')
@needs_dylink
def test_dlfcn_longjmp(self):
create_file('liblib.c', r'''
#include <setjmp.h>
#include <stdio.h>
void jumpy(jmp_buf buf) {
static int i = 0;
i++;
if (i == 10) longjmp(buf, i);
printf("pre %d\n", i);
}
''')
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
#include <setjmp.h>
typedef void (*jumpfunc)(jmp_buf);
int main() {
printf("go!\n");
void *lib_handle;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
jumpfunc jumpy = (jumpfunc)dlsym(lib_handle, "jumpy");
assert(jumpy);
jmp_buf buf;
int jmpval = setjmp(buf);
if (jmpval == 0) {
while (1) jumpy(buf);
} else {
printf("out!\n");
}
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.do_runf('main.c', '''go!
pre 1
pre 2
pre 3
pre 4
pre 5
pre 6
pre 7
pre 8
pre 9
out!
''', force_c=True)
# TODO: make this work. need to forward tempRet0 across modules
# TODO Enable @with_both_exception_handling (the test is not working now)
@needs_dylink
def zzztest_dlfcn_exceptions(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
create_file('liblib.cpp', r'''
extern "C" {
int ok() {
return 65;
}
int fail() {
throw 123;
}
}
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
typedef int (*intfunc)();
int main() {
printf("go!\n");
void *lib_handle;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
intfunc okk = (intfunc)dlsym(lib_handle, "ok");
intfunc faill = (intfunc)dlsym(lib_handle, "fail");
assert(okk && faill);
try {
printf("ok: %d\n", okk());
} catch(...) {
printf("wha\n");
}
try {
printf("fail: %d\n", faill());
} catch(int x) {
printf("int %d\n", x);
}
try {
printf("fail: %d\n", faill());
} catch(double x) {
printf("caught %f\n", x);
}
return 0;
}
'''
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.do_run(src, '''go!
ok: 65
int 123
ok
''')
@needs_dylink
def test_dlfcn_handle_alloc(self):
# verify that dlopen does not allocate already used handles
dirname = self.get_dir()
def indir(name):
return os.path.join(dirname, name)
create_file('a.cpp', r'''
#include <stdio.h>
static struct a {
a() {
puts("a: loaded");
}
} _;
''')
create_file('b.cpp', r'''
#include <stdio.h>
static struct b {
b() {
puts("b: loaded");
}
} _;
''')
self.build_dlfcn_lib('a.cpp')
shutil.move(indir('liblib.so'), indir('liba.so'))
self.build_dlfcn_lib('b.cpp')
shutil.move(indir('liblib.so'), indir('libb.so'))
self.set_setting('MAIN_MODULE')
self.set_setting('NODERAWFS')
self.clear_setting('SIDE_MODULE')
create_file('main.c', r'''
#include <dlfcn.h>
#include <assert.h>
#include <stddef.h>
int main() {
void *liba, *libb, *liba2, *libb2;
int err;
liba = dlopen("liba.so", RTLD_NOW);
assert(liba != NULL);
libb = dlopen("libb.so", RTLD_NOW);
assert(libb != NULL);
// Test that opening libb a second times gives the same handle
libb2 = dlopen("libb.so", RTLD_NOW);
assert(libb == libb2);
err = dlclose(liba);
assert(!err);
liba2 = dlopen("liba.so", RTLD_NOW);
assert(liba2 != libb);
return 0;
}
''')
self.do_runf('main.c', 'a: loaded\nb: loaded\n')
@needs_dylink
@needs_non_trapping_float_to_int
def test_dlfcn_feature_in_lib(self):
self.emcc_args.append('-mnontrapping-fptoint')
create_file('liblib.cpp', r'''
extern "C" int magic(float x) {
return __builtin_wasm_trunc_saturate_s_i32_f32(x);
}
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <dlfcn.h>
#include <stdio.h>
#include <stdlib.h>
typedef int (*fi)(float);
int main() {
void *lib_handle = dlopen("liblib.so", RTLD_NOW);
if (!lib_handle) {
puts(dlerror());
abort();
}
fi x = (fi)dlsym(lib_handle, "magic");
if (!x) {
puts(dlerror());
abort();
}
printf("float: %d.\n", x(42.99));
return 0;
}
'''
self.do_run(src, 'float: 42.\n')
def dylink_test(self, main, side, expected=None, header=None, force_c=False,
main_module=2, **kwargs):
# Same as dylink_testf but take source code in string form
if not isinstance(side, list):
side_file = 'liblib.cpp' if not force_c else 'liblib.c'
create_file(side_file, side)
side = side_file
if not isinstance(main, list):
main_file = 'main.cpp' if not force_c else 'main.c'
create_file(main_file, main)
main = main_file
if header:
create_file('header.h', header)
return self.dylink_testf(main, side, expected, force_c, main_module=main_module, **kwargs)
def dylink_testf(self, main, side=None, expected=None, force_c=False, main_emcc_args=[],
main_module=2,
so_name='liblib.so',
need_reverse=True, **kwargs):
self.maybe_closure()
# Same as dylink_test but takes source code as filenames on disc.
old_args = self.emcc_args.copy()
if not expected:
outfile = shared.replace_suffix(main, '.out')
expected = read_file(outfile)
if not side:
side, ext = os.path.splitext(main)
side += '_side' + ext
# side settings
self.clear_setting('MAIN_MODULE')
self.set_setting('SIDE_MODULE')
side_suffix = 'wasm' if self.is_wasm() else 'js'
if isinstance(side, list):
out_file = 'liblib.' + side_suffix
# side is just a library
self.run_process([EMCC] + side + self.get_emcc_args() + ['-o', out_file])
else:
out_file = self.build(side, js_outfile=(side_suffix == 'js'))
shutil.move(out_file, so_name)
# main settings
self.set_setting('MAIN_MODULE', main_module)
self.clear_setting('SIDE_MODULE')
self.emcc_args += main_emcc_args
self.emcc_args.append(so_name)
if force_c:
self.emcc_args.append('-nostdlib++')
if isinstance(main, list):
# main is just a library
try_delete('main.js')
self.run_process([EMCC] + main + self.get_emcc_args() + ['-o', 'main.js'])
self.do_run('main.js', expected, no_build=True, **kwargs)
else:
self.do_runf(main, expected, force_c=force_c, **kwargs)
self.emcc_args = old_args
if need_reverse:
print('flip')
# Test the reverse as well. There we flip the role of the side module and main module.
# - We add --no-entry since the side module doesn't have a `main`
self.dylink_testf(side, main, expected, force_c, main_emcc_args + ['--no-entry'],
need_reverse=False, **kwargs)
def do_basic_dylink_test(self, **kwargs):
self.dylink_test(r'''
#include <stdio.h>
#include "header.h"
int main() {
printf("other says %d.\n", sidey());
return 0;
}
''', '''
#include "header.h"
int sidey() {
return 11;
}
''', 'other says 11.', 'int sidey();', force_c=True, **kwargs)
@needs_dylink
def test_dylink_basics(self):
self.do_basic_dylink_test(need_reverse=False)
self.verify_in_strict_mode('main.js')
@needs_dylink
def test_dylink_basics_no_modify(self):
if self.is_optimizing():
self.skipTest('no modify mode only works with non-optimizing builds')
self.set_setting('WASM_BIGINT')
self.set_setting('ERROR_ON_WASM_CHANGES_AFTER_LINK')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_basics_lld_report_undefined(self):
self.set_setting('LLD_REPORT_UNDEFINED')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_no_export(self):
self.set_setting('NO_DECLARE_ASM_MODULE_EXPORTS')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_memory_growth(self):
if not self.is_wasm():
self.skipTest('wasm only')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_safe_heap(self):
self.set_setting('SAFE_HEAP')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_function_pointer_equality(self):
self.dylink_test(r'''
#include <stdio.h>
#include "header.h"
int main() {
void* puts_side = get_address();
printf("main module address %p.\n", &puts);
printf("side module address address %p.\n", puts_side);
if (&puts == puts_side)
printf("success\n");
else
printf("failure\n");
return 0;
}
''', '''
#include <stdio.h>
#include "header.h"
void* get_address() {
return (void*)&puts;
}
''', 'success', header='void* get_address();', force_c=True)
@needs_dylink
def test_dylink_floats(self):
self.dylink_test(r'''
#include <stdio.h>
extern float sidey();
int main() {
printf("other says %.2f.\n", sidey()+1);
return 0;
}
''', '''
float sidey() { return 11.5; }
''', 'other says 12.50', force_c=True)
@needs_dylink
def test_dylink_printf(self):
self.dylink_test(r'''
#include <stdio.h>
void sidey();
int main() {
printf("hello from main\n");
sidey();
return 0;
}
''', r'''
#include <stdio.h>
void sidey() {
printf("hello from side\n");
}
''', 'hello from main\nhello from side\n', force_c=True)
@needs_dylink
def test_dylink_funcpointer(self):
self.dylink_test(
main=r'''
#include <stdio.h>
#include <assert.h>
#include "header.h"
intfunc sidey(intfunc f);
void a(int arg) { printf("hello from funcptr: %d\n", arg); }
int main() {
intfunc b = sidey(a);
assert(a == b);
b(0);
return 0;
}
''',
side='''
#include "header.h"
intfunc sidey(intfunc f) { f(1); return f; }
''',
expected='hello from funcptr: 1\nhello from funcptr: 0\n',
header='typedef void (*intfunc)(int );', force_c=True)
@needs_dylink
def test_dylink_static_funcpointers(self):
self.dylink_test(
main=r'''
#include <stdio.h>
#include "header.h"
void areturn0() { printf("hello 0\n"); }
void areturn1() { printf("hello 1\n"); }
void areturn2() { printf("hello 2\n"); }
voidfunc func_ptrs[3] = { areturn0, areturn1, areturn2 };
int main(int argc, char **argv) {
sidey(func_ptrs[0]);
sidey(func_ptrs[1]);
sidey(func_ptrs[2]);
return 0;
}
''',
side='''
#include "header.h"
void sidey(voidfunc f) { f(); }
''',
expected='hello 0\nhello 1\nhello 2\n',
header='typedef void (*voidfunc)(); void sidey(voidfunc f);', force_c=True)
@needs_dylink
def test_dylink_funcpointers_wrapper(self):
self.dylink_test(
main=r'''\
#include <stdio.h>
#include "header.h"
int main(int argc, char **argv) {
charfunc f1 = emscripten_run_script;
f1("console.log('one')");
charfunc f2 = get();
f2("console.log('two')");
return 0;
}
''',
side='''\
#include "header.h"
charfunc get() {
return emscripten_run_script;
}
''',
expected='one\ntwo\n',
header='''\
#include <emscripten.h>
typedef void (*charfunc)(const char*);
extern charfunc get();
''', force_c=True)
@needs_dylink
def test_dylink_static_funcpointer_float(self):
self.dylink_test(
main=r'''\
#include <stdio.h>
#include "header.h"
int sidey(floatfunc f);
float func1(float f) { printf("hello 1: %f\n", f); return 0; }
floatfunc f1 = &func1;
int main(int argc, char **argv) {
printf("got: %d\n", sidey(f1));
f1(12.34);
return 0;
}
''',
side='''\
#include "header.h"
int sidey(floatfunc f) { f(56.78); return 1; }
''',
expected='hello 1: 56.779999\ngot: 1\nhello 1: 12.340000\n',
header='typedef float (*floatfunc)(float);', force_c=True)
@needs_dylink
def test_missing_signatures(self):
create_file('test_sig.c', r'''#include <emscripten.h>
int main() {
return 0 == ( (long)&emscripten_run_script_string +
(long)&emscripten_run_script );
}''')
self.set_setting('MAIN_MODULE', 1)
self.set_setting('ALLOW_MEMORY_GROWTH', '1')
self.set_setting('MAXIMUM_MEMORY', '4GB')
self.do_runf('test_sig.c', '')
@needs_dylink
def test_dylink_global_init(self):
self.dylink_test(r'''
#include <stdio.h>
struct Class {
Class() { printf("a new Class\n"); }
};
static Class c;
int main() {
return 0;
}
''', r'''
void nothing() {}
''', 'a new Class\n')
@needs_dylink
def test_dylink_global_inits(self):
def test():
self.dylink_test(header=r'''
#include <stdio.h>
struct Class {
Class(const char *name) { printf("new %s\n", name); }
};
''', main=r'''
#include "header.h"
static Class c("main");
int main() {
return 0;
}
''', side=r'''
#include "header.h"
static Class c("side");
''', expected=['new main\nnew side\n', 'new side\nnew main\n'])
test()
print('check warnings')
self.set_setting('ASSERTIONS', 2)
test()
@needs_dylink
def test_dylink_i64(self):
self.dylink_test(r'''
#include <stdio.h>
#include <stdint.h>
extern int64_t sidey();
int main() {
printf("other says %lld.\n", sidey());
return 0;
}
''', '''
#include <stdint.h>
int64_t sidey() {
return 42;
}
''', 'other says 42.', force_c=True)
@all_engines
@needs_dylink
def test_dylink_i64_b(self):
self.dylink_test(r'''
#include <stdio.h>
#include <stdint.h>
extern int64_t sidey();
int64_t testAdd(int64_t a) {
return a + 1;
}
int64_t testAddB(int a) {
return a + 1;
}
typedef int64_t (*testAddHandler)(int64_t);
testAddHandler h = &testAdd;
typedef int64_t (*testAddBHandler)(int);
testAddBHandler hb = &testAddB;
int main() {
printf("other says %lld.\n", sidey());
int64_t r = h(42);
printf("my fp says: %lld.\n", r);
int64_t rb = hb(42);
printf("my second fp says: %lld.\n", r);
}
''', '''
#include <stdint.h>
int64_t sidey() {
volatile int64_t x = 0x12345678abcdef12LL;
x += x % 17;
x = 18 - x;
return x;
}
''', 'other says -1311768467750121224.\nmy fp says: 43.\nmy second fp says: 43.', force_c=True)
@needs_dylink
@also_with_wasm_bigint
def test_dylink_i64_c(self):
self.dylink_test(r'''
#include <stdio.h>
#include <inttypes.h>
#include "header.h"
typedef int32_t (*fp_type_32)(int32_t, int32_t, int32_t);
typedef int64_t (*fp_type_64)(int32_t, int32_t, int32_t);
int32_t internal_function_ret_32(int32_t i, int32_t j, int32_t k) {
return 32;
}
int64_t internal_function_ret_64(int32_t i, int32_t j, int32_t k) {
return 64;
}
int main() {
fp_type_32 fp32_internal = &internal_function_ret_32;
fp_type_32 fp32_external = &function_ret_32;
fp_type_64 fp64_external = &function_ret_64;
fp_type_64 fp64_internal = &internal_function_ret_64;
int32_t ires32 = fp32_internal(0,0,0);
printf("res32 - internal %d\n",ires32);
int32_t eres32 = fp32_external(0,0,0);
printf("res32 - external %d\n",eres32);
int64_t ires64 = fp64_internal(0,0,0);
printf("res64 - internal %" PRId64 "\n",ires64);
int64_t eres64 = fp64_external(0,0,0);
printf("res64 - external %" PRId64 "\n",eres64);
return 0;
}
''', '''
#include "header.h"
int32_t function_ret_32(int32_t i, int32_t j, int32_t k) {
return 32;
}
int64_t function_ret_64(int32_t i, int32_t j, int32_t k) {
return 64;
}
''', '''res32 - internal 32
res32 - external 32
res64 - internal 64
res64 - external 64\n''', header='''
#include <emscripten.h>
#include <stdint.h>
EMSCRIPTEN_KEEPALIVE int32_t function_ret_32(int32_t i, int32_t j, int32_t k);
EMSCRIPTEN_KEEPALIVE int64_t function_ret_64(int32_t i, int32_t j, int32_t k);
''', force_c=True)
@needs_dylink
@also_with_wasm_bigint
def test_dylink_i64_invoke(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.dylink_test(r'''\
#include <stdio.h>
#include <stdint.h>
extern "C" int64_t sidey(int64_t arg);
int main(int argc, char *argv[]) {
int64_t temp = 42;
printf("got %lld\n", sidey(temp));
return 0;
}''', r'''\
#include <stdint.h>
#include <stdio.h>
#include <emscripten.h>
extern "C" {
EMSCRIPTEN_KEEPALIVE int64_t do_call(int64_t arg) {
if (arg == 0) {
throw;
}
return 2 * arg;
}
int64_t sidey(int64_t arg) {
try {
return do_call(arg);
} catch(...) {
return 0;
}
}
}''', 'got 84', need_reverse=False)
@needs_dylink
def test_dylink_class(self):
self.dylink_test(header=r'''
#include <stdio.h>
struct Class {
Class(const char *name);
};
''', main=r'''
#include "header.h"
int main() {
Class c("main");
return 0;
}
''', side=r'''
#include "header.h"
Class::Class(const char *name) { printf("new %s\n", name); }
''', expected=['new main\n'])
@needs_dylink
def test_dylink_global_var(self):
self.dylink_test(main=r'''
#include <stdio.h>
extern int x;
int main() {
printf("extern is %d.\n", x);
return 0;
}
''', side=r'''
int x = 123;
''', expected=['extern is 123.\n'], force_c=True)
@needs_dylink
def test_dylink_global_var_modded(self):
self.dylink_test(main=r'''
#include <stdio.h>
extern int x;
int main() {
printf("extern is %d.\n", x);
return 0;
}
''', side=r'''
int x = 123;
struct Initter {
Initter() { x = 456; }
};
Initter initter;
''', expected=['extern is 456.\n'])
@needs_dylink
def test_dylink_stdlib(self):
self.dylink_test(header=r'''
#include <math.h>
#include <stdlib.h>
#include <string.h>
char *side(const char *data);
double pow_two(double x);
''', main=r'''
#include <stdio.h>
#include "header.h"
int main() {
char *temp = side("hello through side\n");
char *ret = (char*)malloc(strlen(temp)+1);
strcpy(ret, temp);
temp[1] = 'x';
puts(ret);
printf("pow_two: %d.\n", (int)pow_two(5.9));
return 0;
}
''', side=r'''
#include "header.h"
char *side(const char *data) {
char *ret = (char*)malloc(strlen(data)+1);
strcpy(ret, data);
return ret;
}
double pow_two(double x) {
return pow(2, x);
}
''', expected=['hello through side\n\npow_two: 59.'], force_c=True)
@needs_dylink
def test_dylink_jslib(self):
create_file('lib.js', r'''
mergeInto(LibraryManager.library, {
test_lib_func: function(x) {
return x + 17.2;
}
});
''')
self.dylink_test(header=r'''
extern double test_lib_func(int input);
''', main=r'''
#include <stdio.h>
#include "header.h"
extern double sidey();
int main2() { return 11; }
int main() {
int input = sidey();
double temp = test_lib_func(input);
printf("other says %.2f\n", temp);
printf("more: %.5f, %d\n", temp, input);
return 0;
}
''', side=r'''
#include <stdio.h>
#include "header.h"
extern int main2();
double sidey() {
int temp = main2();
printf("main2 sed: %d\n", temp);
printf("main2 sed: %u, %c\n", temp, temp/2);
return test_lib_func(temp);
}
''', expected='other says 45.2', main_emcc_args=['--js-library', 'lib.js'], force_c=True)
@needs_dylink
def test_dylink_many_postsets(self):
NUM = 1234
self.dylink_test(header=r'''
#include <stdio.h>
typedef void (*voidfunc)();
static void simple() {
printf("simple.\n");
}
static volatile voidfunc funcs[''' + str(NUM) + '] = { ' + ','.join(['simple'] * NUM) + r''' };
static void test() {
volatile int i = ''' + str(NUM - 1) + r''';
funcs[i]();
i = 0;
funcs[i]();
}
extern void more();
''', main=r'''
#include "header.h"
int main() {
test();
more();
return 0;
}
''', side=r'''
#include "header.h"
void more() {
test();
}
''', expected=['simple.\nsimple.\nsimple.\nsimple.\n'], force_c=True)
@needs_dylink
def test_dylink_postsets_chunking(self):
self.dylink_test(header=r'''
extern int global_var;
''', main=r'''
#include <stdio.h>
#include "header.h"
// prepare 99 global variable with local initializer
static int p = 1;
#define P(x) __attribute__((used)) int *padding##x = &p;
P(01) P(02) P(03) P(04) P(05) P(06) P(07) P(08) P(09) P(10)
P(11) P(12) P(13) P(14) P(15) P(16) P(17) P(18) P(19) P(20)
P(21) P(22) P(23) P(24) P(25) P(26) P(27) P(28) P(29) P(30)
P(31) P(32) P(33) P(34) P(35) P(36) P(37) P(38) P(39) P(40)
P(41) P(42) P(43) P(44) P(45) P(46) P(47) P(48) P(49) P(50)
P(51) P(52) P(53) P(54) P(55) P(56) P(57) P(58) P(59) P(60)
P(61) P(62) P(63) P(64) P(65) P(66) P(67) P(68) P(69) P(70)
P(71) P(72) P(73) P(74) P(75) P(76) P(77) P(78) P(79) P(80)
P(81) P(82) P(83) P(84) P(85) P(86) P(87) P(88) P(89) P(90)
P(91) P(92) P(93) P(94) P(95) P(96) P(97) P(98) P(99)
// prepare global variable with global initializer
int *ptr = &global_var;
int main(int argc, char *argv[]) {
printf("%d\n", *ptr);
}
''', side=r'''
#include "header.h"
int global_var = 12345;
''', expected=['12345\n'], force_c=True)
@needs_dylink
@parameterized({
'libcxx': ('libc,libc++,libmalloc,libc++abi',),
'all': ('1',),
'missing': ('libc,libmalloc', False, False, False),
'missing_assertions': ('libc,libmalloc', False, False, True),
})
def test_dylink_syslibs(self, syslibs, expect_pass=True, need_reverse=True, assertions=True):
self.emcc_args.append('-Wno-deprecated')
self.set_setting('WARN_ON_UNDEFINED_SYMBOLS', 0)
if assertions is not None:
self.set_setting('ASSERTIONS', int(assertions))
passed = True
try:
with env_modify({'EMCC_FORCE_STDLIBS': syslibs, 'EMCC_ONLY_FORCED_STDLIBS': '1'}):
self.dylink_test(main=r'''
void side();
int main() {
side();
return 0;
}
''', side=r'''
#include <iostream>
void side() { std::cout << "cout hello from side\n"; }
''', expected=['cout hello from side\n'], need_reverse=need_reverse, main_module=1)
except Exception as e:
if expect_pass:
raise
print('(seeing expected fail)')
passed = False
assertion = 'build the MAIN_MODULE with EMCC_FORCE_STDLIBS=1 in the environment'
if self.get_setting('ASSERTIONS'):
self.assertContained(assertion, str(e))
else:
self.assertNotContained(assertion, str(e))
assert passed == expect_pass, ['saw', passed, 'but expected', expect_pass]
@needs_dylink
@with_env_modify({'EMCC_FORCE_STDLIBS': 'libc++'})
def test_dylink_iostream(self):
self.dylink_test(header=r'''
#include <iostream>
#include <string>
std::string side();
''', main=r'''
#include "header.h"
int main() {
std::cout << "hello from main " << side() << std::endl;
return 0;
}
''', side=r'''
#include "header.h"
std::string side() { return "and hello from side"; }
''', expected=['hello from main and hello from side\n'])
@needs_dylink
def test_dylink_dynamic_cast(self): # issue 3465
self.dylink_test(header=r'''
class Base {
public:
virtual void printName();
};
class Derived : public Base {
public:
void printName();
};
''', main=r'''
#include "header.h"
#include <iostream>
using namespace std;
int main() {
cout << "starting main" << endl;
Base *base = new Base();
Base *derived = new Derived();
base->printName();
derived->printName();
if (dynamic_cast<Derived*>(derived)) {
cout << "OK" << endl;
} else {
cout << "KO" << endl;
}
return 0;
}
''', side=r'''
#include "header.h"
#include <iostream>
using namespace std;
void Base::printName() {
cout << "Base" << endl;
}
void Derived::printName() {
cout << "Derived" << endl;
}
''', expected=['starting main\nBase\nDerived\nOK'])
@with_both_exception_handling
@needs_dylink
def test_dylink_raii_exceptions(self):
self.dylink_test(main=r'''
#include <stdio.h>
extern int side();
int main() {
printf("from side: %d.\n", side());
}
''', side=r'''
#include <stdio.h>
typedef int (*ifdi)(float, double, int);
int func_with_special_sig(float a, double b, int c) {
printf("special %f %f %d\n", a, b, c);
return 1337;
}
struct DestructorCaller {
~DestructorCaller() { printf("destroy\n"); }
};
int side() {
// d has a destructor that must be called on function
// exit, which means an invoke will be used for the
// indirect call here - and the signature of that call
// is special and not present in the main module, so
// it must be generated for the side module.
DestructorCaller d;
volatile ifdi p = func_with_special_sig;
return p(2.18281, 3.14159, 42);
}
''', expected=['special 2.182810 3.141590 42\ndestroy\nfrom side: 1337.\n'])
@needs_dylink
@disabled('https://github.com/emscripten-core/emscripten/issues/12815')
def test_dylink_hyper_dupe(self):
self.set_setting('INITIAL_MEMORY', '64mb')
self.set_setting('ASSERTIONS', 2)
# test hyper-dynamic linking, and test duplicate warnings
create_file('third.cpp', r'''
#include <stdio.h>
int sidef() { return 36; }
int sideg = 49;
int bsidef() { return 536; }
extern void only_in_second_1(int x);
extern int second_to_third;
int third_to_second = 1337;
void only_in_third_0() {
// note we access our own globals directly, so
// it doesn't matter that overriding failed
printf("only_in_third_0: %d, %d, %d\n", sidef(), sideg, second_to_third);
only_in_second_1(2112);
}
void only_in_third_1(int x) {
printf("only_in_third_1: %d, %d, %d, %d\n", sidef(), sideg, second_to_third, x);
}
''')
if self.is_wasm():
libname = 'third.wasm'
else:
libname = 'third.js'
self.run_process([EMCC, 'third.cpp', '-o', libname, '-s', 'SIDE_MODULE'] + self.get_emcc_args())
self.dylink_test(main=r'''
#include <stdio.h>
#include <emscripten.h>
extern int sidef();
extern int sideg;
extern int bsidef();
extern int bsideg;
extern void only_in_second_0();
extern void only_in_third_0();
int main() {
EM_ASM({
loadDynamicLibrary('%s'); // hyper-dynamic! works at least for functions (and consts not used in same block)
});
printf("sidef: %%d, sideg: %%d.\n", sidef(), sideg);
printf("bsidef: %%d.\n", bsidef());
only_in_second_0();
only_in_third_0();
}
''' % libname,
side=r'''
#include <stdio.h>
int sidef() { return 10; } // third will try to override these, but fail!
int sideg = 20;
extern void only_in_third_1(int x);
int second_to_third = 500;
extern int third_to_second;
void only_in_second_0() {
printf("only_in_second_0: %d, %d, %d\n", sidef(), sideg, third_to_second);
only_in_third_1(1221);
}
void only_in_second_1(int x) {
printf("only_in_second_1: %d, %d, %d, %d\n", sidef(), sideg, third_to_second, x);
}
''',
expected=['sidef: 10, sideg: 20.\nbsidef: 536.\nonly_in_second_0: 10, 20, 1337\nonly_in_third_1: 36, 49, 500, 1221\nonly_in_third_0: 36, 49, 500\nonly_in_second_1: 10, 20, 1337, 2112\n'],
need_reverse=not self.is_wasm())
print('check warnings')
full = self.run_js('src.js')
self.assertContained("warning: symbol '_sideg' from '%s' already exists" % libname, full)
@needs_dylink
def test_dylink_load_compiled_side_module(self):
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args.append('-lnodefs.js')
self.set_setting('INITIAL_MEMORY', '64mb')
self.set_setting('NO_AUTOLOAD_DYLIBS')
self.dylink_test(main=r'''
#include <stdio.h>
#include <emscripten.h>
extern int sidef();
int main() {
EM_ASM({
FS.mkdir('/working');
FS.mount(NODEFS,{ root: '.' }, '/working');
var libData = FS.readFile('/working/liblib.so', {encoding: 'binary'});
if (!(libData instanceof Uint8Array)) {
libData = new Uint8Array(libData);
}
var compiledModule = new WebAssembly.Module(libData);
var sideExports = loadWebAssemblyModule(compiledModule, {loadAsync: false, nodelete: true});
mergeLibSymbols(sideExports, 'liblib.so');
});
printf("sidef: %d.\n", sidef());
}
''',
side=r'''
#include <stdio.h>
int sidef() { return 10; }
''',
expected=['sidef: 10'],
need_reverse=not self.is_wasm())
@needs_dylink
def test_dylink_dso_needed(self):
def do_run(src, expected_output, emcc_args=[]):
create_file('main.c', src + 'int main() { return test_main(); }')
self.do_runf('main.c', expected_output, emcc_args=emcc_args)
self._test_dylink_dso_needed(do_run)
@needs_dylink
def test_dylink_dot_a(self):
create_file('third.c', 'int sidef() { return 36; }')
create_file('fourth.c', 'int sideg() { return 17; }')
self.run_process([EMCC, '-fPIC', '-c', 'third.c', '-o', 'third.o'] + self.get_emcc_args())
self.run_process([EMCC, '-fPIC', '-c', 'fourth.c', '-o', 'fourth.o'] + self.get_emcc_args())
self.run_process([EMAR, 'rc', 'libfourth.a', 'fourth.o'])
self.dylink_test(main=r'''
#include <stdio.h>
#include <emscripten.h>
int sidef();
int sideg();
int main() {
printf("sidef: %d, sideg: %d.\n", sidef(), sideg());
}
''',
side=['libfourth.a', 'third.o'],
expected=['sidef: 36, sideg: 17.\n'], force_c=True)
@needs_dylink
def test_dylink_spaghetti(self):
self.dylink_test(main=r'''
#include <stdio.h>
int main_x = 72;
extern int side_x;
int adjust = side_x + 10;
int *ptr = &side_x;
struct Class {
Class() {
printf("main init sees %d, %d, %d.\n", adjust, *ptr, main_x);
}
};
Class cm;
int main() {
printf("main main sees %d, %d, %d.\n", adjust, *ptr, main_x);
return 0;
}
''', side=r'''
#include <stdio.h>
extern int main_x;
int side_x = -534;
int adjust2 = main_x + 10;
int *ptr2 = &main_x;
struct SideClass {
SideClass() {
printf("side init sees %d, %d, %d.\n", adjust2, *ptr2, side_x);
}
};
SideClass cs;
''', expected=['''\
side init sees 82, 72, -534.
main init sees -524, -534, 72.
main main sees -524, -534, 72.
''', '''\
main init sees -524, -534, 72.
side init sees 82, 72, -534.
main main sees -524, -534, 72.
'''])
@needs_make('mingw32-make')
@needs_dylink
def test_dylink_zlib(self):
self.emcc_args += ['-Wno-shift-negative-value', '-I' + test_file('third_party/zlib')]
self.set_setting('RELOCATABLE')
zlib_archive = self.get_zlib_library()
self.dylink_test(main=read_file(test_file('third_party/zlib/example.c')),
side=zlib_archive,
expected=read_file(test_file('core/test_zlib.out')),
force_c=True)
# @needs_dylink
# def test_dylink_bullet(self):
# self.emcc_args += ['-I' + test_file('bullet/src')]
# side = self.get_bullet_library(self, True)
# self.dylink_test(main=read_file(test_file('bullet/Demos/HelloWorld/HelloWorld.cpp')),
# side=side,
# expected=[read_file(test_file('bullet/output.txt')), # different roundings
# read_file(test_file('bullet/output2.txt')),
# read_file(test_file('bullet/output3.txt'))])
@needs_dylink
def test_dylink_rtti(self):
# Verify that objects created in one module and be dynamic_cast<> correctly
# in the another module.
# Each module will define its own copy of certain COMDAT symbols such as
# each classs's typeinfo, but at runtime they should both use the same one.
self.set_setting('LLD_REPORT_UNDEFINED')
header = '''
#include <cstddef>
class Foo {
public:
virtual ~Foo() {}
};
class Bar : public Foo {
public:
virtual ~Bar() {}
};
bool is_bar(Foo* foo);
'''
main = '''
#include <stdio.h>
#include "header.h"
int main() {
Bar bar;
if (!is_bar(&bar)) {
puts("failure");
return 1;
}
puts("success");
return 0;
}
'''
side = '''
#include "header.h"
bool is_bar(Foo* foo) {
return dynamic_cast<Bar*>(foo) != nullptr;
}
'''
self.dylink_test(main=main,
side=side,
header=header,
expected='success')
@needs_dylink
def test_dylink_argv_argc(self):
self.emcc_args += ['--extern-pre-js', 'pre.js']
create_file('pre.js', '''
var Module = { arguments: ['hello', 'world!'] }
''')
self.dylink_test(
'',
r'''
#include <stdio.h>
int main(int argc, char const *argv[]) {
printf("%d ", argc);
for (int i=1; i<argc; i++) printf("%s ", argv[i]);
printf("\n");
return 0;
}
''',
expected='3 hello world!',
need_reverse=False)
@needs_dylink
def test_dylink_weak(self):
self.dylink_testf(test_file('core/test_dylink_weak.c'), need_reverse=False)
@node_pthreads
@needs_dylink
def test_dylink_tls(self):
self.emcc_args.append('-Wno-experimental')
self.dylink_testf(test_file('core/test_dylink_tls.c'),
need_reverse=False)
@node_pthreads
@needs_dylink
def test_dylink_tls_export(self):
self.emcc_args.append('-Wno-experimental')
self.dylink_testf(test_file('core/test_dylink_tls_export.c'),
need_reverse=False)
def test_random(self):
src = r'''#include <stdlib.h>
#include <stdio.h>
int main()
{
srandom(0xdeadbeef);
printf("%ld\n", random());
}
'''
self.do_run(src, '956867869')
def test_rand(self):
src = r'''#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
int main()
{
// we need RAND_MAX to be a bitmask (power of 2 minus 1). this assertions guarantees
// if RAND_MAX changes the test failure will focus attention on that issue here.
assert(RAND_MAX == 0x7fffffff);
srand(0xdeadbeef);
for(int i = 0; i < 10; ++i)
printf("%d\n", rand());
unsigned int seed = 0xdeadbeef;
for(int i = 0; i < 10; ++i)
printf("%d\n", rand_r(&seed));
bool haveEvenAndOdd = true;
for(int i = 1; i <= 30; ++i)
{
int mask = 1 << i;
if (mask > RAND_MAX) break;
bool haveEven = false;
bool haveOdd = false;
for(int j = 0; j < 1000 && (!haveEven || !haveOdd); ++j)
{
if ((rand() & mask) == 0)
haveEven = true;
else
haveOdd = true;
}
haveEvenAndOdd = haveEvenAndOdd && haveEven && haveOdd;
}
if (haveEvenAndOdd)
printf("Have even and odd!\n");
return 0;
}
'''
expected = '''490242850
2074599277
1480056542
1912638067
931112055
2110392489
2053422194
1614832492
216117595
174823244
760368382
602359081
1121118963
1291018924
1608306807
352705809
958258461
1182561381
114276303
1481323674
Have even and odd!
'''
self.do_run(src, expected)
def test_strtod(self):
self.do_core_test('test_strtod.c')
def test_strtold(self):
self.do_core_test('test_strtold.c')
def test_strtok(self):
self.do_core_test('test_strtok.c')
def test_strtol(self):
self.do_core_test('test_strtol.c')
def test_transtrcase(self):
self.do_core_test('test_transtrcase.c')
@no_wasm2js('very slow to compile')
@is_slow_test
def test_printf(self):
self.emcc_args.append('-Wno-format')
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('printf/test.c')
def test_printf_2(self):
self.do_core_test('test_printf_2.c')
def test_printf_float(self):
self.do_run_in_out_file_test('printf/test_float.c')
def test_printf_octal(self):
self.do_run_in_out_file_test('printf/test_octal.c')
def test_printf_macros(self):
self.do_core_test('test_printf_macros.c')
def test_vprintf(self):
self.do_core_test('test_vprintf.c')
def test_vsnprintf(self):
self.do_core_test('test_vsnprintf.c')
def test_printf_more(self):
self.do_core_test('test_printf_more.c')
def test_perrar(self):
self.do_core_test('test_perrar.c')
def test_atoX(self):
self.do_core_test('test_atoX.c')
def test_strstr(self):
self.do_core_test('test_strstr.c')
def test_fnmatch(self):
self.do_core_test('test_fnmatch.cpp')
def test_sscanf(self):
self.do_core_test('test_sscanf.c')
def test_sscanf_2(self):
for ftype in ['float', 'double']:
src = r'''
#include <stdio.h>
int main(){
char strval1[] = "1.2345678901";
char strval2[] = "1.23456789e5";
char strval3[] = "1.23456789E5";
char strval4[] = "1.2345678e-5";
char strval5[] = "1.2345678E-5";
double dblval = 1.2345678901;
double tstval;
sscanf(strval1, "%lf", &tstval);
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval2, "%lf", &tstval);
dblval = 123456.789;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval3, "%lf", &tstval);
dblval = 123456.789;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval4, "%lf", &tstval);
dblval = 0.000012345678;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval5, "%lf", &tstval);
dblval = 0.000012345678;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
return 0;
}
'''
if ftype == 'float':
self.do_run(src.replace('%lf', '%f').replace('double', 'float'), '''Pass: 1.234568 1.234568
Pass: 123456.789062 123456.789062
Pass: 123456.789062 123456.789062
Pass: 0.000012 0.000012
Pass: 0.000012 0.000012''')
else:
self.do_run(src, '''Pass: 1.234568 1.234568
Pass: 123456.789000 123456.789000
Pass: 123456.789000 123456.789000
Pass: 0.000012 0.000012
Pass: 0.000012 0.000012''')
def test_sscanf_n(self):
self.do_core_test('test_sscanf_n.c')
def test_sscanf_whitespace(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_sscanf_whitespace.c')
def test_sscanf_other_whitespace(self):
self.set_setting('SAFE_HEAP', 0)
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_sscanf_other_whitespace.c')
def test_sscanf_3(self):
self.do_core_test('test_sscanf_3.c')
def test_sscanf_4(self):
self.do_core_test('test_sscanf_4.c')
def test_sscanf_5(self):
self.do_core_test('test_sscanf_5.c')
def test_sscanf_6(self):
self.do_core_test('test_sscanf_6.c')
def test_sscanf_skip(self):
self.do_core_test('test_sscanf_skip.c')
def test_sscanf_caps(self):
self.do_core_test('test_sscanf_caps.c')
def test_sscanf_hex(self):
self.do_core_test('test_sscanf_hex.cpp')
def test_sscanf_float(self):
self.do_core_test('test_sscanf_float.c')
def test_langinfo(self):
self.do_core_test('test_langinfo.c')
def test_files(self):
self.banned_js_engines = [config.SPIDERMONKEY_ENGINE]
if self.maybe_closure():
self.emcc_args = [x for x in self.emcc_args if x != '-g'] # ensure we test --closure 1 --memory-init-file 1 (-g would disable closure)
elif '-O3' in self.emcc_args and not self.is_wasm():
print('closure 2')
self.emcc_args += ['--closure', '2'] # Use closure 2 here for some additional coverage
return self.skipTest('TODO: currently skipped because CI runs out of memory running Closure in this test!')
self.emcc_args += ['--pre-js', 'pre.js']
self.set_setting('FORCE_FILESYSTEM')
print('base', self.emcc_args)
create_file('pre.js', '''
/** @suppress{checkTypes}*/
Module = {
'noFSInit': true,
'preRun': function() {
FS.createLazyFile('/', 'test.file', 'test.file', true, false);
// Test FS_* exporting
Module['FS_createDataFile']('/', 'somefile.binary', [100, 200, 50, 25, 10, 77, 123], true, false, false); // 200 becomes -56, since signed chars are used in memory
var test_files_input = 'hi there!';
var test_files_input_index = 0;
FS.init(function() {
return test_files_input.charCodeAt(test_files_input_index++) || null;
});
}
};
''')
create_file('test.file', 'some data')
mem_file = 'files.js.mem'
try_delete(mem_file)
def clean(out):
return '\n'.join([line for line in out.split('\n') if 'binaryen' not in line and 'wasm' not in line and 'so not running' not in line])
self.do_runf(test_file('files.cpp'), ('size: 7\ndata: 100,-56,50,25,10,77,123\nloop: 100 -56 50 25 10 77 123 \ninput:hi there!\ntexto\n$\n5 : 10,30,20,11,88\nother=some data.\nseeked=me da.\nseeked=ata.\nseeked=ta.\nfscanfed: 10 - hello\n5 bytes to dev/null: 5\nok.\ntexte\n', 'size: 7\ndata: 100,-56,50,25,10,77,123\nloop: 100 -56 50 25 10 77 123 \ninput:hi there!\ntexto\ntexte\n$\n5 : 10,30,20,11,88\nother=some data.\nseeked=me da.\nseeked=ata.\nseeked=ta.\nfscanfed: 10 - hello\n5 bytes to dev/null: 5\nok.\n'),
output_nicerizer=clean)
if self.uses_memory_init_file():
self.assertExists(mem_file)
def test_files_m(self):
# Test for Module.stdin etc.
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
create_file('pre.js', '''
Module = {
data: [10, 20, 40, 30],
stdin: function() { return Module.data.pop() || null },
stdout: function(x) { out('got: ' + x) }
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
src = r'''
#include <stdio.h>
#include <unistd.h>
int main () {
char c;
fprintf(stderr, "isatty? %d,%d,%d\n", isatty(fileno(stdin)), isatty(fileno(stdout)), isatty(fileno(stderr)));
while ((c = fgetc(stdin)) != EOF) {
putc(c+5, stdout);
}
return 0;
}
'''
def clean(out):
return '\n'.join(l for l in out.splitlines() if 'warning' not in l and 'binaryen' not in l)
self.do_run(src, ('got: 35\ngot: 45\ngot: 25\ngot: 15\nisatty? 0,0,1\n', 'got: 35\ngot: 45\ngot: 25\ngot: 15\nisatty? 0,0,1', 'isatty? 0,0,1\ngot: 35\ngot: 45\ngot: 25\ngot: 15'), output_nicerizer=clean)
def test_mount(self):
self.set_setting('FORCE_FILESYSTEM')
self.do_runf(test_file('fs/test_mount.c'), 'success')
def test_getdents64(self):
self.do_runf(test_file('fs/test_getdents64.cpp'), '..')
def test_getdents64_special_cases(self):
# https://bugs.chromium.org/p/v8/issues/detail?id=6881
self.banned_js_engines = [config.V8_ENGINE]
self.do_run_in_out_file_test('fs/test_getdents64_special_cases.cpp')
def test_getcwd_with_non_ascii_name(self):
# https://bugs.chromium.org/p/v8/issues/detail?id=6881
self.banned_js_engines = [config.V8_ENGINE]
self.do_run_in_out_file_test('fs/test_getcwd_with_non_ascii_name.cpp')
def test_proc_self_fd(self):
self.do_run_in_out_file_test('fs/test_proc_self_fd.c')
def test_fwrite_0(self):
self.do_core_test('test_fwrite_0.c')
def test_fgetc_ungetc(self):
print('TODO: update this test once the musl ungetc-on-EOF-stream bug is fixed upstream and reaches us')
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
print(fs)
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('stdio/test_fgetc_ungetc.c'), 'success', js_engines=[config.NODE_JS])
def test_fgetc_unsigned(self):
src = r'''
#include <stdio.h>
int main() {
FILE *file = fopen("file_with_byte_234.txt", "rb");
int c = fgetc(file);
printf("*%d\n", c);
}
'''
create_file('file_with_byte_234.txt', b'\xea', binary=True)
self.emcc_args += ['--embed-file', 'file_with_byte_234.txt']
self.do_run(src, '*234\n')
def test_fgets_eol(self):
src = r'''
#include <stdio.h>
char buf[32];
int main()
{
const char *r = "SUCCESS";
FILE *f = fopen("eol.txt", "r");
while (fgets(buf, 32, f) != NULL) {
if (buf[0] == '\0') {
r = "FAIL";
break;
}
}
printf("%s\n", r);
fclose(f);
return 0;
}
'''
open('eol.txt', 'wb').write(b'\n')
self.emcc_args += ['--embed-file', 'eol.txt']
self.do_run(src, 'SUCCESS\n')
def test_fscanf(self):
create_file('three_numbers.txt', '-1 0.1 -.1')
src = r'''
#include <stdio.h>
#include <assert.h>
#include <float.h>
int main()
{
float x = FLT_MAX, y = FLT_MAX, z = FLT_MAX;
FILE* fp = fopen("three_numbers.txt", "r");
if (fp) {
int match = fscanf(fp, " %f %f %f ", &x, &y, &z);
printf("match = %d\n", match);
printf("x = %0.1f, y = %0.1f, z = %0.1f\n", x, y, z);
} else {
printf("failed to open three_numbers.txt\n");
}
return 0;
}
'''
self.emcc_args += ['--embed-file', 'three_numbers.txt']
self.do_run(src, 'match = 3\nx = -1.0, y = 0.1, z = -0.1\n')
def test_fscanf_2(self):
create_file('a.txt', '''1/2/3 4/5/6 7/8/9
''')
self.emcc_args += ['--embed-file', 'a.txt']
self.do_run(r'''#include <cstdio>
#include <iostream>
using namespace std;
int
main( int argv, char ** argc ) {
cout << "fscanf test" << endl;
FILE * file;
file = fopen("a.txt", "rb");
int vertexIndex[4];
int normalIndex[4];
int uvIndex[4];
int matches = fscanf(file, "%d/%d/%d %d/%d/%d %d/%d/%d %d/%d/%d\n", &vertexIndex[0], &uvIndex[0], &normalIndex[0], &vertexIndex [1], &uvIndex[1], &normalIndex[1], &vertexIndex[2], &uvIndex[2], &normalIndex[2], &vertexIndex[3], &uvIndex[3], &normalIndex[3]);
cout << matches << endl;
return 0;
}
''', 'fscanf test\n9\n')
def test_fileno(self):
create_file('empty.txt', '')
src = r'''
#include <stdio.h>
#include <unistd.h>
int main()
{
FILE* fp = fopen("empty.txt", "r");
if (fp) {
printf("%d\n", fileno(fp));
} else {
printf("failed to open empty.txt\n");
}
return 0;
}
'''
self.emcc_args += ['--embed-file', 'empty.txt']
self.do_run(src, '3\n')
def test_readdir(self):
self.do_run_in_out_file_test('dirent/test_readdir.c')
def test_readdir_empty(self):
self.do_run_in_out_file_test('dirent/test_readdir_empty.c')
def test_stat(self):
self.do_runf(test_file('stat/test_stat.c'), 'success')
self.verify_in_strict_mode('test_stat.js')
def test_fstatat(self):
self.do_runf(test_file('stat/test_fstatat.c'), 'success')
def test_stat_chmod(self):
self.do_runf(test_file('stat/test_chmod.c'), 'success')
def test_stat_mknod(self):
self.do_runf(test_file('stat/test_mknod.c'), 'success')
def test_fcntl(self):
self.add_pre_run("FS.createDataFile('/', 'test', 'abcdef', true, true, false);")
self.do_run_in_out_file_test('fcntl/test_fcntl.c')
def test_fcntl_open(self):
self.do_run_in_out_file_test('fcntl/test_fcntl_open.c')
@also_with_wasm_bigint
def test_fcntl_misc(self):
self.add_pre_run("FS.createDataFile('/', 'test', 'abcdef', true, true, false);")
self.do_run_in_out_file_test('fcntl/test_fcntl_misc.c')
def test_poll(self):
self.add_pre_run('''
var dummy_device = FS.makedev(64, 0);
FS.registerDevice(dummy_device, {});
FS.createDataFile('/', 'file', 'abcdef', true, true, false);
FS.mkdev('/device', dummy_device);
''')
self.do_core_test('test_poll.c')
def test_statvfs(self):
self.do_core_test('test_statvfs.c')
def test_libgen(self):
self.do_core_test('test_libgen.c')
def test_utime(self):
self.do_runf(test_file('utime/test_utime.c'), 'success')
def test_futimens(self):
self.do_runf(test_file('utime', 'test_futimens.c'), 'success')
@no_minimal_runtime('MINIMAL_RUNTIME does not have getValue() and setValue() (TODO add it to a JS library function to get it in)')
def test_utf(self):
self.banned_js_engines = [config.SPIDERMONKEY_ENGINE] # only node handles utf well
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.set_setting('EXPORTED_RUNTIME_METHODS', ['getValue', 'setValue', 'UTF8ToString', 'stringToUTF8'])
self.do_core_test('test_utf.c')
def test_utf32(self):
if self.get_setting('MINIMAL_RUNTIME'):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$UTF32ToString', '$stringToUTF32', '$lengthBytesUTF32'])
else:
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF32ToString', 'stringToUTF32', 'lengthBytesUTF32'])
self.do_runf(test_file('utf32.cpp'), 'OK.')
self.do_runf(test_file('utf32.cpp'), 'OK.', args=['-fshort-wchar'])
def test_utf16(self):
self.do_runf(test_file('core/test_utf16.cpp'), 'OK.')
def test_utf8(self):
if self.get_setting('MINIMAL_RUNTIME'):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$AsciiToString', '$stringToAscii', '$writeAsciiToMemory'])
else:
self.set_setting('EXPORTED_RUNTIME_METHODS',
['UTF8ToString', 'stringToUTF8', 'AsciiToString', 'stringToAscii'])
self.do_runf(test_file('utf8.cpp'), 'OK.')
@also_with_wasm_bigint
def test_utf8_textdecoder(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF8ToString', 'stringToUTF8'])
self.emcc_args += ['--embed-file', test_file('utf8_corpus.txt') + '@/utf8_corpus.txt']
self.do_runf(test_file('benchmark_utf8.cpp'), 'OK.')
# Test that invalid character in UTF8 does not cause decoding to crash.
def test_utf8_invalid(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF8ToString', 'stringToUTF8'])
for decoder_mode in [[], ['-s', 'TEXTDECODER']]:
self.emcc_args += decoder_mode
print(str(decoder_mode))
self.do_runf(test_file('utf8_invalid.cpp'), 'OK.')
# Test that invalid character in UTF8 does not cause decoding to crash.
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_utf8_invalid(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF8ToString', 'stringToUTF8'])
self.set_setting('MINIMAL_RUNTIME')
for decoder_mode in [False, True]:
self.set_setting('TEXTDECODER', decoder_mode)
print(str(decoder_mode))
self.do_runf(test_file('utf8_invalid.cpp'), 'OK.')
def test_utf16_textdecoder(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF16ToString', 'stringToUTF16', 'lengthBytesUTF16'])
self.emcc_args += ['--embed-file', test_file('utf16_corpus.txt') + '@/utf16_corpus.txt']
self.do_runf(test_file('benchmark_utf16.cpp'), 'OK.')
def test_wprintf(self):
self.do_core_test('test_wprintf.cpp')
def test_write_stdout_fileno(self):
self.do_core_test('test_write_stdout_fileno.c')
self.do_core_test('test_write_stdout_fileno.c', args=['-s', 'FILESYSTEM=0'])
def test_direct_string_constant_usage(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_direct_string_constant_usage.cpp')
def test_std_cout_new(self):
self.do_core_test('test_std_cout_new.cpp')
def test_std_function_incomplete_return(self):
self.do_core_test('test_std_function_incomplete_return.cpp')
def test_istream(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
for linkable in [0]: # , 1]:
print(linkable)
# regression check for issue #273
self.set_setting('LINKABLE', linkable)
self.do_core_test('test_istream.cpp')
def test_fs_base(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$FS'])
self.uses_es6 = True
self.add_pre_run(read_file(test_file('filesystem/src.js')))
src = 'int main() {return 0;}\n'
expected = read_file(test_file('filesystem/output.txt'))
self.do_run(src, expected)
@also_with_noderawfs
@is_slow_test
def test_fs_nodefs_rw(self):
# TODO(sbc): This test exposes in issue in the way we run closure compiler and
# causes it to generate non-ES5 output.
# Remove this line once we fix: https://github.com/emscripten-core/emscripten/issues/12628
self.uses_es6 = True
self.emcc_args += ['-lnodefs.js']
self.set_setting('SYSCALL_DEBUG')
self.do_runf(test_file('fs/test_nodefs_rw.c'), 'success')
if self.maybe_closure():
self.do_runf(test_file('fs/test_nodefs_rw.c'), 'success')
@also_with_noderawfs
def test_fs_nodefs_cloexec(self):
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('fs/test_nodefs_cloexec.c'), 'success')
def test_fs_nodefs_home(self):
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('fs/test_nodefs_home.c'), 'success', js_engines=[config.NODE_JS])
def test_fs_nodefs_nofollow(self):
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('fs/test_nodefs_nofollow.c'), 'success', js_engines=[config.NODE_JS])
def test_fs_trackingdelegate(self):
self.set_setting('FS_DEBUG')
self.do_run_in_out_file_test('fs/test_trackingdelegate.c')
@also_with_noderawfs
def test_fs_writeFile(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING') # see issue 2334
self.do_run_in_out_file_test('fs/test_writeFile.cpp')
def test_fs_write(self):
self.do_run_in_out_file_test('fs/test_write.cpp')
@also_with_noderawfs
def test_fs_emptyPath(self):
self.do_run_in_out_file_test('fs/test_emptyPath.c')
@also_with_noderawfs
def test_fs_append(self):
self.do_runf(test_file('fs/test_append.c'), 'success')
def test_fs_mmap(self):
self.uses_es6 = True
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS', 'NODERAWFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
if fs == 'NODERAWFS':
self.emcc_args += ['-lnodefs.js', '-lnoderawfs.js']
self.do_run_in_out_file_test('fs/test_mmap.c')
@parameterized({
'': [],
'minimal_runtime': ['-s', 'MINIMAL_RUNTIME=1']
})
def test_fs_no_main(self, *args):
# library_fs.js uses hooks to enable ignoreing of permisions up until ATMAINs are run. This
# test verified that they work correctly, even in programs without a main function.
create_file('pre.js', '''
Module['preRun'] = function() {
assert(FS.ignorePermissions, "ignorePermissions not set during preRun");
}
Module['onRuntimeInitialized'] = function() {
assert(!FS.ignorePermissions, "ignorePermissions not unset during onRuntimeInitialized");
assert(_foo() == 42);
}
''')
self.set_setting('EXPORTED_FUNCTIONS', '_foo')
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args += ['--pre-js', 'pre.js'] + list(args)
self.do_run('int foo() { return 42; }', '', force_c=True)
@also_with_noderawfs
def test_fs_errorstack(self):
# Enables strict mode, which may catch some strict-mode-only errors
# so that users can safely work with strict JavaScript if enabled.
create_file('pre.js', '"use strict";')
self.emcc_args += ['--pre-js', 'pre.js']
self.set_setting('FORCE_FILESYSTEM')
self.set_setting('ASSERTIONS')
self.do_run(r'''
#include <emscripten.h>
#include <iostream>
int main(void) {
std::cout << "hello world\n"; // should work with strict mode
EM_ASM(
try {
FS.readFile('/dummy.txt');
} catch (err) {
err.stack = err.stack; // should be writable
throw err;
}
);
return 0;
}
''', 'at Object.readFile', assert_returncode=NON_ZERO) # engines has different error stack format
@also_with_noderawfs
def test_fs_llseek(self):
self.set_setting('FORCE_FILESYSTEM')
self.do_runf(test_file('fs/test_llseek.c'), 'success')
def test_fs_64bit(self):
self.do_runf(test_file('fs/test_64bit.c'), 'success')
def test_sigalrm(self):
self.do_runf(test_file('test_sigalrm.c'), 'Received alarm!')
self.set_setting('EXIT_RUNTIME')
self.do_runf(test_file('test_sigalrm.c'), 'Received alarm!')
def test_signals(self):
self.do_core_test(test_file('test_signals.c'))
@no_windows('https://github.com/emscripten-core/emscripten/issues/8882')
def test_unistd_access(self):
self.uses_es6 = True
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/access.c', js_engines=[config.NODE_JS])
# Node.js fs.chmod is nearly no-op on Windows
if not WINDOWS:
self.emcc_args = orig_compiler_opts
self.set_setting('NODERAWFS')
self.do_run_in_out_file_test('unistd/access.c', js_engines=[config.NODE_JS])
def test_unistd_curdir(self):
self.uses_es6 = True
self.do_run_in_out_file_test('unistd/curdir.c')
@also_with_noderawfs
def test_unistd_close(self):
self.do_run_in_out_file_test('unistd/close.c')
def test_unistd_confstr(self):
self.do_run_in_out_file_test('unistd/confstr.c')
def test_unistd_ttyname(self):
self.do_runf(test_file('unistd/ttyname.c'), 'success')
@also_with_noderawfs
def test_unistd_pipe(self):
self.do_runf(test_file('unistd/pipe.c'), 'success')
@also_with_noderawfs
def test_unistd_dup(self):
self.do_run_in_out_file_test('unistd/dup.c')
def test_unistd_pathconf(self):
self.do_run_in_out_file_test('unistd/pathconf.c')
def test_unistd_truncate(self):
self.uses_es6 = True
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/truncate.c', js_engines=[config.NODE_JS])
@no_windows("Windows throws EPERM rather than EACCES or EINVAL")
@unittest.skipIf(WINDOWS or os.geteuid() == 0, "Root access invalidates this test by being able to write on readonly files")
def test_unistd_truncate_noderawfs(self):
self.uses_es6 = True
self.set_setting('NODERAWFS')
self.maybe_closure()
self.do_run_in_out_file_test('unistd/truncate.c', js_engines=[config.NODE_JS])
def test_unistd_swab(self):
self.do_run_in_out_file_test('unistd/swab.c')
def test_unistd_isatty(self):
self.do_runf(test_file('unistd/isatty.c'), 'success')
@also_with_standalone_wasm()
def test_unistd_sysconf(self):
self.do_run_in_out_file_test('unistd/sysconf.c')
@no_asan('ASan alters memory layout')
def test_unistd_sysconf_phys_pages(self):
filename = test_file('unistd/sysconf_phys_pages.c')
if self.get_setting('ALLOW_MEMORY_GROWTH'):
expected = (2 * 1024 * 1024 * 1024) // webassembly.WASM_PAGE_SIZE
else:
expected = 16 * 1024 * 1024 // webassembly.WASM_PAGE_SIZE
self.do_runf(filename, str(expected) + ', errno: 0')
def test_unistd_login(self):
self.do_run_in_out_file_test('unistd/login.c')
@no_windows('https://github.com/emscripten-core/emscripten/issues/8882')
def test_unistd_unlink(self):
self.clear()
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
# symlinks on node.js on non-linux behave differently (e.g. on Windows they require administrative privileges)
# so skip testing those bits on that combination.
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
if WINDOWS:
self.emcc_args += ['-DNO_SYMLINK=1']
if MACOS:
continue
self.do_runf(test_file('unistd/unlink.c'), 'success', js_engines=[config.NODE_JS])
# Several differences/bugs on non-linux including https://github.com/nodejs/node/issues/18014
if not WINDOWS and not MACOS:
self.emcc_args = orig_compiler_opts + ['-DNODERAWFS']
# 0 if root user
if os.geteuid() == 0:
self.emcc_args += ['-DSKIP_ACCESS_TESTS']
self.set_setting('NODERAWFS')
self.do_runf(test_file('unistd/unlink.c'), 'success', js_engines=[config.NODE_JS])
def test_unistd_links(self):
self.clear()
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
if WINDOWS and fs == 'NODEFS':
print('Skipping NODEFS part of this test for test_unistd_links on Windows, since it would require administrative privileges.', file=sys.stderr)
# Also, other detected discrepancies if you do end up running this test on NODEFS:
# test expects /, but Windows gives \ as path slashes.
# Calling readlink() on a non-link gives error 22 EINVAL on Unix, but simply error 0 OK on Windows.
continue
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/links.c', js_engines=[config.NODE_JS])
@no_windows('Skipping NODEFS test, since it would require administrative privileges.')
def test_unistd_symlink_on_nodefs(self):
# Also, other detected discrepancies if you do end up running this test on NODEFS:
# test expects /, but Windows gives \ as path slashes.
# Calling readlink() on a non-link gives error 22 EINVAL on Unix, but simply error 0 OK on Windows.
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/symlink_on_nodefs.c', js_engines=[config.NODE_JS])
def test_unistd_sleep(self):
self.do_run_in_out_file_test('unistd/sleep.c')
@also_with_wasm_bigint
def test_unistd_io(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$ERRNO_CODES'])
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.clear()
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/io.c')
@no_windows('https://github.com/emscripten-core/emscripten/issues/8882')
def test_unistd_misc(self):
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/misc.c', js_engines=[config.NODE_JS], interleaved_output=False)
def test_unistd_fstatfs(self):
self.do_run_in_out_file_test('unistd/fstatfs.c')
# i64s in the API, which we'd need to legalize for JS, so in standalone mode
@also_with_standalone_wasm(wasm2c=True)
def test_posixtime(self):
self.banned_js_engines = [config.V8_ENGINE]
self.do_core_test('test_posixtime.c')
def test_uname(self):
self.do_core_test('test_uname.c')
def test_unary_literal(self):
self.do_core_test('test_unary_literal.cpp')
def test_env(self):
expected = read_file(test_file('env/output.txt'))
self.do_runf(test_file('env/src.c'), [
expected.replace('{{{ THIS_PROGRAM }}}', self.in_dir('src.js')).replace('\\', '/'),
expected.replace('{{{ THIS_PROGRAM }}}', './this.program')
])
def test_environ(self):
expected = read_file(test_file('env/output-mini.txt'))
self.do_runf(test_file('env/src-mini.c'), [
expected.replace('{{{ THIS_PROGRAM }}}', self.in_dir('src-mini.js')).replace('\\', '/'),
expected.replace('{{{ THIS_PROGRAM }}}', './this.program')
])
def test_systypes(self):
self.do_core_test('test_systypes.c')
def test_stddef(self):
self.do_core_test('test_stddef.cpp')
self.do_core_test('test_stddef.cpp', force_c=True)
def test_getloadavg(self):
self.do_core_test('test_getloadavg.c')
def test_nl_types(self):
self.do_core_test('test_nl_types.c')
def test_799(self):
src = test_file('799.cpp')
self.do_runf(src, '''Set PORT family: 0, port: 3979
Get PORT family: 0
PORT: 3979
''')
def test_ctype(self):
self.do_core_test('test_ctype.c')
def test_strcasecmp(self):
self.do_core_test('test_strcasecmp.c')
def test_atomic(self):
self.do_core_test('test_atomic.c')
def test_atomic_cxx(self):
self.emcc_args += ['-DIS_64BIT_LOCK_FREE=1']
self.do_core_test('test_atomic_cxx.cpp')
def test_phiundef(self):
self.do_core_test('test_phiundef.c')
def test_netinet_in(self):
self.do_run_in_out_file_test('netinet/in.cpp')
@needs_dylink
def test_main_module_static_align(self):
if self.get_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('no shared modules with memory growth')
self.set_setting('MAIN_MODULE')
self.do_core_test('test_main_module_static_align.cpp')
def test_iostream_and_determinism(self):
create_file('src.cpp', '''
#include <iostream>
int main()
{
std::cout << "hello world" << std::endl << 77 << "." << std::endl;
return 0;
}
''')
num = 5
for i in range(num):
print('(iteration %d)' % i)
time.sleep(random.random() / (10 * num))
self.do_runf('src.cpp', 'hello world\n77.\n')
if os.path.exists('src.js.previous'):
self.assertBinaryEqual('src.js', 'src.js.previous')
shutil.copy2('src.js', 'src.js.previous')
if self.is_wasm() and not self.get_setting('WASM2JS'):
if os.path.exists('src.wasm.previous'):
self.assertBinaryEqual('src.wasm', 'src.wasm.previous')
shutil.copy2('src.wasm', 'src.wasm.previous')
def test_stdvec(self):
self.do_core_test('test_stdvec.cpp')
def test_random_device(self):
self.maybe_closure()
self.do_core_test('test_random_device.cpp')
def test_reinterpreted_ptrs(self):
self.do_core_test('test_reinterpreted_ptrs.cpp')
def test_js_libraries(self):
create_file('main.cpp', '''
#include <stdio.h>
extern "C" {
extern void printey();
extern int calcey(int x, int y);
}
int main() {
printey();
printf("*%d*\\n", calcey(10, 22));
return 0;
}
''')
create_file('mylib1.js', '''
mergeInto(LibraryManager.library, {
printey: function() {
out('hello from lib!');
}
});
''')
create_file('mylib2.js', '''
mergeInto(LibraryManager.library, {
calcey: function(x, y) {
return x + y;
}
});
''')
self.emcc_args += ['--js-library', 'mylib1.js', '--js-library', 'mylib2.js']
self.do_runf('main.cpp', 'hello from lib!\n*32*\n')
def test_unicode_js_library(self):
create_file('main.cpp', '''
#include <stdio.h>
extern "C" {
extern void printey();
}
int main() {
printey();
return 0;
}
''')
self.emcc_args += ['--js-library', test_file('unicode_library.js')]
self.do_runf('main.cpp', u'Unicode snowman \u2603 says hello!')
def test_funcptr_import_type(self):
self.emcc_args += ['--js-library', test_file('core/test_funcptr_import_type.js')]
self.do_core_test('test_funcptr_import_type.cpp')
@no_asan('ASan does not work with EXPORT_ALL')
def test_constglobalunion(self):
self.set_setting('EXPORT_ALL')
self.do_run(r'''
#include <stdio.h>
struct one_const {
long a;
};
struct two_consts {
long a;
long b;
};
union some_consts {
struct one_const one;
struct two_consts two;
};
union some_consts my_consts = {{
1
}};
struct one_const addr_of_my_consts = {
(long)(&my_consts)
};
int main(void) {
printf("%li\n", (long)!!addr_of_my_consts.a);
return 0;
}
''', '1')
results = [(1, 0), (2, 1), (3, 2), (4, 4), (5, 7), (6, 10), (7, 16), (8, 22)]
self.build(test_file('fannkuch.cpp'))
for i, j in results:
print(i, j)
self.do_run('fannkuch.js', 'Pfannkuchen(%d) = %d.' % (i, j), args=[str(i)], no_build=True)
def test_raytrace(self):
self.skipTest('Relies on double value rounding, extremely sensitive')
src = read_file(test_file('raytrace.cpp')).replace('double', 'float')
output = read_file(test_file('raytrace.ppm'))
self.do_run(src, output, args=['3', '16'])
def test_fasta(self):
results = [(1, '''GG*ctt**tgagc*'''),
(20, '''GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTT*cttBtatcatatgctaKggNcataaaSatgtaaaDcDRtBggDtctttataattcBgtcg**tacgtgtagcctagtgtttgtgttgcgttatagtctatttgtggacacagtatggtcaaa**tgacgtcttttgatctgacggcgttaacaaagatactctg*'''),
(50, '''GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGA*TCACCTGAGGTCAGGAGTTCGAGACCAGCCTGGCCAACAT*cttBtatcatatgctaKggNcataaaSatgtaaaDcDRtBggDtctttataattcBgtcg**tactDtDagcctatttSVHtHttKtgtHMaSattgWaHKHttttagacatWatgtRgaaa**NtactMcSMtYtcMgRtacttctWBacgaa**agatactctgggcaacacacatacttctctcatgttgtttcttcggacctttcataacct**ttcctggcacatggttagctgcacatcacaggattgtaagggtctagtggttcagtgagc**ggaatatcattcgtcggtggtgttaatctatctcggtgtagcttataaatgcatccgtaa**gaatattatgtttatttgtcggtacgttcatggtagtggtgtcgccgatttagacgtaaa**ggcatgtatg*''')]
old = self.emcc_args
orig_src = read_file(test_file('fasta.cpp'))
def test(extra_args):
self.emcc_args = old + extra_args
for t in ['float', 'double']:
print(t)
src = orig_src.replace('double', t)
with open('fasta.cpp', 'w') as f:
f.write(src)
self.build('fasta.cpp')
for arg, output in results:
self.do_run('fasta.js', output, args=[str(arg)], output_nicerizer=lambda x: x.replace('\n', '*'), no_build=True)
shutil.copyfile('fasta.js', '%s.js' % t)
test([])
@needs_non_trapping_float_to_int
def test_fasta_nontrapping(self):
self.emcc_args += ['-mnontrapping-fptoint']
self.test_fasta()
def test_whets(self):
self.do_runf(test_file('whets.cpp'), 'Single Precision C Whetstone Benchmark')
@require_v8
@no_asan('depends on the specifics of memory size, which for asan we are forced to increase')
def test_dlmalloc_inline(self):
self.set_setting('INITIAL_MEMORY', '128mb')
src = read_file(path_from_root('system/lib/dlmalloc.c')) + '\n\n\n' + read_file(test_file('dlmalloc_test.c'))
self.do_run(src, '*1,0*', args=['200', '1'], force_c=True)
self.do_run('src.js', '*400,0*', args=['400', '400'], force_c=True, no_build=True)
@require_v8
@no_asan('depends on the specifics of memory size, which for asan we are forced to increase')
def test_dlmalloc(self):
self.set_setting('INITIAL_MEMORY', '128mb')
self.do_runf(test_file('dlmalloc_test.c'), '*1,0*', args=['200', '1'])
self.do_run('dlmalloc_test.js', '*400,0*', args=['400', '400'], no_build=True)
if self.emcc_args == []:
try_delete('src.js')
self.run_process([EMCC, test_file('dlmalloc_test.c'), '-s', 'INITIAL_MEMORY=128MB', '-o', 'src.js'], stdout=PIPE, stderr=self.stderr_redirect)
self.do_run(None, '*1,0*', ['200', '1'], no_build=True)
self.do_run(None, '*400,0*', ['400', '400'], no_build=True)
src = read_file(test_file('new.cpp'))
for new, delete in [
('malloc(100)', 'free'),
('new char[100]', 'delete[]'),
('new Structy', 'delete'),
('new int', 'delete'),
('new Structy[10]', 'delete[]'),
]:
self.do_run(src.replace('{{{ NEW }}}', new).replace('{{{ DELETE }}}', delete), '*1,0*')
@no_asan('the memory size limit here is too small for asan')
def test_dlmalloc_large(self):
self.emcc_args += ['-s', 'ABORTING_MALLOC=0', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'MAXIMUM_MEMORY=128MB']
self.do_runf(test_file('dlmalloc_test_large.c'), '0 0 0 1')
@no_asan('asan also changes malloc, and that ends up linking in new twice')
def test_dlmalloc_partial(self):
src = read_file(test_file('new.cpp')).replace('{{{ NEW }}}', 'new int').replace('{{{ DELETE }}}', 'delete') + '''
#include <new>
void* operator new(size_t size) {
printf("new %zu!\\n", size);
return malloc(size);
}
'''
self.do_run(src, 'new 4!\n*1,0*')
@no_asan('asan also changes malloc, and that ends up linking in new twice')
def test_dlmalloc_partial_2(self):
if 'SAFE_HEAP' in str(self.emcc_args):
self.skipTest('we do unsafe stuff here')
self.do_core_test('test_dlmalloc_partial_2.c', assert_returncode=NON_ZERO)
def test_libcxx(self):
self.do_runf(test_file('hashtest.cpp'),
'june -> 30\nPrevious (in alphabetical order) is july\nNext (in alphabetical order) is march')
self.do_run('''
#include <set>
#include <stdio.h>
int main() {
std::set<int> fetchOriginatorNums;
fetchOriginatorNums.insert(171);
printf("hello world\\n");
return 0;
}
''', 'hello world')
def test_typeid(self):
self.do_core_test('test_typeid.cpp')
def test_static_variable(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_static_variable.cpp')
def test_fakestat(self):
self.do_core_test('test_fakestat.c')
def test_mmap(self):
if '-fsanitize=address' not in self.emcc_args:
self.set_setting('INITIAL_MEMORY', '128mb')
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_mmap.c')
def test_mmap_file(self):
for extra_args in [[]]:
self.emcc_args += ['--embed-file', 'data.dat'] + extra_args
x = 'data from the file........'
s = ''
while len(s) < 9000:
if len(s) + len(x) < 9000:
s += x
continue
s += '.'
assert len(s) == 9000
create_file('data.dat', s)
self.do_runf(test_file('mmap_file.c'), '*\n' + s[0:20] + '\n' + s[4096:4096 + 20] + '\n*\n')
@no_lsan('Test code contains memory leaks')
def test_cubescript(self):
self.emcc_args += ['-std=c++03', '-Wno-dynamic-class-memaccess']
self.maybe_closure()
self.emcc_args += ['-I', test_file('third_party/cubescript')]
if '-fsanitize=address' in self.emcc_args:
self.emcc_args += ['--pre-js', test_file('asan-no-leak.js')]
def test():
src = test_file('third_party/cubescript/command.cpp')
self.do_runf(src, '*\nTemp is 33\n9\n5\nhello, everyone\n*')
test()
print('asyncify')
self.set_setting('ASYNCIFY')
test()
@needs_dylink
def test_relocatable_void_function(self):
self.set_setting('RELOCATABLE')
self.do_core_test('test_relocatable_void_function.c')
@wasm_simd
def test_wasm_intrinsics_simd(self):
def run():
self.do_runf(test_file('test_wasm_intrinsics_simd.c'), 'Success!')
self.emcc_args.append('-Wno-c++11-narrowing')
self.emcc_args.extend(['-Wpedantic', '-Werror', '-Wall', '-xc++'])
run()
self.emcc_args.append('-funsigned-char')
run()
@wasm_simd
def test_neon_wasm_simd(self):
self.emcc_args.append('-Wno-c++11-narrowing')
self.emcc_args.append('-mfpu=neon')
self.emcc_args.append('-msimd128')
self.do_runf(test_file('neon/test_neon_wasm_simd.cpp'), 'Success!')
@wasm_simd
@requires_native_clang
@no_safe_heap('has unaligned 64-bit operations in wasm')
def test_sse1(self):
src = test_file('sse/test_sse1.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse', '-o', 'test_sse1', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse1', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
@requires_native_clang
@no_safe_heap('has unaligned 64-bit operations in wasm')
@is_slow_test
def test_sse2(self):
src = test_file('sse/test_sse2.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse2', '-Wno-argument-outside-range', '-o', 'test_sse2', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse2', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse2', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
@requires_native_clang
def test_sse3(self):
src = test_file('sse/test_sse3.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse3', '-Wno-argument-outside-range', '-o', 'test_sse3', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse3', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse3', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
@requires_native_clang
def test_ssse3(self):
src = test_file('sse/test_ssse3.cpp')
self.run_process([shared.CLANG_CXX, src, '-mssse3', '-Wno-argument-outside-range', '-o', 'test_ssse3', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_ssse3', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-mssse3', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
@requires_native_clang
@is_slow_test
def test_sse4_1(self):
src = test_file('sse/test_sse4_1.cpp')
if not self.is_optimizing() and '-fsanitize=address' in self.emcc_args:
ocess([shared.CLANG_CXX, src, '-msse4.1', '-Wno-argument-outside-range', '-o', 'test_sse4_1', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse4_1', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse4.1', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
@requires_native_clang
def test_sse4_2(self):
src = test_file('sse/test_sse4_2.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse4.2', '-Wno-argument-outside-range', '-o', 'test_sse4_2', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse4_2', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse4.2', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
@requires_native_clang
def test_avx(self):
src = test_file('sse/test_avx.cpp')
self.run_process([shared.CLANG_CXX, src, '-mavx', '-Wno-argument-outside-range', '-o', 'test_avx', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_avx', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-mavx', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
def test_sse_diagnostics(self):
self.emcc_args.remove('-Werror')
src = test_file('sse/test_sse_diagnostic.cpp')
p = self.run_process(
[shared.EMXX, src, '-msse', '-DWASM_SIMD_COMPAT_SLOW'] + self.get_emcc_args(),
stderr=PIPE)
self.assertContained('Instruction emulated via slow path.', p.stderr)
@requires_native_clang
@wasm_relaxed_simd
def test_relaxed_simd_implies_simd128(self):
src = test_file('sse/test_sse1.cpp')
self.build(src, emcc_args=['-msse'])
@no_asan('call stack exceeded on some versions of node')
def test_gcc_unmangler(self):
self.emcc_args += ['-I' + test_file('third_party/libiberty')]
self.do_runf(test_file('third_party/libiberty/cp-demangle.c'), '*d_demangle(char const*, int, unsigned int*)*', args=['_ZL10d_demanglePKciPj'])
@needs_make('make')
def test_lua(self):
self.emcc_args.remove('-Werror')
libs = self.get_library('third_party/lua', [Path('src/lua.o'), Path('src/liblua.a')], make=['make', 'generic'], configure=None)
self.do_run('',
'hello lua world!\n17\n1\n2\n3\n4\n7',
args=['-e', '''print("hello lua world!");print(17);for x = 1,4 do print(x) end;print(10-3)'''],
libraries=libs,
includes=[test_file('lua')],
output_nicerizer=lambda output: output.replace('\n\n', '\n').replace('\n\n', '\n'))
@no_asan('issues with freetype itself')
@needs_make('configure script')
@is_slow_test
def test_freetype(self):
self.add_pre_run("FS.createDataFile('/', 'font.ttf', %s, true, false, false);" % str(
list(bytearray(read_binary(test_file('freetype/LiberationSansBold.ttf'))))
))
shutil.copyfile(test_file('freetype/LiberationSansBold.ttf'), 'font.ttf')
self.do_run_from_file(test_file('freetype/main.c'),
test_file('freetype/ref.txt'),
args=['font.ttf', 'test!', '150', '120', '25'],
libraries=self.get_freetype_library(),
includes=[test_file('third_party/freetype/include')])
print('[issue 324]')
self.do_run_from_file(test_file('freetype/main_2.c'),
test_file('freetype/ref_2.txt'),
args=['font.ttf', 'w', '32', '32', '25'],
libraries=self.get_freetype_library(),
includes=[test_file('third_party/freetype/include')])
print('[issue 324 case 2]')
self.do_run_from_file(test_file('freetype/main_3.c'),
test_file('freetype/ref_3.txt'),
args=['font.ttf', 'W', '32', '32', '0'],
libraries=self.get_freetype_library(),
includes=[test_file('third_party/freetype/include')])
print('[issue 324 case 3]')
self.do_run('main_3.js',
read_file(test_file('freetype/ref_4.txt')),
args=['font.ttf', 'ea', '40', '32', '0'],
no_build=True)
@no_asan('local count too large for VMs')
@is_slow_test
def test_sqlite(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_sqlite3_open', '_sqlite3_close', '_sqlite3_exec', '_sqlite3_free'])
if '-g' in self.emcc_args:
print("disabling inlining")
self.set_setting('INLINING_LIMIT')
elf.emcc_args += ['-Wno-implicit-int-float-conversion']
# array initialization; did you mean to separate the elements with a comma?"
self.emcc_args += ['-Wno-string-concatenation']
self.emcc_args += ['-Wno-unknown-warning-option']
self.emcc_args += ['-Wno-pointer-bool-conversion']
self.emcc_args += ['-I' + test_file('third_party/sqlite')]
src = '''
#define SQLITE_DISABLE_LFS
#define LONGDOUBLE_TYPE double
#define SQLITE_INT64_TYPE long long int
#define SQLITE_THREADSAFE 0
'''
src += read_file(test_file('third_party/sqlite/sqlite3.c'))
src += read_file(test_file('sqlite/benchmark.c'))
self.do_run(src,
read_file(test_file('sqlite/benchmark.txt')),
includes=[test_file('sqlite')],
force_c=True)
@needs_make('mingw32-make')
@is_slow_test
@parameterized({
'cmake': (True,),
'configure': (False,)
})
def test_zlib(self, use_cmake):
if WINDOWS and not use_cmake:
self.skipTest("Windows cannot run configure sh scripts")
self.maybe_closure()
self.emcc_args.append('-Wno-shift-negative-value')
if '-g' in self.emcc_args:
self.emcc_args.append('-gsource-map')
if use_cmake:
make_args = []
configure = ['cmake', '.']
else:
make_args = ['libz.a']
configure = ['sh', './configure']
self.do_run_from_file(
test_file('third_party/zlib/example.c'),
test_file('core/test_zlib.out'),
libraries=self.get_library('third_party/zlib', 'libz.a', make_args=make_args, configure=configure),
includes=[test_file('third_party/zlib'), 'building', 'zlib'])
@needs_make('make')
@is_slow_test
@parameterized({
'cmake': (True,),
'autoconf': (False,)
})
def test_bullet(self, use_cmake):
if WINDOWS and not use_cmake:
self.skipTest("Windows cannot run configure sh scripts")
self.emcc_args += [
'-Wno-c++11-narrowing',
'-Wno-deprecated-register',
'-Wno-writable-strings',
'-Wno-shift-negative-value',
'-Wno-format'
]
if use_cmake:
self.set_setting('ASSERTIONS', 2)
self.emcc_args.append('-Wno-unused-command-line-argument')
self.do_runf(test_file('third_party/bullet/Demos/HelloWorld/HelloWorld.cpp'),
[read_file(test_file('bullet/output.txt')),
read_file(test_file('bullet/output2.txt')),
read_file(test_file('bullet/output3.txt')),
read_file(test_file('bullet/output4.txt'))],
libraries=self.get_bullet_library(use_cmake),
includes=[test_file('third_party/bullet/src')])
@unittest.skip('LLVM changes have caused this C++ to no longer compile, https://github.com/emscripten-core/emscripten/issues/14614')
@no_asan('issues with freetype itself')
@needs_make('depends on freetype')
@is_slow_test
def test_poppler(self):
pdf_data = read_binary(test_file('poppler/paper.pdf'))
create_file('paper.pdf.js', str(list(bytearray(pdf_data))))
create_file('pre.js', '''
Module.preRun = function() {
FS.createDataFile('/', 'paper.pdf', eval(read_('paper.pdf.js')), true, false, false);
};
Module.postRun = function() {
var FileData = Array.from(MEMFS.getFileDataAsTypedArray(FS.root.contents['filename-1.ppm']));
out("Data: " + JSON.stringify(FileData.map(function(x) { return unSign(x, 8) })));
};
''')
self.emcc_args += ['--pre-js', 'pre.js', '-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=$unSign']
ppm_data = str(list(bytearray(read_binary(test_file('poppler/ref.ppm')))))
self.do_run('', ppm_data.replace(' ', ''),
libraries=self.get_poppler_library(),
args=['-scale-to', '512', 'paper.pdf', 'filename'])
@needs_make('make')
@is_slow_test
def test_openjpeg(self):
def do_test_openjpeg():
def line_splitter(data):
out = ''
counter = 0
for ch in data:
out += ch
if ch == ' ' and counter > 60:
out += '\n'
counter = 0
else:
counter += 1
return out
self.emcc_args = [x for x in self.emcc_args if x != '-g']
original_j2k = test_file('openjpeg/syntensity_lobby_s.j2k')
image_bytes = list(bytearray(read_binary(original_j2k)))
create_file('pre.js', """
Module.preRun = function() { FS.createDataFile('/', 'image.j2k', %s, true, false, false); };
Module.postRun = function() {
out('Data: ' + JSON.stringify(Array.from(MEMFS.getFileDataAsTypedArray(FS.analyzePath('image.raw').object))));
};
""" % line_splitter(str(image_bytes)))
# If we don't do this then we don't know what the state of the cache will be
# and this test would different non-deterministic results based on, for example,
# what other tests had previously run.
self.run_process([EMBUILDER, 'build', 'libpng'])
lib = self.get_library('third_party/openjpeg',
[Path('codec/CMakeFiles/j2k_to_image.dir/index.c.o'),
Path('codec/CMakeFiles/j2k_to_image.dir/convert.c.o'),
Path('codec/CMakeFiles/j2k_to_image.dir/__/common/color.c.o'),
Path('bin/libopenjpeg.a')],
configure=['cmake', '.'],
# configure_args=['--enable-tiff=no', '--enable-jp3d=no', '--enable-png=no'],
make_args=[]) # no -j 2, since parallel builds can fail
# We use doubles in JS, so we get slightly different values than native code. So we
# check our output by comparing the average pixel difference
def image_compare(output):
# Get the image generated by JS, from the JSON.stringify'd array
m = re.search(r'\[[\d, -]*\]', output)
self.assertIsNotNone(m, 'Failed to find proper image output in: ' + output)
js_data = eval(m.group(0))
js_data = [x if x >= 0 else 256 + x for x in js_data]
true_data = bytearray(read_binary(test_file('openjpeg/syntensity_lobby_s.raw')))
assert(len(js_data) == len(true_data))
num = len(js_data)
diff_total = js_total = true_total = 0
for i in range(num):
js_total += js_data[i]
true_total += true_data[i]
diff_total += abs(js_data[i] - true_data[i])
js_mean = js_total / float(num)
true_mean = true_total / float(num)
diff_mean = diff_total / float(num)
image_mean = 83.265
assert abs(js_mean - image_mean) < 0.01, [js_mean, image_mean]
assert abs(true_mean - image_mean) < 0.01, [true_mean, image_mean]
assert diff_mean < 0.01, diff_mean
return output
self.set_setting('EXIT_RUNTIME', 0)
self.emcc_args += ['--minify=0']
self.emcc_args += ['--pre-js', 'pre.js']
def do_test():
self.do_runf(test_file('third_party/openjpeg/codec/j2k_to_image.c'),
'Successfully generated',
args='-i image.j2k -o image.raw'.split(),
emcc_args=['-sUSE_LIBPNG'],
libraries=lib,
includes=[test_file('third_party/openjpeg/libopenjpeg'),
test_file('third_party/openjpeg/codec'),
test_file('third_party/openjpeg/common'),
Path(self.get_build_dir(), 'third_party/openjpeg')],
output_nicerizer=image_compare)
do_test()
if self.get_setting('ALLOW_MEMORY_GROWTH') == 1:
print('no memory growth', file=sys.stderr)
self.set_setting('ALLOW_MEMORY_GROWTH', 0)
do_test()
if '-fsanitize=address' in self.emcc_args:
with env_modify({'EMMAKEN_CFLAGS': '-sINITIAL_MEMORY=300MB'}):
do_test_openjpeg()
else:
do_test_openjpeg()
@no_asan('call stack exceeded on some versions of node')
@is_slow_test
def test_fuzz(self):
self.emcc_args += ['-I' + test_file('fuzz/include'), '-w']
def run_all(x):
print(x)
for name in sorted(glob.glob(test_file('fuzz/*.c')) + glob.glob(test_file('fuzz/*.cpp'))):
if 'newfail' in name:
continue
if os.path.basename(name).startswith('temp_fuzzcode'):
continue
print(name)
if name.endswith('.cpp'):
self.emcc_args.append('-std=c++03')
self.do_runf(test_file('fuzz', name),
read_file(test_file('fuzz', name + '.txt')))
if name.endswith('.cpp'):
self.emcc_args.remove('-std=c++03')
run_all('normal')
self.emcc_args += ['-flto']
run_all('lto')
@also_with_standalone_wasm(wasm2c=True, impure=True)
@no_asan('autodebug logging interferes with asan')
@with_env_modify({'EMCC_AUTODEBUG': '1'})
def test_autodebug_wasm(self):
def check(out):
for msg in ['log_execution', 'get_i32', 'set_i32', 'load_ptr', 'load_val', 'store_ptr', 'store_val']:
self.assertIn(msg, out)
return out
self.do_runf(test_file('core/test_autodebug.c'),
'success', output_nicerizer=check)
@parameterized({
'full': ('full',),
'mask': ('mask',),
'none': ('none',),
})
def test_wasm2c_sandboxing(self, mode):
if not can_do_standalone(self):
return self.skipTest('standalone mode not supported')
self.set_setting('STANDALONE_WASM')
self.set_setting('WASM2C')
self.set_setting('WASM2C_SANDBOXING', mode)
self.wasm_engines = []
self.do_core_test('test_hello_world.c')
_args.append('-Wno-return-stack-address')
self.set_setting('EXPORTED_RUNTIME_METHODS', ['ccall', 'cwrap'])
self.set_setting('WASM_ASYNC_COMPILATION', 0)
create_file('post.js', '''
out('*');
var ret;
ret = Module['ccall']('get_int', 'number'); out([typeof ret, ret].join(','));
ret = ccall('get_float', 'number'); out([typeof ret, ret.toFixed(2)].join(','));
ret = ccall('get_bool', 'boolean'); out([typeof ret, ret].join(','));
ret = ccall('get_string', 'string'); out([typeof ret, ret].join(','));
ret = ccall('print_int', null, ['number'], [12]); out(typeof ret);
ret = ccall('print_float', null, ['number'], [14.56]); out(typeof ret);
ret = ccall('print_bool', null, ['boolean'], [true]); out(typeof ret);
ret = ccall('print_string', null, ['string'], ["cheez"]); out(typeof ret);
ret = ccall('print_string', null, ['array'], [[97, 114, 114, 45, 97, 121, 0]]); out(typeof ret); // JS array
ret = ccall('print_string', null, ['array'], [new Uint8Array([97, 114, 114, 45, 97, 121, 0])]); out(typeof ret); // typed array
ret = ccall('multi', 'number', ['number', 'number', 'number', 'string'], [2, 1.4, 3, 'more']); out([typeof ret, ret].join(','));
var p = ccall('malloc', 'pointer', ['number'], [4]);
setValue(p, 650, 'i32');
ret = ccall('pointer', 'pointer', ['pointer'], [p]); out([typeof ret, getValue(ret, 'i32')].join(','));
out('*');
// part 2: cwrap
var noThirdParam = Module['cwrap']('get_int', 'number');
out(noThirdParam());
var multi = Module['cwrap']('multi', 'number', ['number', 'number', 'number', 'string']);
out(multi(2, 1.4, 3, 'atr'));
out(multi(8, 5.4, 4, 'bret'));
out('*');
// part 3: avoid stack explosion and check it's restored correctly
for (var i = 0; i < TOTAL_STACK/60; i++) {
ccall('multi', 'number', ['number', 'number', 'number', 'string'], [0, 0, 0, '123456789012345678901234567890123456789012345678901234567890']);
}
out('stack is ok.');
ccall('call_ccall_again', null);
''')
self.emcc_args += ['--post-js', 'post.js']
self.set_setting('EXPORTED_FUNCTIONS', ['_get_int', '_get_float', '_get_bool', '_get_string', '_print_int', '_print_float', '_print_bool', '_print_string', '_multi', '_pointer', '_call_ccall_again', '_malloc'])
self.do_core_test('test_ccall.cpp')
if self.maybe_closure():
self.do_core_test('test_ccall.cpp')
def test_EXPORTED_RUNTIME_METHODS(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$dynCall'])
self.do_core_test('EXPORTED_RUNTIME_METHODS.c')
# test dyncall (and other runtime methods in support.js) can be exported
self.emcc_args += ['-DEXPORTED']
self.set_setting('EXPORTED_RUNTIME_METHODS', ['dynCall', 'addFunction', 'lengthBytesUTF8', 'getTempRet0', 'setTempRet0'])
self.do_core_test('EXPORTED_RUNTIME_METHODS.c')
@parameterized({
'': [],
'minimal_runtime': ['-s', 'MINIMAL_RUNTIME=1']
})
def test_dyncall_specific(self, *args):
cases = [
('DIRECT', []),
('DYNAMIC_SIG', ['-s', 'DYNCALLS=1', '-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=$dynCall']),
]
if 'MINIMAL_RUNTIME=1' in args:
self.emcc_args += ['--pre-js', test_file('minimal_runtime_exit_handling.js')]
else:
cases += [
('EXPORTED', []),
('EXPORTED_DYNAMIC_SIG', ['-s', 'DYNCALLS=1', '-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=$dynCall', '-s', 'EXPORTED_RUNTIME_METHODS=dynCall']),
('FROM_OUTSIDE', ['-s', 'EXPORTED_RUNTIME_METHODS=dynCall_iiji'])
]
for which, extra_args in cases:
print(str(args) + ' ' + which)
self.do_core_test('dyncall_specific.c', emcc_args=['-D' + which] + list(args) + extra_args)
def test_getValue_setValue(self):
# these used to be exported, but no longer are by default
def test(output_prefix='', args=[], assert_returncode=0):
src = test_file('core/getValue_setValue.cpp')
expected = test_file('core/getValue_setValue' + output_prefix + '.out')
self.do_run_from_file(src, expected, assert_returncode=assert_returncode, emcc_args=args)
# see that direct usage (not on module) works. we don't export, but the use
test(args=['-DDIRECT'])
self.set_setting('EXPORTED_RUNTIME_METHODS', [])
self.set_setting('ASSERTIONS')
test('_assert', assert_returncode=NON_ZERO)
self.set_setting('ASSERTIONS', 0)
self.set_setting('EXPORTED_RUNTIME_METHODS', ['getValue', 'setValue'])
test()
@parameterized({
'': ([],),
'_files': (['-DUSE_FILES'],)
})
def test_FS_exports(self, extra_args):
def test(output_prefix='', args=[], assert_returncode=0):
args += extra_args
print(args)
self.do_runf(test_file('core/FS_exports.cpp'),
(read_file(test_file('core/FS_exports' + output_prefix + '.out')),
read_file(test_file('core/FS_exports' + output_prefix + '_2.out'))),
assert_returncode=assert_returncode, emcc_args=args)
# keeps it alive through JSDCE
test(args=['-DDIRECT', '-s', 'FORCE_FILESYSTEM'])
# see that with assertions, we get a nice error message
self.set_setting('EXPORTED_RUNTIME_METHODS', [])
self.set_setting('ASSERTIONS')
test('_assert', assert_returncode=NON_ZERO)
self.set_setting('ASSERTIONS', 0)
# see that when we export them, things work on the module
self.set_setting('EXPORTED_RUNTIME_METHODS', ['FS_createDataFile'])
test(args=['-s', 'FORCE_FILESYSTEM'])
def test_legacy_exported_runtime_numbers(self):
# these used to be exported, but no longer are by default
def test(output_prefix='', args=[], assert_returncode=0):
old = self.emcc_args.copy()
self.emcc_args += args
src = test_file('core/legacy_exported_runtime_numbers.cpp')
expected = test_file('core/legacy_exported_runtime_numbers%s.out' % output_prefix)
self.do_run_from_file(src, expected, assert_returncode=assert_returncode)
self.emcc_args = old
# see that direct usage (not on module) works. we don't export, but the use
test(args=['-DDIRECT'])
self.set_setting('EXPORTED_RUNTIME_METHODS', [])
self.set_setting('ASSERTIONS')
test('_assert', assert_returncode=NON_ZERO)
self.set_setting('ASSERTIONS', 0)
self.set_setting('EXPORTED_RUNTIME_METHODS', ['ALLOC_STACK'])
test()
def test_response_file(self):
response_data = '-o %s/response_file.js %s' % (self.get_dir(), test_file('hello_world.cpp'))
create_file('rsp_file', response_data.replace('\\', '\\\\'))
self.run_process([EMCC, "@rsp_file"] + self.get_emcc_args())
self.do_run('response_file.js', 'hello, world', no_build=True)
self.assertContained('response file not found: foo.txt', self.expect_fail([EMCC, '@foo.txt']))
def test_linker_response_file(self):
objfile = 'response_file.o'
self.run_process([EMCC, '-c', test_file('hello_world.cpp'), '-o', objfile] + self.get_emcc_args())
response_data = '--start-group ' + objfile + ' --end-group'
create_file('rsp_file', response_data.replace('\\', '\\\\'))
self.run_process([EMCC, "-Wl,@rsp_file", '-o', 'response_file.o.js'] + self.get_emcc_args())
self.do_run('response_file.o.js', 'hello, world', no_build=True)
def test_exported_response(self):
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <emscripten.h>
extern "C" {
int other_function() { return 5; }
}
int main() {
int x = EM_ASM_INT({ return Module._other_function() });
emscripten_run_script_string(""); // Add a reference to a symbol that exists in src/deps_info.json to uncover issue #2836 in the test suite.
printf("waka %d!\n", x);
return 0;
}
'''
create_file('exps', '["_main","_other_function"]')
self.set_setting('EXPORTED_FUNCTIONS', '@exps')
self.do_run(src, '''waka 5!''')
assert 'other_function' in read_file('src.js')
def test_large_exported_response(self):
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <emscripten.h>
extern "C" {
'''
js_funcs = []
num_exports = 5000
count = 0
while count < num_exports:
src += 'int exported_func_from_response_file_%d () { return %d;}\n' % (count, count)
js_funcs.append('_exported_func_from_response_file_%d' % count)
count += 1
src += r'''
}
int main() {
int x = EM_ASM_INT({ return Module._exported_func_from_response_file_4999() });
emscripten_run_script_string(""); // Add a reference to a symbol that exists in src/deps_info.json to uncover issue #2836 in the test suite.
printf("waka %d!\n", x);
return 0;
}
'''
js_funcs.append('_main')
create_file('large_exported_response.json', json.dumps(js_funcs))
self.set_setting('EXPORTED_FUNCTIONS', '@large_exported_response.json')
self.do_run(src, 'waka 4999!')
self.assertContained('_exported_func_from_response_file_1', read_file('src.js'))
def test_add_function(self):
self.set_setting('INVOKE_RUN', 0)
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.set_setting('RESERVED_FUNCTION_POINTERS')
self.set_setting('EXPORTED_RUNTIME_METHODS', ['callMain'])
src = test_file('interop/test_add_function.cpp')
post_js = test_file('interop/test_add_function_post.js')
self.emcc_args += ['--post-js', post_js]
print('basics')
self.do_run_in_out_file_test('interop/test_add_function.cpp')
print('with RESERVED_FUNCTION_POINTERS=0')
self.set_setting('RESERVED_FUNCTION_POINTERS', 0)
expected = 'Unable to grow wasm table'
if self.is_wasm2js():
# emulation code. when ASSERTIONS are enabled we show a clear message, but
# in optimized builds we don't waste code size on that, and the JS engine
expected = 'wasmTable.grow is not a function'
self.do_runf(src, expected, assert_returncode=NON_ZERO)
print('- with table growth')
self.set_setting('ALLOW_TABLE_GROWTH')
self.emcc_args += ['-DGROWTH']
self.set_setting('ASSERTIONS', 2)
self.do_run_in_out_file_test('interop/test_add_function.cpp', interleaved_output=False)
def test_getFuncWrapper_sig_alias(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$getFuncWrapper'])
src = r'''
#include <stdio.h>
#include <emscripten.h>
void func1(int a) {
printf("func1\n");
}
void func2(int a, int b) {
printf("func2\n");
}
int main() {
EM_ASM({
getFuncWrapper($0, 'vi')(0);
getFuncWrapper($1, 'vii')(0, 0);
}, func1, func2);
return 0;
}
'''
self.do_run(src, 'func1\nfunc2\n')
def test_emulate_function_pointer_casts(self):
self.set_setting('EXIT_RUNTIME', 0)
self.set_setting('EMULATE_FUNCTION_POINTER_CASTS')
self.do_core_test('test_emulate_function_pointer_casts.cpp')
@no_wasm2js('TODO: nicely printed names in wasm2js')
@parameterized({
'normal': ([],),
'noexcept': (['-fno-exceptions'],)
})
def test_demangle_stacks(self, extra_args):
self.emcc_args += extra_args
self.set_setting('DEMANGLE_SUPPORT')
self.set_setting('ASSERTIONS')
self.set_setting('BINARYEN_EXTRA_PASSES', '--one-caller-inline-max-function-size=1')
self.emcc_args += ['--profiling-funcs']
self.do_core_test('test_demangle_stacks.cpp', assert_returncode=NON_ZERO)
print('without assertions, the stack is not printed, but a message suggesting assertions is')
self.set_setting('ASSERTIONS', 0)
self.do_core_test('test_demangle_stacks_noassert.cpp', assert_returncode=NON_ZERO)
def test_demangle_stacks_symbol_map(self):
self.set_setting('BINARYEN_EXTRA_PASSES', '--one-caller-inline-max-function-size=1')
self.set_setting('DEMANGLE_SUPPORT')
if '-O' not in str(self.emcc_args) or '-O0' in self.emcc_args or '-O1' in self.emcc_args or '-g' in self.emcc_args:
self.skipTest("without opts, we don't emit a symbol map")
self.emcc_args += ['--emit-symbol-map']
self.do_runf(test_file('core/test_demangle_stacks.cpp'), 'Aborted', assert_returncode=NON_ZERO)
# make sure the shortened name is the right one
full_aborter = None
short_aborter = None
for line in open('test_demangle_stacks.js.symbols').readlines():
if ':' not in line:
continue
# split by the first ':' (wasm backend demangling may include more :'s later on)
short, full = line.split(':', 1)
if 'Aborter' in full:
short_aborter = short
full_aborter = full
self.assertIsNotNone(full_aborter)
self.assertIsNotNone(short_aborter)
print('full:', full_aborter, 'short:', short_aborter)
if config.SPIDERMONKEY_ENGINE and os.path.exists(config.SPIDERMONKEY_ENGINE[0]):
output = self.run_js('test_demangle_stacks.js', engine=config.SPIDERMONKEY_ENGINE, assert_returncode=NON_ZERO)
if ' ' + short_aborter + ' ' not in output and ' ' + full_aborter + ' ' not in output:
if '\n' + short_aborter + ' ' not in output and '\n' + full_aborter + ' ' not in output and 'wasm-function[' + short_aborter + ']' not in output:
if '\n' + short_aborter + '@' not in output and '\n' + full_aborter + '@' not in output:
self.assertContained(' ' + short_aborter + ' ' + '\n' + ' ' + full_aborter + ' ', output)
@no_safe_heap('tracing from sbrk into JS leads to an infinite loop')
def test_tracing(self):
self.emcc_args += ['--tracing']
self.do_core_test('test_tracing.c')
@disabled('https://github.com/emscripten-core/emscripten/issues/9527')
def test_eval_ctors(self):
if '-O2' not in str(self.emcc_args) or '-O1' in str(self.emcc_args):
self.skipTest('need js optimizations')
if not self.is_wasm():
self.skipTest('this test uses wasm binaries')
print('leave printf in ctor')
self.set_setting('EVAL_CTORS')
self.do_run(r'''
#include <stdio.h>
struct C {
C() { printf("constructing!\n"); } // don't remove this!
};
C c;
int main() {}
''', "constructing!\n")
def get_code_size():
if self.is_wasm():
# Use number of functions as a for code size
return self.count_wasm_contents('hello_libcxx.wasm', 'funcs')
else:
return os.path.getsize('hello_libcxx.js')
def get_mem_size():
if self.is_wasm():
# Use number of functions as a for code size
return self.count_wasm_contents('hello_libcxx.wasm', 'memory-data')
if self.uses_memory_init_file():
return os.path.getsize('hello_libcxx.js.mem')
# otherwise we ignore memory size
return 0
def do_test(test):
self.set_setting('EVAL_CTORS')
test()
ec_code_size = get_code_size()
ec_mem_size = get_mem_size()
self.clear_setting('EVAL_CTORS')
test()
code_size = get_code_size()
mem_size = get_mem_size()
if mem_size:
print('mem: ', mem_size, '=>', ec_mem_size)
self.assertGreater(ec_mem_size, mem_size)
print('code:', code_size, '=>', ec_code_size)
self.assertLess(ec_code_size, code_size)
print('remove ctor of just assigns to memory')
def test1():
self.do_run(r'''
#include <stdio.h>
struct C {
int x;
C() {
volatile int y = 10;
y++;
x = y;
}
};
C c;
int main() {
printf("x: %d\n", c.x);
}
''', "x: 11\n")
do_test(test1)
# The wasm backend currently exports a single initalizer so the ctor
# evaluation is all or nothing. As well as that it doesn't currently
print('libcxx - remove 2 ctors from iostream code')
output = 'hello, world!'
def test2():
self.do_runf(test_file('hello_libcxx.cpp'), output)
do_test(test2)
print('assertions too')
self.set_setting('ASSERTIONS')
self.do_runf(test_file('hello_libcxx.cpp'), output)
self.set_setting('ASSERTIONS', 0)
print('remove just some, leave others')
def test3():
self.do_run(r'''
#include <iostream>
#include <string>
class std_string {
public:
std_string(): ptr(nullptr) { std::cout << "std_string()\n"; }
std_string(const char* s): ptr(s) { std::cout << "std_string(const char* s)" << std::endl; }
std_string(const std_string& s): ptr(s.ptr) { std::cout << "std_string(const std_string& s) " << std::endl; }
const char* data() const { return ptr; }
private:
const char* ptr;
};
const std_string txtTestString("212121\0");
const std::string s2text("someweirdtext");
int main() {
std::cout << s2text << std::endl;
std::cout << txtTestString.data() << std::endl;
std::cout << txtTestString.data() << std::endl;
return 0;
}
''', '''std_string(const char* s)
someweirdtext
212121
212121
''') # noqa
do_test(test3)
def test_embind(self):
self.emcc_args += ['--bind']
create_file('test_embind.cpp', r'''
#include <stdio.h>
#include <emscripten/val.h>
using namespace emscripten;
int main() {
val Math = val::global("Math");
// two ways to call Math.abs
printf("abs(-10): %d\n", Math.call<int>("abs", -10));
printf("abs(-11): %d\n", Math["abs"](-11).as<int>());
return 0;
}
''')
self.do_runf('test_embind.cpp', 'abs(-10): 10\nabs(-11): 11')
def test_embind_2(self):
self.emcc_args += ['--bind', '--post-js', 'post.js']
create_file('post.js', '''
function printLerp() {
out('lerp ' + Module.lerp(100, 200, 66) + '.');
}
''')
create_file('test_embind_2.cpp', r'''
#include <stdio.h>
#include <emscripten.h>
#include <emscripten/bind.h>
using namespace emscripten;
int lerp(int a, int b, int t) {
return (100 - t) * a + t * b;
}
EMSCRIPTEN_BINDINGS(my_module) {
function("lerp", &lerp);
}
int main(int argc, char **argv) {
EM_ASM(printLerp());
return 0;
}
''')
self.do_runf('test_embind_2.cpp', 'lerp 166')
def test_embind_3(self):
self.emcc_args += ['--bind', '--post-js', 'post.js']
create_file('post.js', '''
function ready() {
try {
Module.compute(new Uint8Array([1,2,3]));
} catch(e) {
out(e);
}
}
''')
create_file('test_embind_3.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
using namespace emscripten;
int compute(int array[]) {
return 0;
}
EMSCRIPTEN_BINDINGS(my_module) {
function("compute", &compute, allow_raw_pointers());
}
int main(int argc, char **argv) {
EM_ASM(ready());
return 0;
}
''')
self.do_runf('test_embind_3.cpp', 'UnboundTypeError: Cannot call compute due to unbound types: Pi')
def test_embind_4(self):
self.emcc_args += ['--bind', '--post-js', 'post.js']
create_file('post.js', '''
function printFirstElement() {
out(Module.getBufferView()[0]);
}
''')
create_file('test_embind_4.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
#include <emscripten/val.h>
#include <stdio.h>
using namespace emscripten;
const size_t kBufferSize = 1024;
double buffer[kBufferSize];
val getBufferView(void) {
val v = val(typed_memory_view(kBufferSize, buffer));
return v;
}
EMSCRIPTEN_BINDINGS(my_module) {
function("getBufferView", &getBufferView);
}
int main(int argc, char **argv) {
buffer[0] = 107;
EM_ASM(printFirstElement());
return 0;
}
''')
self.do_runf('test_embind_4.cpp', '107')
def test_embind_5(self):
self.emcc_args += ['--bind']
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_embind_5.cpp')
def test_embind_custom_marshal(self):
self.emcc_args += ['--bind', '--pre-js', test_file('embind/test_custom_marshal.js')]
self.do_run_in_out_file_test('embind/test_custom_marshal.cpp', assert_identical=True)
def test_embind_float_constants(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_float_constants.cpp')
def test_embind_negative_constants(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_negative_constants.cpp')
@also_with_wasm_bigint
def test_embind_unsigned(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_unsigned.cpp')
def test_embind_val(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_val.cpp')
@no_wasm2js('wasm_bigint')
def test_embind_i64_val(self):
self.set_setting('WASM_BIGINT')
self.emcc_args += ['--bind']
self.node_args += ['--experimental-wasm-bigint']
self.do_run_in_out_file_test('embind/test_i64_val.cpp', assert_identical=True)
@no_wasm2js('wasm_bigint')
def test_embind_i64_binding(self):
self.set_setting('WASM_BIGINT')
self.emcc_args += ['--bind']
self.node_args += ['--experimental-wasm-bigint']
self.do_run_in_out_file_test('embind/test_i64_binding.cpp', assert_identical=True)
def test_embind_no_rtti(self):
create_file('main.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
#include <emscripten/val.h>
#include <stdio.h>
EM_JS(void, calltest, (), {
console.log("dotest returned: " + Module.dotest());
});
int main(int argc, char** argv){
printf("418\n");
calltest();
return 0;
}
int test() {
return 42;
}
EMSCRIPTEN_BINDINGS(my_module) {
emscripten::function("dotest", &test);
}
''')
self.emcc_args += ['--bind', '-fno-rtti', '-DEMSCRIPTEN_HAS_UNBOUND_TYPE_NAMES=0']
self.do_runf('main.cpp', '418\ndotest returned: 42\n')
def test_embind_polymorphic_class_no_rtti(self):
self.emcc_args += ['--bind', '-fno-rtti', '-DEMSCRIPTEN_HAS_UNBOUND_TYPE_NAMES=0']
self.do_core_test('test_embind_polymorphic_class_no_rtti.cpp')
def test_embind_no_rtti_followed_by_rtti(self):
src = r'''
#include <emscripten.h>
#include <emscripten/bind.h>
#include <emscripten/val.h>
#include <stdio.h>
EM_JS(void, calltest, (), {
console.log("dotest returned: " + Module.dotest());
});
int main(int argc, char** argv){
printf("418\n");
calltest();
return 0;
}
int test() {
return 42;
}
EMSCRIPTEN_BINDINGS(my_module) {
emscripten::function("dotest", &test);
}
'''
self.emcc_args += ['--bind', '-fno-rtti', '-frtti']
self.do_run(src, '418\ndotest returned: 42\n')
@parameterized({
'': (None, False),
'all': ('ALL', False),
'fast': ('FAST', False),
'default': ('DEFAULT', False),
'all_growth': ('ALL', True),
})
def test_webidl(self, mode, allow_memory_growth):
self.uses_es6 = True
self.set_setting('WASM_ASYNC_COMPILATION', 0)
if self.maybe_closure():
# avoid closure minified names competing with our test code in the global name space
self.set_setting('MODULARIZE')
else:
self.set_setting('WASM_ASYNC_COMPILATION', 0)
# Force IDL checks mode
with env_modify({'IDL_CHECKS': mode}):
self.run_process([WEBIDL_BINDER, test_file('webidl/test.idl'), 'glue'])
self.assertExists('glue.cpp')
self.assertExists('glue.js')
post_js = '\n\n'
if self.get_setting('MODULARIZE'):
post_js += 'var TheModule = Module();\n'
else:
post_js += 'var TheModule = Module;\n'
post_js += '\n\n'
if allow_memory_growth:
post_js += "var isMemoryGrowthAllowed = true;\n"
else:
post_js += "var isMemoryGrowthAllowed = false;\n"
post_js += read_file(test_file('webidl/post.js'))
post_js += '\n\n'
create_file('extern-post.js', post_js)
# Export things on "TheModule". This matches the typical use pattern of the bound library
# being used as Box2D.* or Ammo.*, and we cannot rely on "Module" being always present (closure may remove it).
self.emcc_args += ['-s', 'EXPORTED_FUNCTIONS=_malloc,_free', '--post-js=glue.js', '--extern-post-js=extern-post.js']
if allow_memory_growth:
self.set_setting('ALLOW_MEMORY_GROWTH')
if not mode:
mode = 'DEFAULT'
expected = test_file('webidl/output_%s.txt' % mode)
self.do_run_from_file(test_file('webidl/test.cpp'), expected)
### Tests for tools
@no_wasm2js('TODO: source maps in wasm2js')
@parameterized({
'': ([],),
'minimal_runtime': (['-s', 'MINIMAL_RUNTIME'],),
})
def test_source_map(self, args):
if '-g' not in self.emcc_args:
self.emcc_args.append('-g')
self.emcc_args += args
src = '''
#include <stdio.h>
#include <assert.h>
__attribute__((noinline)) int foo() {
printf("hi"); // line 6
return 1; // line 7
}
int main() {
printf("%d", foo()); // line 11
return 0; // line 12
}
'''
create_file('src.cpp', src)
out_filename = 'a.out.js'
wasm_filename = 'a.out.wasm'
no_maps_filename = 'no-maps.out.js'
assert '-gsource-map' not in self.emcc_args
self.emcc('src.cpp', self.get_emcc_args(), out_filename)
# the file name may find its way into the generated code, so make sure we
# can do an apples-to-apples comparison by compiling with the same file name
shutil.move(out_filename, no_maps_filename)
no_maps_file = read_file(no_maps_filename)
no_maps_file = re.sub(' *//[@
self.emcc_args.append('-gsource-map')
self.emcc(os.path.abspath('src.cpp'),
self.get_emcc_args(),
out_filename,
stderr=PIPE)
map_referent = out_filename if not self.is_wasm() else wasm_filename
# after removing the @line and @sourceMappingURL comments, the build
# result should be identical to the non-source-mapped debug version.
# this is worth checking because the parser AST swaps strings for token
# objects when generating source maps, so we want to make sure the
# optimizer can deal with both types.
map_filename = map_referent + '.map'
data = json.load(open(map_filename))
if hasattr(data, 'file'):
# the file attribute is optional, but if it is present it needs to refer
# the output file.
self.assertPathsIdentical(map_referent, data['file'])
self.assertGreater(len(data['sources']), 1)
self.assertPathsIdentical('src.cpp', data['sources'][0])
if hasattr(data, 'sourcesContent'):
# the sourcesContent attribute is optional, but if it is present it
# needs to containt valid source text.
self.assertTextDataIdentical(src, data['sourcesContent'][0])
mappings = json.loads(self.run_js(
path_from_root('tests/sourcemap2json.js'),
args=[map_filename]))
seen_lines = set()
for m in mappings:
if m['source'] == 'src.cpp':
seen_lines.add(m['originalLine'])
# ensure that all the 'meaningful' lines in the original code get mapped
# when optimizing, the binaryen optimizer may remove some of them (by inlining, etc.)
if self.is_optimizing():
self.assertTrue(seen_lines.issuperset([11, 12]), seen_lines)
else:
self.assertTrue(seen_lines.issuperset([6, 7, 11, 12]), seen_lines)
@no_wasm2js('TODO: source maps in wasm2js')
def test_dwarf(self):
self.emcc_args.append('-g')
create_file('src.cpp', '''
#include <emscripten.h>
EM_JS(int, out_to_js, (int x), {})
void foo() {
out_to_js(0); // line 5
out_to_js(1); // line 6
out_to_js(2); // line 7
// A silly possible recursion to avoid binaryen doing any inlining.
if (out_to_js(3)) foo();
}
int main() {
foo();
}
''')
js_filename = 'a.out.js'
wasm_filename = 'a.out.wasm'
self.emcc('src.cpp', self.get_emcc_args(), js_filename)
out = self.run_process([shared.LLVM_DWARFDUMP, wasm_filename, '-all'], stdout=PIPE).stdout
# parse the sections
sections = {}
curr_section_name = ''
curr_section_body = ''
def add_section():
if curr_section_name:
sections[curr_section_name] = curr_section_body
for line in out.splitlines():
if ' contents:' in line:
# a new section, a line like ".debug_str contents:"
add_section()
curr_section_name = line.split(' ')[0]
curr_section_body = ''
else:
# possibly a line in a section
if curr_section_name:
curr_section_body += line + '\n'
add_section()
# make sure the right sections exist
self.assertIn('.debug_abbrev', sections)
self.assertIn('.debug_info', sections)
self.assertIn('.debug_line', sections)
self.assertIn('.debug_str', sections)
self.assertIn('.debug_ranges', sections)
# verify some content in the sections
self.assertIn('"src.cpp"', sections['.debug_info'])
# the line section looks like this:
# Address Line Column File ISA Discriminator Flags
# ------------------ ------ ------ ------ --- ------------- -------------
# 0x000000000000000b 5 0 3 0 0 is_stmt
src_to_addr = {}
found_src_cpp = False
for line in sections['.debug_line'].splitlines():
if 'name: "src.cpp"' in line:
found_src_cpp = True
if not found_src_cpp:
continue
if 'debug_line' in line:
break
if line.startswith('0x'):
while ' ' in line:
line = line.replace(' ', ' ')
addr, line, col = line.split(' ')[:3]
key = (int(line), int(col))
src_to_addr.setdefault(key, []).append(addr)
# each of the calls must remain in the binary, and be mapped
self.assertIn((5, 9), src_to_addr)
self.assertIn((6, 9), src_to_addr)
self.assertIn((7, 9), src_to_addr)
def get_dwarf_addr(line, col):
addrs = src_to_addr[(line, col)]
# we assume the simple calls have one address
self.assertEqual(len(addrs), 1)
return int(addrs[0], 0)
# the lines must appear in sequence (as calls to JS, the optimizer cannot
# reorder them)
self.assertLess(get_dwarf_addr(5, 9), get_dwarf_addr(6, 9))
self.assertLess(get_dwarf_addr(6, 9), get_dwarf_addr(7, 9))
# Get the wat, printing with -g which has binary offsets
wat = self.run_process([Path(building.get_binaryen_bin(), 'wasm-opt'),
wasm_filename, '-g', '--print'], stdout=PIPE).stdout
# We expect to see a pattern like this in optimized builds (there isn't
# anything else):
#
# ;; code offset: 0x?
# (drop
# ;; code offset: 0x?
# (call $out_to_js
# ;; code offset: 0x?
# (local.get ?) or (i32.const ?)
# )
# )
#
# In the stacky stream of instructions form, it is
#
# local.get or i32.const
# call $out_to_js
# drop
#
# However, in an unoptimized build the constant may be assigned earlier in
# some other manner, so stop here.
if not self.is_optimizing():
return
# get_wat_addr gets the address of one of the 3 interesting calls, by its
# index (0,1,2).
def get_wat_addr(call_index):
# find the call_index-th call
call_loc = -1
for i in range(call_index + 1):
call_loc = wat.find('call $out_to_js', call_loc + 1)
assert call_loc > 0
# the call begins with the local.get/i32.const printed below it, which is
# the first instruction in the stream, so it has the lowest address
start_addr_loc = wat.find('0x', call_loc)
assert start_addr_loc > 0
start_addr_loc_end = wat.find('\n', start_addr_loc)
start_addr = int(wat[start_addr_loc:start_addr_loc_end], 0)
# the call ends with the drop, which is the last in the stream, at the
# highest address
end_addr_loc = wat.rfind('drop', 0, call_loc)
assert end_addr_loc > 0
end_addr_loc = wat.rfind('0x', 0, end_addr_loc)
assert end_addr_loc > 0
end_addr_loc_end = wat.find('\n', end_addr_loc)
assert end_addr_loc_end > 0
end_addr = int(wat[end_addr_loc:end_addr_loc_end], 0)
return (start_addr, end_addr)
# match up the DWARF and the wat
for i in range(3):
dwarf_addr = get_dwarf_addr(5 + i, 9)
start_wat_addr, end_wat_addr = get_wat_addr(i)
# the dwarf may match any of the 3 instructions that form the stream of
# of instructions implementing the call in the source code, in theory
self.assertLessEqual(start_wat_addr, dwarf_addr)
self.assertLessEqual(dwarf_addr, end_wat_addr)
def test_modularize_closure_pre(self):
# test that the combination of modularize + closure + pre-js works. in that mode,
# closure should not minify the Module object in a way that the pre-js cannot use it.
create_file('post.js', 'var TheModule = Module();\n')
self.emcc_args += [
'--pre-js', test_file('core/modularize_closure_pre.js'),
'--extern-post-js=post.js',
'--closure=1',
'-g1',
'-s',
'MODULARIZE=1',
]
self.do_core_test('modularize_closure_pre.c')
@no_wasm2js('symbol names look different wasm2js backtraces')
def test_emscripten_log(self):
self.banned_js_engines = [config.V8_ENGINE] # v8 doesn't support console.log
self.set_setting('DEMANGLE_SUPPORT')
if '-g' not in self.emcc_args:
self.emcc_args.append('-g')
self.emcc_args += ['-DRUN_FROM_JS_SHELL']
self.do_run_in_out_file_test('emscripten_log/emscripten_log.cpp', interleaved_output=False)
if self.maybe_closure():
self.emcc_args += ['-g1']
self.do_run_in_out_file_test('emscripten_log/emscripten_log_with_closure.cpp', interleaved_output=False)
def test_float_literals(self):
self.do_run_in_out_file_test('test_float_literals.cpp')
def test_exit_status(self):
self.set_setting('EXIT_RUNTIME')
create_file('exit.c', r'''
#include <stdio.h>
#include <assert.h>
#include <stdlib.h>
#include <unistd.h>
static void cleanup() {
#ifndef NORMAL_EXIT
assert(0 && "cleanup should only be called from normal exit()");
#endif
printf("cleanup\n");
}
int main() {
atexit(cleanup); // this atexit should still be called
printf("hello, world!\n");
// Unusual exit status to make sure it's working!
#ifdef CAPITAL_EXIT
_Exit(118);
#elif defined(UNDER_EXIT)
_exit(118);
#elif defined(NORMAL_EXIT)
exit(118);
#endif
}
''')
create_file('pre.js', '''
Module.onExit = function() {
out('I see exit status: ' + EXITSTATUS);
}
''')
self.emcc_args += ['--pre-js', 'pre.js']
print('.. exit')
self.do_runf('exit.c', 'hello, world!\ncleanup\nI see exit status: 118', assert_returncode=118, emcc_args=['-DNORMAL_EXIT'])
print('.. _exit')
self.do_runf('exit.c', 'hello, world!\nI see exit status: 118', assert_returncode=118, emcc_args=['-DUNDER_EXIT'])
print('.. _Exit')
self.do_runf('exit.c', 'hello, world!\nI see exit status: 118', assert_returncode=118, emcc_args=['-DCAPITAL_EXIT'])
def test_noexitruntime(self):
src = r'''
#include <emscripten.h>
#include <stdio.h>
static int testPre = TEST_PRE;
struct Global {
Global() {
printf("in Global()\n");
if (testPre) { EM_ASM(noExitRuntime = true;); }
}
~Global() { printf("ERROR: in ~Global()\n"); }
} global;
int main() {
if (!testPre) { EM_ASM(noExitRuntime = true;); }
printf("in main()\n");
}
'''
self.do_run(src.replace('TEST_PRE', '0'), 'in Global()\nin main()')
self.do_run(src.replace('TEST_PRE', '1'), 'in Global()\nin main()')
def test_minmax(self):
self.do_runf(test_file('test_minmax.c'), 'NAN != NAN\nSuccess!')
def test_localeconv(self):
self.do_run_in_out_file_test('core/test_localeconv.c')
def test_newlocale(self):
self.do_run_in_out_file_test('core/test_newlocale.c')
def test_setlocale(self):
self.do_run_in_out_file_test('core/test_setlocale.c')
def test_vswprintf_utf8(self):
self.do_run_in_out_file_test('vswprintf_utf8.c')
# needs setTimeout which only node has
@require_node
def test_async_hello(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASYNCIFY')
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
void f(void *p) {
*(int*)p = 99;
printf("!");
}
int main() {
int i = 0;
printf("Hello");
emscripten_async_call(f, &i, 1);
printf("World");
emscripten_sleep(100);
printf("%d\n", i);
}
''')
self.do_runf('main.c', 'HelloWorld!99')
@require_node
def test_async_ccall_bad(self):
# check bad ccall use
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASYNCIFY')
self.set_setting('ASSERTIONS')
self.set_setting('INVOKE_RUN', 0)
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("Hello");
emscripten_sleep(100);
printf("World\n");
}
''')
create_file('pre.js', '''
Module['onRuntimeInitialized'] = function() {
try {
ccall('main', 'number', ['number', 'string'], [2, 'waka']);
var never = true;
} catch(e) {
out(e);
assert(!never);
}
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_runf('main.c', 'The call to main is running asynchronously.')
@require_node
def test_async_ccall_good(self):
# check reasonable ccall use
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASYNCIFY')
self.set_setting('ASSERTIONS')
self.set_setting('INVOKE_RUN', 0)
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("Hello");
emscripten_sleep(100);
printf("World\n");
}
''')
create_file('pre.js', '''
Module['onRuntimeInitialized'] = function() {
ccall('main', null, ['number', 'string'], [2, 'waka'], { async: true });
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_runf('main.c', 'HelloWorld')
@parameterized({
'': (False,),
'exit_runtime': (True,),
})
def test_async_ccall_promise(self, exit_runtime):
self.set_setting('ASYNCIFY')
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASSERTIONS')
self.set_setting('INVOKE_RUN', 0)
self.set_setting('EXIT_RUNTIME', exit_runtime)
self.set_setting('EXPORTED_FUNCTIONS', ['_stringf', '_floatf'])
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
const char* stringf(char* param) {
emscripten_sleep(20);
printf("stringf: %s", param);
return "second";
}
double floatf() {
emscripten_sleep(20);
emscripten_sleep(20);
return 6.4;
}
''')
create_file('pre.js', r'''
Module['onRuntimeInitialized'] = function() {
runtimeKeepalivePush();
ccall('stringf', 'string', ['string'], ['first\n'], { async: true })
.then(function(val) {
console.log(val);
ccall('floatf', 'number', null, null, { async: true }).then(function(arg) {
console.log(arg);
runtimeKeepalivePop();
maybeExit();
});
});
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_runf('main.c', 'stringf: first\nsecond\n6.4')
def test_fibers_asyncify(self):
self.set_setting('ASYNCIFY')
self.maybe_closure()
self.do_runf(test_file('test_fibers.cpp'), '*leaf-0-100-1-101-1-102-2-103-3-104-5-105-8-106-13-107-21-108-34-109-*')
def test_asyncify_unused(self):
# test a program not using asyncify, but the pref is set
self.set_setting('ASYNCIFY')
self.do_core_test('test_hello_world.c')
@parameterized({
'normal': ([], True),
'removelist_a': (['-s', 'ASYNCIFY_REMOVE=["foo(int, double)"]'], False),
'removelist_b': (['-s', 'ASYNCIFY_REMOVE=["bar()"]'], True),
'removelist_c': (['-s', 'ASYNCIFY_REMOVE=["baz()"]'], False),
'onlylist_a': (['-s', 'ASYNCIFY_ONLY=["main","__original_main","foo(int, double)","baz()","c_baz","Structy::funcy()","bar()"]'], True),
'onlylist_b': (['-s', 'ASYNCIFY_ONLY=["main","__original_main","foo(int, double)","baz()","c_baz","Structy::funcy()"]'], True),
'onlylist_c': (['-s', 'ASYNCIFY_ONLY=["main","__original_main","foo(int, double)","baz()","c_baz"]'], False),
'onlylist_d': (['-s', 'ASYNCIFY_ONLY=["foo(int, double)","baz()","c_baz","Structy::funcy()"]'], False, None, True),
'onlylist_b_response': ([], True, '["main","__original_main","foo(int, double)","baz()","c_baz","Structy::funcy()"]'),
'onlylist_c_response': ([], False, '["main","__original_main","foo(int, double)","baz()","c_baz"]'),
})
def test_asyncify_lists(self, args, should_pass, response=None, no_san=False):
if no_san and is_sanitizing(self.emcc_args):
self.skipTest('remaining asyncify+sanitizer TODO')
if response is not None:
create_file('response.file', response)
self.set_setting('ASYNCIFY_ONLY', '@response.file')
self.set_setting('ASYNCIFY')
self.emcc_args += args
if should_pass:
self.do_core_test('test_asyncify_lists.cpp', assert_identical=True)
else:
self.do_runf(test_file('core/test_asyncify_lists.cpp'), ('RuntimeError', 'Thrown at'), assert_returncode=NON_ZERO)
# use of ASYNCIFY_* options may require intermediate debug info. that should
# not end up emitted in the final binary
# (note that we can't check this if sanitizers run, as they include a lot of
if self.is_wasm() and not is_sanitizing(self.emcc_args):
binary = read_binary('test_asyncify_lists.wasm')
self.assertFalse(b'name' in binary)
if '-O3' in self.emcc_args:
self.assertFalse(b'main' in binary)
@parameterized({
'normal': ([], True),
'ignoreindirect': (['-s', 'ASYNCIFY_IGNORE_INDIRECT'], False),
'add': (['-s', 'ASYNCIFY_IGNORE_INDIRECT', '-s', 'ASYNCIFY_ADD=["__original_main","main","virt()"]'], True),
})
def test_asyncify_indirect_lists(self, args, should_pass):
self.set_setting('ASYNCIFY')
self.emcc_args += args
try:
self.do_core_test('test_asyncify_indirect_lists.cpp', assert_identical=True)
if not should_pass:
should_pass = True
raise Exception('should not have passed')
except Exception:
if should_pass:
raise
@no_asan('asyncify stack operations confuse asan')
def test_emscripten_scan_registers(self):
self.set_setting('ASYNCIFY')
self.do_core_test('test_emscripten_scan_registers.cpp')
def test_asyncify_assertions(self):
self.set_setting('ASYNCIFY')
self.set_setting('ASYNCIFY_IMPORTS', ['suspend'])
self.set_setting('ASSERTIONS')
self.do_core_test('test_asyncify_assertions.c', assert_returncode=NON_ZERO)
@no_lsan('leaks asyncify stack during exit')
@no_asan('leaks asyncify stack during exit')
def test_asyncify_during_exit(self):
self.set_setting('ASYNCIFY')
self.set_setting('ASSERTIONS')
self.set_setting('EXIT_RUNTIME', 1)
self.do_core_test('test_asyncify_during_exit.cpp', assert_returncode=NON_ZERO)
print('NO_ASYNC')
self.do_core_test('test_asyncify_during_exit.cpp', emcc_args=['-DNO_ASYNC'], out_suffix='_no_async')
@no_asan('asyncify stack operations confuse asan')
@no_wasm2js('dynamic linking support in wasm2js')
def test_asyncify_main_module(self):
self.set_setting('ASYNCIFY', 1)
self.set_setting('MAIN_MODULE', 2)
self.do_core_test('test_hello_world.c')
@no_asan('asyncify stack operations confuse asan')
@no_wasm2js('TODO: lazy loading in wasm2js')
@parameterized({
'conditional': (True,),
'unconditional': (False,),
})
def test_emscripten_lazy_load_code(self, conditional):
self.set_setting('ASYNCIFY_LAZY_LOAD_CODE')
self.set_setting('ASYNCIFY_IGNORE_INDIRECT')
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += ['--profiling-funcs']
if conditional:
self.emcc_args += ['-DCONDITIONAL']
self.do_core_test('emscripten_lazy_load_code.cpp', args=['0'])
first_size = os.path.getsize('emscripten_lazy_load_code.wasm')
second_size = os.path.getsize('emscripten_lazy_load_code.wasm.lazy.wasm')
print('first wasm size', first_size)
print('second wasm size', second_size)
if not conditional and self.is_optimizing() and '-g' not in self.emcc_args:
self.assertLess(first_size, 0.6 * second_size)
wasm1 = read_binary('emscripten_lazy_load_code.wasm')
wasm2 = read_binary('emscripten_lazy_load_code.wasm.lazy.wasm')
self.assertNotEqual(wasm1, wasm2)
def break_wasm(name):
wat = self.run_process([Path(building.get_binaryen_bin(), 'wasm-dis'), name], stdout=PIPE).stdout
lines = wat.splitlines()
wat = None
for i in range(len(lines)):
if '(func $foo_end ' in lines[i]:
j = i + 1
while '(local ' in lines[j]:
j += 1
lines[j] = '(unreachable)' + lines[j]
wat = '\n'.join(lines)
break
if wat is None:
shutil.copyfile(name, name + '.orig')
return False
with open('wat.wat', 'w') as f:
f.write(wat)
shutil.move(name, name + '.orig')
self.run_process([Path(building.get_binaryen_bin(), 'wasm-as'), 'wat.wat', '-o', name, '-g'])
return True
def verify_working(args=['0']):
self.assertContained('foo_end\n', self.run_js('emscripten_lazy_load_code.js', args=args))
def verify_broken(args=['0']):
self.assertNotContained('foo_end\n', self.run_js('emscripten_lazy_load_code.js', args=args, assert_returncode=NON_ZERO))
found_foo_end = break_wasm('emscripten_lazy_load_code.wasm')
if not conditional and self.is_optimizing():
self.assertFalse(found_foo_end, 'should have optimizd out $foo_end')
verify_working()
break_wasm('emscripten_lazy_load_code.wasm.lazy.wasm')
verify_broken()
shutil.copyfile('emscripten_lazy_load_code.wasm.orig', 'emscripten_lazy_load_code.wasm')
shutil.copyfile('emscripten_lazy_load_code.wasm.lazy.wasm.orig', 'emscripten_lazy_load_code.wasm.lazy.wasm')
verify_working()
if conditional:
os.remove('emscripten_lazy_load_code.wasm.lazy.wasm')
verify_broken()
verify_working(['42'])
break_wasm('emscripten_lazy_load_code.wasm')
verify_broken()
@no_asan('no wasm2js support yet in asan')
def test_wasm2js(self):
if not self.is_wasm():
self.skipTest('redundant to test wasm2js in wasm2js* mode')
self.set_setting('WASM', 0)
self.do_core_test('test_hello_world.c')
expect_memory_init_file = self.uses_memory_init_file()
if expect_memory_init_file:
self.assertExists('test_hello_world.js.mem')
mem = read_binary('test_hello_world.js.mem')
self.assertTrue(mem[-1] != b'\0')
else:
self.assertNotExists('test_hello_world.js.mem')
@no_asan('no wasm2js support yet in asan')
def test_maybe_wasm2js(self):
if not self.is_wasm():
self.skipTest('redundant to test wasm2js in wasm2js* mode')
self.set_setting('MAYBE_WASM2JS')
self.do_core_test('test_hello_world.c')
cmd = [PYTHON, path_from_root('tools/maybe_wasm2js.py'), 'test_hello_world.js', 'test_hello_world.wasm']
if self.is_optimizing():
cmd += ['-O2']
self.run_process(cmd, stdout=open('do_wasm2js.js', 'w')).stdout
os.remove('test_hello_world.wasm')
self.assertContained('hello, world!', self.run_js('do_wasm2js.js'))
@no_asan('no wasm2js support yet in asan')
@parameterized({
'': ([],),
'minimal_runtime': (['-s', 'MINIMAL_RUNTIME'],),
})
def test_wasm2js_fallback(self, args):
if not self.is_wasm():
self.skipTest('redundant to test wasm2js in wasm2js* mode')
cmd = [EMCC, test_file('small_hello_world.c'), '-s', 'WASM=2'] + args
self.run_process(cmd)
os.rename('a.out.wasm.js', 'a.out.wasm.js.unused')
self.assertContained('hello!', self.run_js('a.out.js'))
os.rename('a.out.wasm.js.unused', 'a.out.wasm.js')
open('b.out.js', 'w').write('WebAssembly = undefined;\n' + read_file('a.out.js'))
os.remove('a.out.wasm')
self.assertContained('hello!', self.run_js('b.out.js'))
def test_cxx_self_assign(self):
self.do_run(r'''
#include <map>
#include <stdio.h>
int main() {
std::map<int, int> m;
m[0] = 1;
m = m;
// size should still be one after self assignment
if (m.size() == 1) {
printf("ok.\n");
}
}
''', 'ok.')
def test_memprof_requirements(self):
create_file('main.cpp', '''
extern "C" {
void check_memprof_requirements();
}
int main() {
check_memprof_requirements();
return 0;
}
''')
create_file('lib.js', '''
mergeInto(LibraryManager.library, {
check_memprof_requirements: function() {
if (typeof _emscripten_stack_get_base === 'function' &&
typeof _emscripten_stack_get_end === 'function' &&
typeof _emscripten_stack_get_current === 'function' &&
typeof Module['___heap_base'] === 'number') {
out('able to run memprof');
} else {
out('missing the required variables to run memprof');
}
}
});
''')
self.emcc_args += ['--memoryprofiler', '--js-library', 'lib.js']
self.do_runf('main.cpp', 'able to run memprof')
def test_fs_dict(self):
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args += ['-lidbfs.js']
self.emcc_args += ['-lnodefs.js']
create_file('pre.js', '''
Module = {};
Module['preRun'] = function() {
out(typeof FS.filesystems['MEMFS']);
out(typeof FS.filesystems['IDBFS']);
out(typeof FS.filesystems['NODEFS']);
// Globals
console.log(typeof MEMFS);
console.log(typeof IDBFS);
console.log(typeof NODEFS);
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_run('int main() { return 0; }', 'object\nobject\nobject\nobject\nobject\nobject')
def test_fs_dict_none(self):
self.set_setting('FORCE_FILESYSTEM')
self.set_setting('ASSERTIONS')
create_file('pre.js', '''
Module = {};
Module['preRun'] = function() {
out(typeof FS.filesystems['MEMFS']);
out(typeof FS.filesystems['IDBFS']);
out(typeof FS.filesystems['NODEFS']);
// Globals
if (ASSERTIONS) {
console.log(typeof MEMFS);
console.log(IDBFS);
console.log(NODEFS);
FS.mkdir('/working1');
try {
FS.mount(IDBFS, {}, '/working1');
} catch (e) {
console.log('|' + e + '|');
}
}
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
expected = '''\
object
undefined
undefined
object
IDBFS is no longer included by default; build with -lidbfs.js
NODEFS is no longer included by default; build with -lnodefs.js
|IDBFS is no longer included by default; build with -lidbfs.js|'''
self.do_run('int main() { return 0; }', expected)
def test_stack_overflow_check(self):
self.set_setting('TOTAL_STACK', 1048576)
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.do_runf(test_file('stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
self.emcc_args += ['-DONE_BIG_STRING']
self.do_runf(test_file('stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
self.clear_setting('STACK_OVERFLOW_CHECK')
self.set_setting('ASSERTIONS', 2)
self.do_runf(test_file('stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
@node_pthreads
def test_binaryen_2170_emscripten_atomic_cas_u8(self):
self.set_setting('USE_PTHREADS')
self.do_run_in_out_file_test('binaryen_2170_emscripten_atomic_cas_u8.cpp')
@also_with_standalone_wasm()
def test_sbrk(self):
self.do_runf(test_file('sbrk_brk.cpp'), 'OK.')
def test_brk(self):
self.emcc_args += ['-DTEST_BRK=1']
self.do_runf(test_file('sbrk_brk.cpp'), 'OK.')
@no_asan('mallinfo is not part of ASan malloc')
def test_mallinfo(self):
self.do_runf(test_file('mallinfo.cpp'), 'OK.')
@no_asan('cannot replace malloc/free with ASan')
def test_wrap_malloc(self):
self.do_runf(test_file('wrap_malloc.cpp'), 'OK.')
def test_environment(self):
self.set_setting('ASSERTIONS')
def test(assert_returncode=0):
self.do_core_test('test_hello_world.c', assert_returncode=assert_returncode)
js = read_file('test_hello_world.js')
assert ('require(' in js) == ('node' in self.get_setting('ENVIRONMENT')), 'we should have require() calls only if node js specified'
for engine in config.JS_ENGINES:
print(engine)
self.banned_js_engines = [e for e in config.JS_ENGINES if e != engine]
if engine == config.NODE_JS:
right = 'node'
wrong = 'shell'
else:
right = 'shell'
wrong = 'node'
self.set_setting('ENVIRONMENT', right)
print('ENVIRONMENT =', self.get_setting('ENVIRONMENT'))
test()
self.set_setting('ENVIRONMENT', wrong)
print('ENVIRONMENT =', self.get_setting('ENVIRONMENT'))
try:
test(assert_returncode=NON_ZERO)
raise Exception('unexpected success')
except Exception as e:
self.assertContained('not compiled for this environment', str(e))
self.set_setting('ENVIRONMENT', right + ',' + wrong)
print('ENVIRONMENT =', self.get_setting('ENVIRONMENT'))
test()
def test_postrun_exception(self):
self.set_setting('EXIT_RUNTIME', 0)
self.add_post_run('ThisFunctionDoesNotExist()')
self.build(test_file('core/test_hello_world.c'))
output = self.run_js('test_hello_world.js', assert_returncode=NON_ZERO)
self.assertStartswith(output, 'hello, world!')
self.assertContained('ThisFunctionDoesNotExist is not defined', output)
def test_no_declare_asm_module_exports(self):
self.set_setting('DECLARE_ASM_MODULE_EXPORTS', 0)
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.maybe_closure()
self.do_runf(test_file('declare_asm_module_exports.cpp'), 'jsFunction: 1')
js = read_file('declare_asm_module_exports.js')
occurances = js.count('cFunction')
if self.is_optimizing() and '-g' not in self.emcc_args:
if self.is_wasm():
self.assertEqual(occurances, 1)
else:
self.assertEqual(occurances, 2)
else:
print(occurances)
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_no_declare_asm_module_exports(self):
self.set_setting('DECLARE_ASM_MODULE_EXPORTS', 0)
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.maybe_closure()
self.set_setting('MINIMAL_RUNTIME')
self.do_runf(test_file('declare_asm_module_exports.cpp'), 'jsFunction: 1')
@parameterized({
'default': ([],),
'streaming': (['-s', 'MINIMAL_RUNTIME_STREAMING_WASM_COMPILATION'],),
'streaming_inst': (['-s', 'MINIMAL_RUNTIME_STREAMING_WASM_INSTANTIATION'],),
'no_export': (['-s', 'DECLARE_ASM_MODULE_EXPORTS=0'],)
})
def test_minimal_runtime_hello_world(self, args):
self.banned_js_engines = [config.V8_ENGINE, config.SPIDERMONKEY_ENGINE]
self.emcc_args = args
self.set_setting('MINIMAL_RUNTIME')
self.maybe_closure()
self.do_runf(test_file('small_hello_world.c'), 'hello')
@parameterized({
'fs': ('FORCE_FILESYSTEM',),
'nofs': ('NO_FILESYSTEM',),
})
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_hello_printf(self, extra_setting):
self.set_setting('MINIMAL_RUNTIME')
self.set_setting(extra_setting)
if '-fsanitize=leak' not in self.emcc_args and extra_setting != 'FORCE_FILESYSTEM':
self.maybe_closure()
self.do_runf(test_file('hello_world.c'), 'hello, world!')
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_safe_heap(self):
self.set_setting('MINIMAL_RUNTIME')
self.set_setting('SAFE_HEAP')
if '-fsanitize=leak' not in self.emcc_args:
self.maybe_closure()
self.do_runf(test_file('small_hello_world.c'), 'hello')
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_global_initializer(self):
self.set_setting('MINIMAL_RUNTIME')
self.maybe_closure()
self.do_runf(test_file('test_global_initializer.cpp'), 't1 > t0: 1')
@no_wasm2js('wasm2js does not support PROXY_TO_PTHREAD (custom section support)')
def test_return_address(self):
self.set_setting('USE_OFFSET_CONVERTER')
self.do_runf(test_file('core/test_return_address.c'), 'passed')
@no_wasm2js('TODO: sanitizers in wasm2js')
@no_asan('-fsanitize-minimal-runtime cannot be used with ASan')
def test_ubsan_minimal_too_many_errors(self):
self.emcc_args += ['-fsanitize=undefined', '-fsanitize-minimal-runtime']
if not self.is_wasm():
if self.is_optimizing():
self.skipTest('test can only be run without optimizations on asm.js')
self.emcc_args += ['-g']
self.do_runf(test_file('core/test_ubsan_minimal_too_many_errors.c'),
expected_output='ubsan: add-overflow\n' * 20 + 'ubsan: too many errors\n')
@no_wasm2js('TODO: sanitizers in wasm2js')
@no_asan('-fsanitize-minimal-runtime cannot be used with ASan')
def test_ubsan_minimal_errors_same_place(self):
self.emcc_args += ['-fsanitize=undefined', '-fsanitize-minimal-runtime']
if not self.is_wasm():
if self.is_optimizing():
self.skipTest('test can only be run without optimizations on asm.js')
self.emcc_args += ['-g']
self.do_runf(test_file('core/test_ubsan_minimal_errors_same_place.c'),
expected_output='ubsan: add-overflow\n' * 5)
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_integer': (['-fsanitize=integer'],),
'fsanitize_overflow': (['-fsanitize=signed-integer-overflow'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_overflow(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_overflow.c'),
assert_all=True, expected_output=[
".c:3:5: runtime error: signed integer overflow: 2147483647 + 1 cannot be represented in type 'int'",
".c:7:7: runtime error: signed integer overflow: 2147483647 + 1 cannot be represented in type 'int'",
])
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_return': (['-fsanitize=return'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_no_return(self, args):
self.emcc_args += ['-Wno-return-type'] + args
self.do_runf(test_file('core/test_ubsan_full_no_return.cpp'),
expected_output='.cpp:1:5: runtime error: execution reached the end of a value-returning function without returning a value', assert_returncode=NON_ZERO)
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_integer': (['-fsanitize=integer'],),
'fsanitize_shift': (['-fsanitize=shift'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_left_shift(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_left_shift.c'),
assert_all=True, expected_output=[
'.c:3:5: runtime error: left shift of negative value -1',
".c:7:5: runtime error: left shift of 16 by 29 places cannot be represented in type 'int'"
])
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_null': (['-fsanitize=null'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_null_ref(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_null_ref.cpp'),
assert_all=True, expected_output=[
".cpp:3:12: runtime error: reference binding to null pointer of type 'int'",
".cpp:4:13: runtime error: reference binding to null pointer of type 'int'",
".cpp:5:14: runtime error: reference binding to null pointer of type 'int'",
])
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_vptr': (['-fsanitize=vptr'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_static_cast(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_static_cast.cpp'),
assert_all=True, expected_output=[
".cpp:18:10: runtime error: downcast of address",
"which does not point to an object of type 'R'",
])
@parameterized({
'g': ('-g', [
".cpp:3:12: runtime error: reference binding to null pointer of type 'int'",
'in main',
]),
'g4': ('-gsource-map', [
".cpp:3:12: runtime error: reference binding to null pointer of type 'int'",
'in main ',
'.cpp:3:8'
]),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_stack_trace(self, g_flag, expected_output):
if g_flag == '-gsource-map':
if not self.is_wasm():
self.skipTest('wasm2js has no source map support')
elif '-Oz' in self.emcc_args:
self.skipTest('-Oz breaks stack traces')
create_file('pre.js', 'Module = {UBSAN_OPTIONS: "print_stacktrace=1"};')
self.emcc_args += ['-fsanitize=null', g_flag, '--pre-js=pre.js']
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(test_file('core/test_ubsan_full_null_ref.cpp'),
assert_all=True, expected_output=expected_output)
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_typeinfo_eq(self):
src = r'''
#include <typeinfo>
#include <stdio.h>
int main() {
int mismatch = typeid(int) != typeid(int);
printf("ok\n");
return mismatch;
}
'''
self.emcc_args.append('-fsanitize=undefined')
self.do_run(src, 'ok\n')
def test_template_class_deduction(self):
self.emcc_args += ['-std=c++17']
self.do_core_test('test_template_class_deduction.cpp')
@no_wasm2js('TODO: ASAN in wasm2js')
@no_safe_heap('asan does not work with SAFE_HEAP')
@parameterized({
'c': ['test_asan_no_error.c'],
'cpp': ['test_asan_no_error.cpp'],
})
def test_asan_no_error(self, name):
self.emcc_args.append('-fsanitize=address')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_runf(test_file('core', name), '', assert_returncode=NON_ZERO)
@no_safe_heap('asan does not work with SAFE_HEAP')
@parameterized({
'use_after_free_c': ('test_asan_use_after_free.c', [
'AddressSanitizer: heap-use-after-free on address',
]),
'use_after_free_cpp': ('test_asan_use_after_free.cpp', [
'AddressSanitizer: heap-use-after-free on address',
]),
'use_after_return': ('test_asan_use_after_return.c', [
'AddressSanitizer: stack-use-after-return on address',
], ['-Wno-return-stack-address']),
'static_buffer_overflow': ('test_asan_static_buffer_overflow.c', [
'AddressSanitizer: global-buffer-overflow on address',
], ['-fno-builtin-memset']),
'heap_buffer_overflow_c': ('test_asan_heap_buffer_overflow.c', [
'AddressSanitizer: heap-buffer-overflow on address',
], ['-fno-builtin-memset']),
'heap_buffer_overflow_cpp': ('test_asan_heap_buffer_overflow.cpp', [
'AddressSanitizer: heap-buffer-overflow on address',
], ['-fno-builtin-memset']),
'stack_buffer_overflow': ('test_asan_stack_buffer_overflow.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'stack_buffer_overflow_js': ('test_asan_stack_buffer_overflow_js.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'bitfield_unround_size': ('test_asan_bitfield_unround_size.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'bitfield_unround_offset': ('test_asan_bitfield_unround_offset.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'bitfield_round': ('test_asan_bitfield_round.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'memset_null': ('test_asan_memset_null.c', [
'AddressSanitizer: null-pointer-dereference on address 0x00000001'
], ['-fno-builtin-memset']),
'memset_freed': ('test_asan_memset_freed.c', [
'AddressSanitizer: heap-use-after-free on address'
], ['-fno-builtin-memset']),
'strcpy': ('test_asan_strcpy.c', [
'AddressSanitizer: heap-buffer-overflow on address'
], ['-fno-builtin-strcpy']),
'memcpy': ('test_asan_memcpy.c', [
'AddressSanitizer: heap-buffer-overflow on address'
], ['-fno-builtin-memcpy']),
'memchr': ('test_asan_memchr.c', [
'AddressSanitizer: global-buffer-overflow on address'
], ['-fno-builtin-memchr']),
'vector': ('test_asan_vector.cpp', [
'AddressSanitizer: container-overflow on address'
]),
})
def test_asan(self, name, expected_output, cflags=None):
if '-Oz' in self.emcc_args:
self.skipTest('-Oz breaks source maps')
if not self.is_wasm():
self.skipTest('wasm2js has no ASan support')
self.emcc_args.append('-fsanitize=address')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
if cflags:
self.emcc_args += cflags
self.do_runf(test_file('core', name),
expected_output=expected_output, assert_all=True,
check_for_error=False, assert_returncode=NON_ZERO)
@no_safe_heap('asan does not work with SAFE_HEAP')
@no_wasm2js('TODO: ASAN in wasm2js')
def test_asan_js_stack_op(self):
self.emcc_args.append('-fsanitize=address')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_runf(test_file('core/test_asan_js_stack_op.c'),
expected_output='Hello, World!')
@no_safe_heap('asan does not work with SAFE_HEAP')
@no_wasm2js('TODO: ASAN in wasm2js')
def test_asan_api(self):
self.emcc_args.append('-fsanitize=address')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_core_test('test_asan_api.c')
@no_safe_heap('asan does not work with SAFE_HEAP')
@no_wasm2js('TODO: ASAN in wasm2js')
def test_asan_modularized_with_closure(self):
create_file('post.js', 'if (!(createModule() instanceof Promise)) throw "Promise was not returned :(";\n')
self.emcc_args += ['-fsanitize=address', '--extern-post-js=post.js']
self.set_setting('MODULARIZE')
self.set_setting('EXPORT_NAME', 'createModule')
self.set_setting('USE_CLOSURE_COMPILER')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_runf(test_file('hello_world.c'), expected_output='hello, world!')
@no_asan('SAFE_HEAP cannot be used with ASan')
def test_safe_heap_user_js(self):
self.set_setting('SAFE_HEAP')
self.do_runf(test_file('core/test_safe_heap_user_js.c'),
expected_output=['Aborted(segmentation fault storing 1 bytes to address 0)'], assert_returncode=NON_ZERO)
def test_safe_stack(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
if self.is_optimizing():
expected = ['Aborted(stack overflow)']
else:
expected = ['Aborted(stack overflow)', '__handle_stack_overflow']
self.do_runf(test_file('core/test_safe_stack.c'),
expected_output=expected,
assert_returncode=NON_ZERO, assert_all=True)
@node_pthreads
def test_safe_stack_pthread(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('USE_PTHREADS')
if self.is_optimizing():
expected = ['Aborted(stack overflow)']
else:
expected = ['Aborted(stack overflow)', '__handle_stack_overflow']
self.do_runf(test_file('core/test_safe_stack.c'),
expected_output=expected,
assert_returncode=NON_ZERO, assert_all=True)
def test_safe_stack_alloca(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
if self.is_optimizing():
expected = ['Aborted(stack overflow)']
else:
expected = ['Aborted(stack overflow)', '__handle_stack_overflow']
self.do_runf(test_file('core/test_safe_stack_alloca.c'),
expected_output=expected,
assert_returncode=NON_ZERO, assert_all=True)
@needs_dylink
def test_safe_stack_dylink(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
self.dylink_test(r'''
#include <stdio.h>
extern void sidey();
int main() {
sidey();
}
''', '''
#include <string.h>
static long accumulator = 0;
int f(int *b) {
// Infinite recursion while recording stack pointer locations
// so that compiler can't eliminate the stack allocs.
accumulator += (long)b;
int a[1024];
return f(a);
}
void sidey() {
f(NULL);
}
''', ['Aborted(stack overflow)', '__handle_stack_overflow'], assert_returncode=NON_ZERO, force_c=True)
def test_fpic_static(self):
self.emcc_args.append('-fPIC')
self.do_core_test('test_hello_world.c')
@node_pthreads
def test_pthread_create(self):
self.set_setting('EXIT_RUNTIME')
# test that the node environment can be specified by itself, and that still
# works with pthreads (even though we did not specify 'node,worker')
self.set_setting('ENVIRONMENT', 'node')
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_c11_threads(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREADS_DEBUG')
if not self.has_changed_setting('INITIAL_MEMORY'):
self.set_setting('INITIAL_MEMORY', '64mb')
# test that the node and worker environments can be specified
self.set_setting('ENVIRONMENT', 'node,worker')
self.do_run_in_out_file_test('pthread/test_pthread_c11_threads.c')
@node_pthreads
def test_pthread_cxx_threads(self):
self.set_setting('PROXY_TO_PTHREAD')
self.clear_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '64Mb')
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('pthread/test_pthread_cxx_threads.cpp')
@node_pthreads
def test_pthread_create_pool(self):
# with a pool, we can synchronously depend on workers being available
self.set_setting('PTHREAD_POOL_SIZE', 2)
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-DALLOW_SYNC']
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_create_proxy(self):
# with PROXY_TO_PTHREAD, we can synchronously depend on workers being available
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-DALLOW_SYNC']
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_create_embind_stack_check(self):
# embind should work with stack overflow checks (see #12356)
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_exceptions(self):
self.set_setting('PTHREAD_POOL_SIZE', 2)
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-fexceptions']
self.do_run_in_out_file_test('core/pthread/exceptions.cpp')
@node_pthreads
def test_pthread_exit_process(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-DEXIT_RUNTIME', '--pre-js', test_file('core/pthread/test_pthread_exit_runtime.pre.js')]
self.do_run_in_out_file_test('core/pthread/test_pthread_exit_runtime.c', assert_returncode=42)
@node_pthreads
@no_wasm2js('wasm2js does not support PROXY_TO_PTHREAD (custom section support)')
def test_pthread_offset_converter(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_OFFSET_CONVERTER')
if '-g' in self.emcc_args:
self.emcc_args += ['-DDEBUG']
self.do_runf(test_file('core/test_return_address.c'), 'passed')
@node_pthreads
@no_wasm2js('wasm2js does not support PROXY_TO_PTHREAD (custom section support)')
def test_pthread_offset_converter_modularize(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_OFFSET_CONVERTER')
self.set_setting('MODULARIZE')
create_file('post.js', 'var m = require("./test_return_address.js"); m();')
self.emcc_args += ['--extern-post-js', 'post.js', '-s', 'EXPORT_NAME=foo']
if '-g' in self.emcc_args:
self.emcc_args += ['-DDEBUG']
self.do_runf(test_file('core/test_return_address.c'), 'passed')
def test_emscripten_atomics_stub(self):
self.do_run_in_out_file_test('core/pthread/emscripten_atomics.c')
@no_asan('incompatibility with atomics')
@node_pthreads
def test_emscripten_atomics(self):
self.set_setting('USE_PTHREADS')
self.do_run_in_out_file_test('core/pthread/emscripten_atomics.c')
@no_asan('incompatibility with atomics')
@node_pthreads
def test_emscripten_futexes(self):
self.set_setting('USE_PTHREADS')
self.do_run_in_out_file_test('core/pthread/emscripten_futexes.c')
@node_pthreads
def test_stdio_locking(self):
self.set_setting('PTHREAD_POOL_SIZE', '2')
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('core', 'test_stdio_locking.c')
@needs_dylink
@node_pthreads
def test_pthread_dylink_basics(self):
self.emcc_args.append('-Wno-experimental')
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.do_basic_dylink_test()
@needs_dylink
@node_pthreads
def test_pthread_dylink(self):
self.emcc_args.append('-Wno-experimental')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_PTHREADS')
self.set_setting('LLD_REPORT_UNDEFINED')
self.set_setting('PTHREAD_POOL_SIZE', 2)
main = test_file('core/pthread/test_pthread_dylink.c')
# test with a long .so name, as a regression test for
# https://github.com/emscripten-core/emscripten/issues/14833
# where we had a bug with long names + TextDecoder + pthreads + dylink
very_long_name = 'very_very_very_very_very_very_very_very_very_long.so'
self.dylink_testf(main, so_name=very_long_name,
need_reverse=False)
@needs_dylink
@node_pthreads
def test_pthread_dylink_tls(self):
self.emcc_args.append('-Wno-experimental')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_PTHREADS')
self.set_setting('PTHREAD_POOL_SIZE=1')
main = test_file('core/pthread/test_pthread_dylink_tls.c')
self.dylink_testf(main, need_reverse=False)
@needs_dylink
@node_pthreads
def test_Module_dynamicLibraries_pthreads(self):
# test that Module.dynamicLibraries works with pthreads
self.emcc_args += ['-pthread', '-Wno-experimental']
self.emcc_args += ['--extern-pre-js', 'pre.js']
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
# This test is for setting dynamicLibraries at runtime so we don't
self.set_setting('NO_AUTOLOAD_DYLIBS')
create_file('pre.js', '''
if (!global.Module) {
// This is the initial load (not a worker)
// Define the initial state of Module as we would
// in the html shell file.
// Use var to escape the scope of the if statement
var Module = {
dynamicLibraries: ['liblib.so']
};
}
''')
self.dylink_test(
r'''
#include <stdio.h>
int side();
int main() {
printf("result is %d", side());
return 0;
}
''',
r'''
int side() { return 42; }
''',
'result is 42')
def test_emscripten_get_exported_function(self):
self.set_setting('RESERVED_FUNCTION_POINTERS', 2)
self.emcc_args += ['-lexports.js']
self.do_core_test('test_get_exported_function.cpp')
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_emscripten_get_exported_function(self):
self.set_setting('RESERVED_FUNCTION_POINTERS', 2)
self.set_setting('MINIMAL_RUNTIME')
self.emcc_args += ['-lexports.js']
self.do_core_test('test_get_exported_function.cpp')
@also_with_standalone_wasm(impure=True)
def test_undefined_main(self):
if self.get_setting('STANDALONE_WASM'):
# opt out (see below).
err = self.expect_fail([EMCC, test_file('core/test_ctors_no_main.cpp')] + self.get_emcc_args())
self.assertContained('error: undefined symbol: main (referenced by top-level compiled C/C++ code)', err)
self.assertContained('warning: To build in STANDALONE_WASM mode without a main(), use emcc --no-entry', err)
elif not self.get_setting('LLD_REPORT_UNDEFINED') and not self.get_setting('STRICT'):
# Traditionally in emscripten we allow main to be implicitly undefined. This allows programs
# with a main and libraries without a main to be compiled identically.
# However we are trying to move away from that model to a more explicit opt-out model. See:
# https://github.com/emscripten-core/emscripten/issues/9640
self.do_core_test('test_ctors_no_main.cpp')
# Disabling IGNORE_MISSING_MAIN should cause link to fail due to missing main
self.set_setting('IGNORE_MISSING_MAIN', 0)
err = self.expect_fail([EMCC, test_file('core/test_ctors_no_main.cpp')] + self.get_emcc_args())
self.assertContained('error: entry symbol not defined (pass --no-entry to suppress): main', err)
# In non-standalone mode exporting an empty list of functions signal that we don't
self.set_setting('EXPORTED_FUNCTIONS', [])
self.do_core_test('test_ctors_no_main.cpp')
self.clear_setting('EXPORTED_FUNCTIONS')
def test_undefined_main_explict(self):
self.emcc_args.append('--no-entry')
self.do_core_test('test_ctors_no_main.cpp')
def test_undefined_main_wasm_output(self):
if not can_do_standalone(self):
self.skipTest('standalone mode only')
err = self.expect_fail([EMCC, '-o', 'out.wasm', test_file('core/test_ctors_no_main.cpp')] + self.get_emcc_args())
self.assertContained('undefined symbol: main', err)
def test_export_start(self):
if not can_do_standalone(self):
self.skipTest('standalone mode only')
self.set_setting('STANDALONE_WASM')
self.set_setting('EXPORTED_FUNCTIONS', ['__start'])
self.do_core_test('test_hello_world.c')
@unittest.skip("memory64 functionality only partially working")
def test_memory64_hello_world(self):
self.set_setting('MEMORY64', 2)
self.do_core_test('test_hello_world.c')
self):
self.do_core_test('test_emscripten_math.c')
def test_custom_js_options(self):
self.emcc_args += ['--js-library', test_file('core/test_custom_js_settings.js'), '-jsDCUSTOM_JS_OPTION=1']
self.do_core_test('test_custom_js_settings.c')
self.assertContained('cannot change built-in settings values with a -jsD directive', self.expect_fail([EMCC, '-jsDWASM=0']))
@no_asan('stack allocation sizes are no longer predictable')
def test_emscripten_stack(self):
self.set_setting('TOTAL_STACK', 4 * 1024 * 1024)
self.do_core_test('test_stack_get_free.c')
def test_abort_on_exceptions(self):
self.set_setting('EXIT_RUNTIME', 0)
self.set_setting('ABORT_ON_WASM_EXCEPTIONS')
self.set_setting('EXPORTED_RUNTIME_METHODS', ['ccall', 'cwrap'])
self.emcc_args += ['--bind', '--post-js', test_file('core/test_abort_on_exception_post.js')]
self.do_core_test('test_abort_on_exception.cpp', interleaved_output=False)
@needs_dylink
def test_gl_main_module(self):
self.set_setting('MAIN_MODULE')
self.do_runf(test_file('core/test_gl_get_proc_address.c'))
@needs_dylink
def test_main_module_js_symbol(self):
self.set_setting('MAIN_MODULE', 2)
self.emcc_args += ['--js-library', test_file('core/test_main_module_js_symbol.js')]
self.do_runf(test_file('core/test_main_module_js_symbol.c'))
def test_REVERSE_DEPS(self):
create_file('connect.c', '#include <sys/socket.h>\nint main() { return (int)(long)&connect; }')
self.run_process([EMCC, 'connect.c'])
base_size = os.path.getsize('a.out.wasm')
self.run_process([EMCC, 'connect.c', '-sREVERSE_DEPS=auto'])
self.run_process([EMCC, 'connect.c', '-sREVERSE_DEPS=all'])
self.assertGreater(os.path.getsize('a.out.wasm'), base_size)
err = self.expect_fail([EMCC, 'connect.c', '-sREVERSE_DEPS=none'])
self.assertContained('undefined symbol: ntohs', err)
def test_emscripten_async_call(self):
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test(test_file('core/test_emscripten_async_call.c'))
@no_asan('asyncify stack operations confuse asan')
@parameterized({
'': ([],),
'no_dynamic_execution': (['-s', 'DYNAMIC_EXECUTION=0'],)
})
def test_embind_lib_with_asyncify(self, args):
self.uses_es6 = True
self.emcc_args += [
'--bind',
'-s', 'ASYNCIFY',
'-s', 'ASYNCIFY_IMPORTS=["sleep_and_return"]',
'--post-js', test_file('core/embind_lib_with_asyncify.test.js'),
]
self.emcc_args += args
self.do_core_test('embind_lib_with_asyncify.cpp')
@no_asan('asyncify stack operations confuse asan')
def test_em_async_js(self):
self.uses_es6 = True
self.set_setting('ASYNCIFY')
self.maybe_closure()
self.do_core_test('test_em_async_js.c')
def make_run(name, emcc_args, settings=None, env=None):
if env is None:
env = {}
if settings is None:
settings = {}
if settings:
emcc_args.append('-Wno-unused-command-line-argument')
TT = type(name, (TestCoreBase,), dict(run_name=name, env=env, __module__=__name__))
def tearDown(self):
try:
super(TT, self).tearDown()
finally:
for k, v in self.env.items():
del os.environ[k]
TT.tearDown = tearDown
def setUp(self):
super(TT, self).setUp()
for k, v in self.env.items():
assert k not in os.environ, k + ' should not be in environment'
os.environ[k] = v
os.chdir(self.get_dir())
for k, v in settings.items():
self.set_setting(k, v)
self.emcc_args += emcc_args
TT.setUp = setUp
return TT
wasm0 = make_run('wasm0', emcc_args=['-O0'])
wasm0g = make_run('wasm0g', emcc_args=['-O0', '-g'])
wasm1 = make_run('wasm1', emcc_args=['-O1'])
wasm2 = make_run('wasm2', emcc_args=['-O2'])
wasm2g = make_run('wasm2g', emcc_args=['-O2', '-g'])
wasm3 = make_run('wasm3', emcc_args=['-O3'])
wasms = make_run('wasms', emcc_args=['-Os'])
wasmz = make_run('wasmz', emcc_args=['-Oz'])
wasmlto0 = make_run('wasmlto0', emcc_args=['-flto', '-O0'])
wasmlto1 = make_run('wasmlto1', emcc_args=['-flto', '-O1'])
wasmlto2 = make_run('wasmlto2', emcc_args=['-flto', '-O2'])
wasmlto3 = make_run('wasmlto3', emcc_args=['-flto', '-O3'])
wasmltos = make_run('wasmltos', emcc_args=['-flto', '-Os'])
wasmltoz = make_run('wasmltoz', emcc_args=['-flto', '-Oz'])
wasm2js0 = make_run('wasm2js0', emcc_args=['-O0'], settings={'WASM': 0})
wasm2js1 = make_run('wasm2js1', emcc_args=['-O1'], settings={'WASM': 0})
wasm2js2 = make_run('wasm2js2', emcc_args=['-O2'], settings={'WASM': 0})
wasm2js3 = make_run('wasm2js3', emcc_args=['-O3'], settings={'WASM': 0})
wasm2jss = make_run('wasm2jss', emcc_args=['-Os'], settings={'WASM': 0})
wasm2jsz = make_run('wasm2jsz', emcc_args=['-Oz'], settings={'WASM': 0})
simd2 = make_run('simd2', emcc_args=['-O2', '-msimd128'])
bulkmem2 = make_run('bulkmem2', emcc_args=['-O2', '-mbulk-memory'])
wasm2s = make_run('wasm2s', emcc_args=['-O2'], settings={'SAFE_HEAP': 1})
wasm2ss = make_run('wasm2ss', emcc_args=['-O2'], settings={'STACK_OVERFLOW_CHECK': 2})
strict = make_run('strict', emcc_args=[], settings={'STRICT': 1})
lsan = make_run('lsan', emcc_args=['-fsanitize=leak', '--profiling'], settings={'ALLOW_MEMORY_GROWTH': 1})
asan = make_run('asan', emcc_args=['-fsanitize=address', '--profiling'], settings={'ALLOW_MEMORY_GROWTH': 1})
asani = make_run('asani', emcc_args=['-fsanitize=address', '--profiling', '--pre-js', os.path.join(os.path.dirname(__file__), 'asan-no-leak.js')],
settings={'ALLOW_MEMORY_GROWTH': 1})
lld = make_run('lld', emcc_args=[], settings={'LLD_REPORT_UNDEFINED': 1})
minimal0 = make_run('minimal0', emcc_args=['-g'], settings={'MINIMAL_RUNTIME': 1})
del TestCoreBase # noqa
| true | true |
f71c3617afc853a7653c8b2fa5b96fb74a081eed | 464 | py | Python | python/string-manipulation/alternating_characters.py | anishLearnsToCode/hackerrabk-interview-preparation-kit | 9d31eefe336e6dbef104ae78f06dd46686c28f84 | [
"MIT"
] | 2 | 2020-06-04T09:48:30.000Z | 2021-11-28T15:43:00.000Z | python/string-manipulation/alternating_characters.py | anishLearnsToCode/hackerrabk-interview-preparation-kit | 9d31eefe336e6dbef104ae78f06dd46686c28f84 | [
"MIT"
] | null | null | null | python/string-manipulation/alternating_characters.py | anishLearnsToCode/hackerrabk-interview-preparation-kit | 9d31eefe336e6dbef104ae78f06dd46686c28f84 | [
"MIT"
] | 1 | 2020-11-01T01:03:08.000Z | 2020-11-01T01:03:08.000Z | def minimum_deletions(string: str) -> int:
current_character = string[0]
count = 0
deletions = 0
for character in string:
if character == current_character:
count += 1
else:
current_character = character
deletions += count - 1
count = 1
return deletions + count - 1
test_cases = int(input())
for _ in range(test_cases):
string = input()
print(minimum_deletions(string))
| 24.421053 | 42 | 0.594828 | def minimum_deletions(string: str) -> int:
current_character = string[0]
count = 0
deletions = 0
for character in string:
if character == current_character:
count += 1
else:
current_character = character
deletions += count - 1
count = 1
return deletions + count - 1
test_cases = int(input())
for _ in range(test_cases):
string = input()
print(minimum_deletions(string))
| true | true |
f71c36d4dffacd0a0f9415f50fd7e32ca1fa2928 | 6,067 | py | Python | oops_fhir/r4/code_system/v3_substance_admin_substitution.py | Mikuana/oops_fhir | 77963315d123756b7d21ae881f433778096a1d25 | [
"MIT"
] | null | null | null | oops_fhir/r4/code_system/v3_substance_admin_substitution.py | Mikuana/oops_fhir | 77963315d123756b7d21ae881f433778096a1d25 | [
"MIT"
] | null | null | null | oops_fhir/r4/code_system/v3_substance_admin_substitution.py | Mikuana/oops_fhir | 77963315d123756b7d21ae881f433778096a1d25 | [
"MIT"
] | null | null | null | from pathlib import Path
from fhir.resources.codesystem import CodeSystem
from oops_fhir.utils import CodeSystemConcept
__all__ = ["v3substanceAdminSubstitution"]
_resource = CodeSystem.parse_file(Path(__file__).with_suffix(".json"))
class v3substanceAdminSubstitution:
"""
v3 Code System substanceAdminSubstitution
Identifies what sort of change is permitted or has occurred between the
therapy that was ordered and the therapy that was/will be provided.
Status: active - Version: 2018-08-12
Copyright None
http://terminology.hl7.org/CodeSystem/v3-substanceAdminSubstitution
"""
underscore_act_substance_admin_substitution_code = CodeSystemConcept(
{
"code": "_ActSubstanceAdminSubstitutionCode",
"concept": [
{
"code": "E",
"concept": [
{
"code": "EC",
"concept": [
{
"code": "BC",
"definition": "Description: \n \r\n\n Substitution occurred or is permitted between equivalent Brands but not Generics\r\n\n \n Examples: \n \r\n\n \n Zestril for Prinivil\n Coumadin for Jantoven",
"display": "brand composition",
},
{
"code": "G",
"definition": "Description: Substitution occurred or is permitted between equivalent Generics but not Brands\r\n\n \n Examples: \n \r\n\n \n Lisnopril (Lupin Corp) for Lisnopril (Wockhardt Corp)",
"display": "generic composition",
},
],
"definition": "Description: \n \r\n\n Substitution occurred or is permitted with another product that is a:\r\n\n \n pharmaceutical alternative containing the same active ingredient but is formulated with different salt, ester\n pharmaceutical equivalent that has the same active ingredient, strength, dosage form and route of administration\n \n \n Examples: \n \r\n\n \n \n Pharmaceutical alternative: Erythromycin Ethylsuccinate for Erythromycin Stearate\n \n Pharmaceutical equivalent: Lisonpril for Zestril",
"display": "equivalent composition",
},
{
"code": "TE",
"concept": [
{
"code": "TB",
"definition": "Description: Substitution occurred or is permitted between therapeutically equivalent Brands but not Generics\r\n>\n Examples: \n \r\n\n \n Zantac for Tagamet",
"display": "therapeutic brand",
},
{
"code": "TG",
"definition": "Description: Substitution occurred or is permitted between therapeutically equivalent Generics but not Brands\r\n>\n Examples: \n \r\n\n \n Ranitidine for cimetidine",
"display": "therapeutic generic",
},
],
"definition": "Description: Substitution occurred or is permitted with another product having the same therapeutic objective and safety profile.\r\n\n \n Examples: \n \r\n\n \n ranitidine for Tagamet",
"display": "therapeutic alternative",
},
],
"definition": "Description: Substitution occurred or is permitted with another bioequivalent and therapeutically equivalent product.",
"display": "equivalent",
},
{
"code": "F",
"definition": "Description: This substitution was performed or is permitted based on formulary guidelines.",
"display": "formulary",
},
{
"code": "N",
"definition": "No substitution occurred or is permitted.",
"display": "none",
},
],
"definition": "Description: Substitution occurred or is permitted with another product that may potentially have different ingredients, but having the same biological and therapeutic effects.",
"display": "ActSubstanceAdminSubstitutionCode",
"property": [{"code": "notSelectable", "valueBoolean": True}],
}
)
"""
ActSubstanceAdminSubstitutionCode
Description: Substitution occurred or is permitted with another product that may potentially have different ingredients, but having the same biological and therapeutic effects.
"""
class Meta:
resource = _resource
| 63.197917 | 890 | 0.447338 | from pathlib import Path
from fhir.resources.codesystem import CodeSystem
from oops_fhir.utils import CodeSystemConcept
__all__ = ["v3substanceAdminSubstitution"]
_resource = CodeSystem.parse_file(Path(__file__).with_suffix(".json"))
class v3substanceAdminSubstitution:
underscore_act_substance_admin_substitution_code = CodeSystemConcept(
{
"code": "_ActSubstanceAdminSubstitutionCode",
"concept": [
{
"code": "E",
"concept": [
{
"code": "EC",
"concept": [
{
"code": "BC",
"definition": "Description: \n \r\n\n Substitution occurred or is permitted between equivalent Brands but not Generics\r\n\n \n Examples: \n \r\n\n \n Zestril for Prinivil\n Coumadin for Jantoven",
"display": "brand composition",
},
{
"code": "G",
"definition": "Description: Substitution occurred or is permitted between equivalent Generics but not Brands\r\n\n \n Examples: \n \r\n\n \n Lisnopril (Lupin Corp) for Lisnopril (Wockhardt Corp)",
"display": "generic composition",
},
],
"definition": "Description: \n \r\n\n Substitution occurred or is permitted with another product that is a:\r\n\n \n pharmaceutical alternative containing the same active ingredient but is formulated with different salt, ester\n pharmaceutical equivalent that has the same active ingredient, strength, dosage form and route of administration\n \n \n Examples: \n \r\n\n \n \n Pharmaceutical alternative: Erythromycin Ethylsuccinate for Erythromycin Stearate\n \n Pharmaceutical equivalent: Lisonpril for Zestril",
"display": "equivalent composition",
},
{
"code": "TE",
"concept": [
{
"code": "TB",
"definition": "Description: Substitution occurred or is permitted between therapeutically equivalent Brands but not Generics\r\n>\n Examples: \n \r\n\n \n Zantac for Tagamet",
"display": "therapeutic brand",
},
{
"code": "TG",
"definition": "Description: Substitution occurred or is permitted between therapeutically equivalent Generics but not Brands\r\n>\n Examples: \n \r\n\n \n Ranitidine for cimetidine",
"display": "therapeutic generic",
},
],
"definition": "Description: Substitution occurred or is permitted with another product having the same therapeutic objective and safety profile.\r\n\n \n Examples: \n \r\n\n \n ranitidine for Tagamet",
"display": "therapeutic alternative",
},
],
"definition": "Description: Substitution occurred or is permitted with another bioequivalent and therapeutically equivalent product.",
"display": "equivalent",
},
{
"code": "F",
"definition": "Description: This substitution was performed or is permitted based on formulary guidelines.",
"display": "formulary",
},
{
"code": "N",
"definition": "No substitution occurred or is permitted.",
"display": "none",
},
],
"definition": "Description: Substitution occurred or is permitted with another product that may potentially have different ingredients, but having the same biological and therapeutic effects.",
"display": "ActSubstanceAdminSubstitutionCode",
"property": [{"code": "notSelectable", "valueBoolean": True}],
}
)
class Meta:
resource = _resource
| true | true |
f71c37bb391e2b9895264076e05236334fa0075c | 570 | py | Python | tests/test_poetcli.py | jkerola/poetcli | e307513fa073beaddd7247d944fcef7092ee95dd | [
"MIT"
] | null | null | null | tests/test_poetcli.py | jkerola/poetcli | e307513fa073beaddd7247d944fcef7092ee95dd | [
"MIT"
] | null | null | null | tests/test_poetcli.py | jkerola/poetcli | e307513fa073beaddd7247d944fcef7092ee95dd | [
"MIT"
] | null | null | null | from pytest import raises
from poetcli.main import PoetCLITest
def test_poetcli():
# test poetcli without any subcommands or arguments
with PoetCLITest() as app:
app.run()
assert app.exit_code == 0
def test_poetcli_debug():
# test that debug mode is functional
argv = ['--debug']
with PoetCLITest(argv=argv) as app:
app.run()
assert app.debug is True
def test_create_poem():
argv = []
with PoetCLITest(argv=argv) as app:
app.run()
output = app.last_rendered
assert output is None
| 21.923077 | 55 | 0.64386 | from pytest import raises
from poetcli.main import PoetCLITest
def test_poetcli():
with PoetCLITest() as app:
app.run()
assert app.exit_code == 0
def test_poetcli_debug():
argv = ['--debug']
with PoetCLITest(argv=argv) as app:
app.run()
assert app.debug is True
def test_create_poem():
argv = []
with PoetCLITest(argv=argv) as app:
app.run()
output = app.last_rendered
assert output is None
| true | true |
f71c38457ce1146b10dc11311b97086931643887 | 2,269 | py | Python | cengal/hardware_info/cpu.py | FI-Mihej/Cengal | 516b9780da6ccc9168f8f89d7ba13dc29e24bc0b | [
"Apache-2.0"
] | 3 | 2018-07-23T18:48:58.000Z | 2021-07-18T14:17:20.000Z | cengal/hardware_info/cpu.py | FI-Mihej/Cengal | 516b9780da6ccc9168f8f89d7ba13dc29e24bc0b | [
"Apache-2.0"
] | null | null | null | cengal/hardware_info/cpu.py | FI-Mihej/Cengal | 516b9780da6ccc9168f8f89d7ba13dc29e24bc0b | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# coding=utf-8
# Copyright © 2017 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cpuinfo
"""
Module Docstring
Docstrings: http://www.python.org/dev/peps/pep-0257/
"""
__author__ = "ButenkoMS <gtalk@butenkoms.space>"
__copyright__ = "Copyright © 2017 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space>"
__credits__ = ["ButenkoMS <gtalk@butenkoms.space>", ]
__license__ = "Apache License, Version 2.0"
__version__ = "0.0.1"
__maintainer__ = "ButenkoMS <gtalk@butenkoms.space>"
__email__ = "gtalk@butenkoms.space"
__status__ = "Prototype"
# __status__ = "Development"
# __status__ = "Production"
def get_cpu_info()->dict:
return cpuinfo.get_cpu_info()
def get_l2_cache_size()->int:
size_text = cpuinfo.get_cpu_info()['l2_cache_size']
size_text_list = size_text.split()
size_text_list_size = len(size_text_list)
size_text_number = None
size_text_dimension = None
if 0 == size_text_list_size:
return 0
elif 1 == size_text_list_size:
return int(size_text_list)
elif 2 == size_text_list_size:
size_text_number, size_text_dimension = size_text_list
else:
return 0
size_text_number = int(size_text_number)
size_text_dimension = size_text_dimension.lower()
factor = 1
if 'kb' == size_text_dimension:
factor = 1024
elif 'mb' == size_text_dimension:
factor = 1024**2
elif 'gb' == size_text_dimension:
factor = 1024**3 # :)
return size_text_number * factor
def l2_cache_per_core()->int:
core_count = cpuinfo.get_cpu_info()['count']
if core_count:
return int(get_l2_cache_size() / core_count)
else:
return 0
| 31.082192 | 100 | 0.712649 |
import cpuinfo
__author__ = "ButenkoMS <gtalk@butenkoms.space>"
__copyright__ = "Copyright © 2017 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space>"
__credits__ = ["ButenkoMS <gtalk@butenkoms.space>", ]
__license__ = "Apache License, Version 2.0"
__version__ = "0.0.1"
__maintainer__ = "ButenkoMS <gtalk@butenkoms.space>"
__email__ = "gtalk@butenkoms.space"
__status__ = "Prototype"
def get_cpu_info()->dict:
return cpuinfo.get_cpu_info()
def get_l2_cache_size()->int:
size_text = cpuinfo.get_cpu_info()['l2_cache_size']
size_text_list = size_text.split()
size_text_list_size = len(size_text_list)
size_text_number = None
size_text_dimension = None
if 0 == size_text_list_size:
return 0
elif 1 == size_text_list_size:
return int(size_text_list)
elif 2 == size_text_list_size:
size_text_number, size_text_dimension = size_text_list
else:
return 0
size_text_number = int(size_text_number)
size_text_dimension = size_text_dimension.lower()
factor = 1
if 'kb' == size_text_dimension:
factor = 1024
elif 'mb' == size_text_dimension:
factor = 1024**2
elif 'gb' == size_text_dimension:
factor = 1024**3
return size_text_number * factor
def l2_cache_per_core()->int:
core_count = cpuinfo.get_cpu_info()['count']
if core_count:
return int(get_l2_cache_size() / core_count)
else:
return 0
| true | true |
f71c38629e95ab04b4b2bedf6a45506097a47901 | 6,731 | py | Python | glfw/library.py | brpollock/pyGLFW | 1f78383e0b466ad4af83270ef9631d306665e714 | [
"MIT"
] | null | null | null | glfw/library.py | brpollock/pyGLFW | 1f78383e0b466ad4af83270ef9631d306665e714 | [
"MIT"
] | null | null | null | glfw/library.py | brpollock/pyGLFW | 1f78383e0b466ad4af83270ef9631d306665e714 | [
"MIT"
] | null | null | null | """
Python bindings for GLFW.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import ctypes
import os
import glob
import sys
import subprocess
import textwrap
def _find_library_candidates(library_names,
library_file_extensions,
library_search_paths):
"""
Finds and returns filenames which might be the library you are looking for.
"""
candidates = set()
for library_name in library_names:
for search_path in library_search_paths:
glob_query = os.path.join(search_path, '*'+library_name+'*')
for filename in glob.iglob(glob_query):
filename = os.path.realpath(filename)
if filename in candidates:
continue
basename = os.path.basename(filename)
if basename.startswith('lib'+library_name):
basename_end = basename[len('lib'+library_name):]
elif basename.startswith(library_name):
basename_end = basename[len(library_name):]
else:
continue
for file_extension in library_file_extensions:
if basename_end.startswith(file_extension):
if basename_end[len(file_extension):][:1] in ('', '.'):
candidates.add(filename)
if basename_end.endswith(file_extension):
basename_middle = basename_end[:-len(file_extension)]
if all(c in '0123456789.' for c in basename_middle):
candidates.add(filename)
return candidates
def _load_library(library_names, library_file_extensions,
library_search_paths, version_check_callback):
"""
Finds, loads and returns the most recent version of the library.
"""
candidates = _find_library_candidates(library_names,
library_file_extensions,
library_search_paths)
library_versions = []
for filename in candidates:
version = version_check_callback(filename)
if version is not None and version >= (3, 0, 0):
library_versions.append((version, filename))
if not library_versions:
return None
library_versions.sort()
return ctypes.CDLL(library_versions[-1][1])
def _glfw_get_version(filename):
"""
Queries and returns the library version tuple or None by using a
subprocess.
"""
version_checker_source = '''
import sys
import ctypes
def get_version(library_handle):
"""
Queries and returns the library version tuple or None.
"""
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
if hasattr(library_handle, 'glfwGetVersion'):
library_handle.glfwGetVersion(major, minor, rev)
version = (major_value.value,
minor_value.value,
rev_value.value)
return version
else:
return None
try:
input_func = raw_input
except NameError:
input_func = input
filename = input_func().strip()
try:
library_handle = ctypes.CDLL(filename)
except OSError:
pass
else:
version = get_version(library_handle)
print(version)
'''
args = [sys.executable, '-c', textwrap.dedent(version_checker_source)]
process = subprocess.Popen(args, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
out = process.communicate(filename)[0]
out = out.strip()
if out:
return eval(out)
else:
return None
def _get_library_search_paths():
"""
Returns a list of library search paths, considering of the current working
directory, default paths and paths from environment variables.
"""
package_path = os.path.abspath(os.path.dirname(__file__))
search_paths = [
'',
package_path,
sys.prefix + '/lib',
'/usr/lib64',
'/usr/local/lib64',
'/usr/lib', '/usr/local/lib',
'/run/current-system/sw/lib',
'/usr/lib/x86_64-linux-gnu/',
'/usr/lib/aarch64-linux-gnu/',
]
if sys.platform != 'darwin':
# manylinux2014 wheels contain libraries built for X11 and Wayland
if os.environ.get('XDG_SESSION_TYPE') == 'wayland':
search_paths.insert(1, os.path.join(package_path, 'wayland'))
else:
# X11 is the default, even if XDG_SESSION_TYPE is not set
search_paths.insert(1, os.path.join(package_path, 'x11'))
if sys.platform == 'darwin':
path_environment_variable = 'DYLD_LIBRARY_PATH'
else:
path_environment_variable = 'LD_LIBRARY_PATH'
if path_environment_variable in os.environ:
search_paths.extend(os.environ[path_environment_variable].split(':'))
return search_paths
if os.environ.get('PYGLFW_LIBRARY', ''):
try:
glfw = ctypes.CDLL(os.environ['PYGLFW_LIBRARY'])
except OSError:
glfw = None
elif sys.platform == 'win32':
glfw = None # Will become `not None` on success.
# try Windows default search path
try:
glfw = ctypes.CDLL('glfw3.dll')
except OSError:
pass
# try package directory
if glfw is None:
try:
if sys.maxsize > 2**32:
# load Microsoft Visual C++ 2012 runtime on 64-bit systems
msvcr = ctypes.CDLL(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'msvcr110.dll'))
else:
# load Microsoft Visual C++ 2010 runtime on 32-bit systems
msvcr = ctypes.CDLL(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'msvcr100.dll'))
glfw = ctypes.CDLL(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'glfw3.dll'))
except OSError:
pass
# try conda's default location on Windows
if glfw is None:
try:
glfw = ctypes.CDLL(os.path.join(sys.prefix, 'Library', 'bin', 'glfw3.dll'))
except OSError:
pass
else:
glfw = _load_library(['glfw', 'glfw3'], ['.so', '.dylib'],
_get_library_search_paths(), _glfw_get_version)
| 34.695876 | 109 | 0.588323 |
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import ctypes
import os
import glob
import sys
import subprocess
import textwrap
def _find_library_candidates(library_names,
library_file_extensions,
library_search_paths):
candidates = set()
for library_name in library_names:
for search_path in library_search_paths:
glob_query = os.path.join(search_path, '*'+library_name+'*')
for filename in glob.iglob(glob_query):
filename = os.path.realpath(filename)
if filename in candidates:
continue
basename = os.path.basename(filename)
if basename.startswith('lib'+library_name):
basename_end = basename[len('lib'+library_name):]
elif basename.startswith(library_name):
basename_end = basename[len(library_name):]
else:
continue
for file_extension in library_file_extensions:
if basename_end.startswith(file_extension):
if basename_end[len(file_extension):][:1] in ('', '.'):
candidates.add(filename)
if basename_end.endswith(file_extension):
basename_middle = basename_end[:-len(file_extension)]
if all(c in '0123456789.' for c in basename_middle):
candidates.add(filename)
return candidates
def _load_library(library_names, library_file_extensions,
library_search_paths, version_check_callback):
candidates = _find_library_candidates(library_names,
library_file_extensions,
library_search_paths)
library_versions = []
for filename in candidates:
version = version_check_callback(filename)
if version is not None and version >= (3, 0, 0):
library_versions.append((version, filename))
if not library_versions:
return None
library_versions.sort()
return ctypes.CDLL(library_versions[-1][1])
def _glfw_get_version(filename):
version_checker_source = '''
import sys
import ctypes
def get_version(library_handle):
"""
Queries and returns the library version tuple or None.
"""
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
if hasattr(library_handle, 'glfwGetVersion'):
library_handle.glfwGetVersion(major, minor, rev)
version = (major_value.value,
minor_value.value,
rev_value.value)
return version
else:
return None
try:
input_func = raw_input
except NameError:
input_func = input
filename = input_func().strip()
try:
library_handle = ctypes.CDLL(filename)
except OSError:
pass
else:
version = get_version(library_handle)
print(version)
'''
args = [sys.executable, '-c', textwrap.dedent(version_checker_source)]
process = subprocess.Popen(args, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
out = process.communicate(filename)[0]
out = out.strip()
if out:
return eval(out)
else:
return None
def _get_library_search_paths():
package_path = os.path.abspath(os.path.dirname(__file__))
search_paths = [
'',
package_path,
sys.prefix + '/lib',
'/usr/lib64',
'/usr/local/lib64',
'/usr/lib', '/usr/local/lib',
'/run/current-system/sw/lib',
'/usr/lib/x86_64-linux-gnu/',
'/usr/lib/aarch64-linux-gnu/',
]
if sys.platform != 'darwin':
if os.environ.get('XDG_SESSION_TYPE') == 'wayland':
search_paths.insert(1, os.path.join(package_path, 'wayland'))
else:
search_paths.insert(1, os.path.join(package_path, 'x11'))
if sys.platform == 'darwin':
path_environment_variable = 'DYLD_LIBRARY_PATH'
else:
path_environment_variable = 'LD_LIBRARY_PATH'
if path_environment_variable in os.environ:
search_paths.extend(os.environ[path_environment_variable].split(':'))
return search_paths
if os.environ.get('PYGLFW_LIBRARY', ''):
try:
glfw = ctypes.CDLL(os.environ['PYGLFW_LIBRARY'])
except OSError:
glfw = None
elif sys.platform == 'win32':
glfw = None
try:
glfw = ctypes.CDLL('glfw3.dll')
except OSError:
pass
if glfw is None:
try:
if sys.maxsize > 2**32:
msvcr = ctypes.CDLL(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'msvcr110.dll'))
else:
msvcr = ctypes.CDLL(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'msvcr100.dll'))
glfw = ctypes.CDLL(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'glfw3.dll'))
except OSError:
pass
if glfw is None:
try:
glfw = ctypes.CDLL(os.path.join(sys.prefix, 'Library', 'bin', 'glfw3.dll'))
except OSError:
pass
else:
glfw = _load_library(['glfw', 'glfw3'], ['.so', '.dylib'],
_get_library_search_paths(), _glfw_get_version)
| true | true |
f71c38e7407523eed8a014449cd499676148e894 | 1,456 | py | Python | azure-servicefabric/azure/servicefabric/models/scaling_mechanism_description_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | azure-servicefabric/azure/servicefabric/models/scaling_mechanism_description_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | azure-servicefabric/azure/servicefabric/models/scaling_mechanism_description_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2018-10-16T13:08:23.000Z | 2018-10-16T13:08:23.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ScalingMechanismDescription(Model):
"""Describes the mechanism for performing a scaling operation.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: PartitionInstanceCountScaleMechanism,
AddRemoveIncrementalNamedPartitionScalingMechanism
All required parameters must be populated in order to send to Azure.
:param kind: Required. Constant filled by server.
:type kind: str
"""
_validation = {
'kind': {'required': True},
}
_attribute_map = {
'kind': {'key': 'Kind', 'type': 'str'},
}
_subtype_map = {
'kind': {'PartitionInstanceCount': 'PartitionInstanceCountScaleMechanism', 'AddRemoveIncrementalNamedPartition': 'AddRemoveIncrementalNamedPartitionScalingMechanism'}
}
def __init__(self, **kwargs) -> None:
super(ScalingMechanismDescription, self).__init__(**kwargs)
self.kind = None
| 33.860465 | 174 | 0.644231 |
from msrest.serialization import Model
class ScalingMechanismDescription(Model):
_validation = {
'kind': {'required': True},
}
_attribute_map = {
'kind': {'key': 'Kind', 'type': 'str'},
}
_subtype_map = {
'kind': {'PartitionInstanceCount': 'PartitionInstanceCountScaleMechanism', 'AddRemoveIncrementalNamedPartition': 'AddRemoveIncrementalNamedPartitionScalingMechanism'}
}
def __init__(self, **kwargs) -> None:
super(ScalingMechanismDescription, self).__init__(**kwargs)
self.kind = None
| true | true |
f71c39ebffaad67624f3ef9efc20843ab2437935 | 4,779 | py | Python | selfdrive/controls/lib/long_mpc.py | DS1SQM/HKG082_SCC_2 | c5b1c7fb593a69b1bc585eaa9947c0a76f381bbc | [
"MIT"
] | null | null | null | selfdrive/controls/lib/long_mpc.py | DS1SQM/HKG082_SCC_2 | c5b1c7fb593a69b1bc585eaa9947c0a76f381bbc | [
"MIT"
] | null | null | null | selfdrive/controls/lib/long_mpc.py | DS1SQM/HKG082_SCC_2 | c5b1c7fb593a69b1bc585eaa9947c0a76f381bbc | [
"MIT"
] | 1 | 2020-10-20T12:26:33.000Z | 2020-10-20T12:26:33.000Z | import os
import math
import cereal.messaging as messaging
from common.numpy_fast import clip, interp
from selfdrive.swaglog import cloudlog
from common.realtime import sec_since_boot
from selfdrive.controls.lib.radar_helpers import _LEAD_ACCEL_TAU
from selfdrive.controls.lib.longitudinal_mpc import libmpc_py
from selfdrive.controls.lib.drive_helpers import MPC_COST_LONG
LOG_MPC = os.environ.get('LOG_MPC', False)
class LongitudinalMpc():
def __init__(self, mpc_id):
self.mpc_id = mpc_id
self.setup_mpc()
self.v_mpc = 0.0
self.v_mpc_future = 0.0
self.a_mpc = 0.0
self.v_cruise = 0.0
self.prev_lead_status = False
self.prev_lead_x = 0.0
self.new_lead = False
self.last_cloudlog_t = 0.0
self.n_its = 0
self.duration = 0
# scc smoother
self.cruise_gap = 0
def publish(self, pm):
if LOG_MPC:
qp_iterations = max(0, self.n_its)
dat = messaging.new_message('liveLongitudinalMpc')
dat.liveLongitudinalMpc.xEgo = list(self.mpc_solution[0].x_ego)
dat.liveLongitudinalMpc.vEgo = list(self.mpc_solution[0].v_ego)
dat.liveLongitudinalMpc.aEgo = list(self.mpc_solution[0].a_ego)
dat.liveLongitudinalMpc.xLead = list(self.mpc_solution[0].x_l)
dat.liveLongitudinalMpc.vLead = list(self.mpc_solution[0].v_l)
dat.liveLongitudinalMpc.cost = self.mpc_solution[0].cost
dat.liveLongitudinalMpc.aLeadTau = self.a_lead_tau
dat.liveLongitudinalMpc.qpIterations = qp_iterations
dat.liveLongitudinalMpc.mpcId = self.mpc_id
dat.liveLongitudinalMpc.calculationTime = self.duration
pm.send('liveLongitudinalMpc', dat)
def setup_mpc(self):
ffi, self.libmpc = libmpc_py.get_libmpc(self.mpc_id)
self.libmpc.init(MPC_COST_LONG.TTC, MPC_COST_LONG.DISTANCE,
MPC_COST_LONG.ACCELERATION, MPC_COST_LONG.JERK)
self.mpc_solution = ffi.new("log_t *")
self.cur_state = ffi.new("state_t *")
self.cur_state[0].v_ego = 0
self.cur_state[0].a_ego = 0
self.a_lead_tau = _LEAD_ACCEL_TAU
def set_cur_state(self, v, a):
self.cur_state[0].v_ego = v
self.cur_state[0].a_ego = a
def update(self, CS, lead):
v_ego = CS.vEgo
# Setup current mpc state
self.cur_state[0].x_ego = 0.0
if lead is not None and lead.status:
x_lead = max(0, lead.dRel - 0.5)
v_lead = max(0.0, lead.vLead)
a_lead = lead.aLeadK
if (v_lead < 0.1 or -a_lead / 2.0 > v_lead):
v_lead = 0.0
a_lead = 0.0
self.a_lead_tau = max(lead.aLeadTau, (a_lead ** 2 * math.pi) / (2 * (v_lead + 0.01) ** 2))
self.new_lead = False
if not self.prev_lead_status or abs(x_lead - self.prev_lead_x) > 2.5:
self.libmpc.init_with_simulation(self.v_mpc, x_lead, v_lead, a_lead, self.a_lead_tau)
self.new_lead = True
self.prev_lead_status = True
self.prev_lead_x = x_lead
self.cur_state[0].x_l = x_lead
self.cur_state[0].v_l = v_lead
else:
self.prev_lead_status = False
# Fake a fast lead car, so mpc keeps running
self.cur_state[0].x_l = 50.0
self.cur_state[0].v_l = v_ego + 10.0
a_lead = 0.0
self.a_lead_tau = _LEAD_ACCEL_TAU
# Calculate mpc
t = sec_since_boot()
# scc smoother
cruise_gap = int(clip(CS.cruiseGap, 1., 4.))
# TR = interp(float(cruise_gap), [1., 2., 3., 4.], [1.0, 1.3, 1.6, 2.0])
TR = interp(v_ego, [3., 30.], [1., 2.5])
if self.cruise_gap != cruise_gap:
self.cruise_gap = cruise_gap
self.n_its = self.libmpc.run_mpc(self.cur_state, self.mpc_solution, self.a_lead_tau, a_lead, TR)
self.duration = int((sec_since_boot() - t) * 1e9)
# Get solution. MPC timestep is 0.2 s, so interpolation to 0.05 s is needed
self.v_mpc = self.mpc_solution[0].v_ego[1]
self.a_mpc = self.mpc_solution[0].a_ego[1]
self.v_mpc_future = self.mpc_solution[0].v_ego[10]
# Reset if NaN or goes through lead car
crashing = any(lead - ego < -50 for (lead, ego) in zip(self.mpc_solution[0].x_l, self.mpc_solution[0].x_ego))
nans = any(math.isnan(x) for x in self.mpc_solution[0].v_ego)
backwards = min(self.mpc_solution[0].v_ego) < -0.01
if ((backwards or crashing) and self.prev_lead_status) or nans:
if t > self.last_cloudlog_t + 5.0:
self.last_cloudlog_t = t
cloudlog.warning("Longitudinal mpc %d reset - backwards: %s crashing: %s nan: %s" % (
self.mpc_id, backwards, crashing, nans))
self.libmpc.init(MPC_COST_LONG.TTC, MPC_COST_LONG.DISTANCE,
MPC_COST_LONG.ACCELERATION, MPC_COST_LONG.JERK)
self.cur_state[0].v_ego = v_ego
self.cur_state[0].a_ego = 0.0
self.v_mpc = v_ego
self.a_mpc = CS.aEgo
self.prev_lead_status = False
| 35.139706 | 113 | 0.669178 | import os
import math
import cereal.messaging as messaging
from common.numpy_fast import clip, interp
from selfdrive.swaglog import cloudlog
from common.realtime import sec_since_boot
from selfdrive.controls.lib.radar_helpers import _LEAD_ACCEL_TAU
from selfdrive.controls.lib.longitudinal_mpc import libmpc_py
from selfdrive.controls.lib.drive_helpers import MPC_COST_LONG
LOG_MPC = os.environ.get('LOG_MPC', False)
class LongitudinalMpc():
def __init__(self, mpc_id):
self.mpc_id = mpc_id
self.setup_mpc()
self.v_mpc = 0.0
self.v_mpc_future = 0.0
self.a_mpc = 0.0
self.v_cruise = 0.0
self.prev_lead_status = False
self.prev_lead_x = 0.0
self.new_lead = False
self.last_cloudlog_t = 0.0
self.n_its = 0
self.duration = 0
self.cruise_gap = 0
def publish(self, pm):
if LOG_MPC:
qp_iterations = max(0, self.n_its)
dat = messaging.new_message('liveLongitudinalMpc')
dat.liveLongitudinalMpc.xEgo = list(self.mpc_solution[0].x_ego)
dat.liveLongitudinalMpc.vEgo = list(self.mpc_solution[0].v_ego)
dat.liveLongitudinalMpc.aEgo = list(self.mpc_solution[0].a_ego)
dat.liveLongitudinalMpc.xLead = list(self.mpc_solution[0].x_l)
dat.liveLongitudinalMpc.vLead = list(self.mpc_solution[0].v_l)
dat.liveLongitudinalMpc.cost = self.mpc_solution[0].cost
dat.liveLongitudinalMpc.aLeadTau = self.a_lead_tau
dat.liveLongitudinalMpc.qpIterations = qp_iterations
dat.liveLongitudinalMpc.mpcId = self.mpc_id
dat.liveLongitudinalMpc.calculationTime = self.duration
pm.send('liveLongitudinalMpc', dat)
def setup_mpc(self):
ffi, self.libmpc = libmpc_py.get_libmpc(self.mpc_id)
self.libmpc.init(MPC_COST_LONG.TTC, MPC_COST_LONG.DISTANCE,
MPC_COST_LONG.ACCELERATION, MPC_COST_LONG.JERK)
self.mpc_solution = ffi.new("log_t *")
self.cur_state = ffi.new("state_t *")
self.cur_state[0].v_ego = 0
self.cur_state[0].a_ego = 0
self.a_lead_tau = _LEAD_ACCEL_TAU
def set_cur_state(self, v, a):
self.cur_state[0].v_ego = v
self.cur_state[0].a_ego = a
def update(self, CS, lead):
v_ego = CS.vEgo
self.cur_state[0].x_ego = 0.0
if lead is not None and lead.status:
x_lead = max(0, lead.dRel - 0.5)
v_lead = max(0.0, lead.vLead)
a_lead = lead.aLeadK
if (v_lead < 0.1 or -a_lead / 2.0 > v_lead):
v_lead = 0.0
a_lead = 0.0
self.a_lead_tau = max(lead.aLeadTau, (a_lead ** 2 * math.pi) / (2 * (v_lead + 0.01) ** 2))
self.new_lead = False
if not self.prev_lead_status or abs(x_lead - self.prev_lead_x) > 2.5:
self.libmpc.init_with_simulation(self.v_mpc, x_lead, v_lead, a_lead, self.a_lead_tau)
self.new_lead = True
self.prev_lead_status = True
self.prev_lead_x = x_lead
self.cur_state[0].x_l = x_lead
self.cur_state[0].v_l = v_lead
else:
self.prev_lead_status = False
self.cur_state[0].x_l = 50.0
self.cur_state[0].v_l = v_ego + 10.0
a_lead = 0.0
self.a_lead_tau = _LEAD_ACCEL_TAU
t = sec_since_boot()
cruise_gap = int(clip(CS.cruiseGap, 1., 4.))
TR = interp(v_ego, [3., 30.], [1., 2.5])
if self.cruise_gap != cruise_gap:
self.cruise_gap = cruise_gap
self.n_its = self.libmpc.run_mpc(self.cur_state, self.mpc_solution, self.a_lead_tau, a_lead, TR)
self.duration = int((sec_since_boot() - t) * 1e9)
self.v_mpc = self.mpc_solution[0].v_ego[1]
self.a_mpc = self.mpc_solution[0].a_ego[1]
self.v_mpc_future = self.mpc_solution[0].v_ego[10]
crashing = any(lead - ego < -50 for (lead, ego) in zip(self.mpc_solution[0].x_l, self.mpc_solution[0].x_ego))
nans = any(math.isnan(x) for x in self.mpc_solution[0].v_ego)
backwards = min(self.mpc_solution[0].v_ego) < -0.01
if ((backwards or crashing) and self.prev_lead_status) or nans:
if t > self.last_cloudlog_t + 5.0:
self.last_cloudlog_t = t
cloudlog.warning("Longitudinal mpc %d reset - backwards: %s crashing: %s nan: %s" % (
self.mpc_id, backwards, crashing, nans))
self.libmpc.init(MPC_COST_LONG.TTC, MPC_COST_LONG.DISTANCE,
MPC_COST_LONG.ACCELERATION, MPC_COST_LONG.JERK)
self.cur_state[0].v_ego = v_ego
self.cur_state[0].a_ego = 0.0
self.v_mpc = v_ego
self.a_mpc = CS.aEgo
self.prev_lead_status = False
| true | true |
f71c39f0319329a2f4f59ab30550497ff4c60f40 | 6,860 | py | Python | script/bfe.py | tiancity-NJU/REID | 125a520a9c0b94440a7757e6f3c3c8bf976906ec | [
"MIT"
] | 1 | 2020-06-15T07:50:05.000Z | 2020-06-15T07:50:05.000Z | script/bfe.py | tiancity-NJU/REID | 125a520a9c0b94440a7757e6f3c3c8bf976906ec | [
"MIT"
] | null | null | null | script/bfe.py | tiancity-NJU/REID | 125a520a9c0b94440a7757e6f3c3c8bf976906ec | [
"MIT"
] | 4 | 2019-04-09T13:10:58.000Z | 2020-03-06T15:22:38.000Z | # encoding: utf-8
import os
import sys
from os import path as osp
from pprint import pprint
import numpy as np
import torch
from tensorboardX import SummaryWriter
from torch import nn
from torch.backends import cudnn
from torch.utils.data import DataLoader
sys.path.insert(0,os.path.abspath(os.path.dirname(__file__)+os.sep+'..'))
from config import opt
from datasets import data_manager
from datasets.data_loader import ImageData
from datasets.samplers import RandomIdentitySampler
from models.networks import ResNetBuilder, IDE, Resnet, BFE
#from models.BFE import BFE
from trainers.evaluator import ResNetEvaluator
from trainers.trainer import cls_tripletTrainer
from utils.loss import CrossEntropyLabelSmooth, TripletLoss, Margin
from utils.LiftedStructure import LiftedStructureLoss
from utils.DistWeightDevianceLoss import DistWeightBinDevianceLoss
from utils.serialization import Logger, save_checkpoint
from utils.transforms import TestTransform, TrainTransform
def train(**kwargs):
opt._parse(kwargs)
opt.model_name = 'bfe'
# set random seed and cudnn benchmark
torch.manual_seed(opt.seed)
os.makedirs(opt.save_dir, exist_ok=True)
use_gpu = torch.cuda.is_available()
sys.stdout = Logger(osp.join(opt.save_dir, 'log_train.txt'))
print('=========user config==========')
pprint(opt._state_dict())
print('============end===============')
if use_gpu:
print('currently using GPU')
cudnn.benchmark = True
torch.cuda.manual_seed_all(opt.seed)
else:
print('currently using cpu')
print('initializing dataset {}'.format(opt.dataset))
dataset = data_manager.init_dataset(name=opt.dataset, mode=opt.mode)
pin_memory = True if use_gpu else False
summary_writer = SummaryWriter(osp.join(opt.save_dir, 'tensorboard_log'))
trainloader = DataLoader(
ImageData(dataset.train, TrainTransform(opt.datatype)),
sampler=RandomIdentitySampler(dataset.train, opt.num_instances),
batch_size=opt.train_batch, num_workers=opt.workers,
pin_memory=pin_memory, drop_last=True
)
queryloader = DataLoader(
ImageData(dataset.query, TestTransform(opt.datatype)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
galleryloader = DataLoader(
ImageData(dataset.gallery, TestTransform(opt.datatype)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
queryFliploader = DataLoader(
ImageData(dataset.query, TestTransform(opt.datatype, True)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
galleryFliploader = DataLoader(
ImageData(dataset.gallery, TestTransform(opt.datatype, True)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
print('initializing model ...')
model = BFE(dataset.num_train_pids, 1.0, 0.33)
optim_policy = model.get_optim_policy()
if opt.pretrained_model:
state_dict = torch.load(opt.pretrained_model)['state_dict']
# state_dict = {k: v for k, v in state_dict.items() \
# if not ('reduction' in k or 'softmax' in k)}
model.load_state_dict(state_dict, False)
print('load pretrained model ' + opt.pretrained_model)
print('model size: {:.5f}M'.format(sum(p.numel() for p in model.parameters()) / 1e6))
if use_gpu:
model = nn.DataParallel(model).cuda()
reid_evaluator = ResNetEvaluator(model)
if opt.evaluate:
reid_evaluator.evaluate(queryloader, galleryloader,
queryFliploader, galleryFliploader, re_ranking=opt.re_ranking, savefig=opt.savefig)
return
# xent_criterion = nn.CrossEntropyLoss()
xent_criterion = CrossEntropyLabelSmooth(dataset.num_train_pids)
if opt.loss == 'triplet':
embedding_criterion = TripletLoss(opt.margin)
elif opt.loss == 'lifted':
embedding_criterion = LiftedStructureLoss(hard_mining=True)
elif opt.loss == 'weight':
embedding_criterion = Margin()
def criterion(triplet_y, softmax_y, labels):
losses = [embedding_criterion(output, labels)[0] for output in triplet_y] + \
[xent_criterion(output, labels) for output in softmax_y]
loss = sum(losses)
return loss
# get optimizer
if opt.optim == "sgd":
optimizer = torch.optim.SGD(optim_policy, lr=opt.lr, momentum=0.9, weight_decay=opt.weight_decay)
else:
optimizer = torch.optim.Adam(optim_policy, lr=opt.lr, weight_decay=opt.weight_decay)
start_epoch = opt.start_epoch
# get trainer and evaluator
reid_trainer = cls_tripletTrainer(opt, model, optimizer, criterion, summary_writer)
def adjust_lr(optimizer, ep):
if ep < 50:
lr = 1e-4 * (ep // 5 + 1)
elif ep < 200:
lr = 1e-3
elif ep < 300:
lr = 1e-4
else:
lr = 1e-5
for p in optimizer.param_groups:
p['lr'] = lr
# start training
best_rank1 = opt.best_rank
best_epoch = 0
for epoch in range(start_epoch, opt.max_epoch):
if opt.adjust_lr:
adjust_lr(optimizer, epoch + 1)
reid_trainer.train(epoch, trainloader)
# skip if not save model
if opt.eval_step > 0 and (epoch + 1) % opt.eval_step == 0 or (epoch + 1) == opt.max_epoch:
if opt.mode == 'class':
rank1 = test(model, queryloader)
else:
rank1 = reid_evaluator.evaluate(queryloader, galleryloader, queryFliploader, galleryFliploader)
is_best = rank1 > best_rank1
if is_best:
best_rank1 = rank1
best_epoch = epoch + 1
if use_gpu:
state_dict = model.module.state_dict()
else:
state_dict = model.state_dict()
save_checkpoint({'state_dict': state_dict, 'epoch': epoch + 1},
is_best=is_best, save_dir=opt.save_dir,
filename='checkpoint_ep' + str(epoch + 1) + '.pth.tar')
print('Best rank-1 {:.1%}, achived at epoch {}'.format(best_rank1, best_epoch))
def test(model, queryloader):
model.eval()
correct = 0
with torch.no_grad():
for data, target, _ in queryloader:
output = model(data).cpu()
# get the index of the max log-probability
pred = output.max(1, keepdim=True)[1]
correct += pred.eq(target.view_as(pred)).sum().item()
rank1 = 100. * correct / len(queryloader.dataset)
print('\nTest set: Accuracy: {}/{} ({:.2f}%)\n'.format(correct, len(queryloader.dataset), rank1))
return rank1
if __name__ == '__main__':
import fire
fire.Fire()
| 34.3 | 115 | 0.655248 |
import os
import sys
from os import path as osp
from pprint import pprint
import numpy as np
import torch
from tensorboardX import SummaryWriter
from torch import nn
from torch.backends import cudnn
from torch.utils.data import DataLoader
sys.path.insert(0,os.path.abspath(os.path.dirname(__file__)+os.sep+'..'))
from config import opt
from datasets import data_manager
from datasets.data_loader import ImageData
from datasets.samplers import RandomIdentitySampler
from models.networks import ResNetBuilder, IDE, Resnet, BFE
from trainers.evaluator import ResNetEvaluator
from trainers.trainer import cls_tripletTrainer
from utils.loss import CrossEntropyLabelSmooth, TripletLoss, Margin
from utils.LiftedStructure import LiftedStructureLoss
from utils.DistWeightDevianceLoss import DistWeightBinDevianceLoss
from utils.serialization import Logger, save_checkpoint
from utils.transforms import TestTransform, TrainTransform
def train(**kwargs):
opt._parse(kwargs)
opt.model_name = 'bfe'
torch.manual_seed(opt.seed)
os.makedirs(opt.save_dir, exist_ok=True)
use_gpu = torch.cuda.is_available()
sys.stdout = Logger(osp.join(opt.save_dir, 'log_train.txt'))
print('=========user config==========')
pprint(opt._state_dict())
print('============end===============')
if use_gpu:
print('currently using GPU')
cudnn.benchmark = True
torch.cuda.manual_seed_all(opt.seed)
else:
print('currently using cpu')
print('initializing dataset {}'.format(opt.dataset))
dataset = data_manager.init_dataset(name=opt.dataset, mode=opt.mode)
pin_memory = True if use_gpu else False
summary_writer = SummaryWriter(osp.join(opt.save_dir, 'tensorboard_log'))
trainloader = DataLoader(
ImageData(dataset.train, TrainTransform(opt.datatype)),
sampler=RandomIdentitySampler(dataset.train, opt.num_instances),
batch_size=opt.train_batch, num_workers=opt.workers,
pin_memory=pin_memory, drop_last=True
)
queryloader = DataLoader(
ImageData(dataset.query, TestTransform(opt.datatype)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
galleryloader = DataLoader(
ImageData(dataset.gallery, TestTransform(opt.datatype)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
queryFliploader = DataLoader(
ImageData(dataset.query, TestTransform(opt.datatype, True)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
galleryFliploader = DataLoader(
ImageData(dataset.gallery, TestTransform(opt.datatype, True)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
print('initializing model ...')
model = BFE(dataset.num_train_pids, 1.0, 0.33)
optim_policy = model.get_optim_policy()
if opt.pretrained_model:
state_dict = torch.load(opt.pretrained_model)['state_dict']
model.load_state_dict(state_dict, False)
print('load pretrained model ' + opt.pretrained_model)
print('model size: {:.5f}M'.format(sum(p.numel() for p in model.parameters()) / 1e6))
if use_gpu:
model = nn.DataParallel(model).cuda()
reid_evaluator = ResNetEvaluator(model)
if opt.evaluate:
reid_evaluator.evaluate(queryloader, galleryloader,
queryFliploader, galleryFliploader, re_ranking=opt.re_ranking, savefig=opt.savefig)
return
xent_criterion = CrossEntropyLabelSmooth(dataset.num_train_pids)
if opt.loss == 'triplet':
embedding_criterion = TripletLoss(opt.margin)
elif opt.loss == 'lifted':
embedding_criterion = LiftedStructureLoss(hard_mining=True)
elif opt.loss == 'weight':
embedding_criterion = Margin()
def criterion(triplet_y, softmax_y, labels):
losses = [embedding_criterion(output, labels)[0] for output in triplet_y] + \
[xent_criterion(output, labels) for output in softmax_y]
loss = sum(losses)
return loss
if opt.optim == "sgd":
optimizer = torch.optim.SGD(optim_policy, lr=opt.lr, momentum=0.9, weight_decay=opt.weight_decay)
else:
optimizer = torch.optim.Adam(optim_policy, lr=opt.lr, weight_decay=opt.weight_decay)
start_epoch = opt.start_epoch
reid_trainer = cls_tripletTrainer(opt, model, optimizer, criterion, summary_writer)
def adjust_lr(optimizer, ep):
if ep < 50:
lr = 1e-4 * (ep // 5 + 1)
elif ep < 200:
lr = 1e-3
elif ep < 300:
lr = 1e-4
else:
lr = 1e-5
for p in optimizer.param_groups:
p['lr'] = lr
best_rank1 = opt.best_rank
best_epoch = 0
for epoch in range(start_epoch, opt.max_epoch):
if opt.adjust_lr:
adjust_lr(optimizer, epoch + 1)
reid_trainer.train(epoch, trainloader)
if opt.eval_step > 0 and (epoch + 1) % opt.eval_step == 0 or (epoch + 1) == opt.max_epoch:
if opt.mode == 'class':
rank1 = test(model, queryloader)
else:
rank1 = reid_evaluator.evaluate(queryloader, galleryloader, queryFliploader, galleryFliploader)
is_best = rank1 > best_rank1
if is_best:
best_rank1 = rank1
best_epoch = epoch + 1
if use_gpu:
state_dict = model.module.state_dict()
else:
state_dict = model.state_dict()
save_checkpoint({'state_dict': state_dict, 'epoch': epoch + 1},
is_best=is_best, save_dir=opt.save_dir,
filename='checkpoint_ep' + str(epoch + 1) + '.pth.tar')
print('Best rank-1 {:.1%}, achived at epoch {}'.format(best_rank1, best_epoch))
def test(model, queryloader):
model.eval()
correct = 0
with torch.no_grad():
for data, target, _ in queryloader:
output = model(data).cpu()
pred = output.max(1, keepdim=True)[1]
correct += pred.eq(target.view_as(pred)).sum().item()
rank1 = 100. * correct / len(queryloader.dataset)
print('\nTest set: Accuracy: {}/{} ({:.2f}%)\n'.format(correct, len(queryloader.dataset), rank1))
return rank1
if __name__ == '__main__':
import fire
fire.Fire()
| true | true |
f71c3aaf6ae1025d594d51ec2a103091d3f8a9c3 | 21,405 | py | Python | pandapower/build_gen.py | mathildebadoual/pandapower | 9ba4bcb78e84b644d2ba6df0c08e285c54af8ddc | [
"BSD-3-Clause"
] | 1 | 2020-10-19T06:39:15.000Z | 2020-10-19T06:39:15.000Z | pandapower/build_gen.py | mathildebadoual/pandapower | 9ba4bcb78e84b644d2ba6df0c08e285c54af8ddc | [
"BSD-3-Clause"
] | null | null | null | pandapower/build_gen.py | mathildebadoual/pandapower | 9ba4bcb78e84b644d2ba6df0c08e285c54af8ddc | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2016-2018 by University of Kassel and Fraunhofer Institute for Energy Economics
# and Energy System Technology (IEE), Kassel. All rights reserved.
import numpy as np
import numpy.core.numeric as ncn
from numpy import array, zeros, isnan
from pandas import DataFrame
from pandapower.idx_bus import PV, REF, VA, VM, BUS_TYPE, NONE, VMAX, VMIN, PQ
from pandapower.idx_gen import QMIN, QMAX, PMIN, PMAX, GEN_STATUS, GEN_BUS, PG, VG, QG
def _build_gen_ppc(net, ppc):
'''
Takes the empty ppc network and fills it with the gen values. The gen
datatype will be float afterwards.
**INPUT**:
**net** -The pandapower format network
**ppc** - The PYPOWER format network to fill in values
'''
mode = net["_options"]["mode"]
# if mode == power flow or short circuit...
if mode == "pf" or mode == "sc":
# get in service elements
_is_elements = net["_is_elements"]
eg_is_mask = _is_elements['ext_grid']
gen_is_mask = _is_elements['gen']
eg_end = np.sum(eg_is_mask)
gen_end = eg_end + np.sum(gen_is_mask)
xw_end = gen_end + len(net["xward"])
# define default q limits
q_lim_default = 1e9 # which is 1000 TW - should be enough for distribution grids.
p_lim_default = 1e9
_init_ppc_gen(ppc, xw_end, 0)
if mode == "sc":
return
# add generator / pv data
if gen_end > eg_end:
_build_pp_gen(net, ppc, gen_is_mask, eg_end, gen_end, q_lim_default, p_lim_default)
_build_pp_ext_grid(net, ppc, eg_is_mask, eg_end)
# add extended ward pv node data
if xw_end > gen_end:
_build_pp_xward(net, ppc, gen_end, xw_end, q_lim_default)
# if mode == optimal power flow...
if mode == "opf":
bus_lookup = net["_pd2ppc_lookups"]["bus"]
calculate_voltage_angles = net["_options"]["calculate_voltage_angles"]
if len(net.dcline) > 0:
ppc["dcline"] = net.dcline[["loss_kw", "loss_percent"]].values
# get in service elements
_is_elements = net["_is_elements"]
eg_is = net["ext_grid"][_is_elements['ext_grid']]
gen_is = net["gen"][_is_elements['gen']]
sg_is = net.sgen[(net.sgen.in_service & net.sgen.controllable) == True] \
if "controllable" in net.sgen.columns else DataFrame()
l_is = net.load[(net.load.in_service & net.load.controllable) == True] \
if "controllable" in net.load.columns else DataFrame()
stor_is = net.storage[(net.storage.in_service & net.storage.controllable) == True] \
if "controllable" in net.storage.columns else DataFrame()
_is_elements["sgen_controllable"] = sg_is
_is_elements["load_controllable"] = l_is
_is_elements["storage_controllable"] = stor_is
eg_end = len(eg_is)
gen_end = eg_end + len(gen_is)
sg_end = gen_end + len(sg_is)
l_end = sg_end + len(l_is)
stor_end = l_end + len(stor_is)
q_lim_default = 1e9 # which is 1000 TW - should be enough for distribution grids.
p_lim_default = 1e9 # changes must be considered in check_opf_data
delta = net["_options"]["delta"]
# initialize generator matrix
ppc["gen"] = zeros(shape=(stor_end, 21), dtype=float)
ppc["gen"][:] = array([0, 0, 0, q_lim_default, -q_lim_default, 1., 1., 1, p_lim_default,
-p_lim_default, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
# add sgens first so pv bus types won't be overwritten
if sg_end > gen_end:
gen_buses = bus_lookup[sg_is["bus"].values]
ppc["gen"][gen_end:sg_end, GEN_BUS] = gen_buses
ppc["gen"][gen_end:sg_end, PG] = - sg_is["p_kw"].values * 1e-3 * sg_is["scaling"].values
ppc["gen"][gen_end:sg_end, QG] = sg_is["q_kvar"].values * 1e-3 * sg_is["scaling"].values
# set bus values for generator buses
ppc["bus"][gen_buses, BUS_TYPE] = PQ
# set constraints for controllable sgens
if "min_q_kvar" in sg_is.columns:
ppc["gen"][gen_end:sg_end, QMAX] = - (sg_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][gen_end:sg_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][gen_end:sg_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in sg_is.columns:
ppc["gen"][gen_end:sg_end, QMIN] = - (sg_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][gen_end:sg_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][gen_end:sg_end, [QMAX]] = min_q_kvar - 1e-10 # TODO Why this? (M.Scharf, 2018-02)
if "max_p_kw" in sg_is.columns:
ppc["gen"][gen_end:sg_end, PMIN] = - (sg_is["max_p_kw"].values * 1e-3 + delta)
max_p_kw = ppc["gen"][gen_end:sg_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][gen_end:sg_end, [PMIN]] = max_p_kw
if "min_p_kw" in sg_is.columns:
ppc["gen"][gen_end:sg_end, PMAX] = - (sg_is["min_p_kw"].values * 1e-3 - delta)
min_p_kw = ppc["gen"][gen_end:sg_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][gen_end:sg_end, [PMAX]] = min_p_kw
# add controllable loads
if l_end > sg_end:
load_buses = bus_lookup[l_is["bus"].values]
ppc["gen"][sg_end:l_end, GEN_BUS] = load_buses
ppc["gen"][sg_end:l_end, PG] = - l_is["p_kw"].values * 1e-3 * l_is["scaling"].values
ppc["gen"][sg_end:l_end, QG] = l_is["q_kvar"].values * 1e-3 * l_is["scaling"].values
# set bus values for controllable loads
ppc["bus"][load_buses, BUS_TYPE] = PQ
# set constraints for controllable loads
if "min_q_kvar" in l_is.columns:
ppc["gen"][sg_end:l_end, QMAX] = - (l_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][sg_end:l_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][sg_end:l_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in l_is.columns:
ppc["gen"][sg_end:l_end, QMIN] = - (l_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][sg_end:l_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][sg_end:l_end, [QMAX]] = min_q_kvar
if "min_p_kw" in l_is.columns:
ppc["gen"][sg_end:l_end, PMIN] = - (l_is["max_p_kw"].values * 1e-3 + delta)
max_p_kw = ppc["gen"][sg_end:l_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][sg_end:l_end, [PMIN]] = max_p_kw
if "max_p_kw" in l_is.columns:
ppc["gen"][sg_end:l_end, PMAX] = - (l_is["min_p_kw"].values * 1e-3 - delta)
min_p_kw = ppc["gen"][sg_end:l_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][sg_end:l_end, [PMAX]] = min_p_kw
# add controllable storages
if stor_end > l_end:
stor_buses = bus_lookup[stor_is["bus"].values]
ppc["gen"][l_end:stor_end, GEN_BUS] = stor_buses
ppc["gen"][l_end:stor_end, PG] = - stor_is["p_kw"].values * 1e-3 * stor_is["scaling"].values
ppc["gen"][l_end:stor_end, QG] = stor_is["q_kvar"].values * 1e-3 * stor_is["scaling"].values
# set bus values for generator buses
ppc["bus"][stor_buses, BUS_TYPE] = PQ
# set constraints for controllable sgens
if "min_q_kvar" in stor_is.columns:
ppc["gen"][l_end:stor_end, QMAX] = - (stor_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][l_end:stor_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][l_end:stor_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in stor_is.columns:
ppc["gen"][l_end:stor_end, QMIN] = - (stor_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][l_end:stor_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][l_end:stor_end, [QMAX]] = min_q_kvar
if "max_p_kw" in stor_is.columns:
ppc["gen"][l_end:stor_end, PMIN] = - (stor_is["max_p_kw"].values * 1e-3 + delta)
max_p_kw = ppc["gen"][l_end:stor_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][l_end:stor_end, [PMIN]] = max_p_kw
if "min_p_kw" in stor_is.columns:
ppc["gen"][l_end:stor_end, PMAX] = - (stor_is["min_p_kw"].values * 1e-3 - delta)
min_p_kw = ppc["gen"][l_end:stor_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][l_end:stor_end, [PMAX]] = min_p_kw
# add ext grid / slack data
ppc["gen"][:eg_end, GEN_BUS] = bus_lookup[eg_is["bus"].values]
ppc["gen"][:eg_end, VG] = eg_is["vm_pu"].values
ppc["gen"][:eg_end, GEN_STATUS] = eg_is["in_service"].values
if "max_p_kw" in eg_is.columns:
ppc["gen"][:eg_end, PMIN] = - (eg_is["max_p_kw"].values * 1e-3 - delta)
max_p_kw = ppc["gen"][:eg_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][:eg_end, [PMIN]] = max_p_kw
if "min_p_kw" in eg_is.columns:
ppc["gen"][:eg_end, PMAX] = - (eg_is["min_p_kw"].values * 1e-3 + delta)
min_p_kw = ppc["gen"][:eg_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][:eg_end, [PMAX]] = min_p_kw
if "min_q_kvar" in eg_is.columns:
ppc["gen"][:eg_end, QMAX] = - (eg_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][:eg_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][:eg_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in eg_is.columns:
ppc["gen"][:eg_end, QMIN] = - (eg_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][:eg_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][:eg_end, [QMAX]] = min_q_kvar - 1e-10
# set bus values for external grid buses
eg_buses = bus_lookup[eg_is["bus"].values]
if calculate_voltage_angles:
ppc["bus"][eg_buses, VA] = eg_is["va_degree"].values
ppc["bus"][eg_buses, BUS_TYPE] = REF
ppc["bus"][eg_buses, VM] = eg_is["vm_pu"].values
# REF busses don't have flexible voltages by definition:
ppc["bus"][eg_buses, VMAX] = ppc["bus"][ppc["bus"][:, BUS_TYPE] == REF, VM]
ppc["bus"][eg_buses, VMIN] = ppc["bus"][ppc["bus"][:, BUS_TYPE] == REF, VM]
# add generator / pv data
if gen_end > eg_end:
ppc["gen"][eg_end:gen_end, GEN_BUS] = bus_lookup[gen_is["bus"].values]
ppc["gen"][eg_end:gen_end, PG] = - gen_is["p_kw"].values * 1e-3 * gen_is["scaling"].values
ppc["gen"][eg_end:gen_end, VG] = gen_is["vm_pu"].values
# set bus values for generator buses
gen_buses = bus_lookup[gen_is["bus"].values]
ppc["bus"][gen_buses, BUS_TYPE] = PV
ppc["bus"][gen_buses, VM] = gen_is["vm_pu"].values
# set constraints for PV generators
_copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, _is_elements['gen'])
_copy_p_limits_to_ppc(net, ppc, eg_end, gen_end, _is_elements['gen'])
_replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default)
_replace_nans_with_default_p_limits_in_ppc(ppc, eg_end, gen_end, p_lim_default)
def _init_ppc_gen(ppc, xw_end, q_lim_default):
# initialize generator matrix
ppc["gen"] = np.zeros(shape=(xw_end, 21), dtype=float)
ppc["gen"][:] = np.array([0, 0, 0, q_lim_default, -q_lim_default, 1.,
1., 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
def _build_pp_ext_grid(net, ppc, eg_is_mask, eg_end):
calculate_voltage_angles = net["_options"]["calculate_voltage_angles"]
bus_lookup = net["_pd2ppc_lookups"]["bus"]
# add ext grid / slack data
eg_buses = bus_lookup[net["ext_grid"]["bus"].values[eg_is_mask]]
ppc["gen"][:eg_end, GEN_BUS] = eg_buses
ppc["gen"][:eg_end, VG] = net["ext_grid"]["vm_pu"].values[eg_is_mask]
ppc["gen"][:eg_end, GEN_STATUS] = True
# set bus values for external grid buses
if calculate_voltage_angles:
ppc["bus"][eg_buses, VA] = net["ext_grid"]["va_degree"].values[eg_is_mask]
ppc["bus"][eg_buses, BUS_TYPE] = REF
# _build_gen_lookups(net, "ext_grid", 0, eg_end)
def _build_pp_gen(net, ppc, gen_is_mask, eg_end, gen_end, q_lim_default, p_lim_default):
bus_lookup = net["_pd2ppc_lookups"]["bus"]
copy_constraints_to_ppc = net["_options"]["copy_constraints_to_ppc"]
gen_buses = bus_lookup[net["gen"]["bus"].values[gen_is_mask]]
gen_is_vm = net["gen"]["vm_pu"].values[gen_is_mask]
ppc["gen"][eg_end:gen_end, GEN_BUS] = gen_buses
ppc["gen"][eg_end:gen_end, PG] = - (net["gen"]["p_kw"].values[gen_is_mask] * 1e-3 *
net["gen"]["scaling"].values[gen_is_mask])
ppc["gen"][eg_end:gen_end, VG] = gen_is_vm
# set bus values for generator buses
ppc["bus"][gen_buses, BUS_TYPE] = PV
ppc["bus"][gen_buses, VM] = gen_is_vm
_copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask)
_replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default)
if copy_constraints_to_ppc:
_copy_p_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask)
_replace_nans_with_default_p_limits_in_ppc(ppc, eg_end, gen_end, p_lim_default)
# _build_gen_lookups(net, "gen", eg_end, gen_end)
def _build_pp_xward(net, ppc, gen_end, xw_end, q_lim_default, update_lookup=True):
bus_lookup = net["_pd2ppc_lookups"]["bus"]
xw = net["xward"]
xw_is = net["_is_elements"]['xward']
if update_lookup:
ppc["gen"][gen_end:xw_end, GEN_BUS] = bus_lookup[xw["ad_bus"].values]
ppc["gen"][gen_end:xw_end, VG] = xw["vm_pu"].values
ppc["gen"][gen_end:xw_end, GEN_STATUS] = xw_is
ppc["gen"][gen_end:xw_end, QMIN] = -q_lim_default
ppc["gen"][gen_end:xw_end, QMAX] = q_lim_default
xward_buses = bus_lookup[net["xward"]["ad_bus"].values]
ppc["bus"][xward_buses[xw_is], BUS_TYPE] = PV
ppc["bus"][xward_buses[~xw_is], BUS_TYPE] = NONE
ppc["bus"][xward_buses, VM] = net["xward"]["vm_pu"].values
def _update_gen_ppc(net, ppc):
'''
Takes the ppc network and updates the gen values from the values in net.
**INPUT**:
**net** -The pandapower format network
**ppc** - The PYPOWER format network to fill in values
'''
# get options from net
calculate_voltage_angles = net["_options"]["calculate_voltage_angles"]
bus_lookup = net["_pd2ppc_lookups"]["bus"]
# get in service elements
_is_elements = net["_is_elements"]
gen_is_mask = _is_elements['gen']
# TODO maybe speed up things here, too
eg_is = net["ext_grid"][_is_elements['ext_grid']]
gen_is = net["gen"][_is_elements['gen']]
eg_end = len(eg_is)
gen_end = eg_end + len(gen_is)
xw_end = gen_end + len(net["xward"])
q_lim_default = 1e9 # which is 1000 TW - should be enough for distribution grids.
# add ext grid / slack data
ext_grid_lookup = net["_pd2ppc_lookups"]["ext_grid"]
ext_grid_idx_ppc = ext_grid_lookup[eg_is.index]
ppc["gen"][ext_grid_idx_ppc, VG] = eg_is["vm_pu"].values
ppc["gen"][ext_grid_idx_ppc, GEN_STATUS] = eg_is["in_service"].values
# set bus values for external grid buses
if calculate_voltage_angles:
# eg_buses = bus_lookup[eg_is["bus"].values]
ppc["bus"][ext_grid_idx_ppc, VA] = eg_is["va_degree"].values
# add generator / pv data
if gen_end > eg_end:
gen_lookup = net["_pd2ppc_lookups"]["gen"]
gen_idx_ppc = gen_lookup[gen_is.index]
ppc["gen"][gen_idx_ppc, PG] = - gen_is["p_kw"].values * 1e-3 * gen_is["scaling"].values
ppc["gen"][gen_idx_ppc, VG] = gen_is["vm_pu"].values
# set bus values for generator buses
gen_buses = bus_lookup[gen_is["bus"].values]
ppc["bus"][gen_buses, VM] = gen_is["vm_pu"].values
_copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask)
_replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default)
# add extended ward pv node data
if xw_end > gen_end:
# ToDo: this must be tested in combination with recycle. Maybe the placement of the updated value in ppc["gen"]
# ToDo: is wrong. -> I'll better raise en error
raise NotImplementedError("xwards in combination with recycle is not properly implemented")
# _build_pp_xward(net, ppc, gen_end, xw_end, q_lim_default,
# update_lookup=False)
def _copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask):
# Note: Pypower has generator reference system, pandapower uses load reference
# system (max <-> min)
delta = net["_options"]["delta"]
if "max_q_kvar" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, QMIN] = -net["gen"]["max_q_kvar"].values[gen_is_mask] * 1e-3 - delta
if "min_q_kvar" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, QMAX] = -net["gen"]["min_q_kvar"].values[gen_is_mask] * 1e-3 + delta
def _copy_p_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask):
delta = net["_options"]["delta"]
if "max_p_kw" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, PMIN] = -net["gen"]["max_p_kw"].values[gen_is_mask] * 1e-3 + delta
if "min_p_kw" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, PMAX] = -net["gen"]["min_p_kw"].values[gen_is_mask] * 1e-3 - delta
def _replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default):
# Note: Pypower has generator reference system, pandapower uses load reference system (max <-> min)
max_q_kvar = ppc["gen"][eg_end:gen_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=np.isnan(max_q_kvar))
ppc["gen"][eg_end:gen_end, [QMIN]] = max_q_kvar
min_q_kvar = ppc["gen"][eg_end:gen_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=np.isnan(min_q_kvar))
ppc["gen"][eg_end:gen_end, [QMAX]] = min_q_kvar
def _replace_nans_with_default_p_limits_in_ppc(ppc, eg_end, gen_end, p_lim_default):
# Note: Pypower has generator reference system, pandapower uses load reference system (max <-> min)
max_p_kw = ppc["gen"][eg_end:gen_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][eg_end:gen_end, [PMIN]] = max_p_kw
min_p_kw = ppc["gen"][eg_end:gen_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][eg_end:gen_end, [PMAX]] = min_p_kw
def _check_voltage_setpoints_at_same_bus(ppc):
# generator buses:
gen_bus = ppc['gen'][:, GEN_BUS].astype(int)
# generator setpoints:
gen_vm = ppc['gen'][:, VG]
if _different_values_at_one_bus(gen_bus, gen_vm):
raise UserWarning("Generators with different voltage setpoints connected to the same bus")
def _check_voltage_angles_at_same_bus(net, ppc):
gen_va = net.ext_grid.va_degree[net._is_elements["ext_grid"]].values
eg_gens = net._pd2ppc_lookups["ext_grid"][net.ext_grid.index[net._is_elements["ext_grid"]]]
gen_bus = ppc["gen"][eg_gens, GEN_BUS].astype(int)
if _different_values_at_one_bus(gen_bus, gen_va):
raise UserWarning("Ext grids with different voltage angle setpoints connected to the same bus")
def _different_values_at_one_bus(buses, values):
"""
checks if there are different values in any of the
"""
# buses with one or more generators and their index
unique_bus, index_first_bus = np.unique(buses, return_index=True)
# voltage setpoint lookup with the voltage of the first occurence of that bus
first_values = -np.ones(buses.max() + 1)
first_values[unique_bus] = values[index_first_bus]
# generate voltage setpoints where all generators at the same bus
# have the voltage of the first generator at that bus
values_equal = first_values[buses]
return not np.array_equal(values, values_equal)
| 46.131466 | 120 | 0.608783 |
import numpy as np
import numpy.core.numeric as ncn
from numpy import array, zeros, isnan
from pandas import DataFrame
from pandapower.idx_bus import PV, REF, VA, VM, BUS_TYPE, NONE, VMAX, VMIN, PQ
from pandapower.idx_gen import QMIN, QMAX, PMIN, PMAX, GEN_STATUS, GEN_BUS, PG, VG, QG
def _build_gen_ppc(net, ppc):
mode = net["_options"]["mode"]
if mode == "pf" or mode == "sc":
_is_elements = net["_is_elements"]
eg_is_mask = _is_elements['ext_grid']
gen_is_mask = _is_elements['gen']
eg_end = np.sum(eg_is_mask)
gen_end = eg_end + np.sum(gen_is_mask)
xw_end = gen_end + len(net["xward"])
q_lim_default = 1e9
p_lim_default = 1e9
_init_ppc_gen(ppc, xw_end, 0)
if mode == "sc":
return
if gen_end > eg_end:
_build_pp_gen(net, ppc, gen_is_mask, eg_end, gen_end, q_lim_default, p_lim_default)
_build_pp_ext_grid(net, ppc, eg_is_mask, eg_end)
if xw_end > gen_end:
_build_pp_xward(net, ppc, gen_end, xw_end, q_lim_default)
if mode == "opf":
bus_lookup = net["_pd2ppc_lookups"]["bus"]
calculate_voltage_angles = net["_options"]["calculate_voltage_angles"]
if len(net.dcline) > 0:
ppc["dcline"] = net.dcline[["loss_kw", "loss_percent"]].values
_is_elements = net["_is_elements"]
eg_is = net["ext_grid"][_is_elements['ext_grid']]
gen_is = net["gen"][_is_elements['gen']]
sg_is = net.sgen[(net.sgen.in_service & net.sgen.controllable) == True] \
if "controllable" in net.sgen.columns else DataFrame()
l_is = net.load[(net.load.in_service & net.load.controllable) == True] \
if "controllable" in net.load.columns else DataFrame()
stor_is = net.storage[(net.storage.in_service & net.storage.controllable) == True] \
if "controllable" in net.storage.columns else DataFrame()
_is_elements["sgen_controllable"] = sg_is
_is_elements["load_controllable"] = l_is
_is_elements["storage_controllable"] = stor_is
eg_end = len(eg_is)
gen_end = eg_end + len(gen_is)
sg_end = gen_end + len(sg_is)
l_end = sg_end + len(l_is)
stor_end = l_end + len(stor_is)
q_lim_default = 1e9
p_lim_default = 1e9
delta = net["_options"]["delta"]
ppc["gen"] = zeros(shape=(stor_end, 21), dtype=float)
ppc["gen"][:] = array([0, 0, 0, q_lim_default, -q_lim_default, 1., 1., 1, p_lim_default,
-p_lim_default, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
if sg_end > gen_end:
gen_buses = bus_lookup[sg_is["bus"].values]
ppc["gen"][gen_end:sg_end, GEN_BUS] = gen_buses
ppc["gen"][gen_end:sg_end, PG] = - sg_is["p_kw"].values * 1e-3 * sg_is["scaling"].values
ppc["gen"][gen_end:sg_end, QG] = sg_is["q_kvar"].values * 1e-3 * sg_is["scaling"].values
# set bus values for generator buses
ppc["bus"][gen_buses, BUS_TYPE] = PQ
# set constraints for controllable sgens
if "min_q_kvar" in sg_is.columns:
ppc["gen"][gen_end:sg_end, QMAX] = - (sg_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][gen_end:sg_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][gen_end:sg_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in sg_is.columns:
ppc["gen"][gen_end:sg_end, QMIN] = - (sg_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][gen_end:sg_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][gen_end:sg_end, [QMAX]] = min_q_kvar - 1e-10 # TODO Why this? (M.Scharf, 2018-02)
if "max_p_kw" in sg_is.columns:
ppc["gen"][gen_end:sg_end, PMIN] = - (sg_is["max_p_kw"].values * 1e-3 + delta)
max_p_kw = ppc["gen"][gen_end:sg_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][gen_end:sg_end, [PMIN]] = max_p_kw
if "min_p_kw" in sg_is.columns:
ppc["gen"][gen_end:sg_end, PMAX] = - (sg_is["min_p_kw"].values * 1e-3 - delta)
min_p_kw = ppc["gen"][gen_end:sg_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][gen_end:sg_end, [PMAX]] = min_p_kw
# add controllable loads
if l_end > sg_end:
load_buses = bus_lookup[l_is["bus"].values]
ppc["gen"][sg_end:l_end, GEN_BUS] = load_buses
ppc["gen"][sg_end:l_end, PG] = - l_is["p_kw"].values * 1e-3 * l_is["scaling"].values
ppc["gen"][sg_end:l_end, QG] = l_is["q_kvar"].values * 1e-3 * l_is["scaling"].values
# set bus values for controllable loads
ppc["bus"][load_buses, BUS_TYPE] = PQ
# set constraints for controllable loads
if "min_q_kvar" in l_is.columns:
ppc["gen"][sg_end:l_end, QMAX] = - (l_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][sg_end:l_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][sg_end:l_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in l_is.columns:
ppc["gen"][sg_end:l_end, QMIN] = - (l_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][sg_end:l_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][sg_end:l_end, [QMAX]] = min_q_kvar
if "min_p_kw" in l_is.columns:
ppc["gen"][sg_end:l_end, PMIN] = - (l_is["max_p_kw"].values * 1e-3 + delta)
max_p_kw = ppc["gen"][sg_end:l_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][sg_end:l_end, [PMIN]] = max_p_kw
if "max_p_kw" in l_is.columns:
ppc["gen"][sg_end:l_end, PMAX] = - (l_is["min_p_kw"].values * 1e-3 - delta)
min_p_kw = ppc["gen"][sg_end:l_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][sg_end:l_end, [PMAX]] = min_p_kw
# add controllable storages
if stor_end > l_end:
stor_buses = bus_lookup[stor_is["bus"].values]
ppc["gen"][l_end:stor_end, GEN_BUS] = stor_buses
ppc["gen"][l_end:stor_end, PG] = - stor_is["p_kw"].values * 1e-3 * stor_is["scaling"].values
ppc["gen"][l_end:stor_end, QG] = stor_is["q_kvar"].values * 1e-3 * stor_is["scaling"].values
# set bus values for generator buses
ppc["bus"][stor_buses, BUS_TYPE] = PQ
# set constraints for controllable sgens
if "min_q_kvar" in stor_is.columns:
ppc["gen"][l_end:stor_end, QMAX] = - (stor_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][l_end:stor_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][l_end:stor_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in stor_is.columns:
ppc["gen"][l_end:stor_end, QMIN] = - (stor_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][l_end:stor_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][l_end:stor_end, [QMAX]] = min_q_kvar
if "max_p_kw" in stor_is.columns:
ppc["gen"][l_end:stor_end, PMIN] = - (stor_is["max_p_kw"].values * 1e-3 + delta)
max_p_kw = ppc["gen"][l_end:stor_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][l_end:stor_end, [PMIN]] = max_p_kw
if "min_p_kw" in stor_is.columns:
ppc["gen"][l_end:stor_end, PMAX] = - (stor_is["min_p_kw"].values * 1e-3 - delta)
min_p_kw = ppc["gen"][l_end:stor_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][l_end:stor_end, [PMAX]] = min_p_kw
# add ext grid / slack data
ppc["gen"][:eg_end, GEN_BUS] = bus_lookup[eg_is["bus"].values]
ppc["gen"][:eg_end, VG] = eg_is["vm_pu"].values
ppc["gen"][:eg_end, GEN_STATUS] = eg_is["in_service"].values
if "max_p_kw" in eg_is.columns:
ppc["gen"][:eg_end, PMIN] = - (eg_is["max_p_kw"].values * 1e-3 - delta)
max_p_kw = ppc["gen"][:eg_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][:eg_end, [PMIN]] = max_p_kw
if "min_p_kw" in eg_is.columns:
ppc["gen"][:eg_end, PMAX] = - (eg_is["min_p_kw"].values * 1e-3 + delta)
min_p_kw = ppc["gen"][:eg_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][:eg_end, [PMAX]] = min_p_kw
if "min_q_kvar" in eg_is.columns:
ppc["gen"][:eg_end, QMAX] = - (eg_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][:eg_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][:eg_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in eg_is.columns:
ppc["gen"][:eg_end, QMIN] = - (eg_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][:eg_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][:eg_end, [QMAX]] = min_q_kvar - 1e-10
# set bus values for external grid buses
eg_buses = bus_lookup[eg_is["bus"].values]
if calculate_voltage_angles:
ppc["bus"][eg_buses, VA] = eg_is["va_degree"].values
ppc["bus"][eg_buses, BUS_TYPE] = REF
ppc["bus"][eg_buses, VM] = eg_is["vm_pu"].values
# REF busses don't have flexible voltages by definition:
ppc["bus"][eg_buses, VMAX] = ppc["bus"][ppc["bus"][:, BUS_TYPE] == REF, VM]
ppc["bus"][eg_buses, VMIN] = ppc["bus"][ppc["bus"][:, BUS_TYPE] == REF, VM]
if gen_end > eg_end:
ppc["gen"][eg_end:gen_end, GEN_BUS] = bus_lookup[gen_is["bus"].values]
ppc["gen"][eg_end:gen_end, PG] = - gen_is["p_kw"].values * 1e-3 * gen_is["scaling"].values
ppc["gen"][eg_end:gen_end, VG] = gen_is["vm_pu"].values
gen_buses = bus_lookup[gen_is["bus"].values]
ppc["bus"][gen_buses, BUS_TYPE] = PV
ppc["bus"][gen_buses, VM] = gen_is["vm_pu"].values
_copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, _is_elements['gen'])
_copy_p_limits_to_ppc(net, ppc, eg_end, gen_end, _is_elements['gen'])
_replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default)
_replace_nans_with_default_p_limits_in_ppc(ppc, eg_end, gen_end, p_lim_default)
def _init_ppc_gen(ppc, xw_end, q_lim_default):
ppc["gen"] = np.zeros(shape=(xw_end, 21), dtype=float)
ppc["gen"][:] = np.array([0, 0, 0, q_lim_default, -q_lim_default, 1.,
1., 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
def _build_pp_ext_grid(net, ppc, eg_is_mask, eg_end):
calculate_voltage_angles = net["_options"]["calculate_voltage_angles"]
bus_lookup = net["_pd2ppc_lookups"]["bus"]
eg_buses = bus_lookup[net["ext_grid"]["bus"].values[eg_is_mask]]
ppc["gen"][:eg_end, GEN_BUS] = eg_buses
ppc["gen"][:eg_end, VG] = net["ext_grid"]["vm_pu"].values[eg_is_mask]
ppc["gen"][:eg_end, GEN_STATUS] = True
if calculate_voltage_angles:
ppc["bus"][eg_buses, VA] = net["ext_grid"]["va_degree"].values[eg_is_mask]
ppc["bus"][eg_buses, BUS_TYPE] = REF
def _build_pp_gen(net, ppc, gen_is_mask, eg_end, gen_end, q_lim_default, p_lim_default):
bus_lookup = net["_pd2ppc_lookups"]["bus"]
copy_constraints_to_ppc = net["_options"]["copy_constraints_to_ppc"]
gen_buses = bus_lookup[net["gen"]["bus"].values[gen_is_mask]]
gen_is_vm = net["gen"]["vm_pu"].values[gen_is_mask]
ppc["gen"][eg_end:gen_end, GEN_BUS] = gen_buses
ppc["gen"][eg_end:gen_end, PG] = - (net["gen"]["p_kw"].values[gen_is_mask] * 1e-3 *
net["gen"]["scaling"].values[gen_is_mask])
ppc["gen"][eg_end:gen_end, VG] = gen_is_vm
ppc["bus"][gen_buses, BUS_TYPE] = PV
ppc["bus"][gen_buses, VM] = gen_is_vm
_copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask)
_replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default)
if copy_constraints_to_ppc:
_copy_p_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask)
_replace_nans_with_default_p_limits_in_ppc(ppc, eg_end, gen_end, p_lim_default)
def _build_pp_xward(net, ppc, gen_end, xw_end, q_lim_default, update_lookup=True):
bus_lookup = net["_pd2ppc_lookups"]["bus"]
xw = net["xward"]
xw_is = net["_is_elements"]['xward']
if update_lookup:
ppc["gen"][gen_end:xw_end, GEN_BUS] = bus_lookup[xw["ad_bus"].values]
ppc["gen"][gen_end:xw_end, VG] = xw["vm_pu"].values
ppc["gen"][gen_end:xw_end, GEN_STATUS] = xw_is
ppc["gen"][gen_end:xw_end, QMIN] = -q_lim_default
ppc["gen"][gen_end:xw_end, QMAX] = q_lim_default
xward_buses = bus_lookup[net["xward"]["ad_bus"].values]
ppc["bus"][xward_buses[xw_is], BUS_TYPE] = PV
ppc["bus"][xward_buses[~xw_is], BUS_TYPE] = NONE
ppc["bus"][xward_buses, VM] = net["xward"]["vm_pu"].values
def _update_gen_ppc(net, ppc):
calculate_voltage_angles = net["_options"]["calculate_voltage_angles"]
bus_lookup = net["_pd2ppc_lookups"]["bus"]
_is_elements = net["_is_elements"]
gen_is_mask = _is_elements['gen']
eg_is = net["ext_grid"][_is_elements['ext_grid']]
gen_is = net["gen"][_is_elements['gen']]
eg_end = len(eg_is)
gen_end = eg_end + len(gen_is)
xw_end = gen_end + len(net["xward"])
q_lim_default = 1e9
ext_grid_lookup = net["_pd2ppc_lookups"]["ext_grid"]
ext_grid_idx_ppc = ext_grid_lookup[eg_is.index]
ppc["gen"][ext_grid_idx_ppc, VG] = eg_is["vm_pu"].values
ppc["gen"][ext_grid_idx_ppc, GEN_STATUS] = eg_is["in_service"].values
if calculate_voltage_angles:
ppc["bus"][ext_grid_idx_ppc, VA] = eg_is["va_degree"].values
if gen_end > eg_end:
gen_lookup = net["_pd2ppc_lookups"]["gen"]
gen_idx_ppc = gen_lookup[gen_is.index]
ppc["gen"][gen_idx_ppc, PG] = - gen_is["p_kw"].values * 1e-3 * gen_is["scaling"].values
ppc["gen"][gen_idx_ppc, VG] = gen_is["vm_pu"].values
gen_buses = bus_lookup[gen_is["bus"].values]
ppc["bus"][gen_buses, VM] = gen_is["vm_pu"].values
_copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask)
_replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default)
if xw_end > gen_end:
raise NotImplementedError("xwards in combination with recycle is not properly implemented")
# _build_pp_xward(net, ppc, gen_end, xw_end, q_lim_default,
# update_lookup=False)
def _copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask):
# Note: Pypower has generator reference system, pandapower uses load reference
# system (max <-> min)
delta = net["_options"]["delta"]
if "max_q_kvar" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, QMIN] = -net["gen"]["max_q_kvar"].values[gen_is_mask] * 1e-3 - delta
if "min_q_kvar" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, QMAX] = -net["gen"]["min_q_kvar"].values[gen_is_mask] * 1e-3 + delta
def _copy_p_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask):
delta = net["_options"]["delta"]
if "max_p_kw" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, PMIN] = -net["gen"]["max_p_kw"].values[gen_is_mask] * 1e-3 + delta
if "min_p_kw" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, PMAX] = -net["gen"]["min_p_kw"].values[gen_is_mask] * 1e-3 - delta
def _replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default):
# Note: Pypower has generator reference system, pandapower uses load reference system (max <-> min)
max_q_kvar = ppc["gen"][eg_end:gen_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=np.isnan(max_q_kvar))
ppc["gen"][eg_end:gen_end, [QMIN]] = max_q_kvar
min_q_kvar = ppc["gen"][eg_end:gen_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=np.isnan(min_q_kvar))
ppc["gen"][eg_end:gen_end, [QMAX]] = min_q_kvar
def _replace_nans_with_default_p_limits_in_ppc(ppc, eg_end, gen_end, p_lim_default):
# Note: Pypower has generator reference system, pandapower uses load reference system (max <-> min)
max_p_kw = ppc["gen"][eg_end:gen_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][eg_end:gen_end, [PMIN]] = max_p_kw
min_p_kw = ppc["gen"][eg_end:gen_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][eg_end:gen_end, [PMAX]] = min_p_kw
def _check_voltage_setpoints_at_same_bus(ppc):
# generator buses:
gen_bus = ppc['gen'][:, GEN_BUS].astype(int)
# generator setpoints:
gen_vm = ppc['gen'][:, VG]
if _different_values_at_one_bus(gen_bus, gen_vm):
raise UserWarning("Generators with different voltage setpoints connected to the same bus")
def _check_voltage_angles_at_same_bus(net, ppc):
gen_va = net.ext_grid.va_degree[net._is_elements["ext_grid"]].values
eg_gens = net._pd2ppc_lookups["ext_grid"][net.ext_grid.index[net._is_elements["ext_grid"]]]
gen_bus = ppc["gen"][eg_gens, GEN_BUS].astype(int)
if _different_values_at_one_bus(gen_bus, gen_va):
raise UserWarning("Ext grids with different voltage angle setpoints connected to the same bus")
def _different_values_at_one_bus(buses, values):
# buses with one or more generators and their index
unique_bus, index_first_bus = np.unique(buses, return_index=True)
# voltage setpoint lookup with the voltage of the first occurence of that bus
first_values = -np.ones(buses.max() + 1)
first_values[unique_bus] = values[index_first_bus]
# generate voltage setpoints where all generators at the same bus
# have the voltage of the first generator at that bus
values_equal = first_values[buses]
return not np.array_equal(values, values_equal)
| true | true |
f71c3ac798ea0aa03af1f53d6228417fc90cca0c | 861 | py | Python | abtools/__init__.py | menis/abtools | bfc7c6c508b174bb3b74d8f152156242ddd2ee77 | [
"MIT"
] | 9 | 2016-06-13T20:00:04.000Z | 2022-03-19T19:07:23.000Z | abtools/__init__.py | menis/abtools | bfc7c6c508b174bb3b74d8f152156242ddd2ee77 | [
"MIT"
] | null | null | null | abtools/__init__.py | menis/abtools | bfc7c6c508b174bb3b74d8f152156242ddd2ee77 | [
"MIT"
] | 4 | 2018-04-10T09:05:21.000Z | 2022-01-27T21:23:06.000Z | import os
if not os.environ.get('READTHEDOCS', None):
from ._compare import run as compare
from ._correct import run as correct
from ._finder import run as finder
from ._phylogeny import run as phylogeny
# import _stats as stats
from pkg_resources import get_distribution, DistributionNotFound
import os.path
try:
_dist = get_distribution('abtools')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'abtools')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install AbTools before checking the version'
else:
__version__ = _dist.version
| 33.115385 | 74 | 0.688734 | import os
if not os.environ.get('READTHEDOCS', None):
from ._compare import run as compare
from ._correct import run as correct
from ._finder import run as finder
from ._phylogeny import run as phylogeny
from pkg_resources import get_distribution, DistributionNotFound
import os.path
try:
_dist = get_distribution('abtools')
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'abtools')):
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install AbTools before checking the version'
else:
__version__ = _dist.version
| true | true |
f71c3b9ce33e658445e5575868064ec2d77a48fe | 2,312 | py | Python | src/borg/shellpattern.py | russelldavis/borg | 20abc9d68742a8cf5da8503cef96b2e9a5c83213 | [
"BSD-3-Clause"
] | 2 | 2021-08-19T16:25:15.000Z | 2021-11-17T10:54:16.000Z | src/borg/shellpattern.py | tschwinge/borg | 3e2d5b2b220aee2db68f81bbc0108332abc9cda9 | [
"BSD-3-Clause"
] | null | null | null | src/borg/shellpattern.py | tschwinge/borg | 3e2d5b2b220aee2db68f81bbc0108332abc9cda9 | [
"BSD-3-Clause"
] | null | null | null | import os
import re
def translate(pat, match_end=r"\Z"):
"""Translate a shell-style pattern to a regular expression.
The pattern may include ``**<sep>`` (<sep> stands for the platform-specific path separator; "/" on POSIX systems) for
matching zero or more directory levels and "*" for matching zero or more arbitrary characters with the exception of
any path separator. Wrap meta-characters in brackets for a literal match (i.e. "[?]" to match the literal character
"?").
Using match_end=regex one can give a regular expression that is used to match after the regex that is generated from
the pattern. The default is to match the end of the string.
This function is derived from the "fnmatch" module distributed with the Python standard library.
Copyright (C) 2001-2016 Python Software Foundation. All rights reserved.
TODO: support {alt1,alt2} shell-style alternatives
"""
sep = os.path.sep
n = len(pat)
i = 0
res = ""
while i < n:
c = pat[i]
i += 1
if c == "*":
if i + 1 < n and pat[i] == "*" and pat[i + 1] == sep:
# **/ == wildcard for 0+ full (relative) directory names with trailing slashes; the forward slash stands
# for the platform-specific path separator
res += r"(?:[^\%s]*\%s)*" % (sep, sep)
i += 2
else:
# * == wildcard for name parts (does not cross path separator)
res += r"[^\%s]*" % sep
elif c == "?":
# ? == any single character excluding path separator
res += r"[^\%s]" % sep
elif c == "[":
j = i
if j < n and pat[j] == "!":
j += 1
if j < n and pat[j] == "]":
j += 1
while j < n and pat[j] != "]":
j += 1
if j >= n:
res += "\\["
else:
stuff = pat[i:j].replace("\\", "\\\\")
i = j + 1
if stuff[0] == "!":
stuff = "^" + stuff[1:]
elif stuff[0] == "^":
stuff = "\\" + stuff
res += "[%s]" % stuff
else:
res += re.escape(c)
return res + match_end + "(?ms)"
| 35.030303 | 121 | 0.493945 | import os
import re
def translate(pat, match_end=r"\Z"):
sep = os.path.sep
n = len(pat)
i = 0
res = ""
while i < n:
c = pat[i]
i += 1
if c == "*":
if i + 1 < n and pat[i] == "*" and pat[i + 1] == sep:
res += r"(?:[^\%s]*\%s)*" % (sep, sep)
i += 2
else:
res += r"[^\%s]*" % sep
elif c == "?":
res += r"[^\%s]" % sep
elif c == "[":
j = i
if j < n and pat[j] == "!":
j += 1
if j < n and pat[j] == "]":
j += 1
while j < n and pat[j] != "]":
j += 1
if j >= n:
res += "\\["
else:
stuff = pat[i:j].replace("\\", "\\\\")
i = j + 1
if stuff[0] == "!":
stuff = "^" + stuff[1:]
elif stuff[0] == "^":
stuff = "\\" + stuff
res += "[%s]" % stuff
else:
res += re.escape(c)
return res + match_end + "(?ms)"
| true | true |
f71c3d9f2d35818503ab834cb34b0e0f458291f8 | 1,541 | py | Python | signal_ocean/geo/models.py | ktsitsikas-signal/SignalSDK | 1b125ae963ee2b53a2a3ec5a7ae6bf9511608355 | [
"Apache-2.0"
] | null | null | null | signal_ocean/geo/models.py | ktsitsikas-signal/SignalSDK | 1b125ae963ee2b53a2a3ec5a7ae6bf9511608355 | [
"Apache-2.0"
] | null | null | null | signal_ocean/geo/models.py | ktsitsikas-signal/SignalSDK | 1b125ae963ee2b53a2a3ec5a7ae6bf9511608355 | [
"Apache-2.0"
] | null | null | null | # noqa: D100
from dataclasses import dataclass
from decimal import Decimal
from typing import Optional
@dataclass(frozen=True)
class Country:
"""Represents a country.
Attributes:
id: The ID of the country.
name: The name of the country.
country_code: Alpha-2 codes used by the ISO 3166
standard.
country_code_numeric: UN codes used by the ISO 3166
standard.
country_code_iso3: Alpha-3 codes used by the ISO 3166
standard.
"""
id: int
name: str
country_code: str
country_code_numeric: str
country_code_iso3: str
@dataclass(frozen=True)
class Port:
"""A maritime facility where vessels can dock.
Attributes:
id: ID of the port.
country_id: ID of the country the port is in.
area_id: ID of the area the port is in.
name: Name of the port.
latitude: Latitude of the port.
longitude: Longitude of the port.
source: The source of information about the port.
"""
id: int
country_id: int
area_id: int
name: str
latitude: Decimal
longitude: Decimal
source: str
@dataclass(frozen=True)
class Area:
"""A geographical area.
Attributes:
id: ID of the area.
name: Name of the area.
area_type_id: ID of the area type.
parent_area_id: ID of this area's parent area. None if the area has no
parent.
"""
id: int
name: str
area_type_id: int
parent_area_id: Optional[int]
| 22.333333 | 78 | 0.627515 |
from dataclasses import dataclass
from decimal import Decimal
from typing import Optional
@dataclass(frozen=True)
class Country:
id: int
name: str
country_code: str
country_code_numeric: str
country_code_iso3: str
@dataclass(frozen=True)
class Port:
id: int
country_id: int
area_id: int
name: str
latitude: Decimal
longitude: Decimal
source: str
@dataclass(frozen=True)
class Area:
id: int
name: str
area_type_id: int
parent_area_id: Optional[int]
| true | true |
f71c3e06df78623471caeb98f86f952dc33b019f | 671 | py | Python | oocran/django/ns/admin.py | howls90/oocran | 9951f3ff752f9f6517a4d016476c1d1e2bb44a4d | [
"Apache-2.0",
"BSD-3-Clause"
] | 3 | 2018-12-12T10:32:16.000Z | 2022-02-07T19:46:10.000Z | oocran/django/ns/admin.py | howls90/oocran | 9951f3ff752f9f6517a4d016476c1d1e2bb44a4d | [
"Apache-2.0",
"BSD-3-Clause"
] | 1 | 2017-01-11T06:56:35.000Z | 2017-01-11T06:58:44.000Z | oocran/django/ns/admin.py | howls90/OOCRAN | 9951f3ff752f9f6517a4d016476c1d1e2bb44a4d | [
"Apache-2.0",
"BSD-3-Clause"
] | 6 | 2017-05-29T03:34:23.000Z | 2022-02-07T19:46:11.000Z | from django.contrib import admin
from .models import Ns, Nvf
class NsModelAdmin(admin.ModelAdmin):
list_display = ["name", "update", "timestamp"]
list_display_links = ["update"]
list_filter = ["update", "timestamp"]
list_editable = ["name"]
search_fields = ["name"]
class Meta:
model = Ns
admin.site.register(Ns, NsModelAdmin)
class NvfModelAdmin(admin.ModelAdmin):
list_display = ["name", "update", "timestamp"]
list_display_links = ["update"]
list_filter = ["update", "timestamp"]
list_editable = ["name"]
search_fields = ["name"]
class Meta:
model = Nvf
admin.site.register(Nvf, NvfModelAdmin)
| 21.645161 | 50 | 0.657228 | from django.contrib import admin
from .models import Ns, Nvf
class NsModelAdmin(admin.ModelAdmin):
list_display = ["name", "update", "timestamp"]
list_display_links = ["update"]
list_filter = ["update", "timestamp"]
list_editable = ["name"]
search_fields = ["name"]
class Meta:
model = Ns
admin.site.register(Ns, NsModelAdmin)
class NvfModelAdmin(admin.ModelAdmin):
list_display = ["name", "update", "timestamp"]
list_display_links = ["update"]
list_filter = ["update", "timestamp"]
list_editable = ["name"]
search_fields = ["name"]
class Meta:
model = Nvf
admin.site.register(Nvf, NvfModelAdmin)
| true | true |
f71c3fd4413d254a1be32c01d461ea5d95ef19f5 | 560 | py | Python | Video_Generation_with_Detections.py | ludvikalkhoury/Sperm-Heads-Segmentation-and-Localization-using-Urbano-s-Method | 30580d02d0981d59376fbec6b59f5146eaffef14 | [
"MIT"
] | null | null | null | Video_Generation_with_Detections.py | ludvikalkhoury/Sperm-Heads-Segmentation-and-Localization-using-Urbano-s-Method | 30580d02d0981d59376fbec6b59f5146eaffef14 | [
"MIT"
] | null | null | null | Video_Generation_with_Detections.py | ludvikalkhoury/Sperm-Heads-Segmentation-and-Localization-using-Urbano-s-Method | 30580d02d0981d59376fbec6b59f5146eaffef14 | [
"MIT"
] | null | null | null | import cv2
import numpy as np
Initial_Frame = 900
Final_Frame = 1190
video_name = 'Sample2 with detections.avi'
frame = cv2.imread("image/Seg_frame%d.jpg" % Initial_Frame)
height, width, layers = frame.shape
fps = 15
video = cv2.VideoWriter(video_name, 0, fps, (width,height))
for x in range(Initial_Frame,Final_Frame+1,1):
frame = cv2.imread("image/Seg_frame%d.jpg" % x)
video.write(frame)
print(round(((x - Initial_Frame) / (Final_Frame - Initial_Frame)) * 100, 2), '%')
cv2.destroyAllWindows()
video.release() | 22.4 | 86 | 0.676786 | import cv2
import numpy as np
Initial_Frame = 900
Final_Frame = 1190
video_name = 'Sample2 with detections.avi'
frame = cv2.imread("image/Seg_frame%d.jpg" % Initial_Frame)
height, width, layers = frame.shape
fps = 15
video = cv2.VideoWriter(video_name, 0, fps, (width,height))
for x in range(Initial_Frame,Final_Frame+1,1):
frame = cv2.imread("image/Seg_frame%d.jpg" % x)
video.write(frame)
print(round(((x - Initial_Frame) / (Final_Frame - Initial_Frame)) * 100, 2), '%')
cv2.destroyAllWindows()
video.release() | true | true |
f71c41f8731671ad3e1d3a2df155f7aa93679845 | 5,182 | py | Python | instances/optimization/20210422-1717/pas1/inst-20210422-1717-c30-pas1.py | LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure | bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11 | [
"BSD-3-Clause"
] | null | null | null | instances/optimization/20210422-1717/pas1/inst-20210422-1717-c30-pas1.py | LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure | bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11 | [
"BSD-3-Clause"
] | null | null | null | instances/optimization/20210422-1717/pas1/inst-20210422-1717-c30-pas1.py | LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure | bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11 | [
"BSD-3-Clause"
] | null | null | null |
"""
PERIODS
"""
numPeriods = 180
"""
STOPS
"""
numStations = 13
station_names = (
"Hamburg Hbf", # 0
"Landwehr", # 1
"Hasselbrook", # 2
"Wansbeker Chaussee*", # 3
"Friedrichsberg*", # 4
"Barmbek*", # 5
"Alte Woehr (Stadtpark)", # 6
"Ruebenkamp (City Nord)", # 7
"Ohlsdorf*", # 8
"Kornweg", # 9
"Hoheneichen", # 10
"Wellingsbuettel", # 11
"Poppenbuettel*", # 12
)
numStops = 26
stops_position = (
(0, 0), # Stop 0
(2, 0), # Stop 1
(3, 0), # Stop 2
(4, 0), # Stop 3
(5, 0), # Stop 4
(6, 0), # Stop 5
(7, 0), # Stop 6
(8, 0), # Stop 7
(9, 0), # Stop 8
(11, 0), # Stop 9
(13, 0), # Stop 10
(14, 0), # Stop 11
(15, 0), # Stop 12
(15, 1), # Stop 13
(15, 1), # Stop 14
(13, 1), # Stop 15
(12, 1), # Stop 16
(11, 1), # Stop 17
(10, 1), # Stop 18
(9, 1), # Stop 19
(8, 1), # Stop 20
(7, 1), # Stop 21
(6, 1), # Stop 22
(4, 1), # Stop 23
(2, 1), # Stop 24
(1, 1), # Stop 25
)
stops_distance = (
(0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 0
(0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 1
(0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 2
(0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 3
(0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 4
(0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 5
(0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 6
(0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 7
(0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 8
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 9
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 10
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 11
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 12
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 13
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 14
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 15
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 16
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0), # Stop 17
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0), # Stop 18
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0), # Stop 19
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0), # Stop 20
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0), # Stop 21
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0), # Stop 22
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0), # Stop 23
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2), # Stop 24
(1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 25
)
station_start = 0
"""
TRAMS
"""
numTrams = 18
tram_capacity = 514
tram_capacity_cargo = 304
tram_capacity_min_passenger = 208
tram_capacity_min_cargo = 0
tram_speed = 1
tram_headway = 1
tram_min_service = 1
tram_max_service = 10
min_time_next_tram = 0.333
tram_travel_deviation = 0.167
"""
PASSENGERS
"""
passenger_set = "pas-20210422-1717-int1"
passenger_service_time_board = 0.0145
passenger_service_time_alight = 0.0145
"""
CARGO
"""
numCargo = 30
cargo_size = 4
cargo_station_destination = (
5, # 0
5, # 1
5, # 2
8, # 3
5, # 4
12, # 5
8, # 6
4, # 7
12, # 8
3, # 9
12, # 10
12, # 11
4, # 12
4, # 13
12, # 14
3, # 15
5, # 16
4, # 17
3, # 18
12, # 19
5, # 20
8, # 21
12, # 22
8, # 23
3, # 24
4, # 25
12, # 26
12, # 27
3, # 28
4, # 29
)
cargo_release = (
5, # 0
8, # 1
16, # 2
22, # 3
24, # 4
25, # 5
26, # 6
27, # 7
32, # 8
33, # 9
34, # 10
35, # 11
37, # 12
37, # 13
38, # 14
41, # 15
44, # 16
45, # 17
46, # 18
47, # 19
48, # 20
49, # 21
57, # 22
61, # 23
62, # 24
67, # 25
70, # 26
70, # 27
71, # 28
72, # 29
)
cargo_station_deadline = (
176, # 0
171, # 1
155, # 2
123, # 3
126, # 4
91, # 5
36, # 6
87, # 7
141, # 8
163, # 9
108, # 10
144, # 11
76, # 12
47, # 13
97, # 14
114, # 15
142, # 16
55, # 17
56, # 18
57, # 19
118, # 20
160, # 21
139, # 22
71, # 23
82, # 24
77, # 25
80, # 26
169, # 27
129, # 28
99, # 29
)
cargo_max_delay = 3
cargo_service_time_load = 0.3333333333333333
cargo_service_time_unload = 0.25
"""
parameters for reproducibiliy. More information: https://numpy.org/doc/stable/reference/random/parallel.html
"""
#initial entropy
entropy = 8991598675325360468762009371570610170
#index for seed sequence child
child_seed_index = (
0, # 0
)
| 20.563492 | 108 | 0.446353 |
numPeriods = 180
numStations = 13
station_names = (
"Hamburg Hbf",
"Landwehr",
"Hasselbrook",
"Wansbeker Chaussee*",
"Friedrichsberg*",
"Barmbek*",
"Alte Woehr (Stadtpark)",
"Ruebenkamp (City Nord)",
"Ohlsdorf*",
"Kornweg",
"Hoheneichen",
"Wellingsbuettel",
"Poppenbuettel*",
)
numStops = 26
stops_position = (
(0, 0),
(2, 0),
(3, 0),
(4, 0),
(5, 0),
(6, 0),
(7, 0),
(8, 0),
(9, 0),
(11, 0),
(13, 0),
(14, 0),
(15, 0),
(15, 1),
(15, 1),
(13, 1),
(12, 1),
(11, 1),
(10, 1),
(9, 1),
(8, 1),
(7, 1),
(6, 1),
(4, 1),
(2, 1),
(1, 1),
)
stops_distance = (
(0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2),
(1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
)
station_start = 0
numTrams = 18
tram_capacity = 514
tram_capacity_cargo = 304
tram_capacity_min_passenger = 208
tram_capacity_min_cargo = 0
tram_speed = 1
tram_headway = 1
tram_min_service = 1
tram_max_service = 10
min_time_next_tram = 0.333
tram_travel_deviation = 0.167
passenger_set = "pas-20210422-1717-int1"
passenger_service_time_board = 0.0145
passenger_service_time_alight = 0.0145
numCargo = 30
cargo_size = 4
cargo_station_destination = (
5,
5,
5,
8,
5,
12,
8,
4,
12,
3,
12,
12,
4,
4,
12,
3,
5,
4,
3,
12,
5,
8,
12,
8,
3,
4,
12,
12,
3,
4,
)
cargo_release = (
5,
8,
16,
22,
24,
25,
26,
27,
32,
33,
34,
35,
37,
37,
38,
41,
44,
45,
46,
47,
48,
49,
57,
61,
62,
67,
70,
70,
71,
72,
)
cargo_station_deadline = (
176,
171,
155,
123,
126,
91,
36,
87,
141,
163,
108,
144,
76,
47,
97,
114,
142,
55,
56,
57,
118,
160,
139,
71,
82,
77,
80,
169,
129,
99,
)
cargo_max_delay = 3
cargo_service_time_load = 0.3333333333333333
cargo_service_time_unload = 0.25
entropy = 8991598675325360468762009371570610170
child_seed_index = (
0,
)
| true | true |
f71c4289fb96b9b13572b2e0265ec5d04f90d215 | 3,523 | py | Python | nn_interpretability/model/model_repository.py | miquelmn/nn_interpretability | 2b5d2b4102016189743e09f1f3a56f2ecddfde98 | [
"MIT"
] | 41 | 2020-10-13T18:46:32.000Z | 2022-02-21T15:52:50.000Z | nn_interpretability/model/model_repository.py | miquelmn/nn_interpretability | 2b5d2b4102016189743e09f1f3a56f2ecddfde98 | [
"MIT"
] | 4 | 2021-07-11T12:38:03.000Z | 2022-03-08T14:47:38.000Z | nn_interpretability/model/model_repository.py | miquelmn/nn_interpretability | 2b5d2b4102016189743e09f1f3a56f2ecddfde98 | [
"MIT"
] | 7 | 2020-10-21T13:03:16.000Z | 2022-03-07T11:45:00.000Z | import os
import torch
from pathlib import Path
from nn_interpretability.model.definition.am_mnist_classifier import AMCNN
from nn_interpretability.model.definition.mc_dropout_cnn import CNN_Dropout
from nn_interpretability.model.definition.general_mnist_cnn import GeneralCNN
from nn_interpretability.model.definition.mnist_generator import MNISTGenerator
from nn_interpretability.model.definition.mnist_discriminator import MNISTDiscriminator
from nn_interpretability.model.definition.cam_mnist_classifier import CAMMNISTClassifier
from nn_interpretability.model.definition.pretrained_dc_generator import PretrainedDCGANGenerator
from nn_interpretability.model.definition.cam_mnist_classifier_2 import CAMMNISTExtendedClassifier
class ModelRepository:
MODELS_PATH = str(Path(__file__).parent.parent.parent.joinpath('models')) + "/"
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
@staticmethod
def get_general_mnist_cnn(path: str = None):
model = GeneralCNN()
if path is not None:
if os.path.exists(ModelRepository.MODELS_PATH + path):
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_cnn_dropout(path: str = None):
model = CNN_Dropout()
if path is not None:
if os.path.exists(ModelRepository.MODELS_PATH + path):
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_cam_classifier(path: str = None):
model = CAMMNISTClassifier()
if path is not None:
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_cam_extended_classifier(path: str = None):
model = CAMMNISTExtendedClassifier()
if path is not None:
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_am_classifier(path: str = None):
model = AMCNN()
if path is not None:
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_pretrained_dcgan_generator():
"""
Source of the pretrained model is:
https://github.com/csinva/gan-vae-pretrained-pytorch
:return:
"""
path = 'pretrained_dcgan_generator.pth'
model = PretrainedDCGANGenerator()
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_mnist_generator(latent_dim: int = 128, path: str = None):
model = MNISTGenerator(latent_dim=latent_dim)
if path is not None:
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_mnist_discriminator(path: str = None):
model = MNISTDiscriminator()
if path is not None:
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def save(model, model_name):
torch.save(model.state_dict(), ModelRepository.MODELS_PATH + model_name)
return model
@staticmethod
def _load(model, model_name):
model.load_state_dict(torch.load(ModelRepository.MODELS_PATH + model_name, map_location=ModelRepository.device))
return model.to(ModelRepository.device)
| 32.62037 | 121 | 0.705081 | import os
import torch
from pathlib import Path
from nn_interpretability.model.definition.am_mnist_classifier import AMCNN
from nn_interpretability.model.definition.mc_dropout_cnn import CNN_Dropout
from nn_interpretability.model.definition.general_mnist_cnn import GeneralCNN
from nn_interpretability.model.definition.mnist_generator import MNISTGenerator
from nn_interpretability.model.definition.mnist_discriminator import MNISTDiscriminator
from nn_interpretability.model.definition.cam_mnist_classifier import CAMMNISTClassifier
from nn_interpretability.model.definition.pretrained_dc_generator import PretrainedDCGANGenerator
from nn_interpretability.model.definition.cam_mnist_classifier_2 import CAMMNISTExtendedClassifier
class ModelRepository:
MODELS_PATH = str(Path(__file__).parent.parent.parent.joinpath('models')) + "/"
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
@staticmethod
def get_general_mnist_cnn(path: str = None):
model = GeneralCNN()
if path is not None:
if os.path.exists(ModelRepository.MODELS_PATH + path):
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_cnn_dropout(path: str = None):
model = CNN_Dropout()
if path is not None:
if os.path.exists(ModelRepository.MODELS_PATH + path):
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_cam_classifier(path: str = None):
model = CAMMNISTClassifier()
if path is not None:
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_cam_extended_classifier(path: str = None):
model = CAMMNISTExtendedClassifier()
if path is not None:
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_am_classifier(path: str = None):
model = AMCNN()
if path is not None:
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_pretrained_dcgan_generator():
path = 'pretrained_dcgan_generator.pth'
model = PretrainedDCGANGenerator()
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_mnist_generator(latent_dim: int = 128, path: str = None):
model = MNISTGenerator(latent_dim=latent_dim)
if path is not None:
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def get_mnist_discriminator(path: str = None):
model = MNISTDiscriminator()
if path is not None:
model = ModelRepository._load(model, path)
return model.to(ModelRepository.device)
@staticmethod
def save(model, model_name):
torch.save(model.state_dict(), ModelRepository.MODELS_PATH + model_name)
return model
@staticmethod
def _load(model, model_name):
model.load_state_dict(torch.load(ModelRepository.MODELS_PATH + model_name, map_location=ModelRepository.device))
return model.to(ModelRepository.device)
| true | true |
f71c4337eebf63c1b6c456cfeb478e85e64e7d70 | 4,660 | py | Python | cherrytree/github_utils.py | preset-io/cherrytree | 647b7acfb8f95c6a874386183860fdf17cace49b | [
"Apache-2.0"
] | 1 | 2021-04-06T18:25:58.000Z | 2021-04-06T18:25:58.000Z | cherrytree/github_utils.py | preset-io/cherrytree | 647b7acfb8f95c6a874386183860fdf17cace49b | [
"Apache-2.0"
] | null | null | null | cherrytree/github_utils.py | preset-io/cherrytree | 647b7acfb8f95c6a874386183860fdf17cace49b | [
"Apache-2.0"
] | null | null | null | import os
import re
from collections import OrderedDict
from typing import Generator, List, Optional, Reversible
import delegator
from git import Commit
from git.exc import InvalidGitRepositoryError
from git.repo import Repo
from github import Github
from github.Label import Label
from github.Issue import Issue
from github.GithubException import UnknownObjectException
from github.Repository import Repository
from cherrytree.classes import CherryTreeExecutionException
# PRs are either of form "Merge pull request #nnn from..." or "...(#nnn)"
PR_REGEX = re.compile(r"(^Merge pull request #(\d+) from|\(#(\d+)\)$)")
def get_github_instance() -> Github:
token = os.environ.get("GITHUB_TOKEN")
if not token:
raise Exception("Env var 'GITHUB_TOKEN' is missing")
return Github(token)
def get_repo(repo: str) -> Repository:
g = get_github_instance()
return g.get_repo(repo)
def get_issues_from_labels(repo: str, label: str, prs_only: bool = False) -> List[Issue]:
label_objects: List[Label] = []
gh_repo = get_repo(repo)
try:
label_objects.append(gh_repo.get_label(label))
except UnknownObjectException:
# unknown label
return []
issues = gh_repo.get_issues(labels=label_objects, state="all")
if prs_only:
return [o for o in issues if o.pull_request]
return [o for o in issues]
def get_issue(repo: str, id_: int) -> Optional[Issue]:
gh_repo = get_repo(repo)
try:
return gh_repo.get_issue(id_)
except UnknownObjectException:
# unknown id
return None
def get_commits(repo: str, branch: str, since=None):
"""Get commit objects from a branch, over a limited period"""
gh_repo = get_repo(repo)
branch_object = gh_repo.get_branch(branch)
sha = branch_object.commit.sha
if since:
commits = gh_repo.get_commits(sha=sha, since=since)
else:
commits = gh_repo.get_commits(sha=sha)
return commits
def commit_pr_number(commit: Commit) -> Optional[int]:
"""Given a commit object, returns the PR number"""
res = PR_REGEX.search(commit.summary)
if res:
groups = res.groups()
return int(groups[1] or groups[2])
return None
def get_commit_pr_map(commits: Reversible[Commit]):
"""Given a list of commits and prs, returns a map of pr_number to commit"""
d = OrderedDict()
for commit in reversed(commits):
pr_number = commit_pr_number(commit)
if pr_number:
d[pr_number] = commit
return d
def truncate_str(value: str, width: int = 90) -> str:
cont_str = "..."
trunc_value = value[: width - len(cont_str)].strip()
if len(trunc_value) < len(value.strip()):
trunc_value = f"{trunc_value}{cont_str}"
return f"{trunc_value:<{width}}"
def git_get_current_head() -> str:
output = os_system("git status | head -1")
match = re.match("(?:HEAD detached at|On branch) (.*)", output)
if not match:
return ""
return match.group(1)
def os_system(cmd, raise_on_error=True) -> str:
p = delegator.run(cmd)
if raise_on_error and p.return_code != 0:
raise CherryTreeExecutionException(p.err)
return p.out
def check_if_branch_exists(branch: str) -> bool:
current_head = git_get_current_head()
try:
os_system(f"git checkout {branch}")
except CherryTreeExecutionException:
return False
os_system(f"git checkout {current_head}")
return True
def deduplicate_prs(prs: List[Issue]) -> List[Issue]:
pr_set = set()
ret: List[Issue] = []
for pr in prs:
if pr.number not in pr_set:
ret.append(pr)
pr_set.add(pr.number)
return ret
def get_git_repo() -> Repo:
"""
Find the path containing the git repo. Start by checking the current working
directory, and proceed up the directory tree if a git repo can't be found.
returns: Paath to closest git repo
raises FileNotFoundError: if no git repo is found in the current path
"""
def _traverse_dirs(path: str) -> Generator[str, None, None]:
# first yield the current directory
yield path
# then start yielding parents until we reach the root
while True:
parent = os.path.dirname(path)
if path != parent:
yield parent
path = parent
else:
break
cwd = os.getcwd()
for dir_ in _traverse_dirs(cwd):
try:
repo = Repo(dir_)
return repo
except InvalidGitRepositoryError:
pass
raise FileNotFoundError("No git repo found in path: {}". format(cwd))
| 29.308176 | 89 | 0.657296 | import os
import re
from collections import OrderedDict
from typing import Generator, List, Optional, Reversible
import delegator
from git import Commit
from git.exc import InvalidGitRepositoryError
from git.repo import Repo
from github import Github
from github.Label import Label
from github.Issue import Issue
from github.GithubException import UnknownObjectException
from github.Repository import Repository
from cherrytree.classes import CherryTreeExecutionException
PR_REGEX = re.compile(r"(^Merge pull request #(\d+) from|\(#(\d+)\)$)")
def get_github_instance() -> Github:
token = os.environ.get("GITHUB_TOKEN")
if not token:
raise Exception("Env var 'GITHUB_TOKEN' is missing")
return Github(token)
def get_repo(repo: str) -> Repository:
g = get_github_instance()
return g.get_repo(repo)
def get_issues_from_labels(repo: str, label: str, prs_only: bool = False) -> List[Issue]:
label_objects: List[Label] = []
gh_repo = get_repo(repo)
try:
label_objects.append(gh_repo.get_label(label))
except UnknownObjectException:
return []
issues = gh_repo.get_issues(labels=label_objects, state="all")
if prs_only:
return [o for o in issues if o.pull_request]
return [o for o in issues]
def get_issue(repo: str, id_: int) -> Optional[Issue]:
gh_repo = get_repo(repo)
try:
return gh_repo.get_issue(id_)
except UnknownObjectException:
return None
def get_commits(repo: str, branch: str, since=None):
gh_repo = get_repo(repo)
branch_object = gh_repo.get_branch(branch)
sha = branch_object.commit.sha
if since:
commits = gh_repo.get_commits(sha=sha, since=since)
else:
commits = gh_repo.get_commits(sha=sha)
return commits
def commit_pr_number(commit: Commit) -> Optional[int]:
res = PR_REGEX.search(commit.summary)
if res:
groups = res.groups()
return int(groups[1] or groups[2])
return None
def get_commit_pr_map(commits: Reversible[Commit]):
d = OrderedDict()
for commit in reversed(commits):
pr_number = commit_pr_number(commit)
if pr_number:
d[pr_number] = commit
return d
def truncate_str(value: str, width: int = 90) -> str:
cont_str = "..."
trunc_value = value[: width - len(cont_str)].strip()
if len(trunc_value) < len(value.strip()):
trunc_value = f"{trunc_value}{cont_str}"
return f"{trunc_value:<{width}}"
def git_get_current_head() -> str:
output = os_system("git status | head -1")
match = re.match("(?:HEAD detached at|On branch) (.*)", output)
if not match:
return ""
return match.group(1)
def os_system(cmd, raise_on_error=True) -> str:
p = delegator.run(cmd)
if raise_on_error and p.return_code != 0:
raise CherryTreeExecutionException(p.err)
return p.out
def check_if_branch_exists(branch: str) -> bool:
current_head = git_get_current_head()
try:
os_system(f"git checkout {branch}")
except CherryTreeExecutionException:
return False
os_system(f"git checkout {current_head}")
return True
def deduplicate_prs(prs: List[Issue]) -> List[Issue]:
pr_set = set()
ret: List[Issue] = []
for pr in prs:
if pr.number not in pr_set:
ret.append(pr)
pr_set.add(pr.number)
return ret
def get_git_repo() -> Repo:
def _traverse_dirs(path: str) -> Generator[str, None, None]:
yield path
while True:
parent = os.path.dirname(path)
if path != parent:
yield parent
path = parent
else:
break
cwd = os.getcwd()
for dir_ in _traverse_dirs(cwd):
try:
repo = Repo(dir_)
return repo
except InvalidGitRepositoryError:
pass
raise FileNotFoundError("No git repo found in path: {}". format(cwd))
| true | true |
f71c439040784630188bbe6360a6d737525bd96e | 7,063 | py | Python | NiaPy/algorithms/basic/gso.py | tuahk/NiaPy | c863d801fda8e1949a3ca716a4de7c7ca3d0ea16 | [
"MIT"
] | null | null | null | NiaPy/algorithms/basic/gso.py | tuahk/NiaPy | c863d801fda8e1949a3ca716a4de7c7ca3d0ea16 | [
"MIT"
] | null | null | null | NiaPy/algorithms/basic/gso.py | tuahk/NiaPy | c863d801fda8e1949a3ca716a4de7c7ca3d0ea16 | [
"MIT"
] | null | null | null | # encoding=utf8
# pylint: disable=mixed-indentation, trailing-whitespace, line-too-long, multiple-statements, attribute-defined-outside-init, logging-not-lazy, no-self-use, redefined-builtin, singleton-comparison, unused-argument, arguments-differ, no-else-return
import logging
from scipy.spatial.distance import euclidean
from numpy import full, apply_along_axis, argmin, copy, sum, inf, fmax, pi, where
from NiaPy.algorithms.algorithm import Algorithm
logging.basicConfig()
logger = logging.getLogger('NiaPy.algorithms.basic')
logger.setLevel('INFO')
__all__ = ['GlowwormSwarmOptimization', 'GlowwormSwarmOptimizationV1', 'GlowwormSwarmOptimizationV2', 'GlowwormSwarmOptimizationV3']
class GlowwormSwarmOptimization(Algorithm):
r"""Implementation of glowwarm swarm optimization.
**Algorithm:** Glowwarm Swarm Optimization Algorithm
**Date:** 2018
**Authors:** Klemen Berkovič
**License:** MIT
**Reference URL:** https://www.springer.com/gp/book/9783319515946
**Reference paper:** Kaipa, Krishnanand N., and Debasish Ghose. Glowworm swarm optimization: theory, algorithms, and applications. Vol. 698. Springer, 2017.
"""
def __init__(self, **kwargs):
if kwargs.get('name', None) == None: Algorithm.__init__(self, name='GlowwormSwarmOptimization', sName='GSO', **kwargs)
else: Algorithm.__init__(self, **kwargs)
def setParameters(self, n=25, l0=5, nt=5, rho=0.4, gamma=0.6, beta=0.08, s=0.03, **ukwargs):
r"""Set the arguments of an algorithm.
**Arguments:**
n {integer} -- number of glowworms in population
l0 {real} -- initial luciferin quantity for each glowworm
nt {real} --
rs {real} -- maximum sensing range
rho {real} -- luciferin decay constant
gamma {real} -- luciferin enhancement constant
beta {real} --
s {real} --
"""
self.n, self.l0, self.nt, self.rho, self.gamma, self.beta, self.s = n, l0, nt, rho, gamma, beta, s
if ukwargs: logger.info('Unused arguments: %s' % (ukwargs))
def randMove(self, i):
j = i
while i == j: j = self.randint(self.n)
return j
def getNeighbors(self, i, r, GS, L):
N = full(self.n, 0)
for j, gw in enumerate(GS): N[j] = 1 if i != j and euclidean(GS[i], gw) <= r and L[i] >= L[j] else 0
return N
def probabilityes(self, i, N, L):
d, P = sum(L[where(N == 1)] - L[i]), full(self.n, .0)
for j in range(self.n): P[i] = ((L[j] - L[i]) / d) if N[j] == 1 else 0
return P
def moveSelect(self, pb, i):
r, b_l, b_u = self.rand(), 0, 0
for j in range(self.n):
b_l, b_u = b_u, b_u + pb[i]
if b_l < r < b_u: return j
return self.randint(self.n)
def calcLuciferin(self, L, GS_f): return (1 - self.rho) * L + self.gamma * GS_f
def rangeUpdate(self, R, N, rs): return R + self.beta * (self.nt - sum(N))
def getBest(self, GS, GS_f, xb, xb_f):
ib = argmin(GS_f)
if GS_f[ib] < xb_f: return GS[ib], GS_f[ib]
else: return xb, xb_f
def runTask(self, task):
rs = euclidean(full(task.D, 0), task.bRange)
GS, GS_f, L, R = self.uniform(task.Lower, task.Upper, [self.n, task.D]), full(self.n, inf), full(self.n, self.l0), full(self.n, rs)
xb, xb_f = None, inf
while not task.stopCondI():
GSo, Ro, GS_f = copy(GS), copy(R), apply_along_axis(task.eval, 1, GS)
xb, xb_f = self.getBest(GS, GS_f, xb, xb_f)
L = self.calcLuciferin(L, GS_f)
N = [self.getNeighbors(i, Ro[i], GSo, L) for i in range(self.n)]
P = [self.probabilityes(i, N[i], L) for i in range(self.n)]
j = [self.moveSelect(P[i], i) for i in range(self.n)]
for i in range(self.n): GS[i] = task.repair(GSo[i] + self.s * ((GSo[j[i]] - GSo[i]) / (euclidean(GSo[j[i]], GSo[i]) + 1e-31)))
for i in range(self.n): R[i] = max(0, min(rs, self.rangeUpdate(Ro[i], N[i], rs)))
return xb, xb_f
class GlowwormSwarmOptimizationV1(GlowwormSwarmOptimization):
r"""Implementation of glowwarm swarm optimization.
**Algorithm:** Glowwarm Swarm Optimization Algorithm
**Date:** 2018
**Authors:** Klemen Berkovič
**License:** MIT
**Reference URL:** https://www.springer.com/gp/book/9783319515946
**Reference paper:** Kaipa, Krishnanand N., and Debasish Ghose. Glowworm swarm optimization: theory, algorithms, and applications. Vol. 698. Springer, 2017.
"""
def __init__(self, **kwargs): GlowwormSwarmOptimization.__init__(self, name='GlowwormSwarmOptimizationV1', sName='GSOv1', **kwargs)
def setParameters(self, **kwargs):
self.__setParams(**kwargs)
GlowwormSwarmOptimization.setParameters(self, **kwargs)
def __setParams(self, alpha=0.2, **ukwargs):
r"""Set the arguments of an algorithm.
**Arguments:**
alpha {real} --
"""
self.alpha = alpha
if ukwargs: logger.info('Unused arguments: %s' % (ukwargs))
def calcLuciferin(self, L, GS_f): return fmax(0, (1 - self.rho) * L + self.gamma * GS_f)
def rangeUpdate(self, R, N, rs): return rs / (1 + self.beta * (sum(N) / (pi * rs ** 2)))
class GlowwormSwarmOptimizationV2(GlowwormSwarmOptimization):
r"""Implementation of glowwarm swarm optimization.
**Algorithm:** Glowwarm Swarm Optimization Algorithm
**Date:** 2018
**Authors:** Klemen Berkovič
**License:** MIT
**Reference URL:** https://www.springer.com/gp/book/9783319515946
**Reference paper:** Kaipa, Krishnanand N., and Debasish Ghose. Glowworm swarm optimization: theory, algorithms, and applications. Vol. 698. Springer, 2017.
"""
def __init__(self, **kwargs): GlowwormSwarmOptimization.__init__(self, name='GlowwormSwarmOptimizationV2', sName='GSOv2', **kwargs)
def setParameters(self, **kwargs):
self.__setParams(alpha=kwargs.pop('alpha', 0.2), **kwargs)
GlowwormSwarmOptimization.setParameters(self, **kwargs)
def __setParams(self, alpha=0.2, **ukwargs):
r"""Set the arguments of an algorithm.
**Arguments:**
beta1 {real} --
s {real} --
"""
self.alpha = alpha
if ukwargs: logger.info('Unused arguments: %s' % (ukwargs))
def rangeUpdate(self, P, N, rs): return self.alpha + (rs - self.alpha) / (1 + self.beta * sum(N))
class GlowwormSwarmOptimizationV3(GlowwormSwarmOptimization):
r"""Implementation of glowwarm swarm optimization.
**Algorithm:** Glowwarm Swarm Optimization Algorithm
**Date:** 2018
**Authors:** Klemen Berkovič
**License:** MIT
**Reference URL:** https://www.springer.com/gp/book/9783319515946
**Reference paper:** Kaipa, Krishnanand N., and Debasish Ghose. Glowworm swarm optimization: theory, algorithms, and applications. Vol. 698. Springer, 2017.
"""
def __init__(self, **kwargs): GlowwormSwarmOptimization.__init__(self, name='GlowwormSwarmOptimizationV2', sName='GSOv2', **kwargs)
def setParameters(self, **kwargs):
self.__setParams(beta1=kwargs.pop('beta1', 0.2), **kwargs)
GlowwormSwarmOptimization.setParameters(self, **kwargs)
def __setParams(self, beta1=0.2, **ukwargs):
r"""Set the arguments of an algorithm.
**Arguments:**
beta1 {real} --
s {real} --
"""
self.beta1 = beta1
if ukwargs: logger.info('Unused arguments: %s' % (ukwargs))
def rangeUpdate(self, R, N, rs): return R + (self.beta * sum(N)) if sum(N) < self.nt else (-self.beta1 * sum(N))
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
| 33.794258 | 247 | 0.68781 |
import logging
from scipy.spatial.distance import euclidean
from numpy import full, apply_along_axis, argmin, copy, sum, inf, fmax, pi, where
from NiaPy.algorithms.algorithm import Algorithm
logging.basicConfig()
logger = logging.getLogger('NiaPy.algorithms.basic')
logger.setLevel('INFO')
__all__ = ['GlowwormSwarmOptimization', 'GlowwormSwarmOptimizationV1', 'GlowwormSwarmOptimizationV2', 'GlowwormSwarmOptimizationV3']
class GlowwormSwarmOptimization(Algorithm):
def __init__(self, **kwargs):
if kwargs.get('name', None) == None: Algorithm.__init__(self, name='GlowwormSwarmOptimization', sName='GSO', **kwargs)
else: Algorithm.__init__(self, **kwargs)
def setParameters(self, n=25, l0=5, nt=5, rho=0.4, gamma=0.6, beta=0.08, s=0.03, **ukwargs):
self.n, self.l0, self.nt, self.rho, self.gamma, self.beta, self.s = n, l0, nt, rho, gamma, beta, s
if ukwargs: logger.info('Unused arguments: %s' % (ukwargs))
def randMove(self, i):
j = i
while i == j: j = self.randint(self.n)
return j
def getNeighbors(self, i, r, GS, L):
N = full(self.n, 0)
for j, gw in enumerate(GS): N[j] = 1 if i != j and euclidean(GS[i], gw) <= r and L[i] >= L[j] else 0
return N
def probabilityes(self, i, N, L):
d, P = sum(L[where(N == 1)] - L[i]), full(self.n, .0)
for j in range(self.n): P[i] = ((L[j] - L[i]) / d) if N[j] == 1 else 0
return P
def moveSelect(self, pb, i):
r, b_l, b_u = self.rand(), 0, 0
for j in range(self.n):
b_l, b_u = b_u, b_u + pb[i]
if b_l < r < b_u: return j
return self.randint(self.n)
def calcLuciferin(self, L, GS_f): return (1 - self.rho) * L + self.gamma * GS_f
def rangeUpdate(self, R, N, rs): return R + self.beta * (self.nt - sum(N))
def getBest(self, GS, GS_f, xb, xb_f):
ib = argmin(GS_f)
if GS_f[ib] < xb_f: return GS[ib], GS_f[ib]
else: return xb, xb_f
def runTask(self, task):
rs = euclidean(full(task.D, 0), task.bRange)
GS, GS_f, L, R = self.uniform(task.Lower, task.Upper, [self.n, task.D]), full(self.n, inf), full(self.n, self.l0), full(self.n, rs)
xb, xb_f = None, inf
while not task.stopCondI():
GSo, Ro, GS_f = copy(GS), copy(R), apply_along_axis(task.eval, 1, GS)
xb, xb_f = self.getBest(GS, GS_f, xb, xb_f)
L = self.calcLuciferin(L, GS_f)
N = [self.getNeighbors(i, Ro[i], GSo, L) for i in range(self.n)]
P = [self.probabilityes(i, N[i], L) for i in range(self.n)]
j = [self.moveSelect(P[i], i) for i in range(self.n)]
for i in range(self.n): GS[i] = task.repair(GSo[i] + self.s * ((GSo[j[i]] - GSo[i]) / (euclidean(GSo[j[i]], GSo[i]) + 1e-31)))
for i in range(self.n): R[i] = max(0, min(rs, self.rangeUpdate(Ro[i], N[i], rs)))
return xb, xb_f
class GlowwormSwarmOptimizationV1(GlowwormSwarmOptimization):
def __init__(self, **kwargs): GlowwormSwarmOptimization.__init__(self, name='GlowwormSwarmOptimizationV1', sName='GSOv1', **kwargs)
def setParameters(self, **kwargs):
self.__setParams(**kwargs)
GlowwormSwarmOptimization.setParameters(self, **kwargs)
def __setParams(self, alpha=0.2, **ukwargs):
self.alpha = alpha
if ukwargs: logger.info('Unused arguments: %s' % (ukwargs))
def calcLuciferin(self, L, GS_f): return fmax(0, (1 - self.rho) * L + self.gamma * GS_f)
def rangeUpdate(self, R, N, rs): return rs / (1 + self.beta * (sum(N) / (pi * rs ** 2)))
class GlowwormSwarmOptimizationV2(GlowwormSwarmOptimization):
def __init__(self, **kwargs): GlowwormSwarmOptimization.__init__(self, name='GlowwormSwarmOptimizationV2', sName='GSOv2', **kwargs)
def setParameters(self, **kwargs):
self.__setParams(alpha=kwargs.pop('alpha', 0.2), **kwargs)
GlowwormSwarmOptimization.setParameters(self, **kwargs)
def __setParams(self, alpha=0.2, **ukwargs):
self.alpha = alpha
if ukwargs: logger.info('Unused arguments: %s' % (ukwargs))
def rangeUpdate(self, P, N, rs): return self.alpha + (rs - self.alpha) / (1 + self.beta * sum(N))
class GlowwormSwarmOptimizationV3(GlowwormSwarmOptimization):
def __init__(self, **kwargs): GlowwormSwarmOptimization.__init__(self, name='GlowwormSwarmOptimizationV2', sName='GSOv2', **kwargs)
def setParameters(self, **kwargs):
self.__setParams(beta1=kwargs.pop('beta1', 0.2), **kwargs)
GlowwormSwarmOptimization.setParameters(self, **kwargs)
def __setParams(self, beta1=0.2, **ukwargs):
self.beta1 = beta1
if ukwargs: logger.info('Unused arguments: %s' % (ukwargs))
def rangeUpdate(self, R, N, rs): return R + (self.beta * sum(N)) if sum(N) < self.nt else (-self.beta1 * sum(N))
| true | true |
f71c43a114c10204ad2b5ff2693265ff01dc5894 | 64 | py | Python | dr/__init__.py | jigangkim/domain_randomization | 07a309a9e824b5332219871abe8f0f657694b292 | [
"MIT"
] | 7 | 2019-06-09T13:03:18.000Z | 2022-02-19T08:50:51.000Z | dr/__init__.py | jigangkim/domain_randomization | 07a309a9e824b5332219871abe8f0f657694b292 | [
"MIT"
] | 1 | 2019-07-12T23:59:46.000Z | 2021-11-21T04:09:09.000Z | dr/__init__.py | jigangkim/domain_randomization | 07a309a9e824b5332219871abe8f0f657694b292 | [
"MIT"
] | 3 | 2020-05-01T13:18:25.000Z | 2021-03-30T11:52:33.000Z | import dr.dist
import dr.experiment
import dr.gym
import dr.ppo
| 12.8 | 20 | 0.8125 | import dr.dist
import dr.experiment
import dr.gym
import dr.ppo
| true | true |
f71c441128b684fbb0542eeb3decdebe626fc0b9 | 377 | py | Python | plugin_info.py | Attolab/pymodaq_plugins_smaract | 109808255d784bd98b875fb4886c90a1f0e9ff7b | [
"CECILL-B"
] | null | null | null | plugin_info.py | Attolab/pymodaq_plugins_smaract | 109808255d784bd98b875fb4886c90a1f0e9ff7b | [
"CECILL-B"
] | null | null | null | plugin_info.py | Attolab/pymodaq_plugins_smaract | 109808255d784bd98b875fb4886c90a1f0e9ff7b | [
"CECILL-B"
] | null | null | null | SHORT_PLUGIN_NAME = 'smaract'
package_url = 'https://github.com/CEMES-CNRS/pymodaq_plugins_samarct'
description = 'Set of PyMoDAQ plugins for linear actuators from Smaract' \
'(SLC positioners). MCS and MCS2 controllers are supported.'
author = 'David Bresteau'
author_email = 'david.bresteau@cea.fr'
# packages required for your plugin:
packages_required = []
| 34.272727 | 74 | 0.748011 | SHORT_PLUGIN_NAME = 'smaract'
package_url = 'https://github.com/CEMES-CNRS/pymodaq_plugins_samarct'
description = 'Set of PyMoDAQ plugins for linear actuators from Smaract' \
'(SLC positioners). MCS and MCS2 controllers are supported.'
author = 'David Bresteau'
author_email = 'david.bresteau@cea.fr'
packages_required = []
| true | true |
f71c454e2944e16b15e7400432d9858fbd2966f8 | 4,678 | py | Python | cuesdk/capi.py | CorsairOfficial/cue-sdk-python | 8385369725be852eac4bd8e4323ea6661c8603e4 | [
"MIT"
] | 34 | 2020-03-25T08:57:23.000Z | 2022-03-26T16:30:06.000Z | cuesdk/capi.py | CorsairOfficial/cue-sdk-python | 8385369725be852eac4bd8e4323ea6661c8603e4 | [
"MIT"
] | 12 | 2020-03-25T08:56:28.000Z | 2022-02-18T15:20:51.000Z | cuesdk/capi.py | CorsairOfficial/cue-sdk-python | 8385369725be852eac4bd8e4323ea6661c8603e4 | [
"MIT"
] | 17 | 2020-07-24T13:29:06.000Z | 2022-02-02T22:13:43.000Z | import os
import platform
import sys
from ctypes import (CDLL, CFUNCTYPE, POINTER, sizeof, c_bool, c_char, c_int32,
c_void_p)
from .enums import (CorsairAccessMode, CorsairError, CorsairLedId,
CorsairEventId, CorsairDevicePropertyId)
from .structs import (CorsairProtocolDetails, CorsairDeviceInfo,
CorsairLedPosition, CorsairLedPositions, CorsairLedColor,
CorsairEvent)
__all__ = ['CorsairNativeApi']
def get_library_path_windows():
suffix = '.x64' if sizeof(c_void_p) == 8 else ''
lib_name = 'CUESDK' + suffix + '_2017.dll'
return os.path.join(os.path.dirname(__file__), 'bin', lib_name)
def get_library_path_mac():
lib_name = 'libCUESDK.dylib'
return os.path.join(os.path.dirname(__file__), 'bin', lib_name)
def load_library(library_path):
try:
return CDLL(library_path)
except OSError:
print("Unable to load the library %s" % library_path)
sys.exit()
class CorsairNativeApi():
def __init__(self, libpath):
if libpath is None:
system = platform.system()
if system == "Windows":
libpath = get_library_path_windows()
elif system == "Darwin":
libpath = get_library_path_mac()
lib = load_library(libpath)
def create_func(fn, restype, argtypes):
f = lib.__getattr__(fn)
f.restype = restype
f.argtypes = argtypes
return f
self.CorsairSetLedsColorsBufferByDeviceIndex = create_func(
'CorsairSetLedsColorsBufferByDeviceIndex', c_bool,
[c_int32, c_int32, POINTER(CorsairLedColor)])
self.CorsairSetLedsColorsFlushBuffer = create_func(
'CorsairSetLedsColorsFlushBuffer', c_bool, None)
self.CallbackFunc = CFUNCTYPE(c_void_p, c_bool, CorsairError)
self.CorsairSetLedsColorsFlushBufferAsync = create_func(
'CorsairSetLedsColorsFlushBufferAsync', c_bool,
[self.CallbackFunc, c_void_p])
self.CorsairGetLedsColors = create_func(
'CorsairGetLedsColors', c_bool,
[c_int32, POINTER(CorsairLedColor)])
self.CorsairGetLedsColorsByDeviceIndex = create_func(
'CorsairGetLedsColorsByDeviceIndex', c_bool,
[c_int32, c_int32, POINTER(CorsairLedColor)])
self.CorsairGetDeviceCount = create_func('CorsairGetDeviceCount',
c_int32, None)
self.CorsairGetDeviceInfo = create_func('CorsairGetDeviceInfo',
POINTER(CorsairDeviceInfo),
[c_int32])
self.CorsairGetLedPositions = create_func('CorsairGetLedPositions',
POINTER(CorsairLedPositions),
None)
self.CorsairGetLedPositionsByDeviceIndex = create_func(
'CorsairGetLedPositionsByDeviceIndex',
POINTER(CorsairLedPositions), [c_int32])
self.CorsairGetLedIdForKeyName = create_func(
'CorsairGetLedIdForKeyName', CorsairLedId, [c_char])
self.CorsairRequestControl = create_func('CorsairRequestControl',
c_bool, [CorsairAccessMode])
self.CorsairPerformProtocolHandshake = create_func(
'CorsairPerformProtocolHandshake', CorsairProtocolDetails, None)
self.CorsairGetLastError = create_func('CorsairGetLastError',
CorsairError, None)
self.CorsairReleaseControl = create_func('CorsairReleaseControl',
c_bool, [CorsairAccessMode])
self.CorsairSetLayerPriority = create_func('CorsairSetLayerPriority',
c_bool, [c_int32])
c_bool_p = POINTER(c_bool)
self.CorsairGetBoolPropertyValue = create_func(
'CorsairGetBoolPropertyValue', c_bool,
[c_int32, CorsairDevicePropertyId, c_bool_p])
c_int32_p = POINTER(c_int32)
self.CorsairGetInt32PropertyValue = create_func(
'CorsairGetInt32PropertyValue', c_bool,
[c_int32, CorsairDevicePropertyId, c_int32_p])
self.EventHandler = CFUNCTYPE(None, c_void_p, POINTER(CorsairEvent))
self.CorsairSubscribeForEvents = create_func(
'CorsairSubscribeForEvents', c_bool, [self.EventHandler, c_void_p])
self.CorsairUnsubscribeFromEvents = create_func(
'CorsairUnsubscribeFromEvents', c_bool, None)
| 46.316832 | 79 | 0.621633 | import os
import platform
import sys
from ctypes import (CDLL, CFUNCTYPE, POINTER, sizeof, c_bool, c_char, c_int32,
c_void_p)
from .enums import (CorsairAccessMode, CorsairError, CorsairLedId,
CorsairEventId, CorsairDevicePropertyId)
from .structs import (CorsairProtocolDetails, CorsairDeviceInfo,
CorsairLedPosition, CorsairLedPositions, CorsairLedColor,
CorsairEvent)
__all__ = ['CorsairNativeApi']
def get_library_path_windows():
suffix = '.x64' if sizeof(c_void_p) == 8 else ''
lib_name = 'CUESDK' + suffix + '_2017.dll'
return os.path.join(os.path.dirname(__file__), 'bin', lib_name)
def get_library_path_mac():
lib_name = 'libCUESDK.dylib'
return os.path.join(os.path.dirname(__file__), 'bin', lib_name)
def load_library(library_path):
try:
return CDLL(library_path)
except OSError:
print("Unable to load the library %s" % library_path)
sys.exit()
class CorsairNativeApi():
def __init__(self, libpath):
if libpath is None:
system = platform.system()
if system == "Windows":
libpath = get_library_path_windows()
elif system == "Darwin":
libpath = get_library_path_mac()
lib = load_library(libpath)
def create_func(fn, restype, argtypes):
f = lib.__getattr__(fn)
f.restype = restype
f.argtypes = argtypes
return f
self.CorsairSetLedsColorsBufferByDeviceIndex = create_func(
'CorsairSetLedsColorsBufferByDeviceIndex', c_bool,
[c_int32, c_int32, POINTER(CorsairLedColor)])
self.CorsairSetLedsColorsFlushBuffer = create_func(
'CorsairSetLedsColorsFlushBuffer', c_bool, None)
self.CallbackFunc = CFUNCTYPE(c_void_p, c_bool, CorsairError)
self.CorsairSetLedsColorsFlushBufferAsync = create_func(
'CorsairSetLedsColorsFlushBufferAsync', c_bool,
[self.CallbackFunc, c_void_p])
self.CorsairGetLedsColors = create_func(
'CorsairGetLedsColors', c_bool,
[c_int32, POINTER(CorsairLedColor)])
self.CorsairGetLedsColorsByDeviceIndex = create_func(
'CorsairGetLedsColorsByDeviceIndex', c_bool,
[c_int32, c_int32, POINTER(CorsairLedColor)])
self.CorsairGetDeviceCount = create_func('CorsairGetDeviceCount',
c_int32, None)
self.CorsairGetDeviceInfo = create_func('CorsairGetDeviceInfo',
POINTER(CorsairDeviceInfo),
[c_int32])
self.CorsairGetLedPositions = create_func('CorsairGetLedPositions',
POINTER(CorsairLedPositions),
None)
self.CorsairGetLedPositionsByDeviceIndex = create_func(
'CorsairGetLedPositionsByDeviceIndex',
POINTER(CorsairLedPositions), [c_int32])
self.CorsairGetLedIdForKeyName = create_func(
'CorsairGetLedIdForKeyName', CorsairLedId, [c_char])
self.CorsairRequestControl = create_func('CorsairRequestControl',
c_bool, [CorsairAccessMode])
self.CorsairPerformProtocolHandshake = create_func(
'CorsairPerformProtocolHandshake', CorsairProtocolDetails, None)
self.CorsairGetLastError = create_func('CorsairGetLastError',
CorsairError, None)
self.CorsairReleaseControl = create_func('CorsairReleaseControl',
c_bool, [CorsairAccessMode])
self.CorsairSetLayerPriority = create_func('CorsairSetLayerPriority',
c_bool, [c_int32])
c_bool_p = POINTER(c_bool)
self.CorsairGetBoolPropertyValue = create_func(
'CorsairGetBoolPropertyValue', c_bool,
[c_int32, CorsairDevicePropertyId, c_bool_p])
c_int32_p = POINTER(c_int32)
self.CorsairGetInt32PropertyValue = create_func(
'CorsairGetInt32PropertyValue', c_bool,
[c_int32, CorsairDevicePropertyId, c_int32_p])
self.EventHandler = CFUNCTYPE(None, c_void_p, POINTER(CorsairEvent))
self.CorsairSubscribeForEvents = create_func(
'CorsairSubscribeForEvents', c_bool, [self.EventHandler, c_void_p])
self.CorsairUnsubscribeFromEvents = create_func(
'CorsairUnsubscribeFromEvents', c_bool, None)
| true | true |
f71c45e85feabb8c601d3ee30aa42ca3ae609193 | 20,007 | py | Python | gt_kai.py | d-ks/gym_torcs_kai | b9e1659a18ea8a788d0c6aeb7b1111c0284b23ac | [
"MIT"
] | null | null | null | gt_kai.py | d-ks/gym_torcs_kai | b9e1659a18ea8a788d0c6aeb7b1111c0284b23ac | [
"MIT"
] | null | null | null | gt_kai.py | d-ks/gym_torcs_kai | b9e1659a18ea8a788d0c6aeb7b1111c0284b23ac | [
"MIT"
] | null | null | null | # Gym-TORCS-Kai Environment for Reinforcement Learning in TORCS
# original author : Naoto Yoshida
# (https://github.com/ugo-nama-kun/gym_torcs)
# modified version author : Daiko Kishikawa
#
# This environment is under modification. (2019.12)
#
import gym
from gym import spaces
from gym.utils import seeding
import numpy as np
import sys
sys.path.append("./gym_torcs_kai")
import snakeoil3_gym as snakeoil3
import os
import time
class TorcsKaiEnv(gym.Env):
# the speed limit starts when the number of steps exceeds this
terminal_judge_start = 500
# episode terminates when the car is running slower than this limit
termination_limit_progress = 5
# whether to initialize when resetting the environment
initial_reset = True
def __init__(self, throttle=False, gear_change=False):
print("=== Hello, this is Gym-TORCS-Kai. ===")
############################ PARAMETERS OF DRIVING ############################
""" throttle (bool) : usage of the throttle control in TORCS. """
""" gear_change (bool) : usage of the gear control in TORCS. """
""" obsdim (int) : the number of observation (state input) dimensions."""
# Currently, three types of dimensions are supported: "2", "31", "79".
# "2" : the minimum number of dimensions required for driving.
# "31" : the number of dimensions required for a single agent to drive normally.
# "79" : the number of dimensions using all available inputs.
""" maximum_distance (float) : the maximum distance when finish driving. """
""" default_speed (float) : the target speed for acceleration/deceleration. """
self.throttle = throttle
self.gear_change = gear_change
self.obsdim = 31
self.maximum_distance = 1908.32
self.default_speed = 100
##################################################################################
print("--> throttle : ", self.throttle)
print("--> gear : ", self.gear_change)
print("--> dim. of observ. : ", self.obsdim)
print("--> max. dist. : ", self.maximum_distance, " m")
print("--> targ. speed : ", self.default_speed, "km/h")
# Initialization of the driving in TORCS.
self.initial_run = True
# variable for calculating Y-axis acceleration
self.speedY = 0
self.time = 0
# variable for recording the current number of steps
self.time_step = 0
# the range of reward function
self.reward_range = (-10, 10)
self.testmode = False
# lists for recording vehicle status
self.Yaclist = []
self.poshis = []
self.anglehis = []
self.sphis = []
# launch TORCS system
os.system("pkill torcs")
time.sleep(0.5)
if self.obsdim == 79:
os.system("torcs &")
else:
os.system("torcs -nofuel -nodamage -nolaptime &")
time.sleep(0.5)
os.system("sh ./gym_torcs_kai/autostart.sh")
time.sleep(0.5)
"""
# Modify here if you use multiple tracks in the environment
self.client = snakeoil3.Client(p=3101, vision=False) # Open new UDP in vtorcs
self.client.MAX_STEPS = np.inf
client = self.client
client.get_servers_input() # Get the initial input from torcs
obs = client.S.d # Get the current full-observation from torcs
"""
# definitions of action space ranges
if throttle is False:
self.action_space = spaces.Box(low=-1.0, high=1.0, shape=(1,))
else:
self.action_space = spaces.Box(low=-1.0, high=1.0, shape=(2,))
# definitions of observation space ranges
if self.obsdim == 79:
high = np.array([np.pi, # angle
np.inf, # curLapTime
np.inf, # damage
np.inf, # distFromStart
np.inf, # distRaced
# focus (5 dim.)
200, 200, 200, 200, 200,
np.inf, # fuel
6, # gear
np.inf, # lastLapTime
# opponents (36 dim.)
200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200,
np.inf, # racePos
np.inf, # rpm
np.inf, # speedX
np.inf, # speedY
np.inf, # speedZ
# track (19 dim.)
200, 200, 200, 200, 200,
200, 200, 200, 200, 200,
200, 200, 200, 200, 200,
200, 200, 200, 200,
np.inf, # trackPos
# wheelSpinVel (4 dim.)
np.inf, np.inf, np.inf, np.inf,
np.inf, # z
])
low = np.array([-np.pi, # angle
0, # curLapTime
0, # damage
0, # distFromStart
0, # distRaced
# focus (5 dim.)
0, 0, 0, 0, 0,
0, # fuel
-1, # gear
0, # lastLapTime
# opponents (36 dim.)
0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0,
1, # racePos
0, # rpm
-np.inf, # speedX
-np.inf, # speedY
-np.inf, # speedZ
# track (19 dim.)
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0,
-np.inf, # trackPos
# wheelSpinVel (4 dim.)
0, 0, 0, 0,
-np.inf, # z
])
elif self.obsdim == 2:
high = np.array([np.pi, # angle
np.inf]) # trackPos
low = np.array([-np.pi, # angle
-np.inf]) # trackPos
elif self.obsdim == 31:
high = np.array([np.pi, # angle
6, # gear
np.inf, # rpm
np.inf, # speedX
np.inf, # speedY
np.inf, # speedZ
# track (19 dim.)
200, 200, 200, 200, 200,
200, 200, 200, 200, 200,
200, 200, 200, 200, 200,
200, 200, 200, 200,
np.inf, # trackPos
# wheelSpinVel (4 dim.)
np.inf, np.inf, np.inf, np.inf,
np.inf, # z
])
low = np.array([-np.pi, # angle
-1, # gear
0, # rpm
-np.inf, # speedX
-np.inf, # speedY
-np.inf, # speedZ
# track (19 dim.)
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0,
-np.inf, # trackPos
# wheelSpinVel (4 dim.)
0, 0, 0, 0,
-np.inf, # z
])
else:
low = None
high = None
self.observation_space = spaces.Box(low=low, high=high)
# For evaluation episodes, set to “test mode” to not display logs.
def testset(self, test):
self.testmode = test
# Set learning parameter
def set_params(self, throttle, gear, dim, max_dist, targ_speed):
#params: [throttle, gear, dim, max_dist, targ_speed]
self.throttle = throttle
self.gear_change = gear
self.obsdim = dim
self.maximum_distance = max_dist
self.default_speed = targ_speed
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
# "step" function
def step(self, u):
# convert thisAction to the actual torcs actionstr
client = self.client
this_action = self.agent_to_torcs(u)
# apply actions in TORCS
action_torcs = client.R.d
# steering control from the agent
action_torcs["steer"] = this_action["steer"] # in [-1, 1]
# simple automatic throttle control by Snakeoil
if self.throttle is False:
target_speed = self.default_speed
if client.S.d["speedX"] < target_speed - (client.R.d["steer"] * 50):
if client.R.d["accel"] + 0.1 <= 1:
client.R.d["accel"] += 0.1
else:
if client.R.d["accel"] - 0.1 >= 0:
client.R.d["accel"] -= 0.1
if client.S.d["speedX"] < 10:
if (client.S.d["speedX"] + 0.1) != 0:
client.R.d["accel"] += 1 / (client.S.d["speedX"] + 0.1)
# traction control system
if (client.S.d["wheelSpinVel"][2] + client.S.d["wheelSpinVel"][3]) - (
client.S.d["wheelSpinVel"][0] + client.S.d["wheelSpinVel"][1]
) > 5:
action_torcs["accel"] -= 0.2
else:
action_torcs["accel"] = this_action["accel"]
# gear control from agent
if self.gear_change is True:
action_torcs["gear"] = this_action["gear"]
else:
# automatic gear control
action_torcs["gear"] = 1
if client.S.d["speedX"] > 50:
action_torcs["gear"] = 2
if client.S.d["speedX"] > 80:
action_torcs["gear"] = 3
if client.S.d["speedX"] > 110:
action_torcs["gear"] = 4
if client.S.d["speedX"] > 140:
action_torcs["gear"] = 5
if client.S.d["speedX"] > 170:
action_torcs["gear"] = 6
# one-step dynamics update #################################
# apply actions into TORCS
client.respond_to_server()
# get the response from TORCS
client.get_servers_input()
# get the current full-observation from TORCS
obs = client.S.d
# make an observation from a raw observation vector from TORCS
self.observation = self.make_observaton(obs)
# calculation of progress
progress = np.array(obs["speedX"]) * np.cos(obs["angle"])
# Designed Reward Function #######################################
# This reward function enables agents to learn stable high-speed driving
# with low Y-axis acceleration.
# This reward function was designed after trial and error by me.
if (obs["curLapTime"] - self.time) > 0:
Yac = (obs["speedY"] - self.speedY) / (obs["curLapTime"] - self.time)
else:
Yac = 0
self.speedY = obs["speedY"]
self.time = obs["curLapTime"]
self.Yaclist.append(Yac)
self.poshis.append(obs["trackPos"])
self.anglehis.append(obs["angle"])
self.sphis.append(obs["speedX"])
# reward for the low Y-axis acceleration
eta_Yac = 1
r_Yac = 1 / ((Yac / eta_Yac) ** 2 + 1)
# reward for the small angle : 0 ~ 1
eta_angle = 0.01
r_angle = 1 / ((obs["angle"] / eta_angle) ** 2 + 1)
# reward for the small position from center : 0 ~ 1
eta_pos = 0.01
r_trackPos = 1 / ((obs["trackPos"] / eta_pos) ** 2 + 1)
# reward for the high X-axis speed : 0 ~ 1
maxspeed = 100
if obs["speedX"] >= 0:
r_speed = min(obs["speedX"] / maxspeed, 1)
else:
r_speed = 0
# reward function: -1 ~ 1
reward = 0.2 * r_angle + 0.2 * r_trackPos + 0.3 * r_speed + 0.3 * r_Yac
Yac_threshold = 3.530394 # 0.1G
if np.abs(Yac) > Yac_threshold:
reward = -min(np.abs(Yac) / 250, 1)
# Termination judgement #########################
track = np.array(obs["track"])
# episode terminates when the car is out of track
if track.min() < 0:
reward = -10
client.R.d["meta"] = True
# episode terminates if the progress of agent is little
if self.terminal_judge_start < self.time_step:
if progress < self.termination_limit_progress:
reward = -10
client.R.d["meta"] = True
# episode terminates if the agent runs backward
if np.cos(obs["angle"]) < 0 or obs["distRaced"] < 0:
reward = -10
client.R.d["meta"] = True
# episode terminates when the agent reaches the maximum distance
if obs["distRaced"] >= self.maximum_distance:
reward = 10
client.R.d["meta"] = True
if client.R.d["meta"] is True: # send a reset signal
poshis = np.array(self.poshis)
anglehis = np.array(self.anglehis)
sphis = np.array(self.sphis)
Yachis = np.array(self.Yaclist)
# For training episodes, display information about the vehicle in the finished driving
if self.testmode == False:
print("---------------------------------------------------------")
print("---> raced: ", obs["distRaced"], " m <---")
print("--- maxYac: ", np.max(Yachis), " km/h/s ---")
print("--- minYac: ", np.min(Yachis), " km/h/s ---")
if abs(np.max(Yachis)) >= abs(np.min(Yachis)):
absmaxYac = abs(np.max(Yachis))
else:
absmaxYac = abs(np.min(Yachis))
print("--- absmaxYac: ", absmaxYac, " km/h/s ---")
print("--- meanYac: ", np.mean(Yachis), " km/h/s +- ", np.std(Yachis), "---")
print("--- medianYac: ", np.median(Yachis), " km/h/s ---")
print("--- trackPos_mean: ", np.mean(poshis), " +- ", np.std(poshis), " ---")
print("--- angle_mean : ", np.mean(anglehis), " rad +- ", np.std(anglehis), " ---")
print("--- speedX_mean: ", np.mean(sphis), " km/h +- ", np.std(sphis), " ---")
print("---------------------------------------------------------")
self.initial_run = False
client.respond_to_server()
self.time_step += 1
return self.get_obs(), reward, client.R.d["meta"], {}
def reset(self, relaunch=False):
self.time_step = 0
# If not true, send a reset signal to TORCS when the reset function is called
if self.initial_reset is not True:
self.client.R.d["meta"] = True
self.client.respond_to_server()
## TENTATIVE. Restarting TORCS for every episode will cause the memory leak bug!
if relaunch is True:
self.reset_torcs()
# Modify here if you use multiple tracks in the environment
# Open new UDP in vtorcs
self.client = snakeoil3.Client(p=3101, vision=False)
self.client.MAX_STEPS = np.inf
client = self.client
# get the initial input from TORCS
client.get_servers_input()
# get the current full observation from TORCS
obs = client.S.d
self.observation = self.make_observaton(obs)
# reset variables and lists
self.speedY = obs["speedY"]
self.time = obs["curLapTime"]
self.Yaclist = []
self.poshis = []
self.anglehis = []
self.sphis = []
self.initial_reset = False
return self.get_obs()
def close(self):
os.system("pkill torcs")
def render(self, mode="human"):
# TORCS has a monitor of driving, so this method omitted.
pass
####################################### making observation ############################################
def get_obs(self):
return self.observation
def reset_torcs(self):
os.system("pkill torcs")
time.sleep(0.5)
if self.obsdim == 79:
os.system("torcs &")
elif self.obsdim == 2:
os.system("torcs -nofuel -nodamage -nolaptime &")
else:
os.system("torcs -nofuel -nodamage -nolaptime &")
time.sleep(0.5)
os.system("sh ./gym_torcs_kai/autostart.sh")
time.sleep(0.5)
def agent_to_torcs(self, u):
torcs_action = {"steer": u[0]}
if self.throttle is True: # throttle action is enabled
torcs_action.update({"accel": u[1]})
if self.gear_change is True: # gear change action is enabled
torcs_action.update({"gear": u[2]})
return torcs_action
def make_observaton(self, raw_obs):
if self.obsdim == 79:
obs1 = np.array(
[
raw_obs["angle"],
raw_obs["curLapTime"],
raw_obs["damage"],
raw_obs["distFromStart"],
raw_obs["distRaced"],
]
)
focus = raw_obs["focus"]
obs2 = np.array([raw_obs["fuel"], raw_obs["gear"], raw_obs["lastLapTime"]])
opponents = raw_obs["opponents"]
obs3 = np.array(
[
raw_obs["racePos"],
raw_obs["rpm"],
raw_obs["speedX"],
raw_obs["speedY"],
raw_obs["speedZ"],
]
)
track = raw_obs["track"]
trackPos = np.array([raw_obs["trackPos"]])
wheelSpinVel = raw_obs["wheelSpinVel"]
z = np.array(raw_obs["z"])
observ = np.hstack(
[obs1, focus, obs2, opponents, obs3, track, trackPos, wheelSpinVel, z]
)
return observ
elif self.obsdim == 2:
return np.array([raw_obs["angle"], raw_obs["trackPos"]])
elif self.obsdim == 31:
obs1 = np.array(
[
raw_obs["angle"],
raw_obs["gear"],
raw_obs["rpm"],
raw_obs["speedX"],
raw_obs["speedY"],
raw_obs["speedZ"],
]
)
trackPos = np.array([raw_obs["trackPos"]])
z = np.array(raw_obs["z"])
observ = np.hstack(
[obs1, raw_obs["track"], trackPos, raw_obs["wheelSpinVel"], z]
)
return observ
else:
return None
| 35.410619 | 107 | 0.450992 |
import gym
from gym import spaces
from gym.utils import seeding
import numpy as np
import sys
sys.path.append("./gym_torcs_kai")
import snakeoil3_gym as snakeoil3
import os
import time
class TorcsKaiEnv(gym.Env):
terminal_judge_start = 500
termination_limit_progress = 5
initial_reset = True
def __init__(self, throttle=False, gear_change=False):
print("=== Hello, this is Gym-TORCS-Kai. ===")
0, 0, 0, 0,
-np.inf,
])
else:
low = None
high = None
self.observation_space = spaces.Box(low=low, high=high)
def testset(self, test):
self.testmode = test
def set_params(self, throttle, gear, dim, max_dist, targ_speed):
self.throttle = throttle
self.gear_change = gear
self.obsdim = dim
self.maximum_distance = max_dist
self.default_speed = targ_speed
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def step(self, u):
client = self.client
this_action = self.agent_to_torcs(u)
action_torcs = client.R.d
action_torcs["steer"] = this_action["steer"]
if self.throttle is False:
target_speed = self.default_speed
if client.S.d["speedX"] < target_speed - (client.R.d["steer"] * 50):
if client.R.d["accel"] + 0.1 <= 1:
client.R.d["accel"] += 0.1
else:
if client.R.d["accel"] - 0.1 >= 0:
client.R.d["accel"] -= 0.1
if client.S.d["speedX"] < 10:
if (client.S.d["speedX"] + 0.1) != 0:
client.R.d["accel"] += 1 / (client.S.d["speedX"] + 0.1)
if (client.S.d["wheelSpinVel"][2] + client.S.d["wheelSpinVel"][3]) - (
client.S.d["wheelSpinVel"][0] + client.S.d["wheelSpinVel"][1]
) > 5:
action_torcs["accel"] -= 0.2
else:
action_torcs["accel"] = this_action["accel"]
if self.gear_change is True:
action_torcs["gear"] = this_action["gear"]
else:
action_torcs["gear"] = 1
if client.S.d["speedX"] > 50:
action_torcs["gear"] = 2
if client.S.d["speedX"] > 80:
action_torcs["gear"] = 3
if client.S.d["speedX"] > 110:
action_torcs["gear"] = 4
if client.S.d["speedX"] > 140:
action_torcs["gear"] = 5
if client.S.d["speedX"] > 170:
action_torcs["gear"] = 6
250, 1)
"] = True
if np.cos(obs["angle"]) < 0 or obs["distRaced"] < 0:
reward = -10
client.R.d["meta"] = True
if obs["distRaced"] >= self.maximum_distance:
reward = 10
client.R.d["meta"] = True
if client.R.d["meta"] is True:
poshis = np.array(self.poshis)
anglehis = np.array(self.anglehis)
sphis = np.array(self.sphis)
Yachis = np.array(self.Yaclist)
if self.testmode == False:
print("---------------------------------------------------------")
print("---> raced: ", obs["distRaced"], " m <---")
print("--- maxYac: ", np.max(Yachis), " km/h/s ---")
print("--- minYac: ", np.min(Yachis), " km/h/s ---")
if abs(np.max(Yachis)) >= abs(np.min(Yachis)):
absmaxYac = abs(np.max(Yachis))
else:
absmaxYac = abs(np.min(Yachis))
print("--- absmaxYac: ", absmaxYac, " km/h/s ---")
print("--- meanYac: ", np.mean(Yachis), " km/h/s +- ", np.std(Yachis), "---")
print("--- medianYac: ", np.median(Yachis), " km/h/s ---")
print("--- trackPos_mean: ", np.mean(poshis), " +- ", np.std(poshis), " ---")
print("--- angle_mean : ", np.mean(anglehis), " rad +- ", np.std(anglehis), " ---")
print("--- speedX_mean: ", np.mean(sphis), " km/h +- ", np.std(sphis), " ---")
print("---------------------------------------------------------")
self.initial_run = False
client.respond_to_server()
self.time_step += 1
return self.get_obs(), reward, client.R.d["meta"], {}
def reset(self, relaunch=False):
self.time_step = 0
if self.initial_reset is not True:
self.client.R.d["meta"] = True
self.client.respond_to_server()
self.client = snakeoil3.Client(p=3101, vision=False)
self.client.MAX_STEPS = np.inf
client = self.client
client.get_servers_input()
obs = client.S.d
self.observation = self.make_observaton(obs)
self.speedY = obs["speedY"]
self.time = obs["curLapTime"]
self.Yaclist = []
self.poshis = []
self.anglehis = []
self.sphis = []
self.initial_reset = False
return self.get_obs()
def close(self):
os.system("pkill torcs")
def render(self, mode="human"):
pass
| true | true |
f71c466cbf91fb905a3d1819ad2004dbe30abd3a | 775 | py | Python | app/mixin/assets.py | swelanauguste/refactored-sniffle | 1c0ea2f4d07a74d694ae3409b8b2ea3d57b9db4f | [
"MIT"
] | null | null | null | app/mixin/assets.py | swelanauguste/refactored-sniffle | 1c0ea2f4d07a74d694ae3409b8b2ea3d57b9db4f | [
"MIT"
] | null | null | null | app/mixin/assets.py | swelanauguste/refactored-sniffle | 1c0ea2f4d07a74d694ae3409b8b2ea3d57b9db4f | [
"MIT"
] | null | null | null | from django.conf import settings
from django.db import models
from django.views.generic import TemplateView
User = settings.AUTH_USER_MODEL
class TimeStampMixin(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
created_by = models.ForeignKey(
User,
blank=True,
null=True,
related_name="%(class)s_created_by",
on_delete=models.SET_DEFAULT, default=1
)
updated_by = models.ForeignKey(
User,
blank=True,
null=True,
related_name="%(class)s_updated_by",
on_delete=models.SET_DEFAULT, default=1,
)
class Meta:
abstract = True
class IndexView(TemplateView):
template_name = "index.html"
| 22.794118 | 56 | 0.676129 | from django.conf import settings
from django.db import models
from django.views.generic import TemplateView
User = settings.AUTH_USER_MODEL
class TimeStampMixin(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
created_by = models.ForeignKey(
User,
blank=True,
null=True,
related_name="%(class)s_created_by",
on_delete=models.SET_DEFAULT, default=1
)
updated_by = models.ForeignKey(
User,
blank=True,
null=True,
related_name="%(class)s_updated_by",
on_delete=models.SET_DEFAULT, default=1,
)
class Meta:
abstract = True
class IndexView(TemplateView):
template_name = "index.html"
| true | true |
f71c4861f8de557647d8e90f974fff337027d1d9 | 14,233 | py | Python | mnist128.py | samgregoost/self_supervised_large | 9c0c33cf374a1d5112519939012a64bca98c5f8d | [
"MIT"
] | 1 | 2021-08-25T08:32:19.000Z | 2021-08-25T08:32:19.000Z | mnist128.py | samgregoost/self_supervised_large | 9c0c33cf374a1d5112519939012a64bca98c5f8d | [
"MIT"
] | null | null | null | mnist128.py | samgregoost/self_supervised_large | 9c0c33cf374a1d5112519939012a64bca98c5f8d | [
"MIT"
] | null | null | null | from __future__ import print_function
import tensorflow as tf
import numpy as np
import random
import TensorflowUtils as utils
import read_MITSceneParsingDataParis as scene_parsing
import datetime
import BatchDatsetReader as dataset
from six.moves import xrange
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_integer("batch_size", "50", "batch size for training")
tf.flags.DEFINE_string("logs_dir", "/scratch1/ram095/nips20/logs_mnist128/", "path to logs directory")
tf.flags.DEFINE_string("data_dir", "/scratch1/ram095/nips20/paris_street", "path to dataset")
tf.flags.DEFINE_float("learning_rate", "1e-4", "Learning rate for Adam Optimizer")
tf.flags.DEFINE_string("model_dir", "Model_zoo/", "Path to vgg model mat")
tf.flags.DEFINE_bool('debug', "False", "Debug mode: True/ False")
tf.flags.DEFINE_string('mode', "train", "Mode train/ test/ visualize")
MODEL_URL = 'http://www.vlfeat.org/matconvnet/models/beta16/imagenet-vgg-verydeep-19.mat'
MAX_ITERATION = int(1e5 + 1)
NUM_OF_CLASSESS = 3
IMAGE_SIZE = 128
def vgg_net(weights, image):
layers = (
'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1',
'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',
'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3',
'relu3_3', 'conv3_4', 'relu3_4', 'pool3',
'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3',
'relu4_3', 'conv4_4', 'relu4_4', 'pool4',
'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3',
'relu5_3', 'conv5_4', 'relu5_4'
)
net = {}
current = image
for i, name in enumerate(layers):
kind = name[:4]
if kind == 'conv':
kernels, bias = weights[i][0][0][0][0]
# matconvnet: weights are [width, height, in_channels, out_channels]
# tensorflow: weights are [height, width, in_channels, out_channels]
kernels = utils.get_variable(np.transpose(kernels, (1, 0, 2, 3)), name=name + "_w")
bias = utils.get_variable(bias.reshape(-1), name=name + "_b")
current = utils.conv2d_basic(current, kernels, bias)
elif kind == 'relu':
current = tf.nn.relu(current, name=name)
if FLAGS.debug:
utils.add_activation_summary(current)
elif kind == 'pool':
current = utils.avg_pool_2x2(current)
net[name] = current
return net
'''
def decoder(image):
model_data = utils.get_model_data(FLAGS.model_dir, MODEL_URL)
mean = model_data['normalization'][0][0][0]
mean_pixel = np.mean(mean, axis=(0, 1))
weights = np.squeeze(model_data['layers'])
processed_image = utils.process_image(image, mean_pixel)
with tf.variable_scope("decoder"):
image_net = vgg_net(weights, processed_image)
conv_final_layer = image_net["conv5_3"]
pool5 = utils.max_pool_2x2(conv_final_layer)
return pool5
'''
def inference(image, keep_prob,z):
"""
Semantic segmentation network definition
:param image: input image. Should have values in range 0-255
:param keep_prob:
:return:
"""
print("setting up vgg initialized conv layers ...")
model_data = utils.get_model_data(FLAGS.model_dir, MODEL_URL)
mean = model_data['normalization'][0][0][0]
mean_pixel = np.mean(mean, axis=(0, 1))
weights = np.squeeze(model_data['layers'])
processed_image = utils.process_image(image, mean_pixel)
with tf.variable_scope("inference"):
image_net = vgg_net(weights, processed_image)
conv_final_layer = image_net["conv5_3"]
pool5 = utils.max_pool_2x2(conv_final_layer)
W6 = utils.weight_variable([7, 7, 512, 4096], name="W6")
b6 = utils.bias_variable([4096], name="b6")
conv6 = utils.conv2d_basic(pool5, W6, b6)
relu6 = tf.nn.relu(conv6, name="relu6")
if FLAGS.debug:
utils.add_activation_summary(relu6)
relu_dropout6 = tf.nn.dropout(relu6, keep_prob=keep_prob)
W7 = utils.weight_variable([1, 1, 4096, 4096], name="W7")
b7 = utils.bias_variable([4096], name="b7")
conv7 = utils.conv2d_basic(relu_dropout6, W7, b7)
relu7 = tf.nn.relu(conv7, name="relu7")
if FLAGS.debug:
utils.add_activation_summary(relu7)
relu_dropout7 = tf.nn.dropout(relu7, keep_prob=keep_prob)
W8 = utils.weight_variable([1, 1, 4096, 150], name="W8")
b8 = utils.bias_variable([150], name="b8")
# W_h = utils.weight_variable([1, 7, 7, 4], name="Wh")
conv8 = tf.reshape(utils.conv2d_basic(relu_dropout7, W8, b8),[-1,4*4*150])
fc1 = tf.reshape(tf.layers.dense(conv8,4*4*150,activation = tf.nn.relu),[-1,4,4,150])
concat1 = tf.concat([fc1, z],axis = 3)
# annotation_pred1 = tf.argmax(conv8, dimension=3, name="prediction1")
print("###########################################################")
print(fc1)
# now to upscale to actual image size
deconv_shape1 = image_net["pool4"].get_shape()
W_t1 = utils.weight_variable([4, 4, deconv_shape1[3].value, 278], name="W_t1")
b_t1 = utils.bias_variable([deconv_shape1[3].value], name="b_t1")
conv_t1 = utils.conv2d_transpose_strided(concat1, W_t1, b_t1, output_shape=tf.shape(image_net["pool4"]))
fuse_1 = tf.add(conv_t1, image_net["pool4"], name="fuse_1")
deconv_shape2 = image_net["pool3"].get_shape()
W_t2 = utils.weight_variable([4, 4, deconv_shape2[3].value, deconv_shape1[3].value], name="W_t2")
b_t2 = utils.bias_variable([deconv_shape2[3].value], name="b_t2")
conv_t2 = utils.conv2d_transpose_strided(fuse_1, W_t2, b_t2, output_shape=tf.shape(image_net["pool3"]))
fuse_2 = tf.add(conv_t2, image_net["pool3"], name="fuse_2")
shape = tf.shape(image)
deconv_shape3 = tf.stack([shape[0], shape[1], shape[2], 3])
W_t3 = utils.weight_variable([16, 16, 3, deconv_shape2[3].value], name="W_t3")
b_t3 = utils.bias_variable([3], name="b_t3")
conv_t3 = tf.nn.relu(utils.conv2d_transpose_strided(fuse_2, W_t3, b_t3, output_shape=deconv_shape3, stride=8))
annotation_pred = tf.argmax(conv_t3, dimension=3, name="prediction")
return tf.expand_dims(annotation_pred, dim=3), conv_t3
def train(loss_val, var_list):
optimizer = tf.train.AdamOptimizer(FLAGS.learning_rate)
grads = optimizer.compute_gradients(loss_val, var_list=var_list)
if FLAGS.debug:
# print(len(var_list))
for grad, var in grads:
utils.add_gradient_summary(grad, var)
return optimizer.apply_gradients(grads)
def train_z(loss,Z):
return tf.gradients(ys = loss, xs = Z)
def main(argv=None):
keep_probability = tf.placeholder(tf.float32, name="keep_probabilty")
image = tf.placeholder(tf.float32, shape=[None, IMAGE_SIZE, IMAGE_SIZE, 3], name="input_image")
annotation = tf.placeholder(tf.float32, shape=[None, IMAGE_SIZE, IMAGE_SIZE, 3], name="annotation")
z = tf.placeholder(tf.float32, shape=[None, 4, 4, 128], name="z")
# pred_annotation, logits = inference(image, keep_probability,z)
# tf.summary.image("input_image", image, max_outputs=2)
# tf.summary.image("ground_truth", tf.cast(annotation, tf.uint8), max_outputs=2)
# tf.summary.image("pred_annotation", tf.cast(pred_annotation, tf.uint8), max_outputs=2)
# loss = tf.reduce_mean((tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits,
# labels=tf.squeeze(annotation, squeeze_dims=[3]),
# name="entropy")))
mask_ = tf.ones([FLAGS.batch_size,64,64,3])
mask = tf.pad(mask_, [[0,0],[32,32],[32,32],[0,0]])
mask2__ = tf.ones([FLAGS.batch_size,78,78,3])
mask2_ = tf.pad(mask2__, [[0,0],[25,25],[25,25],[0,0]])
mask2 = mask2_ - mask
pred_annotation, logits = inference((1-mask)*image + mask*255, keep_probability,z)
tf.summary.image("input_image", image, max_outputs=2)
tf.summary.image("ground_truth", tf.cast(annotation, tf.uint8), max_outputs=2)
tf.summary.image("pred_annotation", tf.cast(pred_annotation, tf.uint8), max_outputs=2)
# loss0 = tf.reduce_mean(tf.abs(z))
loss = tf.reduce_mean(tf.sqrt(tf.reduce_sum(tf.square((image - logits)),[1,2,3])))
# loss2 = tf.reduce_mean(tf.square((image - logits)*mask2))
# loss = loss1 + loss2 + loss0
# loss = tf.reduce_mean(tf.squared_difference(logits ,annotation ))
loss_summary = tf.summary.scalar("entropy", loss)
grads = train_z(loss,z)
trainable_var = tf.trainable_variables()
if FLAGS.debug:
for var in trainable_var:
utils.add_to_regularization_and_summary(var)
train_op = train(loss, trainable_var)
print("Setting up summary op...")
summary_op = tf.summary.merge_all()
print("Setting up image reader...")
train_records, valid_records = scene_parsing.read_dataset(FLAGS.data_dir)
print(len(train_records))
print(len(valid_records))
print("Setting up dataset reader")
image_options = {'resize': True, 'resize_size': IMAGE_SIZE}
if FLAGS.mode == 'train':
train_dataset_reader = dataset.BatchDatset(train_records, image_options)
validation_dataset_reader = dataset.BatchDatset(valid_records, image_options)
sess = tf.Session()
print("Setting up Saver...")
saver = tf.train.Saver()
# create two summary writers to show training loss and validation loss in the same graph
# need to create two folders 'train' and 'validation' inside FLAGS.logs_dir
train_writer = tf.summary.FileWriter(FLAGS.logs_dir + '/train', sess.graph)
validation_writer = tf.summary.FileWriter(FLAGS.logs_dir + '/validation')
sess.run(tf.global_variables_initializer())
ckpt = tf.train.get_checkpoint_state(FLAGS.logs_dir)
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
print("Model restored...")
if FLAGS.mode == "train":
for itr in xrange(MAX_ITERATION):
train_images, train_annotations = train_dataset_reader.next_batch(FLAGS.batch_size)
z_ = np.random.uniform(low=-1.0, high=1.0, size=(FLAGS.batch_size,4,4,128))
# print(train_images)
feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 0.85, z: z_}
#train_images[:,50:100,50:100,:] =0
v = 0
for p in range(10):
z_ol = np.copy(z_)
# print("666666666666666666666666666666666666666")
z_loss, summ = sess.run([loss,loss_summary], feed_dict=feed_dict)
print("Step: %d, z_step: %d, Train_loss:%g" % (itr,p,z_loss))
# print(z_)
g = sess.run([grads],feed_dict=feed_dict)
v_prev = np.copy(v)
# print(g[0][0].shape)
v = 0.001*v - 0.1*g[0][0]
z_ += 0.001 * v_prev + (1+0.001)*v
# z_ = np.clip(z_, -1.0, 1.0)
# print(v.shape)
# print(z_.shape)
feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 0.85, z: z_}
sess.run(train_op, feed_dict=feed_dict)
if itr % 10 == 0:
train_loss, summary_str = sess.run([loss, loss_summary], feed_dict=feed_dict)
print("Step: %d, Train_loss:%g" % (itr, train_loss))
train_writer.add_summary(summary_str, itr)
if itr % 500 == 0:
valid_images, valid_annotations = validation_dataset_reader.next_batch(FLAGS.batch_size)
valid_loss, summary_sva = sess.run([loss, loss_summary], feed_dict={image: valid_images, annotation: valid_annotations,
keep_probability: 1.0, z: z_})
print("%s ---> Validation_loss: %g" % (datetime.datetime.now(), valid_loss))
# add validation loss to TensorBoard
validation_writer.add_summary(summary_sva, itr)
saver.save(sess, FLAGS.logs_dir + "model_z_center.ckpt", 500)
elif FLAGS.mode == "visualize":
valid_images, valid_annotations = validation_dataset_reader.get_random_batch(50)
z_ = np.random.uniform(low=-1.0, high=1.0, size=(FLAGS.batch_size,4,4,128))
feed_dict = {image: valid_images, annotation: valid_annotations, keep_probability: 0.85, z: z_}
v= 0
for p in range(50):
z_ol = np.copy(z_)
# print("666666666666666666666666666666666666666")
z_loss, summ = sess.run([loss,loss_summary], feed_dict=feed_dict)
print("z_step: %d, Train_loss:%g" % (p,z_loss))
# print(z_)
g = sess.run([grads],feed_dict=feed_dict)
v_prev = np.copy(v)
# print(g[0][0].shape)
v = 0.001*v - 0.1*g[0][0]
z_ += 0.001 * v_prev + (1+0.001)*v
# z_ = np.clip(z_, -1.0, 1.0)
pred = sess.run(logits, feed_dict={image: valid_images, annotation: valid_annotations,z:z_,
keep_probability: 1.0})
valid_images_masked = (1-sess.run(mask))*valid_images
predicted_patch = sess.run(mask) * pred
pred = valid_images_masked + predicted_patch
# valid_annotations = np.squeeze(valid_annotations, axis=3)
# pred = np.squeeze(pred, axis=3)
print(valid_images.shape)
print(valid_annotations.shape)
print(pred.shape)
for itr in range(FLAGS.batch_size):
utils.save_image(valid_images_masked[itr].astype(np.uint8), FLAGS.logs_dir, name="inp_" + str(5+itr))
utils.save_image(valid_annotations[itr].astype(np.uint8), FLAGS.logs_dir, name="gt_" + str(5+itr))
utils.save_image(pred[itr].astype(np.uint8), FLAGS.logs_dir, name="predz_" + str(5+itr))
print("Saved image: %d" % itr)
if __name__ == "__main__":
tf.app.run()
| 42.486567 | 135 | 0.622848 | from __future__ import print_function
import tensorflow as tf
import numpy as np
import random
import TensorflowUtils as utils
import read_MITSceneParsingDataParis as scene_parsing
import datetime
import BatchDatsetReader as dataset
from six.moves import xrange
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_integer("batch_size", "50", "batch size for training")
tf.flags.DEFINE_string("logs_dir", "/scratch1/ram095/nips20/logs_mnist128/", "path to logs directory")
tf.flags.DEFINE_string("data_dir", "/scratch1/ram095/nips20/paris_street", "path to dataset")
tf.flags.DEFINE_float("learning_rate", "1e-4", "Learning rate for Adam Optimizer")
tf.flags.DEFINE_string("model_dir", "Model_zoo/", "Path to vgg model mat")
tf.flags.DEFINE_bool('debug', "False", "Debug mode: True/ False")
tf.flags.DEFINE_string('mode', "train", "Mode train/ test/ visualize")
MODEL_URL = 'http://www.vlfeat.org/matconvnet/models/beta16/imagenet-vgg-verydeep-19.mat'
MAX_ITERATION = int(1e5 + 1)
NUM_OF_CLASSESS = 3
IMAGE_SIZE = 128
def vgg_net(weights, image):
layers = (
'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1',
'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',
'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3',
'relu3_3', 'conv3_4', 'relu3_4', 'pool3',
'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3',
'relu4_3', 'conv4_4', 'relu4_4', 'pool4',
'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3',
'relu5_3', 'conv5_4', 'relu5_4'
)
net = {}
current = image
for i, name in enumerate(layers):
kind = name[:4]
if kind == 'conv':
kernels, bias = weights[i][0][0][0][0]
kernels = utils.get_variable(np.transpose(kernels, (1, 0, 2, 3)), name=name + "_w")
bias = utils.get_variable(bias.reshape(-1), name=name + "_b")
current = utils.conv2d_basic(current, kernels, bias)
elif kind == 'relu':
current = tf.nn.relu(current, name=name)
if FLAGS.debug:
utils.add_activation_summary(current)
elif kind == 'pool':
current = utils.avg_pool_2x2(current)
net[name] = current
return net
def inference(image, keep_prob,z):
print("setting up vgg initialized conv layers ...")
model_data = utils.get_model_data(FLAGS.model_dir, MODEL_URL)
mean = model_data['normalization'][0][0][0]
mean_pixel = np.mean(mean, axis=(0, 1))
weights = np.squeeze(model_data['layers'])
processed_image = utils.process_image(image, mean_pixel)
with tf.variable_scope("inference"):
image_net = vgg_net(weights, processed_image)
conv_final_layer = image_net["conv5_3"]
pool5 = utils.max_pool_2x2(conv_final_layer)
W6 = utils.weight_variable([7, 7, 512, 4096], name="W6")
b6 = utils.bias_variable([4096], name="b6")
conv6 = utils.conv2d_basic(pool5, W6, b6)
relu6 = tf.nn.relu(conv6, name="relu6")
if FLAGS.debug:
utils.add_activation_summary(relu6)
relu_dropout6 = tf.nn.dropout(relu6, keep_prob=keep_prob)
W7 = utils.weight_variable([1, 1, 4096, 4096], name="W7")
b7 = utils.bias_variable([4096], name="b7")
conv7 = utils.conv2d_basic(relu_dropout6, W7, b7)
relu7 = tf.nn.relu(conv7, name="relu7")
if FLAGS.debug:
utils.add_activation_summary(relu7)
relu_dropout7 = tf.nn.dropout(relu7, keep_prob=keep_prob)
W8 = utils.weight_variable([1, 1, 4096, 150], name="W8")
b8 = utils.bias_variable([150], name="b8")
conv8 = tf.reshape(utils.conv2d_basic(relu_dropout7, W8, b8),[-1,4*4*150])
fc1 = tf.reshape(tf.layers.dense(conv8,4*4*150,activation = tf.nn.relu),[-1,4,4,150])
concat1 = tf.concat([fc1, z],axis = 3)
print("###########################################################")
print(fc1)
deconv_shape1 = image_net["pool4"].get_shape()
W_t1 = utils.weight_variable([4, 4, deconv_shape1[3].value, 278], name="W_t1")
b_t1 = utils.bias_variable([deconv_shape1[3].value], name="b_t1")
conv_t1 = utils.conv2d_transpose_strided(concat1, W_t1, b_t1, output_shape=tf.shape(image_net["pool4"]))
fuse_1 = tf.add(conv_t1, image_net["pool4"], name="fuse_1")
deconv_shape2 = image_net["pool3"].get_shape()
W_t2 = utils.weight_variable([4, 4, deconv_shape2[3].value, deconv_shape1[3].value], name="W_t2")
b_t2 = utils.bias_variable([deconv_shape2[3].value], name="b_t2")
conv_t2 = utils.conv2d_transpose_strided(fuse_1, W_t2, b_t2, output_shape=tf.shape(image_net["pool3"]))
fuse_2 = tf.add(conv_t2, image_net["pool3"], name="fuse_2")
shape = tf.shape(image)
deconv_shape3 = tf.stack([shape[0], shape[1], shape[2], 3])
W_t3 = utils.weight_variable([16, 16, 3, deconv_shape2[3].value], name="W_t3")
b_t3 = utils.bias_variable([3], name="b_t3")
conv_t3 = tf.nn.relu(utils.conv2d_transpose_strided(fuse_2, W_t3, b_t3, output_shape=deconv_shape3, stride=8))
annotation_pred = tf.argmax(conv_t3, dimension=3, name="prediction")
return tf.expand_dims(annotation_pred, dim=3), conv_t3
def train(loss_val, var_list):
optimizer = tf.train.AdamOptimizer(FLAGS.learning_rate)
grads = optimizer.compute_gradients(loss_val, var_list=var_list)
if FLAGS.debug:
for grad, var in grads:
utils.add_gradient_summary(grad, var)
return optimizer.apply_gradients(grads)
def train_z(loss,Z):
return tf.gradients(ys = loss, xs = Z)
def main(argv=None):
keep_probability = tf.placeholder(tf.float32, name="keep_probabilty")
image = tf.placeholder(tf.float32, shape=[None, IMAGE_SIZE, IMAGE_SIZE, 3], name="input_image")
annotation = tf.placeholder(tf.float32, shape=[None, IMAGE_SIZE, IMAGE_SIZE, 3], name="annotation")
z = tf.placeholder(tf.float32, shape=[None, 4, 4, 128], name="z")
mask_ = tf.ones([FLAGS.batch_size,64,64,3])
mask = tf.pad(mask_, [[0,0],[32,32],[32,32],[0,0]])
mask2__ = tf.ones([FLAGS.batch_size,78,78,3])
mask2_ = tf.pad(mask2__, [[0,0],[25,25],[25,25],[0,0]])
mask2 = mask2_ - mask
pred_annotation, logits = inference((1-mask)*image + mask*255, keep_probability,z)
tf.summary.image("input_image", image, max_outputs=2)
tf.summary.image("ground_truth", tf.cast(annotation, tf.uint8), max_outputs=2)
tf.summary.image("pred_annotation", tf.cast(pred_annotation, tf.uint8), max_outputs=2)
loss = tf.reduce_mean(tf.sqrt(tf.reduce_sum(tf.square((image - logits)),[1,2,3])))
loss_summary = tf.summary.scalar("entropy", loss)
grads = train_z(loss,z)
trainable_var = tf.trainable_variables()
if FLAGS.debug:
for var in trainable_var:
utils.add_to_regularization_and_summary(var)
train_op = train(loss, trainable_var)
print("Setting up summary op...")
summary_op = tf.summary.merge_all()
print("Setting up image reader...")
train_records, valid_records = scene_parsing.read_dataset(FLAGS.data_dir)
print(len(train_records))
print(len(valid_records))
print("Setting up dataset reader")
image_options = {'resize': True, 'resize_size': IMAGE_SIZE}
if FLAGS.mode == 'train':
train_dataset_reader = dataset.BatchDatset(train_records, image_options)
validation_dataset_reader = dataset.BatchDatset(valid_records, image_options)
sess = tf.Session()
print("Setting up Saver...")
saver = tf.train.Saver()
train_writer = tf.summary.FileWriter(FLAGS.logs_dir + '/train', sess.graph)
validation_writer = tf.summary.FileWriter(FLAGS.logs_dir + '/validation')
sess.run(tf.global_variables_initializer())
ckpt = tf.train.get_checkpoint_state(FLAGS.logs_dir)
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
print("Model restored...")
if FLAGS.mode == "train":
for itr in xrange(MAX_ITERATION):
train_images, train_annotations = train_dataset_reader.next_batch(FLAGS.batch_size)
z_ = np.random.uniform(low=-1.0, high=1.0, size=(FLAGS.batch_size,4,4,128))
feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 0.85, z: z_}
v = 0
for p in range(10):
z_ol = np.copy(z_)
z_loss, summ = sess.run([loss,loss_summary], feed_dict=feed_dict)
print("Step: %d, z_step: %d, Train_loss:%g" % (itr,p,z_loss))
g = sess.run([grads],feed_dict=feed_dict)
v_prev = np.copy(v)
v = 0.001*v - 0.1*g[0][0]
z_ += 0.001 * v_prev + (1+0.001)*v
feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 0.85, z: z_}
sess.run(train_op, feed_dict=feed_dict)
if itr % 10 == 0:
train_loss, summary_str = sess.run([loss, loss_summary], feed_dict=feed_dict)
print("Step: %d, Train_loss:%g" % (itr, train_loss))
train_writer.add_summary(summary_str, itr)
if itr % 500 == 0:
valid_images, valid_annotations = validation_dataset_reader.next_batch(FLAGS.batch_size)
valid_loss, summary_sva = sess.run([loss, loss_summary], feed_dict={image: valid_images, annotation: valid_annotations,
keep_probability: 1.0, z: z_})
print("%s ---> Validation_loss: %g" % (datetime.datetime.now(), valid_loss))
validation_writer.add_summary(summary_sva, itr)
saver.save(sess, FLAGS.logs_dir + "model_z_center.ckpt", 500)
elif FLAGS.mode == "visualize":
valid_images, valid_annotations = validation_dataset_reader.get_random_batch(50)
z_ = np.random.uniform(low=-1.0, high=1.0, size=(FLAGS.batch_size,4,4,128))
feed_dict = {image: valid_images, annotation: valid_annotations, keep_probability: 0.85, z: z_}
v= 0
for p in range(50):
z_ol = np.copy(z_)
z_loss, summ = sess.run([loss,loss_summary], feed_dict=feed_dict)
print("z_step: %d, Train_loss:%g" % (p,z_loss))
g = sess.run([grads],feed_dict=feed_dict)
v_prev = np.copy(v)
v = 0.001*v - 0.1*g[0][0]
z_ += 0.001 * v_prev + (1+0.001)*v
pred = sess.run(logits, feed_dict={image: valid_images, annotation: valid_annotations,z:z_,
keep_probability: 1.0})
valid_images_masked = (1-sess.run(mask))*valid_images
predicted_patch = sess.run(mask) * pred
pred = valid_images_masked + predicted_patch
print(valid_images.shape)
print(valid_annotations.shape)
print(pred.shape)
for itr in range(FLAGS.batch_size):
utils.save_image(valid_images_masked[itr].astype(np.uint8), FLAGS.logs_dir, name="inp_" + str(5+itr))
utils.save_image(valid_annotations[itr].astype(np.uint8), FLAGS.logs_dir, name="gt_" + str(5+itr))
utils.save_image(pred[itr].astype(np.uint8), FLAGS.logs_dir, name="predz_" + str(5+itr))
print("Saved image: %d" % itr)
if __name__ == "__main__":
tf.app.run()
| true | true |
f71c490d7b16466aa2d3b6d909be2ecacb018f6b | 70 | py | Python | hashlib/hashlib/__init__.py | badgeteam/micropython-lib | fca0235c166ebbada489d88c42fc549267832797 | [
"PSF-2.0"
] | null | null | null | hashlib/hashlib/__init__.py | badgeteam/micropython-lib | fca0235c166ebbada489d88c42fc549267832797 | [
"PSF-2.0"
] | null | null | null | hashlib/hashlib/__init__.py | badgeteam/micropython-lib | fca0235c166ebbada489d88c42fc549267832797 | [
"PSF-2.0"
] | 2 | 2017-11-21T16:53:03.000Z | 2021-07-29T08:47:14.000Z | from .sha256 import sha224, sha256
from .sha512 import sha384, sha512
| 23.333333 | 34 | 0.8 | from .sha256 import sha224, sha256
from .sha512 import sha384, sha512
| true | true |
f71c4b2b7c7dee5107676ddd03075e0b5134b81f | 2,488 | py | Python | HDF5Saver.py | sizhky/carla-dataset-runner | a670d981d29de78460cd90b1d4949ee4b71d0ade | [
"MIT"
] | 48 | 2019-12-28T11:08:27.000Z | 2022-03-24T09:22:51.000Z | HDF5Saver.py | sizhky/carla-dataset-runner | a670d981d29de78460cd90b1d4949ee4b71d0ade | [
"MIT"
] | 6 | 2020-02-01T21:47:21.000Z | 2021-12-10T13:19:41.000Z | HDF5Saver.py | sizhky/carla-dataset-runner | a670d981d29de78460cd90b1d4949ee4b71d0ade | [
"MIT"
] | 19 | 2020-01-08T08:19:08.000Z | 2022-03-24T08:39:53.000Z | import h5py
import numpy as np
class HDF5Saver:
def __init__(self, sensor_width, sensor_height, file_path_to_save="data/carla_dataset.hdf5"):
self.sensor_width = sensor_width
self.sensor_height = sensor_height
self.file = h5py.File(file_path_to_save, "w")
# Creating groups to store each type of data
self.rgb_group = self.file.create_group("rgb")
self.depth_group = self.file.create_group("depth")
self.ego_speed_group = self.file.create_group("ego_speed")
self.bounding_box_group = self.file.create_group("bounding_box")
self.bb_vehicles_group = self.bounding_box_group.create_group("vehicles")
self.bb_walkers_group = self.bounding_box_group.create_group("walkers")
self.timestamp_group = self.file.create_group("timestamps")
# Storing metadata
self.file.attrs['sensor_width'] = sensor_width
self.file.attrs['sensor_height'] = sensor_height
self.file.attrs['simulation_synchronization_type'] = "syncd"
self.rgb_group.attrs['channels'] = 'R,G,B'
self.ego_speed_group.attrs['x,y,z_velocity'] = 'in m/s'
self.bounding_box_group.attrs['data_description'] = 'Each 4 entries in the same row present one individual actor in the scene.'
self.bounding_box_group.attrs['bbox_format'] = '[xmin, ymin, xmax, ymax] (top left coords; right bottom coords)' \
'the vector has been flattened; therefore the data must' \
'be captured in blocks of 4 elements'
self.timestamp_group.attrs['time_format'] = "current time in MILISSECONDS since the unix epoch " \
"(time.time()*1000 in python3)"
def record_data(self, rgb_array, depth_array, bounding_box, ego_speed, timestamp):
timestamp = str(timestamp)
self.rgb_group.create_dataset(timestamp, data=rgb_array)
self.depth_group.create_dataset(timestamp, data=depth_array)
self.ego_speed_group.create_dataset(timestamp, data=ego_speed)
self.bb_vehicles_group.create_dataset(timestamp, data=bounding_box[0])
self.bb_walkers_group.create_dataset(timestamp, data=bounding_box[1])
def record_all_timestamps(self, timestamps_list):
self.timestamp_group.create_dataset("timestamps", data=np.array(timestamps_list))
def close_HDF5(self):
self.file.close()
| 54.086957 | 135 | 0.671624 | import h5py
import numpy as np
class HDF5Saver:
def __init__(self, sensor_width, sensor_height, file_path_to_save="data/carla_dataset.hdf5"):
self.sensor_width = sensor_width
self.sensor_height = sensor_height
self.file = h5py.File(file_path_to_save, "w")
self.rgb_group = self.file.create_group("rgb")
self.depth_group = self.file.create_group("depth")
self.ego_speed_group = self.file.create_group("ego_speed")
self.bounding_box_group = self.file.create_group("bounding_box")
self.bb_vehicles_group = self.bounding_box_group.create_group("vehicles")
self.bb_walkers_group = self.bounding_box_group.create_group("walkers")
self.timestamp_group = self.file.create_group("timestamps")
self.file.attrs['sensor_width'] = sensor_width
self.file.attrs['sensor_height'] = sensor_height
self.file.attrs['simulation_synchronization_type'] = "syncd"
self.rgb_group.attrs['channels'] = 'R,G,B'
self.ego_speed_group.attrs['x,y,z_velocity'] = 'in m/s'
self.bounding_box_group.attrs['data_description'] = 'Each 4 entries in the same row present one individual actor in the scene.'
self.bounding_box_group.attrs['bbox_format'] = '[xmin, ymin, xmax, ymax] (top left coords; right bottom coords)' \
'the vector has been flattened; therefore the data must' \
'be captured in blocks of 4 elements'
self.timestamp_group.attrs['time_format'] = "current time in MILISSECONDS since the unix epoch " \
"(time.time()*1000 in python3)"
def record_data(self, rgb_array, depth_array, bounding_box, ego_speed, timestamp):
timestamp = str(timestamp)
self.rgb_group.create_dataset(timestamp, data=rgb_array)
self.depth_group.create_dataset(timestamp, data=depth_array)
self.ego_speed_group.create_dataset(timestamp, data=ego_speed)
self.bb_vehicles_group.create_dataset(timestamp, data=bounding_box[0])
self.bb_walkers_group.create_dataset(timestamp, data=bounding_box[1])
def record_all_timestamps(self, timestamps_list):
self.timestamp_group.create_dataset("timestamps", data=np.array(timestamps_list))
def close_HDF5(self):
self.file.close()
| true | true |
f71c4b4a5eb25c7ff024fb30f9b4ce405c736e0b | 544 | py | Python | manage.py | almazkun/PythonDjangoMozilaTut | 39e7c4d1ab9fbfe85abe90d94585fc7315617d1d | [
"MIT"
] | 1 | 2019-01-16T05:39:42.000Z | 2019-01-16T05:39:42.000Z | manage.py | almazkun/PythonDjangoMozilaTut | 39e7c4d1ab9fbfe85abe90d94585fc7315617d1d | [
"MIT"
] | null | null | null | manage.py | almazkun/PythonDjangoMozilaTut | 39e7c4d1ab9fbfe85abe90d94585fc7315617d1d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'visitkoreakz.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| 34 | 76 | 0.689338 |
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'visitkoreakz.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| true | true |
f71c4b6de69a49fef47431af49540a00c27168f7 | 622 | py | Python | src/libs/components/customsmarttile.py | loghinalexandru/blackboard-greenboard | 80332bf7709e602a4d5ada31b3cf95801c06190f | [
"MIT"
] | null | null | null | src/libs/components/customsmarttile.py | loghinalexandru/blackboard-greenboard | 80332bf7709e602a4d5ada31b3cf95801c06190f | [
"MIT"
] | null | null | null | src/libs/components/customsmarttile.py | loghinalexandru/blackboard-greenboard | 80332bf7709e602a4d5ada31b3cf95801c06190f | [
"MIT"
] | null | null | null | import kivy
from functools import partial
kivy.require('2.0.0')
from kivymd.uix.imagelist import SmartTile
from constants import Screen
class CustomSmartTile(SmartTile):
def __init__(self, **kwargs):
super(CustomSmartTile, self).__init__(**kwargs)
self.height = '240dp'
self.size_hint_y = None
self.box_color = [0, 0, 0, 0]
self.on_press = partial(self._maximize, self.source)
def _maximize(self, file):
self.parent.parent.parent.manager.get_screen(Screen.ImageView.value).file_name = file
self.parent.parent.parent.manager.current = Screen.ImageView.value | 36.588235 | 93 | 0.709003 | import kivy
from functools import partial
kivy.require('2.0.0')
from kivymd.uix.imagelist import SmartTile
from constants import Screen
class CustomSmartTile(SmartTile):
def __init__(self, **kwargs):
super(CustomSmartTile, self).__init__(**kwargs)
self.height = '240dp'
self.size_hint_y = None
self.box_color = [0, 0, 0, 0]
self.on_press = partial(self._maximize, self.source)
def _maximize(self, file):
self.parent.parent.parent.manager.get_screen(Screen.ImageView.value).file_name = file
self.parent.parent.parent.manager.current = Screen.ImageView.value | true | true |
f71c4cb2928d71b7feb87e22f1a85505c3468626 | 1,416 | py | Python | processing/data_collection/gazette/spiders/es_associacao_municipios.py | marlesson/diario-oficial | 6c2b3e41d1d08a1fd47517ed55ac22ae888c88b3 | [
"MIT"
] | 3 | 2018-06-05T02:33:03.000Z | 2018-06-05T14:20:02.000Z | processing/data_collection/gazette/spiders/es_associacao_municipios.py | marlesson/diario-oficial | 6c2b3e41d1d08a1fd47517ed55ac22ae888c88b3 | [
"MIT"
] | 4 | 2018-06-07T14:55:53.000Z | 2018-06-29T12:37:12.000Z | processing/data_collection/gazette/spiders/es_associacao_municipios.py | marlesson/diario-oficial | 6c2b3e41d1d08a1fd47517ed55ac22ae888c88b3 | [
"MIT"
] | 1 | 2018-08-24T22:32:27.000Z | 2018-08-24T22:32:27.000Z | from dateparser import parse
import datetime as dt
import scrapy
from gazette.items import Gazette
from gazette.spiders.base import BaseGazetteSpider
class EsAssociacaoMunicipiosSpider(BaseGazetteSpider):
TERRITORY_ID = '3200000'
name = 'es_associacao_municipios'
allowed_domains = ['diariomunicipales.org.br']
start_urls = ['https://diariomunicipales.org.br/?r=site/edicoes&Edicao_page=1']
def parse(self, response):
"""
@url https://diariomunicipales.org.br/?r=site/edicoes&Edicao_page=1
@returns items 15 15
@returns requests 1 1
@scrapes date file_urls is_extra_edition territory_id power scraped_at
"""
for gazette_node in response.css('.items tbody tr'):
url = gazette_node.css('[download]::attr(href)').extract_first()
date = gazette_node.css('td::text')[1].extract()
date = parse(date, languages=['pt']).date()
yield Gazette(
date=date,
file_urls=[url],
is_extra_edition=False,
territory_id=self.TERRITORY_ID,
power='executive',
scraped_at=dt.datetime.utcnow(),
)
css_path = '.pagination .next:not(.disabled) a::attr(href)'
next_page_url = response.css(css_path).extract_first()
if next_page_url:
yield response.follow(next_page_url)
| 35.4 | 83 | 0.633475 | from dateparser import parse
import datetime as dt
import scrapy
from gazette.items import Gazette
from gazette.spiders.base import BaseGazetteSpider
class EsAssociacaoMunicipiosSpider(BaseGazetteSpider):
TERRITORY_ID = '3200000'
name = 'es_associacao_municipios'
allowed_domains = ['diariomunicipales.org.br']
start_urls = ['https://diariomunicipales.org.br/?r=site/edicoes&Edicao_page=1']
def parse(self, response):
for gazette_node in response.css('.items tbody tr'):
url = gazette_node.css('[download]::attr(href)').extract_first()
date = gazette_node.css('td::text')[1].extract()
date = parse(date, languages=['pt']).date()
yield Gazette(
date=date,
file_urls=[url],
is_extra_edition=False,
territory_id=self.TERRITORY_ID,
power='executive',
scraped_at=dt.datetime.utcnow(),
)
css_path = '.pagination .next:not(.disabled) a::attr(href)'
next_page_url = response.css(css_path).extract_first()
if next_page_url:
yield response.follow(next_page_url)
| true | true |
f71c4f6668cb080ef2ce8616c2f028e0b74e850d | 6,404 | py | Python | test/functional/mempool_persist.py | likloadm/arielcoin | bd26479189fbdbea6e6f783c9d898054ae8740b0 | [
"MIT"
] | 3 | 2022-02-24T01:44:26.000Z | 2022-03-04T12:13:08.000Z | test/functional/mempool_persist.py | likloadm/arielcoin | bd26479189fbdbea6e6f783c9d898054ae8740b0 | [
"MIT"
] | 2 | 2022-03-21T05:41:51.000Z | 2022-03-21T17:12:13.000Z | test/functional/mempool_persist.py | likloadm/arielcoin | bd26479189fbdbea6e6f783c9d898054ae8740b0 | [
"MIT"
] | 6 | 2022-02-23T10:54:43.000Z | 2022-03-24T09:05:45.000Z | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool persistence.
By default, bitcoind will dump mempool on shutdown and
then reload it on startup. This can be overridden with
the -persistmempool=0 command line option.
Test is as follows:
- start node0, node1 and node2. node1 has -persistmempool=0
- create 5 transactions on node2 to its own address. Note that these
are not sent to node0 or node1 addresses because we don't want
them to be saved in the wallet.
- check that node0 and node1 have 5 transactions in their mempools
- shutdown all nodes.
- startup node0. Verify that it still has 5 transactions
in its mempool. Shutdown node0. This tests that by default the
mempool is persistent.
- startup node1. Verify that its mempool is empty. Shutdown node1.
This tests that with -persistmempool=0, the mempool is not
dumped to disk when the node is shut down.
- Restart node0 with -persistmempool=0. Verify that its mempool is
empty. Shutdown node0. This tests that with -persistmempool=0,
the mempool is not loaded from disk on start up.
- Restart node0 with -persistmempool. Verify that it has 5
transactions in its mempool. This tests that -persistmempool=0
does not overwrite a previously valid mempool stored on disk.
- Remove node0 mempool.dat and verify savemempool RPC recreates it
and verify that node1 can load it and has 5 transactions in its
mempool.
- Verify that savemempool throws when the RPC is called if
node1 can't write to disk.
"""
from decimal import Decimal
import os
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, wait_until
class MempoolPersistTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [[], ["-persistmempool=0"], []]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
chain_height = self.nodes[0].getblockcount()
assert_equal(chain_height, 200)
self.log.debug("Mine a single block to get out of IBD")
self.nodes[0].generate(1)
self.sync_all()
self.log.debug("Send 5 transactions from node2 (to its own address)")
for i in range(5):
last_txid = self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
node2_balance = self.nodes[2].getbalance()
self.sync_all()
self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools")
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Prioritize a transaction on node0")
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'], fees['modified'])
self.nodes[0].prioritisetransaction(txid=last_txid, fee_delta=1000)
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
# Give this node a head-start, so we can be "extra-sure" that it didn't load anything later
# Also don't store the mempool, to keep the datadir clean
self.start_node(1, extra_args=["-persistmempool=0"])
self.start_node(0)
self.start_node(2)
# Give bitcoind a second to reload the mempool
wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5, timeout=1)
wait_until(lambda: len(self.nodes[2].getrawmempool()) == 5, timeout=1)
# The others have loaded their mempool. If node_1 loaded anything, we'd probably notice by now:
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.log.debug('Verify prioritization is loaded correctly')
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
# Verify accounting of mempool transactions after restart is correct
self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
self.start_node(0, extra_args=["-persistmempool=0"])
# Give bitcoind a second to reload the mempool
time.sleep(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5)
mempooldat0 = os.path.join(self.nodes[0].datadir, 'regtest', 'mempool.dat')
mempooldat1 = os.path.join(self.nodes[1].datadir, 'regtest', 'mempool.dat')
self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
os.remove(mempooldat0)
self.nodes[0].savemempool()
assert os.path.isfile(mempooldat0)
self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions")
os.rename(mempooldat0, mempooldat1)
self.stop_nodes()
self.start_node(1, extra_args=[])
wait_until(lambda: len(self.nodes[1].getrawmempool()) == 5)
self.log.debug("Prevent arielcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
# to test the exception we are creating a tmp folder called mempool.dat.new
# which is an implementation detail that could change and break this test
mempooldotnew1 = mempooldat1 + '.new'
os.mkdir(mempooldotnew1)
assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
os.rmdir(mempooldotnew1)
if __name__ == '__main__':
MempoolPersistTest().main()
| 47.088235 | 207 | 0.698626 |
from decimal import Decimal
import os
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, wait_until
class MempoolPersistTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [[], ["-persistmempool=0"], []]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
chain_height = self.nodes[0].getblockcount()
assert_equal(chain_height, 200)
self.log.debug("Mine a single block to get out of IBD")
self.nodes[0].generate(1)
self.sync_all()
self.log.debug("Send 5 transactions from node2 (to its own address)")
for i in range(5):
last_txid = self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
node2_balance = self.nodes[2].getbalance()
self.sync_all()
self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools")
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Prioritize a transaction on node0")
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'], fees['modified'])
self.nodes[0].prioritisetransaction(txid=last_txid, fee_delta=1000)
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
# Also don't store the mempool, to keep the datadir clean
self.start_node(1, extra_args=["-persistmempool=0"])
self.start_node(0)
self.start_node(2)
wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5, timeout=1)
wait_until(lambda: len(self.nodes[2].getrawmempool()) == 5, timeout=1)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.log.debug('Verify prioritization is loaded correctly')
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
# Verify accounting of mempool transactions after restart is correct
self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
self.start_node(0, extra_args=["-persistmempool=0"])
time.sleep(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5)
mempooldat0 = os.path.join(self.nodes[0].datadir, 'regtest', 'mempool.dat')
mempooldat1 = os.path.join(self.nodes[1].datadir, 'regtest', 'mempool.dat')
self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
os.remove(mempooldat0)
self.nodes[0].savemempool()
assert os.path.isfile(mempooldat0)
self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions")
os.rename(mempooldat0, mempooldat1)
self.stop_nodes()
self.start_node(1, extra_args=[])
wait_until(lambda: len(self.nodes[1].getrawmempool()) == 5)
self.log.debug("Prevent arielcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
mempooldotnew1 = mempooldat1 + '.new'
os.mkdir(mempooldotnew1)
assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
os.rmdir(mempooldotnew1)
if __name__ == '__main__':
MempoolPersistTest().main()
| true | true |
f71c4fd86e03f9140408817a88d200d10a2703a4 | 345 | py | Python | day07/day7_part1.py | briannamcdonald/advent-of-code-2021 | c67e3c02a84db1a0cceeefef34a8cddba311484e | [
"MIT"
] | 1 | 2021-12-05T15:04:23.000Z | 2021-12-05T15:04:23.000Z | day07/day7_part1.py | briannamcdonald/advent-of-code-2021 | c67e3c02a84db1a0cceeefef34a8cddba311484e | [
"MIT"
] | null | null | null | day07/day7_part1.py | briannamcdonald/advent-of-code-2021 | c67e3c02a84db1a0cceeefef34a8cddba311484e | [
"MIT"
] | null | null | null | def main():
data = open("day07/input.txt", "r")
lines = [line for line in data]
crabs = [int(fish.strip()) for fish in lines[0].split(",")]
median_val = sorted(crabs)[len(crabs) // 2]
fuel_sum = 0
for crab in crabs:
fuel_sum += abs(crab - median_val)
print(fuel_sum)
if __name__ == "__main__":
main()
| 21.5625 | 63 | 0.582609 | def main():
data = open("day07/input.txt", "r")
lines = [line for line in data]
crabs = [int(fish.strip()) for fish in lines[0].split(",")]
median_val = sorted(crabs)[len(crabs) // 2]
fuel_sum = 0
for crab in crabs:
fuel_sum += abs(crab - median_val)
print(fuel_sum)
if __name__ == "__main__":
main()
| true | true |
f71c503591cb93ca4d82cc8039bd0f901a19b722 | 578 | py | Python | utils/utility.py | qiyuangong/Relational_Transaction_Anon | 954cca4b073b4d0532814ac7cf77ab1ae8068c9c | [
"MIT"
] | 1 | 2020-04-16T13:40:39.000Z | 2020-04-16T13:40:39.000Z | utils/utility.py | qiyuangong/Relational_Transaction_Anon | 954cca4b073b4d0532814ac7cf77ab1ae8068c9c | [
"MIT"
] | null | null | null | utils/utility.py | qiyuangong/Relational_Transaction_Anon | 954cca4b073b4d0532814ac7cf77ab1ae8068c9c | [
"MIT"
] | 2 | 2015-12-17T01:20:39.000Z | 2019-03-10T13:51:21.000Z | """
shared functions
"""
#!/usr/bin/env python
#coding=utf-8
def cmp_str(element1, element2):
"""
compare number in str format correctley
"""
try:
return cmp(float(element1), float(element2))
except ValueError:
return cmp(element1, element2)
def list_to_str(value_list, cmpfun=cmp, sep=';'):
"""covert sorted str list (sorted by cmpfun) to str
value (splited by sep). This fuction is value safe, which means
value_list will not be changed.
"""
temp = value_list[:]
temp.sort(cmp=cmpfun)
return sep.join(temp)
| 21.407407 | 67 | 0.650519 |
def cmp_str(element1, element2):
try:
return cmp(float(element1), float(element2))
except ValueError:
return cmp(element1, element2)
def list_to_str(value_list, cmpfun=cmp, sep=';'):
temp = value_list[:]
temp.sort(cmp=cmpfun)
return sep.join(temp)
| true | true |
f71c5062942eba1d7faec207ed2e124ae3fb0e61 | 603 | py | Python | app.py | zachmerrill/pyrdle | 573035cecbe3ee5cae36562a2e3b53ea4f2950a0 | [
"MIT"
] | null | null | null | app.py | zachmerrill/pyrdle | 573035cecbe3ee5cae36562a2e3b53ea4f2950a0 | [
"MIT"
] | null | null | null | app.py | zachmerrill/pyrdle | 573035cecbe3ee5cae36562a2e3b53ea4f2950a0 | [
"MIT"
] | null | null | null | from game import Game
if __name__ == '__main__':
# Initialize the game
game = Game()
# Game loop
while True:
# Get the user's guess
guess = input('Guess a word: ').lower()
# Check the guess
game.check_guess(guess)
# Print the board
game.print_board()
# Check if the game is won
if game.is_won():
print('You won!\n')
game.print_share()
break
# Check if the game is lost
if game.is_lost():
print('You lost!\n')
game.print_share()
break
| 25.125 | 47 | 0.512438 | from game import Game
if __name__ == '__main__':
game = Game()
while True:
guess = input('Guess a word: ').lower()
# Check the guess
game.check_guess(guess)
# Print the board
game.print_board()
# Check if the game is won
if game.is_won():
print('You won!\n')
game.print_share()
break
# Check if the game is lost
if game.is_lost():
print('You lost!\n')
game.print_share()
break
| true | true |
f71c51f3a090afb75de8ec1e60a2bd9eb1fb35d1 | 935 | py | Python | nnsvs/logger.py | nicolalandro/nnsvs | 45da00218dd0a445c8483f11ac891c6ef00d3925 | [
"MIT"
] | 72 | 2020-04-19T16:14:09.000Z | 2020-05-02T04:02:05.000Z | nnsvs/logger.py | nicolalandro/nnsvs | 45da00218dd0a445c8483f11ac891c6ef00d3925 | [
"MIT"
] | 1 | 2020-04-19T16:28:03.000Z | 2020-05-02T13:49:13.000Z | nnsvs/logger.py | nicolalandro/nnsvs | 45da00218dd0a445c8483f11ac891c6ef00d3925 | [
"MIT"
] | 3 | 2020-04-20T02:34:31.000Z | 2020-04-26T01:04:35.000Z | # coding: utf-8
from __future__ import absolute_import, print_function, with_statement
import logging
import os
from os.path import dirname
format = "%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s"
def getLogger(verbose=0, filename=None, name="nnsvs"):
logger = logging.getLogger(name)
if verbose >= 100:
logger.setLevel(logging.DEBUG)
elif verbose > 0:
logger.setLevel(logging.INFO)
else:
logger.setLevel(logging.WARN)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(logging.Formatter(format))
# logger.addHandler(stream_handler)
if filename is not None:
os.makedirs(dirname(filename), exist_ok=True)
file_handler = logging.FileHandler(filename=filename)
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(logging.Formatter(format))
logger.addHandler(file_handler)
return logger
| 29.21875 | 73 | 0.71123 |
from __future__ import absolute_import, print_function, with_statement
import logging
import os
from os.path import dirname
format = "%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s"
def getLogger(verbose=0, filename=None, name="nnsvs"):
logger = logging.getLogger(name)
if verbose >= 100:
logger.setLevel(logging.DEBUG)
elif verbose > 0:
logger.setLevel(logging.INFO)
else:
logger.setLevel(logging.WARN)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(logging.Formatter(format))
if filename is not None:
os.makedirs(dirname(filename), exist_ok=True)
file_handler = logging.FileHandler(filename=filename)
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(logging.Formatter(format))
logger.addHandler(file_handler)
return logger
| true | true |
f71c525adccaf762f27c3b40fe128ecd416463bd | 6,309 | py | Python | dico/base/model.py | dico-api/dico | 0bb80e2bb8dd66bb5078e52c9e37c180b7c80319 | [
"MIT"
] | 37 | 2021-08-23T00:16:42.000Z | 2022-02-22T23:19:47.000Z | dico/base/model.py | dico-api/dico | 0bb80e2bb8dd66bb5078e52c9e37c180b7c80319 | [
"MIT"
] | 14 | 2021-08-07T09:01:29.000Z | 2022-02-27T15:19:36.000Z | dico/base/model.py | eunwoo1104/dico | 0bb80e2bb8dd66bb5078e52c9e37c180b7c80319 | [
"MIT"
] | 9 | 2021-08-25T04:14:05.000Z | 2022-02-27T15:08:49.000Z | import copy
import typing
from ..model.snowflake import Snowflake
if typing.TYPE_CHECKING:
from ..api import APIClient
class CopyableObject:
def copy(self):
return copy.deepcopy(self)
class EventBase:
def __init__(self, client: "APIClient", resp: dict):
self.raw: dict = resp
self.client: "APIClient" = client
self._dont_dispatch: bool = False
@classmethod
def create(cls, client, resp: dict):
return cls(client, resp)
class DiscordObjectBase(CopyableObject):
TYPING = typing.Union[
int, str, Snowflake, "DiscordObjectBase", typing.Type["DiscordObjectBase"]
]
RESPONSE = typing.Union["DiscordObjectBase", typing.Awaitable["DiscordObjectBase"]]
RESPONSE_AS_LIST = typing.Union[
typing.List["DiscordObjectBase"],
typing.Awaitable[typing.List["DiscordObjectBase"]],
]
_cache_type = None
def __init__(self, client: "APIClient", resp: dict, **kwargs: typing.Any):
resp.update(kwargs)
# self._cache_type = None
self.raw: dict = resp.copy()
self.id: Snowflake = Snowflake(resp["id"])
self.client: "APIClient" = client
def __int__(self) -> int:
return int(self.id)
def __eq__(self, other):
return int(self.id) == int(other)
def __ne__(self, other):
return int(self.id) != int(other)
def __hash__(self):
return hash(self.id)
def update(self, new_resp: dict, **kwargs: typing.Any):
orig = self.raw
for k, v in new_resp.items():
if orig.get(k) != v:
orig[k] = v
self.__init__(self.client, orig, **kwargs)
@classmethod
def create(cls, client: "APIClient", resp: dict, **kwargs: typing.Any):
ensure_cache_type = kwargs.pop("ensure_cache_type", cls._cache_type)
prevent_caching = kwargs.pop("prevent_caching", False)
maybe_exist = client.has_cache and client.cache.get(
resp["id"], ensure_cache_type
)
if maybe_exist:
if prevent_caching:
maybe_exist = maybe_exist.copy()
maybe_exist.update(resp, **kwargs)
"""
orig = maybe_exist.raw
for k, v in resp.items():
if orig.get(k) != v:
orig[k] = v
maybe_exist.__init__(client, orig, **kwargs)
"""
return maybe_exist
else:
ret = cls(client, resp, **kwargs)
if client.has_cache and not prevent_caching:
client.cache.add(ret.id, ret._cache_type, ret)
if hasattr(ret, "guild_id") and ret.guild_id:
client.cache.get_guild_container(ret.guild_id).add(
ret.id, ret._cache_type, ret
)
return ret
class AbstractObject(dict):
RESPONSE = typing.Union["AbstractObject", typing.Awaitable["AbstractObject"]]
RESPONSE_AS_LIST = typing.Union[
typing.List["AbstractObject"], typing.Awaitable[typing.List["AbstractObject"]]
]
def __init__(self, resp: dict):
super().__init__(**resp)
def __getattr__(self, item):
return self.get(item)
def __setattr__(self, key, value):
self[key] = value
class FlagBase:
def __init__(self, *args: str, **kwargs: bool):
self.values: typing.Dict[str, int] = {
x: getattr(self, x) for x in dir(self) if isinstance(getattr(self, x), int)
}
self.value: int = 0
for x in args:
if x.upper() not in self.values:
raise AttributeError(f"invalid name: `{x}`")
self.value |= self.values[x.upper()]
for k, v in kwargs.items():
if k.upper() not in self.values:
raise AttributeError(f"invalid name: `{k}`")
if v:
self.value |= self.values[k.upper()]
def __int__(self) -> int:
return self.value
def __getattr__(self, item):
if item.startswith("__"):
return self.__getattribute__(item)
return self.has(item)
def __iter__(self):
for k, v in self.values.items():
if self.has(k):
yield v
def has(self, name: str) -> bool:
if name.upper() not in self.values:
raise AttributeError(f"invalid name: `{name}`")
return (self.value & self.values[name.upper()]) == self.values[name.upper()]
def __setattr__(self, key, value):
orig = key
key = key.upper()
if orig in ["value", "values"] or key not in self.values.keys():
return super().__setattr__(orig, value)
if not isinstance(value, bool):
raise TypeError(f"only type `bool` is supported.")
has_value = self.has(key)
if value and not has_value:
self.value |= self.values[key]
elif not value and has_value:
self.value &= ~self.values[key]
def add(self, value: str):
return self.__setattr__(value, True)
def remove(self, value: str):
return self.__setattr__(value, False)
@classmethod
def from_value(cls, value: int):
ret = cls()
ret.value = value
return ret
class TypeBase:
def __init__(self, value):
self.values: typing.Dict[int, str] = {
getattr(self, x): x for x in dir(self) if isinstance(getattr(self, x), int)
}
self.value: int = value
if self.value not in self.values:
raise AttributeError(f"invalid value: {value}")
def __str__(self) -> str:
return self.values[self.value]
def __int__(self) -> int:
return self.value
def __getattr__(self, item):
if item.startswith("__"):
return self.__getattribute__(item)
return self.is_type(item)
def is_type(self, name: str) -> bool:
values = {y: x for x, y in self.values.items()}
if name.upper() not in values:
raise AttributeError(f"invalid name: `{name}`")
return self.value == values[name.upper()]
@classmethod
def to_string(cls, value: int) -> str:
values = {
getattr(cls, x): x for x in dir(cls) if isinstance(getattr(cls, x), int)
}
return values.get(value)
| 31.232673 | 87 | 0.579648 | import copy
import typing
from ..model.snowflake import Snowflake
if typing.TYPE_CHECKING:
from ..api import APIClient
class CopyableObject:
def copy(self):
return copy.deepcopy(self)
class EventBase:
def __init__(self, client: "APIClient", resp: dict):
self.raw: dict = resp
self.client: "APIClient" = client
self._dont_dispatch: bool = False
@classmethod
def create(cls, client, resp: dict):
return cls(client, resp)
class DiscordObjectBase(CopyableObject):
TYPING = typing.Union[
int, str, Snowflake, "DiscordObjectBase", typing.Type["DiscordObjectBase"]
]
RESPONSE = typing.Union["DiscordObjectBase", typing.Awaitable["DiscordObjectBase"]]
RESPONSE_AS_LIST = typing.Union[
typing.List["DiscordObjectBase"],
typing.Awaitable[typing.List["DiscordObjectBase"]],
]
_cache_type = None
def __init__(self, client: "APIClient", resp: dict, **kwargs: typing.Any):
resp.update(kwargs)
self.raw: dict = resp.copy()
self.id: Snowflake = Snowflake(resp["id"])
self.client: "APIClient" = client
def __int__(self) -> int:
return int(self.id)
def __eq__(self, other):
return int(self.id) == int(other)
def __ne__(self, other):
return int(self.id) != int(other)
def __hash__(self):
return hash(self.id)
def update(self, new_resp: dict, **kwargs: typing.Any):
orig = self.raw
for k, v in new_resp.items():
if orig.get(k) != v:
orig[k] = v
self.__init__(self.client, orig, **kwargs)
@classmethod
def create(cls, client: "APIClient", resp: dict, **kwargs: typing.Any):
ensure_cache_type = kwargs.pop("ensure_cache_type", cls._cache_type)
prevent_caching = kwargs.pop("prevent_caching", False)
maybe_exist = client.has_cache and client.cache.get(
resp["id"], ensure_cache_type
)
if maybe_exist:
if prevent_caching:
maybe_exist = maybe_exist.copy()
maybe_exist.update(resp, **kwargs)
return maybe_exist
else:
ret = cls(client, resp, **kwargs)
if client.has_cache and not prevent_caching:
client.cache.add(ret.id, ret._cache_type, ret)
if hasattr(ret, "guild_id") and ret.guild_id:
client.cache.get_guild_container(ret.guild_id).add(
ret.id, ret._cache_type, ret
)
return ret
class AbstractObject(dict):
RESPONSE = typing.Union["AbstractObject", typing.Awaitable["AbstractObject"]]
RESPONSE_AS_LIST = typing.Union[
typing.List["AbstractObject"], typing.Awaitable[typing.List["AbstractObject"]]
]
def __init__(self, resp: dict):
super().__init__(**resp)
def __getattr__(self, item):
return self.get(item)
def __setattr__(self, key, value):
self[key] = value
class FlagBase:
def __init__(self, *args: str, **kwargs: bool):
self.values: typing.Dict[str, int] = {
x: getattr(self, x) for x in dir(self) if isinstance(getattr(self, x), int)
}
self.value: int = 0
for x in args:
if x.upper() not in self.values:
raise AttributeError(f"invalid name: `{x}`")
self.value |= self.values[x.upper()]
for k, v in kwargs.items():
if k.upper() not in self.values:
raise AttributeError(f"invalid name: `{k}`")
if v:
self.value |= self.values[k.upper()]
def __int__(self) -> int:
return self.value
def __getattr__(self, item):
if item.startswith("__"):
return self.__getattribute__(item)
return self.has(item)
def __iter__(self):
for k, v in self.values.items():
if self.has(k):
yield v
def has(self, name: str) -> bool:
if name.upper() not in self.values:
raise AttributeError(f"invalid name: `{name}`")
return (self.value & self.values[name.upper()]) == self.values[name.upper()]
def __setattr__(self, key, value):
orig = key
key = key.upper()
if orig in ["value", "values"] or key not in self.values.keys():
return super().__setattr__(orig, value)
if not isinstance(value, bool):
raise TypeError(f"only type `bool` is supported.")
has_value = self.has(key)
if value and not has_value:
self.value |= self.values[key]
elif not value and has_value:
self.value &= ~self.values[key]
def add(self, value: str):
return self.__setattr__(value, True)
def remove(self, value: str):
return self.__setattr__(value, False)
@classmethod
def from_value(cls, value: int):
ret = cls()
ret.value = value
return ret
class TypeBase:
def __init__(self, value):
self.values: typing.Dict[int, str] = {
getattr(self, x): x for x in dir(self) if isinstance(getattr(self, x), int)
}
self.value: int = value
if self.value not in self.values:
raise AttributeError(f"invalid value: {value}")
def __str__(self) -> str:
return self.values[self.value]
def __int__(self) -> int:
return self.value
def __getattr__(self, item):
if item.startswith("__"):
return self.__getattribute__(item)
return self.is_type(item)
def is_type(self, name: str) -> bool:
values = {y: x for x, y in self.values.items()}
if name.upper() not in values:
raise AttributeError(f"invalid name: `{name}`")
return self.value == values[name.upper()]
@classmethod
def to_string(cls, value: int) -> str:
values = {
getattr(cls, x): x for x in dir(cls) if isinstance(getattr(cls, x), int)
}
return values.get(value)
| true | true |
f71c526a7dbf0c3e5e4a74e675dd8614cf5c1f83 | 4,367 | py | Python | src/python/gh_api_repo_metrics.py | pamelarussell/github-bioinformatics | 0e7184cae57426c25cfa0e838637d34adf0a59e7 | [
"MIT"
] | 32 | 2018-05-14T20:34:08.000Z | 2022-03-22T12:37:19.000Z | src/python/gh_api_repo_metrics.py | pamelarussell/github-bioinformatics | 0e7184cae57426c25cfa0e838637d34adf0a59e7 | [
"MIT"
] | null | null | null | src/python/gh_api_repo_metrics.py | pamelarussell/github-bioinformatics | 0e7184cae57426c25cfa0e838637d34adf0a59e7 | [
"MIT"
] | 6 | 2018-07-11T17:15:07.000Z | 2021-08-02T19:51:40.000Z | import argparse
from bigquery import get_client
from gh_api import curr_commit_master
from gh_api import repo
from util import create_bq_table, push_bq_records
from util import get_repo_names, curr_time_utc
from util import unique_vals
parser = argparse.ArgumentParser()
parser.add_argument('--proj', action = 'store', dest = 'proj', required = True,
help = 'BigQuery project name')
parser.add_argument('--json_key', action = 'store', dest = 'json_key', required = True,
help = 'JSON key file for BigQuery dataset')
parser.add_argument('--ds', action = 'store', dest = 'ds', required = True,
help = 'BigQuery dataset to write table to')
parser.add_argument('--table', action = 'store', dest = 'table', required = True,
help = 'BigQuery table to write to')
parser.add_argument('--sheet', action = 'store', dest = 'sheet', required = True,
help = 'Google Sheet with use_repo as a column')
parser.add_argument('--gh_user', action = 'store', dest = 'gh_username', required = True,
help = 'GitHub username for API')
parser.add_argument('--gh_oauth_key', action = 'store', dest = 'gh_oauth_key', required = True,
help = '(String) GitHub oauth key')
args = parser.parse_args()
proj = args.proj
json_key = args.json_key
dataset = args.ds
table = args.table
sheet = args.sheet
gh_username = args.gh_username
gh_oauth_key = args.gh_oauth_key
# Get repo names
print("\nGetting repo names from spreadsheet")
repos = get_repo_names(sheet, json_key)
print("There are %s repos with use_repo = 1.\n" % len(repos))
# Using BigQuery-Python https://github.com/tylertreat/BigQuery-Python
print('\nGetting BigQuery client\n')
client = get_client(json_key_file=json_key, readonly=False, swallow_results=True)
# Check which repos are already in the table
existing_repos = unique_vals(client, proj, dataset, table, "repo_name")
if len(existing_repos) > 0:
repos = [repo for repo in repos if repo not in existing_repos]
print("Only getting data for %s repos not yet analyzed" %len(repos))
# Create the output table if necessary
schema = [
{'name': 'repo_name', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'api_url', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'html_url', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'description', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'is_fork', 'type': 'BOOLEAN', 'mode': 'NULLABLE'},
{'name': 'stargazers_count', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'watchers_count', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'forks_count', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'open_issues_count', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'subscribers_count', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'curr_commit_master', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'time_accessed', 'type': 'STRING', 'mode': 'NULLABLE'}
]
if not client.check_table(dataset, table):
create_bq_table(client, dataset, table, schema)
def get_record(repo_name):
r = repo.Repo(repo_name, gh_username, gh_oauth_key)
curr_time = curr_time_utc()
curr_commit = curr_commit_master(repo_name, gh_username, gh_oauth_key)
return {'repo_name': r.get_repo_name(),
'api_url': r.get_gh_api_url(),
'html_url': r.get_html_url(),
'description': r.get_description(),
'is_fork': r.is_fork(),
'stargazers_count': r.get_stargazers_count(),
'watchers_count': r.get_watchers_count(),
'forks_count': r.get_forks_count(),
'open_issues_count': r.get_open_issues_count(),
'subscribers_count': r.get_subscribers_count(),
'curr_commit_master': curr_commit,
'time_accessed': curr_time}
print("Getting repo info from GitHub API")
records = []
num_done = 0
for repo_name in repos:
try:
records.append(get_record(repo_name))
except UnicodeEncodeError:
print("Skipping repo %s" % repo_name)
num_done = num_done + 1
if num_done % 100 == 0:
print("Finished %s repos. Pushing records." % num_done)
push_bq_records(client, dataset, table, records)
records.clear()
push_bq_records(client, dataset, table, records) # Last batch
| 41.590476 | 96 | 0.654912 | import argparse
from bigquery import get_client
from gh_api import curr_commit_master
from gh_api import repo
from util import create_bq_table, push_bq_records
from util import get_repo_names, curr_time_utc
from util import unique_vals
parser = argparse.ArgumentParser()
parser.add_argument('--proj', action = 'store', dest = 'proj', required = True,
help = 'BigQuery project name')
parser.add_argument('--json_key', action = 'store', dest = 'json_key', required = True,
help = 'JSON key file for BigQuery dataset')
parser.add_argument('--ds', action = 'store', dest = 'ds', required = True,
help = 'BigQuery dataset to write table to')
parser.add_argument('--table', action = 'store', dest = 'table', required = True,
help = 'BigQuery table to write to')
parser.add_argument('--sheet', action = 'store', dest = 'sheet', required = True,
help = 'Google Sheet with use_repo as a column')
parser.add_argument('--gh_user', action = 'store', dest = 'gh_username', required = True,
help = 'GitHub username for API')
parser.add_argument('--gh_oauth_key', action = 'store', dest = 'gh_oauth_key', required = True,
help = '(String) GitHub oauth key')
args = parser.parse_args()
proj = args.proj
json_key = args.json_key
dataset = args.ds
table = args.table
sheet = args.sheet
gh_username = args.gh_username
gh_oauth_key = args.gh_oauth_key
print("\nGetting repo names from spreadsheet")
repos = get_repo_names(sheet, json_key)
print("There are %s repos with use_repo = 1.\n" % len(repos))
print('\nGetting BigQuery client\n')
client = get_client(json_key_file=json_key, readonly=False, swallow_results=True)
existing_repos = unique_vals(client, proj, dataset, table, "repo_name")
if len(existing_repos) > 0:
repos = [repo for repo in repos if repo not in existing_repos]
print("Only getting data for %s repos not yet analyzed" %len(repos))
schema = [
{'name': 'repo_name', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'api_url', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'html_url', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'description', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'is_fork', 'type': 'BOOLEAN', 'mode': 'NULLABLE'},
{'name': 'stargazers_count', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'watchers_count', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'forks_count', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'open_issues_count', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'subscribers_count', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'curr_commit_master', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'time_accessed', 'type': 'STRING', 'mode': 'NULLABLE'}
]
if not client.check_table(dataset, table):
create_bq_table(client, dataset, table, schema)
def get_record(repo_name):
r = repo.Repo(repo_name, gh_username, gh_oauth_key)
curr_time = curr_time_utc()
curr_commit = curr_commit_master(repo_name, gh_username, gh_oauth_key)
return {'repo_name': r.get_repo_name(),
'api_url': r.get_gh_api_url(),
'html_url': r.get_html_url(),
'description': r.get_description(),
'is_fork': r.is_fork(),
'stargazers_count': r.get_stargazers_count(),
'watchers_count': r.get_watchers_count(),
'forks_count': r.get_forks_count(),
'open_issues_count': r.get_open_issues_count(),
'subscribers_count': r.get_subscribers_count(),
'curr_commit_master': curr_commit,
'time_accessed': curr_time}
print("Getting repo info from GitHub API")
records = []
num_done = 0
for repo_name in repos:
try:
records.append(get_record(repo_name))
except UnicodeEncodeError:
print("Skipping repo %s" % repo_name)
num_done = num_done + 1
if num_done % 100 == 0:
print("Finished %s repos. Pushing records." % num_done)
push_bq_records(client, dataset, table, records)
records.clear()
push_bq_records(client, dataset, table, records)
| true | true |
f71c529b8ce1931c25f8a957d2a88be00a3047b0 | 551 | py | Python | list/list10.py | liyuanyuan11/Python | d94cc7ab39e56c6e24bfc741a30da77590d1d220 | [
"MIT"
] | null | null | null | list/list10.py | liyuanyuan11/Python | d94cc7ab39e56c6e24bfc741a30da77590d1d220 | [
"MIT"
] | null | null | null | list/list10.py | liyuanyuan11/Python | d94cc7ab39e56c6e24bfc741a30da77590d1d220 | [
"MIT"
] | null | null | null | roadSign=[]
roadSign.append("Johnson's house")
roadSign.append("Fox streetlamp")
roadSign.append("Guang Hualu kindergarten")
roadSign.append("Dog rescue center")
roadSign.append("Samll street park")
roadSign.append("Ri Tan School")
print(roadSign)
nextRoadSign=roadSign.pop()
print(nextRoadSign)
nextRoadSign=roadSign.pop()
print(nextRoadSign)
nextRoadSign=roadSign.pop()
print(nextRoadSign)
nextRoadSign=roadSign.pop()
print(nextRoadSign)
nextRoadSign=roadSign.pop()
print(nextRoadSign)
nextRoadSign=roadSign.pop()
print(nextRoadSign)
print(roadSign) | 26.238095 | 43 | 0.811252 | roadSign=[]
roadSign.append("Johnson's house")
roadSign.append("Fox streetlamp")
roadSign.append("Guang Hualu kindergarten")
roadSign.append("Dog rescue center")
roadSign.append("Samll street park")
roadSign.append("Ri Tan School")
print(roadSign)
nextRoadSign=roadSign.pop()
print(nextRoadSign)
nextRoadSign=roadSign.pop()
print(nextRoadSign)
nextRoadSign=roadSign.pop()
print(nextRoadSign)
nextRoadSign=roadSign.pop()
print(nextRoadSign)
nextRoadSign=roadSign.pop()
print(nextRoadSign)
nextRoadSign=roadSign.pop()
print(nextRoadSign)
print(roadSign) | true | true |
f71c52fff7f80a35b0dd2c37b0c08e7e9f367495 | 2,489 | py | Python | src/pretalx/orga/management/commands/import_schedule.py | lili668668/pretalx | 5ba2185ffd7c5f95254aafe25ad3de340a86eadb | [
"Apache-2.0"
] | null | null | null | src/pretalx/orga/management/commands/import_schedule.py | lili668668/pretalx | 5ba2185ffd7c5f95254aafe25ad3de340a86eadb | [
"Apache-2.0"
] | null | null | null | src/pretalx/orga/management/commands/import_schedule.py | lili668668/pretalx | 5ba2185ffd7c5f95254aafe25ad3de340a86eadb | [
"Apache-2.0"
] | null | null | null | import datetime as dt
from xml.etree import ElementTree as ET
from django.core.management.base import BaseCommand
from django.db import transaction
from django_scopes import scopes_disabled
from pretalx.event.models import Event, Organiser, Team
from pretalx.person.models import User
class Command(BaseCommand):
help = "Imports a frab xml export"
def add_arguments(self, parser):
parser.add_argument("path", type=str)
@transaction.atomic
def handle(self, *args, **options):
from pretalx.schedule.utils import process_frab
path = options.get("path")
tree = ET.parse(path)
root = tree.getroot()
event_data = root.find("conference")
event = Event.objects.filter(
slug__iexact=event_data.find("acronym").text
).first()
with scopes_disabled():
if not event:
event = self.create_event(event_data)
team = event.organiser.teams.filter(
can_change_teams=True,
can_change_organiser_settings=True,
can_change_event_settings=True,
can_change_submissions=True,
).first() or self.create_team(
str(event.name) + " Organisers", event.organiser
)
for user in User.objects.filter(is_administrator=True):
team.members.add(user)
team.save()
self.stdout.write(self.style.SUCCESS(process_frab(root, event)))
def create_event(self, event_data):
name = event_data.find("title").text
organiser = Organiser.objects.create(
name=name, slug=event_data.find("acronym").text
)
event = Event(
name=name,
organiser=organiser,
slug=event_data.find("acronym").text,
date_from=dt.datetime.strptime(
event_data.find("start").text, "%Y-%m-%d"
).date(),
date_to=dt.datetime.strptime(
event_data.find("end").text, "%Y-%m-%d"
).date(),
)
event.save()
self.create_team(name + " Organisers", organiser)
return event
def create_team(self, name, organiser):
return Team.objects.create(
name=name,
organiser=organiser,
can_change_teams=True,
can_change_organiser_settings=True,
can_change_event_settings=True,
can_change_submissions=True,
)
| 32.324675 | 72 | 0.599839 | import datetime as dt
from xml.etree import ElementTree as ET
from django.core.management.base import BaseCommand
from django.db import transaction
from django_scopes import scopes_disabled
from pretalx.event.models import Event, Organiser, Team
from pretalx.person.models import User
class Command(BaseCommand):
help = "Imports a frab xml export"
def add_arguments(self, parser):
parser.add_argument("path", type=str)
@transaction.atomic
def handle(self, *args, **options):
from pretalx.schedule.utils import process_frab
path = options.get("path")
tree = ET.parse(path)
root = tree.getroot()
event_data = root.find("conference")
event = Event.objects.filter(
slug__iexact=event_data.find("acronym").text
).first()
with scopes_disabled():
if not event:
event = self.create_event(event_data)
team = event.organiser.teams.filter(
can_change_teams=True,
can_change_organiser_settings=True,
can_change_event_settings=True,
can_change_submissions=True,
).first() or self.create_team(
str(event.name) + " Organisers", event.organiser
)
for user in User.objects.filter(is_administrator=True):
team.members.add(user)
team.save()
self.stdout.write(self.style.SUCCESS(process_frab(root, event)))
def create_event(self, event_data):
name = event_data.find("title").text
organiser = Organiser.objects.create(
name=name, slug=event_data.find("acronym").text
)
event = Event(
name=name,
organiser=organiser,
slug=event_data.find("acronym").text,
date_from=dt.datetime.strptime(
event_data.find("start").text, "%Y-%m-%d"
).date(),
date_to=dt.datetime.strptime(
event_data.find("end").text, "%Y-%m-%d"
).date(),
)
event.save()
self.create_team(name + " Organisers", organiser)
return event
def create_team(self, name, organiser):
return Team.objects.create(
name=name,
organiser=organiser,
can_change_teams=True,
can_change_organiser_settings=True,
can_change_event_settings=True,
can_change_submissions=True,
)
| true | true |
f71c54fd629fe7b2eed4f7bb9d796bb0a3a708f2 | 17,077 | py | Python | ignite/contrib/engines/common.py | VinhLoiIT/ignite | 3b2b9655ea9f80ce49b8a9f1c2d72f80e2a95f56 | [
"BSD-3-Clause"
] | 1 | 2020-11-08T16:27:24.000Z | 2020-11-08T16:27:24.000Z | ignite/contrib/engines/common.py | VinhLoiIT/ignite | 3b2b9655ea9f80ce49b8a9f1c2d72f80e2a95f56 | [
"BSD-3-Clause"
] | null | null | null | ignite/contrib/engines/common.py | VinhLoiIT/ignite | 3b2b9655ea9f80ce49b8a9f1c2d72f80e2a95f56 | [
"BSD-3-Clause"
] | null | null | null | from functools import partial
import warnings
import numbers
from collections.abc import Sequence, Mapping
import torch
import torch.distributed as dist
from ignite.engine import Engine, Events
from ignite.metrics import RunningAverage
from ignite.handlers import TerminateOnNan, ModelCheckpoint, EarlyStopping
from ignite.contrib.metrics import GpuInfo
from ignite.contrib.handlers import ProgressBar
from ignite.contrib.handlers import VisdomLogger
from ignite.contrib.handlers import TensorboardLogger, global_step_from_engine
import ignite.contrib.handlers.tensorboard_logger as tb_logger_module
import ignite.contrib.handlers.visdom_logger as visdom_logger_module
from ignite.contrib.handlers import MLflowLogger
import ignite.contrib.handlers.mlflow_logger as mlflow_logger_module
from ignite.contrib.handlers import PolyaxonLogger
import ignite.contrib.handlers.polyaxon_logger as polyaxon_logger_module
def setup_common_training_handlers(
trainer,
train_sampler=None,
to_save=None,
save_every_iters=1000,
output_path=None,
lr_scheduler=None,
with_gpu_stats=False,
output_names=None,
with_pbars=True,
with_pbar_on_iters=True,
log_every_iters=100,
device="cuda",
):
"""Helper method to setup trainer with common handlers (it also supports distributed configuration):
- :class:`~ignite.handlers.TerminateOnNan`
- handler to setup learning rate scheduling
- :class:`~ignite.handlers.ModelCheckpoint`
- :class:`~ignite.metrics.RunningAverage` on `update_function` output
- Two progress bars on epochs and optionally on iterations
Args:
trainer (Engine): trainer engine. Output of trainer's `update_function` should be a dictionary
or sequence or a single tensor.
train_sampler (torch.utils.data.DistributedSampler, optional): Optional distributed sampler used to call
`set_epoch` method on epoch started event.
to_save (dict, optional): dictionary with objects to save in the checkpoint. This is used with
:class:`~ignite.handlers.ModelCheckpoint`.
save_every_iters (int, optional): saving interval. By default, `to_save` objects are stored
each 1000 iterations.
output_path (str, optional): output path to indicate where `to_save` objects are stored.
lr_scheduler (ParamScheduler or subclass of `torch.optim.lr_scheduler._LRScheduler`): learning rate scheduler
as native torch LRScheduler or ignite's parameter scheduler.
with_gpu_stats (bool, optional): if True, :class:`~ignite.contrib.metrics.handlers.GpuInfo` is attached to the
trainer. This requires `pynvml` package to be installed.
output_names (list/tuple): list of names associated with `update_function` output dictionary.
with_pbars (bool, optional): if True, two progress bars on epochs and optionally on iterations are attached
with_pbar_on_iters (bool, optional): if True, a progress bar on iterations is attached to the trainer.
log_every_iters (int, optional): logging interval for :class:`~ignite.contrib.metrics.handlers.GpuInfo` and for
epoch-wise progress bar.
device (str of torch.device, optional): Optional device specification in case of distributed computation usage.
"""
kwargs = dict(
to_save=to_save,
save_every_iters=save_every_iters,
output_path=output_path,
lr_scheduler=lr_scheduler,
with_gpu_stats=with_gpu_stats,
output_names=output_names,
with_pbars=with_pbars,
with_pbar_on_iters=with_pbar_on_iters,
log_every_iters=log_every_iters,
device=device,
)
if dist.is_available() and dist.is_initialized():
_setup_common_distrib_training_handlers(trainer, train_sampler=train_sampler, **kwargs)
else:
if train_sampler is not None:
warnings.warn(
"Argument train_sampler distributed sampler used to call `set_epoch` method on epoch "
"started event, but no distributed setting detected",
UserWarning,
)
_setup_common_training_handlers(trainer, **kwargs)
setup_common_distrib_training_handlers = setup_common_training_handlers
def _setup_common_training_handlers(
trainer,
to_save=None,
save_every_iters=1000,
output_path=None,
lr_scheduler=None,
with_gpu_stats=True,
output_names=None,
with_pbars=True,
with_pbar_on_iters=True,
log_every_iters=100,
device="cuda",
):
trainer.add_event_handler(Events.ITERATION_COMPLETED, TerminateOnNan())
if lr_scheduler is not None:
if isinstance(lr_scheduler, torch.optim.lr_scheduler._LRScheduler):
trainer.add_event_handler(Events.ITERATION_COMPLETED, lambda engine: lr_scheduler.step())
else:
trainer.add_event_handler(Events.ITERATION_STARTED, lr_scheduler)
trainer.add_event_handler(Events.EPOCH_COMPLETED, empty_cuda_cache)
if to_save is not None:
if output_path is None:
raise ValueError("If to_save argument is provided then output_path argument should be also defined")
checkpoint_handler = ModelCheckpoint(dirname=output_path, filename_prefix="training")
trainer.add_event_handler(Events.ITERATION_COMPLETED(every=save_every_iters), checkpoint_handler, to_save)
if with_gpu_stats:
GpuInfo().attach(trainer, name="gpu", event_name=Events.ITERATION_COMPLETED(every=log_every_iters))
if output_names is not None:
def output_transform(x, index, name):
if isinstance(x, Mapping):
return x[name]
elif isinstance(x, Sequence):
return x[index]
elif isinstance(x, (torch.Tensor, numbers.Number)):
return x
else:
raise ValueError(
"Unhandled type of update_function's output. "
"It should either mapping or sequence, but given {}".format(type(x))
)
for i, n in enumerate(output_names):
RunningAverage(
output_transform=partial(output_transform, index=i, name=n), epoch_bound=False, device=device
).attach(trainer, n)
if with_pbars:
if with_pbar_on_iters:
ProgressBar(persist=False).attach(
trainer, metric_names="all", event_name=Events.ITERATION_COMPLETED(every=log_every_iters)
)
ProgressBar(persist=True, bar_format="").attach(
trainer, event_name=Events.EPOCH_STARTED, closing_event_name=Events.COMPLETED
)
def _setup_common_distrib_training_handlers(
trainer,
train_sampler=None,
to_save=None,
save_every_iters=1000,
output_path=None,
lr_scheduler=None,
with_gpu_stats=True,
output_names=None,
with_pbars=True,
with_pbar_on_iters=True,
log_every_iters=100,
device="cuda",
):
if not (dist.is_available() and dist.is_initialized()):
raise RuntimeError("Distributed setting is not initialized, please call `dist.init_process_group` before.")
_setup_common_training_handlers(
trainer,
to_save=None,
lr_scheduler=lr_scheduler,
with_gpu_stats=with_gpu_stats,
output_names=output_names,
with_pbars=(dist.get_rank() == 0) and with_pbars,
with_pbar_on_iters=with_pbar_on_iters,
log_every_iters=log_every_iters,
device=device,
)
if train_sampler is not None:
if not callable(getattr(train_sampler, "set_epoch", None)):
raise TypeError("Train sampler should have `set_epoch` method")
@trainer.on(Events.EPOCH_STARTED)
def distrib_set_epoch(engine):
train_sampler.set_epoch(engine.state.epoch - 1)
if dist.get_rank() == 0:
if to_save is not None:
if output_path is None:
raise ValueError("If to_save argument is provided then output_path argument should be also defined")
checkpoint_handler = ModelCheckpoint(dirname=output_path, filename_prefix="training")
trainer.add_event_handler(Events.ITERATION_COMPLETED(every=save_every_iters), checkpoint_handler, to_save)
def empty_cuda_cache(_):
torch.cuda.empty_cache()
import gc
gc.collect()
def setup_any_logging(logger, logger_module, trainer, optimizers, evaluators, log_every_iters):
if optimizers is not None:
from torch.optim.optimizer import Optimizer
if not isinstance(optimizers, (Optimizer, Mapping)):
raise TypeError("Argument optimizers should be either a single optimizer or a dictionary or optimizers")
if evaluators is not None:
if not isinstance(evaluators, (Engine, Mapping)):
raise TypeError("Argument optimizers should be either a single optimizer or a dictionary or optimizers")
if log_every_iters is None:
log_every_iters = 1
logger.attach(
trainer,
log_handler=logger_module.OutputHandler(tag="training", metric_names="all"),
event_name=Events.ITERATION_COMPLETED(every=log_every_iters),
)
if optimizers is not None:
# Log optimizer parameters
if isinstance(optimizers, Optimizer):
optimizers = {None: optimizers}
for k, optimizer in optimizers.items():
logger.attach(
trainer,
log_handler=logger_module.OptimizerParamsHandler(optimizer, param_name="lr", tag=k),
event_name=Events.ITERATION_STARTED(every=log_every_iters),
)
if evaluators is not None:
# Log evaluation metrics
if isinstance(evaluators, Engine):
evaluators = {"validation": evaluators}
for k, evaluator in evaluators.items():
gst = global_step_from_engine(trainer)
logger.attach(
evaluator,
log_handler=logger_module.OutputHandler(tag=k, metric_names="all", global_step_transform=gst),
event_name=Events.COMPLETED,
)
def setup_tb_logging(output_path, trainer, optimizers=None, evaluators=None, log_every_iters=100):
"""Method to setup TensorBoard logging on trainer and a list of evaluators. Logged metrics are:
- Training metrics, e.g. running average loss values
- Learning rate(s)
- Evaluation metrics
Args:
output_path (str): logging directory path
trainer (Engine): trainer engine
optimizers (torch.optim.Optimizer or dict of torch.optim.Optimizer, optional): single or dictionary of
torch optimizers. If a dictionary, keys are used as tags arguments for logging.
evaluators (Engine or dict of Engine, optional): single or dictionary of evaluators. If a dictionary,
keys are used as tags arguments for logging.
log_every_iters (int, optional): interval for loggers attached to iteration events. To log every iteration,
value can be set to 1 or None.
Returns:
TensorboardLogger
"""
tb_logger = TensorboardLogger(log_dir=output_path)
setup_any_logging(tb_logger, tb_logger_module, trainer, optimizers, evaluators, log_every_iters=log_every_iters)
return tb_logger
def setup_visdom_logging(trainer, optimizers=None, evaluators=None, log_every_iters=100, **kwargs):
"""Method to setup Visdom logging on trainer and a list of evaluators. Logged metrics are:
- Training metrics, e.g. running average loss values
- Learning rate(s)
- Evaluation metrics
Args:
trainer (Engine): trainer engine
optimizers (torch.optim.Optimizer or dict of torch.optim.Optimizer, optional): single or dictionary of
torch optimizers. If a dictionary, keys are used as tags arguments for logging.
evaluators (Engine or dict of Engine, optional): single or dictionary of evaluators. If a dictionary,
keys are used as tags arguments for logging.
log_every_iters (int, optional): interval for loggers attached to iteration events. To log every iteration,
value can be set to 1 or None.
**kwargs: kwargs to pass into VisdomLogger
Returns:
VisdomLogger
"""
vis_logger = VisdomLogger(**kwargs)
setup_any_logging(
vis_logger, visdom_logger_module, trainer, optimizers, evaluators, log_every_iters=log_every_iters
)
return vis_logger
def setup_mlflow_logging(trainer, optimizers=None, evaluators=None, log_every_iters=100):
"""Method to setup MLflow logging on trainer and a list of evaluators. Logged metrics are:
- Training metrics, e.g. running average loss values
- Learning rate(s)
- Evaluation metrics
Args:
trainer (Engine): trainer engine
optimizers (torch.optim.Optimizer or dict of torch.optim.Optimizer, optional): single or dictionary of
torch optimizers. If a dictionary, keys are used as tags arguments for logging.
evaluators (Engine or dict of Engine, optional): single or dictionary of evaluators. If a dictionary,
keys are used as tags arguments for logging.
log_every_iters (int, optional): interval for loggers attached to iteration events. To log every iteration,
value can be set to 1 or None.
Returns:
MLflowLogger
"""
mlflow_logger = MLflowLogger()
setup_any_logging(
mlflow_logger, mlflow_logger_module, trainer, optimizers, evaluators, log_every_iters=log_every_iters
)
return mlflow_logger
def setup_plx_logging(trainer, optimizers=None, evaluators=None, log_every_iters=100):
"""Method to setup MLflow logging on trainer and a list of evaluators. Logged metrics are:
- Training metrics, e.g. running average loss values
- Learning rate(s)
- Evaluation metrics
Args:
trainer (Engine): trainer engine
optimizers (torch.optim.Optimizer or dict of torch.optim.Optimizer, optional): single or dictionary of
torch optimizers. If a dictionary, keys are used as tags arguments for logging.
evaluators (Engine or dict of Engine, optional): single or dictionary of evaluators. If a dictionary,
keys are used as tags arguments for logging.
log_every_iters (int, optional): interval for loggers attached to iteration events. To log every iteration,
value can be set to 1 or None.
Returns:
PolyaxonLogger
"""
plx_logger = PolyaxonLogger()
setup_any_logging(
plx_logger, polyaxon_logger_module, trainer, optimizers, evaluators, log_every_iters=log_every_iters
)
return plx_logger
def get_default_score_fn(metric_name):
def wrapper(engine):
score = engine.state.metrics[metric_name]
return score
return wrapper
def save_best_model_by_val_score(output_path, evaluator, model, metric_name, n_saved=3, trainer=None, tag="val"):
"""Method adds a handler to `evaluator` to save best models based on the score (named by `metric_name`)
provided by `evaluator`.
Args:
output_path (str): output path to indicate where to save best models
evaluator (Engine): evaluation engine used to provide the score
model (nn.Module): model to store
metric_name (str): metric name to use for score evaluation. This metric should be present in
`evaluator.state.metrics`.
n_saved (int, optional): number of best models to store
trainer (Engine, optional): trainer engine to fetch the epoch when saving the best model.
tag (str, optional): score name prefix: `{tag}_{metric_name}`. By default, tag is "val".
"""
global_step_transform = None
if trainer is not None:
global_step_transform = global_step_from_engine(trainer)
best_model_handler = ModelCheckpoint(
dirname=output_path,
filename_prefix="best",
n_saved=n_saved,
global_step_transform=global_step_transform,
score_name="{}_{}".format(tag, metric_name.lower()),
score_function=get_default_score_fn(metric_name),
)
evaluator.add_event_handler(Events.COMPLETED, best_model_handler, {"model": model,})
def add_early_stopping_by_val_score(patience, evaluator, trainer, metric_name):
"""Method setups early stopping handler based on the score (named by `metric_name`) provided by `evaluator`.
Args:
patience (int): number of events to wait if no improvement and then stop the training.
evaluator (Engine): evaluation engine used to provide the score
trainer (Engine): trainer engine to stop the run if no improvement.
metric_name (str): metric name to use for score evaluation. This metric should be present in
`evaluator.state.metrics`.
"""
es_handler = EarlyStopping(patience=patience, score_function=get_default_score_fn(metric_name), trainer=trainer)
evaluator.add_event_handler(Events.COMPLETED, es_handler)
| 42.061576 | 119 | 0.701997 | from functools import partial
import warnings
import numbers
from collections.abc import Sequence, Mapping
import torch
import torch.distributed as dist
from ignite.engine import Engine, Events
from ignite.metrics import RunningAverage
from ignite.handlers import TerminateOnNan, ModelCheckpoint, EarlyStopping
from ignite.contrib.metrics import GpuInfo
from ignite.contrib.handlers import ProgressBar
from ignite.contrib.handlers import VisdomLogger
from ignite.contrib.handlers import TensorboardLogger, global_step_from_engine
import ignite.contrib.handlers.tensorboard_logger as tb_logger_module
import ignite.contrib.handlers.visdom_logger as visdom_logger_module
from ignite.contrib.handlers import MLflowLogger
import ignite.contrib.handlers.mlflow_logger as mlflow_logger_module
from ignite.contrib.handlers import PolyaxonLogger
import ignite.contrib.handlers.polyaxon_logger as polyaxon_logger_module
def setup_common_training_handlers(
trainer,
train_sampler=None,
to_save=None,
save_every_iters=1000,
output_path=None,
lr_scheduler=None,
with_gpu_stats=False,
output_names=None,
with_pbars=True,
with_pbar_on_iters=True,
log_every_iters=100,
device="cuda",
):
kwargs = dict(
to_save=to_save,
save_every_iters=save_every_iters,
output_path=output_path,
lr_scheduler=lr_scheduler,
with_gpu_stats=with_gpu_stats,
output_names=output_names,
with_pbars=with_pbars,
with_pbar_on_iters=with_pbar_on_iters,
log_every_iters=log_every_iters,
device=device,
)
if dist.is_available() and dist.is_initialized():
_setup_common_distrib_training_handlers(trainer, train_sampler=train_sampler, **kwargs)
else:
if train_sampler is not None:
warnings.warn(
"Argument train_sampler distributed sampler used to call `set_epoch` method on epoch "
"started event, but no distributed setting detected",
UserWarning,
)
_setup_common_training_handlers(trainer, **kwargs)
setup_common_distrib_training_handlers = setup_common_training_handlers
def _setup_common_training_handlers(
trainer,
to_save=None,
save_every_iters=1000,
output_path=None,
lr_scheduler=None,
with_gpu_stats=True,
output_names=None,
with_pbars=True,
with_pbar_on_iters=True,
log_every_iters=100,
device="cuda",
):
trainer.add_event_handler(Events.ITERATION_COMPLETED, TerminateOnNan())
if lr_scheduler is not None:
if isinstance(lr_scheduler, torch.optim.lr_scheduler._LRScheduler):
trainer.add_event_handler(Events.ITERATION_COMPLETED, lambda engine: lr_scheduler.step())
else:
trainer.add_event_handler(Events.ITERATION_STARTED, lr_scheduler)
trainer.add_event_handler(Events.EPOCH_COMPLETED, empty_cuda_cache)
if to_save is not None:
if output_path is None:
raise ValueError("If to_save argument is provided then output_path argument should be also defined")
checkpoint_handler = ModelCheckpoint(dirname=output_path, filename_prefix="training")
trainer.add_event_handler(Events.ITERATION_COMPLETED(every=save_every_iters), checkpoint_handler, to_save)
if with_gpu_stats:
GpuInfo().attach(trainer, name="gpu", event_name=Events.ITERATION_COMPLETED(every=log_every_iters))
if output_names is not None:
def output_transform(x, index, name):
if isinstance(x, Mapping):
return x[name]
elif isinstance(x, Sequence):
return x[index]
elif isinstance(x, (torch.Tensor, numbers.Number)):
return x
else:
raise ValueError(
"Unhandled type of update_function's output. "
"It should either mapping or sequence, but given {}".format(type(x))
)
for i, n in enumerate(output_names):
RunningAverage(
output_transform=partial(output_transform, index=i, name=n), epoch_bound=False, device=device
).attach(trainer, n)
if with_pbars:
if with_pbar_on_iters:
ProgressBar(persist=False).attach(
trainer, metric_names="all", event_name=Events.ITERATION_COMPLETED(every=log_every_iters)
)
ProgressBar(persist=True, bar_format="").attach(
trainer, event_name=Events.EPOCH_STARTED, closing_event_name=Events.COMPLETED
)
def _setup_common_distrib_training_handlers(
trainer,
train_sampler=None,
to_save=None,
save_every_iters=1000,
output_path=None,
lr_scheduler=None,
with_gpu_stats=True,
output_names=None,
with_pbars=True,
with_pbar_on_iters=True,
log_every_iters=100,
device="cuda",
):
if not (dist.is_available() and dist.is_initialized()):
raise RuntimeError("Distributed setting is not initialized, please call `dist.init_process_group` before.")
_setup_common_training_handlers(
trainer,
to_save=None,
lr_scheduler=lr_scheduler,
with_gpu_stats=with_gpu_stats,
output_names=output_names,
with_pbars=(dist.get_rank() == 0) and with_pbars,
with_pbar_on_iters=with_pbar_on_iters,
log_every_iters=log_every_iters,
device=device,
)
if train_sampler is not None:
if not callable(getattr(train_sampler, "set_epoch", None)):
raise TypeError("Train sampler should have `set_epoch` method")
@trainer.on(Events.EPOCH_STARTED)
def distrib_set_epoch(engine):
train_sampler.set_epoch(engine.state.epoch - 1)
if dist.get_rank() == 0:
if to_save is not None:
if output_path is None:
raise ValueError("If to_save argument is provided then output_path argument should be also defined")
checkpoint_handler = ModelCheckpoint(dirname=output_path, filename_prefix="training")
trainer.add_event_handler(Events.ITERATION_COMPLETED(every=save_every_iters), checkpoint_handler, to_save)
def empty_cuda_cache(_):
torch.cuda.empty_cache()
import gc
gc.collect()
def setup_any_logging(logger, logger_module, trainer, optimizers, evaluators, log_every_iters):
if optimizers is not None:
from torch.optim.optimizer import Optimizer
if not isinstance(optimizers, (Optimizer, Mapping)):
raise TypeError("Argument optimizers should be either a single optimizer or a dictionary or optimizers")
if evaluators is not None:
if not isinstance(evaluators, (Engine, Mapping)):
raise TypeError("Argument optimizers should be either a single optimizer or a dictionary or optimizers")
if log_every_iters is None:
log_every_iters = 1
logger.attach(
trainer,
log_handler=logger_module.OutputHandler(tag="training", metric_names="all"),
event_name=Events.ITERATION_COMPLETED(every=log_every_iters),
)
if optimizers is not None:
# Log optimizer parameters
if isinstance(optimizers, Optimizer):
optimizers = {None: optimizers}
for k, optimizer in optimizers.items():
logger.attach(
trainer,
log_handler=logger_module.OptimizerParamsHandler(optimizer, param_name="lr", tag=k),
event_name=Events.ITERATION_STARTED(every=log_every_iters),
)
if evaluators is not None:
# Log evaluation metrics
if isinstance(evaluators, Engine):
evaluators = {"validation": evaluators}
for k, evaluator in evaluators.items():
gst = global_step_from_engine(trainer)
logger.attach(
evaluator,
log_handler=logger_module.OutputHandler(tag=k, metric_names="all", global_step_transform=gst),
event_name=Events.COMPLETED,
)
def setup_tb_logging(output_path, trainer, optimizers=None, evaluators=None, log_every_iters=100):
tb_logger = TensorboardLogger(log_dir=output_path)
setup_any_logging(tb_logger, tb_logger_module, trainer, optimizers, evaluators, log_every_iters=log_every_iters)
return tb_logger
def setup_visdom_logging(trainer, optimizers=None, evaluators=None, log_every_iters=100, **kwargs):
vis_logger = VisdomLogger(**kwargs)
setup_any_logging(
vis_logger, visdom_logger_module, trainer, optimizers, evaluators, log_every_iters=log_every_iters
)
return vis_logger
def setup_mlflow_logging(trainer, optimizers=None, evaluators=None, log_every_iters=100):
mlflow_logger = MLflowLogger()
setup_any_logging(
mlflow_logger, mlflow_logger_module, trainer, optimizers, evaluators, log_every_iters=log_every_iters
)
return mlflow_logger
def setup_plx_logging(trainer, optimizers=None, evaluators=None, log_every_iters=100):
plx_logger = PolyaxonLogger()
setup_any_logging(
plx_logger, polyaxon_logger_module, trainer, optimizers, evaluators, log_every_iters=log_every_iters
)
return plx_logger
def get_default_score_fn(metric_name):
def wrapper(engine):
score = engine.state.metrics[metric_name]
return score
return wrapper
def save_best_model_by_val_score(output_path, evaluator, model, metric_name, n_saved=3, trainer=None, tag="val"):
global_step_transform = None
if trainer is not None:
global_step_transform = global_step_from_engine(trainer)
best_model_handler = ModelCheckpoint(
dirname=output_path,
filename_prefix="best",
n_saved=n_saved,
global_step_transform=global_step_transform,
score_name="{}_{}".format(tag, metric_name.lower()),
score_function=get_default_score_fn(metric_name),
)
evaluator.add_event_handler(Events.COMPLETED, best_model_handler, {"model": model,})
def add_early_stopping_by_val_score(patience, evaluator, trainer, metric_name):
es_handler = EarlyStopping(patience=patience, score_function=get_default_score_fn(metric_name), trainer=trainer)
evaluator.add_event_handler(Events.COMPLETED, es_handler)
| true | true |
f71c550e53e9a2417f70c36c35d7b793ff32d391 | 30,900 | py | Python | mindspore/python/mindspore/common/parameter.py | zhz44/mindspore | 6044d34074c8505dd4b02c0a05419cbc32a43f86 | [
"Apache-2.0"
] | null | null | null | mindspore/python/mindspore/common/parameter.py | zhz44/mindspore | 6044d34074c8505dd4b02c0a05419cbc32a43f86 | [
"Apache-2.0"
] | null | null | null | mindspore/python/mindspore/common/parameter.py | zhz44/mindspore | 6044d34074c8505dd4b02c0a05419cbc32a43f86 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020-2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Parameter for cell."""
from copy import copy
import numbers
import numpy as np
from mindspore import log as logger
from .._c_expression import ParamInfo
from . import dtype as mstype
from .. import context
from ..parallel._utils import _get_parallel_mode
from .initializer import initializer
from .tensor import Tensor
from .._checkparam import Validator
from .._c_expression import Tensor as Tensor_
from ..parallel._tensor import _get_slice_index
from ..parallel._auto_parallel_context import auto_parallel_context
from ..parallel._ps_context import _is_role_worker, _is_role_pserver, _is_role_sched, _clone_hash_table, _is_fl_mode
from ..parallel._ps_context import _reinsert_hash_table_size
from ..parallel._ps_context import _insert_weight_init_info, _insert_accumu_init_info
from .seed import _get_global_and_op_seed
__all__ = ['Parameter', 'ParameterTuple']
PARAMETER_NAME_DEFAULT = "Parameter"
PARAMETER_NAME_PREFIX_MAX_LEN = 1024
def _is_in_parallel_mode():
"""Get parallel mode."""
return auto_parallel_context().get_parallel_mode() in ["semi_auto_parallel", "auto_parallel"]
def init_to_value(init):
"""
Get value of initializer.
Returns:
Value of the initializer.
Raises:
ValueError: The value of the argument 'init' is not correct.
"""
if isinstance(init, str):
if init == 'zeros':
return 0.0
if init == 'ones':
return 1.0
raise ValueError("The argument 'init' should be one of values in ['zeros', 'ones'].")
if isinstance(init, numbers.Number):
return float(init)
raise ValueError("The argument 'init' should be number or string, but got {}.".format(type(init)))
class Parameter(Tensor_):
"""
`Parameter` is a `Tensor` subclass, when they are assigned as Cell attributes they are automatically added to
the list of its parameters, and will appear e.g. in `cell.get_parameters()` iterator.
Note:
In auto_parallel mode of "semi_auto_parallel" and "auto_parallel", if init `Parameter` by
a `Tensor`, the type of Parameter will be `Tensor`. `Tensor`
will save the shape and type info of a tensor with no memory usage. The shape can be changed while
compiling for auto-parallel. Call `init_data` will return a Tensor Parameter with initialized data.
If there is an operator in the network that requires part of the inputs to be Parameter,
then the Parameters as this part of the inputs are not allowed to be cast.
Give each `Parameter` a unique name to facilitate subsequent operations and updates.
If there are two or more `Parameter` objects with the same name in a network,
will be prompted to set a unique name when defining.
Args:
default_input (Union[Tensor, int, float, numpy.ndarray, list]): Parameter data,
to initialize the parameter data.
name (str): Name of the parameter. Default: None.
1) If the parameter is not given a name, the default name is its variable name. For example, the name of
param_a below is name_a, and the name of param_b is the variable name param_b.
.. code-block::
self.param_a = Parameter(Tensor([1], ms.float32), name="name_a")
self.param_b = Parameter(Tensor([2], ms.float32))
2) If parameter in list or tuple is not given a name, will give it a unique name. For example, the names of
parameters below are **Parameter$1** and **Parameter$2**.
.. code-block::
self.param_list = [Parameter(Tensor([3], ms.float32)),
Parameter(Tensor([4], ms.float32))]
3) If the parameter is given a name, and the same name exists between different parameters, an exception
will be thrown. For example, "its name 'name_a' already exists." will be thrown.
.. code-block::
self.param_a = Parameter(Tensor([1], ms.float32), name="name_a")
self.param_tuple = (Parameter(Tensor([5], ms.float32), name="name_a"),
Parameter(Tensor([6], ms.float32)))
4) If a parameter appear multiple times in list or tuple, check the name of the object only once. For
example, the following example will not throw an exception.
.. code-block::
self.param_a = Parameter(Tensor([1], ms.float32), name="name_a")
self.param_tuple = (self.param_a, self.param_a)
requires_grad (bool): True if the parameter requires gradient. Default: True.
layerwise_parallel (bool): When layerwise_parallel is true in data/hybrid parallel mode,
broadcast and gradients communication would not be applied to parameters. Default: False.
parallel_optimizer (bool): It is used to filter the weight shard operation in semi auto or auto parallel
mode. It works only when enable parallel optimizer in `mindspore.context.set_auto_parallel_context()`.
Default: True.
Examples:
>>> import numpy as np
>>> from mindspore import Parameter, Tensor
>>> import mindspore.ops as ops
>>> import mindspore.nn as nn
>>> import mindspore
>>>
>>> class Net(nn.Cell):
... def __init__(self):
... super(Net, self).__init__()
... self.matmul = ops.MatMul()
... self.weight = Parameter(Tensor(np.ones((1, 2)), mindspore.float32), name="w", requires_grad=True)
...
... def construct(self, x):
... out = self.matmul(self.weight, x)
... return out
>>> net = Net()
>>> x = Tensor(np.ones((2, 1)), mindspore.float32)
>>> print(net(x))
[[2.]]
>>> net.weight.set_data(Tensor(np.zeros((1, 2)), mindspore.float32))
>>> print(net(x))
[[0.]]
"""
__base_type__ = {}
def __new__(cls, default_input, *args, **kwargs):
init_data_flag = bool(isinstance(default_input, Tensor) and default_input.has_init)
input_class, *class_init_args = Parameter._get_parameter_new_args(default_input)
new_type = Parameter._get_base_class(input_class)
obj = input_class.__new__(new_type)
input_class.__init__(obj, *class_init_args)
# it's better to make the Initializer a kind of tensor.
obj.init_mode = None
obj.is_default_input_init = init_data_flag
if obj.has_init:
obj.init_mode = default_input
return obj
def __reduce_ex__(self, _):
data = self
if self.init_mode is not None:
data = self.init_mode
else:
# cast to break deep infinite loop while deepcopy
data = Tensor(self)
return (
Parameter, (data, self.name, self.requires_grad, self.layerwise_parallel))
def __init__(self, default_input, name=None, requires_grad=True, layerwise_parallel=False, parallel_optimizer=True):
self.param_info = ParamInfo()
self.init_in_server = False
self.cache_enable = False
self.name = name
self.requires_grad = requires_grad
self.layerwise_parallel = layerwise_parallel
self.parallel_optimizer = parallel_optimizer
# this flag for tensor copy data.
self.init_flag = False
# this flag is for ge variable copy data.
self.is_init = False
self._inited_param = None
self._sliced = False
self.is_param_ps = False
self.push_weight_to_server = False
self.pull_weight_from_server = False
self.requires_aggr = True
self._cast_type = None
self._unique = False
self.is_in_parallel = _is_in_parallel_mode()
self._pipeline_stage_list = []
if isinstance(default_input, (Tensor_, Tensor)):
Tensor_.__init__(self, default_input.dtype, default_input.shape)
elif isinstance(default_input, int):
Tensor_.__init__(self, mstype.int64, ())
elif isinstance(default_input, float):
Tensor_.__init__(self, mstype.float32, ())
elif isinstance(default_input, (np.ndarray, list)):
Tensor_.__init__(self, default_input)
else:
raise TypeError(f"The type of the argument 'default_input' must be in ['Tensor', 'int', 'float',"
f" 'numpy.ndarray', 'list']. But got type {type(default_input)}.")
self.param_info.parameter_shape = self.shape
def __deepcopy__(self, memodict):
new_obj = Parameter(self)
new_obj.name = self.name
new_obj._inited_param = self._inited_param
return new_obj
@staticmethod
def _get_base_class(input_class):
input_class_name = Parameter.__name__
if input_class_name in Parameter.__base_type__:
new_type = Parameter.__base_type__[input_class_name]
else:
new_type = type(input_class_name, (Parameter, input_class), {})
Parameter.__base_type__[input_class_name] = new_type
return new_type
@staticmethod
def _get_parameter_new_args(data):
"""Set `set_data` of current `Parameter`."""
if isinstance(data, bool):
raise ValueError('Parameter data can not be `bool`')
if isinstance(data, Tensor) and data.has_init:
if not _is_fl_mode():
if _is_in_parallel_mode() or _is_role_worker() or _is_role_sched() or _is_role_pserver():
# do not init data while in auto parallel.
return (Tensor, None, data.dtype, data.shape, data.init)
data = data.init_data().asnumpy()
elif isinstance(data, Tensor):
# make a copy of Tensor to init the parameter
return (Tensor, data.asnumpy(),)
if isinstance(data, int):
return (Tensor, data, mstype.int32)
if isinstance(data, float):
return (Tensor, data, mstype.float32)
return (Tensor, data)
def __str__(self):
return f'Parameter (name={self.name}, shape={self.shape}, dtype={self.dtype}, ' \
f'requires_grad={self.requires_grad})'
def __repr__(self):
return self.__str__()
def __parameter__(self):
"""For parse check."""
def set_param_ps(self, init_in_server=False):
"""
Set whether the trainable parameter is updated by parameter server and whether the
trainable parameter is initialized on server.
Note:
It only works when a running task is in the parameter server mode.
Args:
init_in_server (bool): Whether trainable parameter updated by parameter server is
initialized on server. Default: False.
"""
if not(_is_role_worker() or _is_role_pserver() or _is_role_sched()):
raise RuntimeError("Must complete following two steps before calling set_param_ps: \n"
"1. context.set_ps_context(enable_ps=True) \n"
"2. export MS_ROLE environment variable \n"
"Please refer to the official website for detailed usage.")
self.is_param_ps = True
self.init_in_server = init_in_server
self.param_info.init_in_server = init_in_server
def set_param_fl(self, push_to_server=False, pull_from_server=False, requires_aggr=True):
"""
Set the way of parameter and server interaction.
Args:
push_to_server (bool): Whether the parameter should be pushed to server. Default: False.
pull_from_server (bool): Whether the parameter should be pulled from server. Default: False.
requires_aggr (bool): Whether the parameter should be aggregated in the server. Default: True.
"""
if push_to_server:
self.push_weight_to_server = True
if pull_from_server:
self.pull_weight_from_server = True
if not requires_aggr:
self.requires_aggr = False
self.param_info.requires_aggr = False
@property
def inited_param(self):
"""
Get the new parameter after call the init_data.
Default is a None, If `self` is a Parameter without data, after call the
`init_data` the initialized Parameter with data will be recorded here.
"""
return self._inited_param
@property
def name(self):
"""Get the name of the parameter."""
return self.param_info.name
@name.setter
def name(self, name_):
"""
Define a name for the parameter.
Args:
name_ (`str` or `None`): The name of the parameter. When the parameter is None or an empty string,
the default value `PARAMETER_NAME_DEFAULT` is used.
"""
if name_ is None:
name_ = PARAMETER_NAME_DEFAULT
elif isinstance(name_, str):
name_ = name_.strip()
if name_ == '':
name_ = PARAMETER_NAME_DEFAULT
if len(name_) > PARAMETER_NAME_PREFIX_MAX_LEN:
raise ValueError("The length of the '{}' name should be less than {}.".
format(name_, PARAMETER_NAME_PREFIX_MAX_LEN))
else:
raise ValueError("The type of the Parameter's name should be 'string' or 'None', "
"but got {}.".format(type(name_)))
if _is_role_worker() and self.cache_enable:
if len(self.shape) != 2:
raise RuntimeError("The dims of parameter '{}' must be 2, but got {}."
.format(self.name, len(self.shape)))
_reinsert_hash_table_size(name_, self.param_info.name, self.shape[0], self.shape[1])
self.param_info.name = name_
@property
def sliced(self):
"""Get slice status of the parameter."""
return self._sliced
@sliced.setter
def sliced(self, sliced_):
self._sliced = sliced_
@property
def comm_fusion(self):
"""
Get the fusion type (int) for communication operators corresponding to this parameter.
In `AUTO_PARALLEL` and `SEMI_AUTO_PARALLEL` mode, some communication operators used for parameters or
gradients aggregation are inserted automatically. The value of fusion must be greater than or equal to 0.
When the value of fusion is 0, operators will not be fused together.
"""
return self.param_info.comm_fusion
@comm_fusion.setter
def comm_fusion(self, comm_fusion_):
if context.get_context("mode") == context.PYNATIVE_MODE and "auto_parallel" in _get_parallel_mode():
raise RuntimeError(
"`comm_fusion` does not support PYNATIVE_MODE in AUTO_PARALLEL and SEMI_AUTO_PARALLEL mode.")
Validator.check_non_negative_int(comm_fusion_)
self.param_info.comm_fusion = comm_fusion_
@property
def parallel_optimizer_comm_recompute(self):
"""
Get the communication recompute status(bool) of optimizer parallel for the parameter.
In `AUTO_PARALLEL` and `SEMI_AUTO_PARALLEL` mode, when applying parallel optimizer, some AllGather operators
used for parameters gathering are inserted automatically. It is used to control the recompute attr for those
AllGather operators.
Note:
- Only `Graph` mode is supported.
- It is recommended to use cell.recompute(parallel_optimizer_comm_recompute=True/False) to configure
the AllGather operators introducing by parallel optimizer rather than using this interface directly.
"""
return self.param_info.parallel_optimizer_comm_recompute
@parallel_optimizer_comm_recompute.setter
def parallel_optimizer_comm_recompute(self, parallel_optimizer_comm_recompute_):
Validator.check_bool(parallel_optimizer_comm_recompute_)
self.param_info.parallel_optimizer_comm_recompute = parallel_optimizer_comm_recompute_
@property
def unique(self):
"""Whether the parameter is already unique or not."""
return self._unique
@unique.setter
def unique(self, unique_):
self._unique = unique_
def clone(self, init='same'):
"""
Clone the parameter.
Args:
init (Union[Tensor, str, numbers.Number]): Initialize the shape and dtype of the parameter.
If `init` is a `Tensor` or `numbers.Number`, clone a new parameter with the same shape
and dtype, and the data of the new parameter will be set according to `init`. If `init`
is a `str`, the `init` should be the alias of the class inheriting from `Initializer`.
For example, if `init` is 'same', clone a new parameter with the same data, shape, and
dtype. Default: 'same'.
Returns:
Parameter, a new parameter.
"""
x = copy(self)
x.param_info = self.param_info.clone()
x.is_init = False
x.init = self.init
x.is_param_ps = self.is_param_ps
x.init_in_server = self.init_in_server
x.cache_enable = self.cache_enable
x.requires_aggr = self.requires_aggr
if self.cache_shape:
x.cache_shape = self.cache_shape
if init != 'same':
shape = self.shape
dtype = self.dtype
x.set_data(initializer(init, shape=shape, dtype=dtype))
return x
@property
def layerwise_parallel(self):
"""
Get the layerwise parallel status(bool) of the parameter.
When layerwise_parallel is true in `DATA_PARALLEL` and `HYBRID_PARALLEL` parallel mode, broadcast and gradients
communication would not be applied to parameters.
"""
return self.param_info.layerwise_parallel
@layerwise_parallel.setter
def layerwise_parallel(self, value=True):
if not isinstance(value, bool):
raise TypeError("The argument `layerwise_parallel` must be bool type.")
self.param_info.layerwise_parallel = value
@property
def parallel_optimizer(self):
"""
Get the optimizer parallel status(bool) of the parameter.
It is used to filter the weight shard operation in `AUTO_PARALLEL` and `SEMI_AUTO_PARALLEL` mode. It works only
when enable parallel optimizer in `mindspore.context.set_auto_parallel_context()`.
"""
return self.param_info.parallel_optimizer
@parallel_optimizer.setter
def parallel_optimizer(self, value=True):
if not isinstance(value, bool):
raise TypeError("The argument `parallel_optimizer` must be bool type.")
self.param_info.parallel_optimizer = value
@property
def cache_enable(self):
"""Return whether the parameter is cache enable."""
return self.param_info.cache_enable
@cache_enable.setter
def cache_enable(self, value=True):
if not isinstance(value, bool):
raise TypeError("The argument `cache_enable` must be bool type.")
self.param_info.cache_enable = value
@property
def cache_shape(self):
"""Return the cache shape corresponding to the parameter if use cache."""
return self.param_info.cache_shape
@cache_shape.setter
def cache_shape(self, value):
if not isinstance(value, (tuple, list)):
raise TypeError("The argument `cache_shape` must be tuple or list type.")
self.param_info.cache_shape = value
@property
def requires_grad(self):
"""
Return whether the parameter requires gradient.
The main function of requires_grad is to tell auto grad to start recording operations on a Tensor.
If a Tensor has requires_grad=False, then Tensor requires_grad will make auto grad start recording
operations on the tensor.
"""
return self.param_info.requires_grad
@requires_grad.setter
def requires_grad(self, value=True):
if not isinstance(value, bool):
raise TypeError("The argument `requires_grad` must be bool type")
self.param_info.requires_grad = value
@property
def data(self):
"""Return the parameter object."""
return self
def _update_tensor_data(self, data):
"""Update the parameter by a Tensor."""
if isinstance(self, Tensor):
self.init_flag = False
self.init = None
return self.assign_value(data)
new_param = Parameter(data, self.name, self.requires_grad)
new_param.param_info = self.param_info
return new_param
def add_pipeline_stage(self, stage):
logger.warning(f"This interface may be deleted in the future.")
if not isinstance(stage, int) or stage < 0:
raise TypeError("`stage` must be a positive number of int type")
self._pipeline_stage_list.append(stage)
def _raise_type_error(self, incoming):
raise TypeError(f"Incoming Parameter dtype can not be converted to current dtype implicitly. "
f"Current dtype is {self.dtype}, and incoming is {incoming}. "
f"Use .set_dtype(xxx) to change the dtype.")
@staticmethod
def _set_data_check_input_valid(current_shape, data_shape, current_tensor_is_init,
incoming_tensor_is_init, slice_shape=False):
if incoming_tensor_is_init and not current_tensor_is_init:
raise TypeError("The original tensor data is initialized, but the argument 'data' is not initialized."
"Please initialize 'data' before call this method.")
if tuple(current_shape) != tuple(data_shape):
# If Slice create Parameter shape can be change.
if not slice_shape:
raise ValueError(f"Can not change the shape of Parameter which has been initialized."
f" Current shape is {current_shape}, and incoming is {data_shape}.")
def set_data(self, data, slice_shape=False):
"""
Set Parameter's data.
Args:
data (Union[Tensor, int, float]): New data.
slice_shape (bool): If slice the parameter is set to true, the shape is not checked for consistency.
Default: False.
Returns:
Parameter, the parameter after set data.
"""
if not isinstance(data, (Tensor, int, float)):
raise TypeError(f"Parameter data must be [`Tensor`, `int`, `float`] or a kind of `Tensor` "
f"(like `Tensor`). But with type {type(data)}.")
if isinstance(data, (int, float)):
if self.dtype in mstype.int_type and isinstance(data, float):
self._raise_type_error(mstype.float_)
data = Tensor(data, self.dtype)
# both not init.
incoming_tensor_is_init = isinstance(data, Tensor) and not data.has_init
current_tensor_is_init = isinstance(self, Tensor) and not self.has_init
Parameter._set_data_check_input_valid(self.shape, data.shape, current_tensor_is_init, incoming_tensor_is_init,
slice_shape)
if self.dtype != data.dtype:
if mstype.implicit_conversion_seq[self.dtype] < mstype.implicit_conversion_seq[data.dtype]:
self._raise_type_error(data.dtype)
else:
from mindspore.ops import functional as F
if isinstance(data, Tensor) and data.init is not None:
data.init_data()
data = F.cast(data, self.dtype)
if isinstance(data, Tensor) and data.has_init:
# The parameter has been initialized, directly update by the data
if current_tensor_is_init:
self._update_tensor_data(data.init_data())
else:
# also update the related inited parameter data
if self.inited_param is not None:
self.inited_param.set_data(data)
self.init_mode = data
elif incoming_tensor_is_init or current_tensor_is_init:
self._update_tensor_data(data)
self.sliced = slice_shape
return self
def _get_init_data_args(self, layout=None):
init_data_args = ()
if layout:
if not isinstance(layout, tuple):
raise TypeError("The argument 'layout' should be tuple, but got {}.".format(type(layout)))
if len(layout) < 6:
raise ValueError("The length of 'layout' must be larger than 5, but got {}.".format(len(layout)))
slice_index = int(_get_slice_index(layout[0], layout[1]))
init_data_args += (slice_index, layout[2], layout[5])
return init_data_args
def init_data(self, layout=None, set_sliced=False):
"""
Initialize the parameter's data.
Args:
layout (Union[None, tuple]): The parameter's layout info.
layout [dev_mat, tensor_map, slice_shape, filed_size, uniform_split, opt_shard_group]. Default: None.
It's not None only in 'SEMI_AUTO_PARALLEL' or 'AUTO_PARALLEL' mode.
- dev_mat (list(int)): The parameter's device matrix.
- tensor_map (list(int)): The parameter's tensor map.
- slice_shape (list(int)): The parameter's slice shape.
- filed_size (int): The parameter's filed size.
- uniform_split (bool): Whether the parameter is split evenly.
- opt_shard_group (str): The group of the parameter while running optimizer parallel.
set_sliced (bool): True if the parameter is set sliced after initializing the data.
Default: False.
Raises:
RuntimeError: If it is from Initializer, and parallel mode has changed after the Initializer created.
ValueError: If the length of the layout is less than 6.
TypeError: If `layout` is not tuple.
Returns:
Parameter, the `Parameter` after initializing data. If current `Parameter` was already initialized before,
returns the same initialized `Parameter`.
"""
if self.is_default_input_init and self.is_in_parallel != _is_in_parallel_mode():
raise RuntimeError("Must set or change parallel mode before any Tensor created.")
if self.init_mode is None:
return self
if self.inited_param is not None:
return self.inited_param
if _is_role_worker() and self.cache_enable:
global_seed, op_seed = _get_global_and_op_seed()
_insert_weight_init_info(self.name, global_seed, op_seed)
init_data_args = self._get_init_data_args(layout)
if _is_role_pserver():
return self
if self.init_in_server and self.is_param_ps and isinstance(self.init_mode, Tensor) and \
self.init_mode.init is not None and (_is_role_worker() or _is_role_sched()):
if self.cache_enable:
data = self.init_mode.init_data(*init_data_args)
else:
data = self.init_mode.init_data(0, [1])
else:
data = self.init_mode.init_data(*init_data_args)
obj = self._update_tensor_data(data)
if id(obj) != id(self):
self._inited_param = obj
obj.init_mode = None
obj.sliced = set_sliced
return obj
class ParameterTuple(tuple):
"""
Inherited from tuple, ParameterTuple is used to save multiple parameter.
Note:
It is used to store the parameters of the network into the parameter tuple collection.
"""
def __new__(cls, iterable):
"""Create instance object of ParameterTuple."""
data = tuple(iterable)
ids = set()
names = set()
for x in data:
if not isinstance(x, Parameter):
raise TypeError(f"ParameterTuple input should be `Parameter` collection."
f"But got a {type(iterable)}, {iterable}")
if id(x) not in ids:
if x.name in names:
raise ValueError("The value {} , its name '{}' already exists. "
"Please set a unique name for the parameter.".format(x, x.name))
names.add(x.name)
ids.add(id(x))
return tuple.__new__(ParameterTuple, tuple(data))
def clone(self, prefix, init='same'):
"""
Clone the parameters in ParameterTuple element-wisely to generate a new ParameterTuple.
Args:
prefix (str): Namespace of parameter, the prefix string will be added to the names of parameters
in parametertuple.
init (Union[Tensor, str, numbers.Number]): Clone the shape and dtype of Parameters in ParameterTuple and
set data according to `init`. Default: 'same'.
If `init` is a `Tensor` , set the new Parameter data to the input Tensor.
If `init` is `numbers.Number` , set the new Parameter data to the input number.
If `init` is a `str`, data will be seted according to the initialization method of the same name in
the `Initializer`.
If `init` is 'same', the new Parameter has the same value with the original Parameter.
Returns:
Tuple, the new Parameter tuple.
"""
Validator.check_str_by_regular(prefix)
new = []
for x in self:
x1 = x.clone(init)
x1.name = prefix + "." + x1.name
new.append(x1)
if not x1.cache_enable:
continue
if _is_role_worker():
_clone_hash_table(x.name, x1.name)
_insert_accumu_init_info(x1.name, init_to_value(init))
return ParameterTuple(new)
def __parameter_tuple__(self):
"""For parse check."""
| 43.036212 | 120 | 0.631489 |
from copy import copy
import numbers
import numpy as np
from mindspore import log as logger
from .._c_expression import ParamInfo
from . import dtype as mstype
from .. import context
from ..parallel._utils import _get_parallel_mode
from .initializer import initializer
from .tensor import Tensor
from .._checkparam import Validator
from .._c_expression import Tensor as Tensor_
from ..parallel._tensor import _get_slice_index
from ..parallel._auto_parallel_context import auto_parallel_context
from ..parallel._ps_context import _is_role_worker, _is_role_pserver, _is_role_sched, _clone_hash_table, _is_fl_mode
from ..parallel._ps_context import _reinsert_hash_table_size
from ..parallel._ps_context import _insert_weight_init_info, _insert_accumu_init_info
from .seed import _get_global_and_op_seed
__all__ = ['Parameter', 'ParameterTuple']
PARAMETER_NAME_DEFAULT = "Parameter"
PARAMETER_NAME_PREFIX_MAX_LEN = 1024
def _is_in_parallel_mode():
return auto_parallel_context().get_parallel_mode() in ["semi_auto_parallel", "auto_parallel"]
def init_to_value(init):
if isinstance(init, str):
if init == 'zeros':
return 0.0
if init == 'ones':
return 1.0
raise ValueError("The argument 'init' should be one of values in ['zeros', 'ones'].")
if isinstance(init, numbers.Number):
return float(init)
raise ValueError("The argument 'init' should be number or string, but got {}.".format(type(init)))
class Parameter(Tensor_):
__base_type__ = {}
def __new__(cls, default_input, *args, **kwargs):
init_data_flag = bool(isinstance(default_input, Tensor) and default_input.has_init)
input_class, *class_init_args = Parameter._get_parameter_new_args(default_input)
new_type = Parameter._get_base_class(input_class)
obj = input_class.__new__(new_type)
input_class.__init__(obj, *class_init_args)
obj.init_mode = None
obj.is_default_input_init = init_data_flag
if obj.has_init:
obj.init_mode = default_input
return obj
def __reduce_ex__(self, _):
data = self
if self.init_mode is not None:
data = self.init_mode
else:
# cast to break deep infinite loop while deepcopy
data = Tensor(self)
return (
Parameter, (data, self.name, self.requires_grad, self.layerwise_parallel))
def __init__(self, default_input, name=None, requires_grad=True, layerwise_parallel=False, parallel_optimizer=True):
self.param_info = ParamInfo()
self.init_in_server = False
self.cache_enable = False
self.name = name
self.requires_grad = requires_grad
self.layerwise_parallel = layerwise_parallel
self.parallel_optimizer = parallel_optimizer
# this flag for tensor copy data.
self.init_flag = False
# this flag is for ge variable copy data.
self.is_init = False
self._inited_param = None
self._sliced = False
self.is_param_ps = False
self.push_weight_to_server = False
self.pull_weight_from_server = False
self.requires_aggr = True
self._cast_type = None
self._unique = False
self.is_in_parallel = _is_in_parallel_mode()
self._pipeline_stage_list = []
if isinstance(default_input, (Tensor_, Tensor)):
Tensor_.__init__(self, default_input.dtype, default_input.shape)
elif isinstance(default_input, int):
Tensor_.__init__(self, mstype.int64, ())
elif isinstance(default_input, float):
Tensor_.__init__(self, mstype.float32, ())
elif isinstance(default_input, (np.ndarray, list)):
Tensor_.__init__(self, default_input)
else:
raise TypeError(f"The type of the argument 'default_input' must be in ['Tensor', 'int', 'float',"
f" 'numpy.ndarray', 'list']. But got type {type(default_input)}.")
self.param_info.parameter_shape = self.shape
def __deepcopy__(self, memodict):
new_obj = Parameter(self)
new_obj.name = self.name
new_obj._inited_param = self._inited_param
return new_obj
@staticmethod
def _get_base_class(input_class):
input_class_name = Parameter.__name__
if input_class_name in Parameter.__base_type__:
new_type = Parameter.__base_type__[input_class_name]
else:
new_type = type(input_class_name, (Parameter, input_class), {})
Parameter.__base_type__[input_class_name] = new_type
return new_type
@staticmethod
def _get_parameter_new_args(data):
if isinstance(data, bool):
raise ValueError('Parameter data can not be `bool`')
if isinstance(data, Tensor) and data.has_init:
if not _is_fl_mode():
if _is_in_parallel_mode() or _is_role_worker() or _is_role_sched() or _is_role_pserver():
# do not init data while in auto parallel.
return (Tensor, None, data.dtype, data.shape, data.init)
data = data.init_data().asnumpy()
elif isinstance(data, Tensor):
# make a copy of Tensor to init the parameter
return (Tensor, data.asnumpy(),)
if isinstance(data, int):
return (Tensor, data, mstype.int32)
if isinstance(data, float):
return (Tensor, data, mstype.float32)
return (Tensor, data)
def __str__(self):
return f'Parameter (name={self.name}, shape={self.shape}, dtype={self.dtype}, ' \
f'requires_grad={self.requires_grad})'
def __repr__(self):
return self.__str__()
def __parameter__(self):
def set_param_ps(self, init_in_server=False):
if not(_is_role_worker() or _is_role_pserver() or _is_role_sched()):
raise RuntimeError("Must complete following two steps before calling set_param_ps: \n"
"1. context.set_ps_context(enable_ps=True) \n"
"2. export MS_ROLE environment variable \n"
"Please refer to the official website for detailed usage.")
self.is_param_ps = True
self.init_in_server = init_in_server
self.param_info.init_in_server = init_in_server
def set_param_fl(self, push_to_server=False, pull_from_server=False, requires_aggr=True):
if push_to_server:
self.push_weight_to_server = True
if pull_from_server:
self.pull_weight_from_server = True
if not requires_aggr:
self.requires_aggr = False
self.param_info.requires_aggr = False
@property
def inited_param(self):
return self._inited_param
@property
def name(self):
return self.param_info.name
@name.setter
def name(self, name_):
if name_ is None:
name_ = PARAMETER_NAME_DEFAULT
elif isinstance(name_, str):
name_ = name_.strip()
if name_ == '':
name_ = PARAMETER_NAME_DEFAULT
if len(name_) > PARAMETER_NAME_PREFIX_MAX_LEN:
raise ValueError("The length of the '{}' name should be less than {}.".
format(name_, PARAMETER_NAME_PREFIX_MAX_LEN))
else:
raise ValueError("The type of the Parameter's name should be 'string' or 'None', "
"but got {}.".format(type(name_)))
if _is_role_worker() and self.cache_enable:
if len(self.shape) != 2:
raise RuntimeError("The dims of parameter '{}' must be 2, but got {}."
.format(self.name, len(self.shape)))
_reinsert_hash_table_size(name_, self.param_info.name, self.shape[0], self.shape[1])
self.param_info.name = name_
@property
def sliced(self):
return self._sliced
@sliced.setter
def sliced(self, sliced_):
self._sliced = sliced_
@property
def comm_fusion(self):
return self.param_info.comm_fusion
@comm_fusion.setter
def comm_fusion(self, comm_fusion_):
if context.get_context("mode") == context.PYNATIVE_MODE and "auto_parallel" in _get_parallel_mode():
raise RuntimeError(
"`comm_fusion` does not support PYNATIVE_MODE in AUTO_PARALLEL and SEMI_AUTO_PARALLEL mode.")
Validator.check_non_negative_int(comm_fusion_)
self.param_info.comm_fusion = comm_fusion_
@property
def parallel_optimizer_comm_recompute(self):
return self.param_info.parallel_optimizer_comm_recompute
@parallel_optimizer_comm_recompute.setter
def parallel_optimizer_comm_recompute(self, parallel_optimizer_comm_recompute_):
Validator.check_bool(parallel_optimizer_comm_recompute_)
self.param_info.parallel_optimizer_comm_recompute = parallel_optimizer_comm_recompute_
@property
def unique(self):
return self._unique
@unique.setter
def unique(self, unique_):
self._unique = unique_
def clone(self, init='same'):
x = copy(self)
x.param_info = self.param_info.clone()
x.is_init = False
x.init = self.init
x.is_param_ps = self.is_param_ps
x.init_in_server = self.init_in_server
x.cache_enable = self.cache_enable
x.requires_aggr = self.requires_aggr
if self.cache_shape:
x.cache_shape = self.cache_shape
if init != 'same':
shape = self.shape
dtype = self.dtype
x.set_data(initializer(init, shape=shape, dtype=dtype))
return x
@property
def layerwise_parallel(self):
return self.param_info.layerwise_parallel
@layerwise_parallel.setter
def layerwise_parallel(self, value=True):
if not isinstance(value, bool):
raise TypeError("The argument `layerwise_parallel` must be bool type.")
self.param_info.layerwise_parallel = value
@property
def parallel_optimizer(self):
return self.param_info.parallel_optimizer
@parallel_optimizer.setter
def parallel_optimizer(self, value=True):
if not isinstance(value, bool):
raise TypeError("The argument `parallel_optimizer` must be bool type.")
self.param_info.parallel_optimizer = value
@property
def cache_enable(self):
return self.param_info.cache_enable
@cache_enable.setter
def cache_enable(self, value=True):
if not isinstance(value, bool):
raise TypeError("The argument `cache_enable` must be bool type.")
self.param_info.cache_enable = value
@property
def cache_shape(self):
return self.param_info.cache_shape
@cache_shape.setter
def cache_shape(self, value):
if not isinstance(value, (tuple, list)):
raise TypeError("The argument `cache_shape` must be tuple or list type.")
self.param_info.cache_shape = value
@property
def requires_grad(self):
return self.param_info.requires_grad
@requires_grad.setter
def requires_grad(self, value=True):
if not isinstance(value, bool):
raise TypeError("The argument `requires_grad` must be bool type")
self.param_info.requires_grad = value
@property
def data(self):
return self
def _update_tensor_data(self, data):
if isinstance(self, Tensor):
self.init_flag = False
self.init = None
return self.assign_value(data)
new_param = Parameter(data, self.name, self.requires_grad)
new_param.param_info = self.param_info
return new_param
def add_pipeline_stage(self, stage):
logger.warning(f"This interface may be deleted in the future.")
if not isinstance(stage, int) or stage < 0:
raise TypeError("`stage` must be a positive number of int type")
self._pipeline_stage_list.append(stage)
def _raise_type_error(self, incoming):
raise TypeError(f"Incoming Parameter dtype can not be converted to current dtype implicitly. "
f"Current dtype is {self.dtype}, and incoming is {incoming}. "
f"Use .set_dtype(xxx) to change the dtype.")
@staticmethod
def _set_data_check_input_valid(current_shape, data_shape, current_tensor_is_init,
incoming_tensor_is_init, slice_shape=False):
if incoming_tensor_is_init and not current_tensor_is_init:
raise TypeError("The original tensor data is initialized, but the argument 'data' is not initialized."
"Please initialize 'data' before call this method.")
if tuple(current_shape) != tuple(data_shape):
if not slice_shape:
raise ValueError(f"Can not change the shape of Parameter which has been initialized."
f" Current shape is {current_shape}, and incoming is {data_shape}.")
def set_data(self, data, slice_shape=False):
if not isinstance(data, (Tensor, int, float)):
raise TypeError(f"Parameter data must be [`Tensor`, `int`, `float`] or a kind of `Tensor` "
f"(like `Tensor`). But with type {type(data)}.")
if isinstance(data, (int, float)):
if self.dtype in mstype.int_type and isinstance(data, float):
self._raise_type_error(mstype.float_)
data = Tensor(data, self.dtype)
incoming_tensor_is_init = isinstance(data, Tensor) and not data.has_init
current_tensor_is_init = isinstance(self, Tensor) and not self.has_init
Parameter._set_data_check_input_valid(self.shape, data.shape, current_tensor_is_init, incoming_tensor_is_init,
slice_shape)
if self.dtype != data.dtype:
if mstype.implicit_conversion_seq[self.dtype] < mstype.implicit_conversion_seq[data.dtype]:
self._raise_type_error(data.dtype)
else:
from mindspore.ops import functional as F
if isinstance(data, Tensor) and data.init is not None:
data.init_data()
data = F.cast(data, self.dtype)
if isinstance(data, Tensor) and data.has_init:
if current_tensor_is_init:
self._update_tensor_data(data.init_data())
else:
if self.inited_param is not None:
self.inited_param.set_data(data)
self.init_mode = data
elif incoming_tensor_is_init or current_tensor_is_init:
self._update_tensor_data(data)
self.sliced = slice_shape
return self
def _get_init_data_args(self, layout=None):
init_data_args = ()
if layout:
if not isinstance(layout, tuple):
raise TypeError("The argument 'layout' should be tuple, but got {}.".format(type(layout)))
if len(layout) < 6:
raise ValueError("The length of 'layout' must be larger than 5, but got {}.".format(len(layout)))
slice_index = int(_get_slice_index(layout[0], layout[1]))
init_data_args += (slice_index, layout[2], layout[5])
return init_data_args
def init_data(self, layout=None, set_sliced=False):
if self.is_default_input_init and self.is_in_parallel != _is_in_parallel_mode():
raise RuntimeError("Must set or change parallel mode before any Tensor created.")
if self.init_mode is None:
return self
if self.inited_param is not None:
return self.inited_param
if _is_role_worker() and self.cache_enable:
global_seed, op_seed = _get_global_and_op_seed()
_insert_weight_init_info(self.name, global_seed, op_seed)
init_data_args = self._get_init_data_args(layout)
if _is_role_pserver():
return self
if self.init_in_server and self.is_param_ps and isinstance(self.init_mode, Tensor) and \
self.init_mode.init is not None and (_is_role_worker() or _is_role_sched()):
if self.cache_enable:
data = self.init_mode.init_data(*init_data_args)
else:
data = self.init_mode.init_data(0, [1])
else:
data = self.init_mode.init_data(*init_data_args)
obj = self._update_tensor_data(data)
if id(obj) != id(self):
self._inited_param = obj
obj.init_mode = None
obj.sliced = set_sliced
return obj
class ParameterTuple(tuple):
def __new__(cls, iterable):
data = tuple(iterable)
ids = set()
names = set()
for x in data:
if not isinstance(x, Parameter):
raise TypeError(f"ParameterTuple input should be `Parameter` collection."
f"But got a {type(iterable)}, {iterable}")
if id(x) not in ids:
if x.name in names:
raise ValueError("The value {} , its name '{}' already exists. "
"Please set a unique name for the parameter.".format(x, x.name))
names.add(x.name)
ids.add(id(x))
return tuple.__new__(ParameterTuple, tuple(data))
def clone(self, prefix, init='same'):
Validator.check_str_by_regular(prefix)
new = []
for x in self:
x1 = x.clone(init)
x1.name = prefix + "." + x1.name
new.append(x1)
if not x1.cache_enable:
continue
if _is_role_worker():
_clone_hash_table(x.name, x1.name)
_insert_accumu_init_info(x1.name, init_to_value(init))
return ParameterTuple(new)
def __parameter_tuple__(self):
| true | true |
f71c553ec28422c6f3e48889698abd8e2489bc38 | 6,512 | py | Python | rootpy/logger/__init__.py | masonproffitt/rootpy | 3926935e1f2100d8ba68070c2ab44055d4800f73 | [
"BSD-3-Clause"
] | 146 | 2015-01-04T15:16:44.000Z | 2022-01-27T11:29:31.000Z | rootpy/logger/__init__.py | masonproffitt/rootpy | 3926935e1f2100d8ba68070c2ab44055d4800f73 | [
"BSD-3-Clause"
] | 143 | 2015-01-07T00:20:42.000Z | 2021-11-04T07:48:26.000Z | rootpy/logger/__init__.py | masonproffitt/rootpy | 3926935e1f2100d8ba68070c2ab44055d4800f73 | [
"BSD-3-Clause"
] | 56 | 2015-01-30T11:11:07.000Z | 2022-03-28T09:42:06.000Z | """
:py:mod:`rootpy` overrides the default logging class, inserting a check that
there exists a default logging handler. If there is not, it adds one.
In additon, this can be used to intercept ROOT's log messages and redirect them
through python's logging subsystem
Example use:
.. sourcecode:: python
# Disable colored logging (not needed if writing into a file,
# this is automatic).
# Must be done before :py:mod:`rootpy` logs any messages.
import logging; logging.basicConfig(level=logging.DEBUG)
from rootpy import log; log = log["/myapp"]
log.debug("Hello") # Results in "DEBUG:myapp] Hello"
# Suppress all myapp debug and info messages
log.setLevel(log.WARNING)
log.debug("Hello") # No effect
mymod = log["mymod"]
mymod.warning("Hello") # Results in "WARNING:myapp.mymod] Hello"
# Suppress all rootpy debug and info messages
log["/rootpy"].setLevel(log.WARNING)
# Suppress messages coming from TCanvas like
# INFO:ROOT.TCanvas.Print] png file /path/to/file.png has been created
log["/ROOT.TCanvas.Print"].setLevel(log.WARNING)
# Suppress warning messages coming the ``TClass`` constructor:
log["/ROOT.TClass.TClass"].setLevel(log.ERROR)
# Precisely remove messages containing the text "no dictionary for class"
# (doesn't work when attached to parent logger)
import logging
class NoDictMessagesFilter(logging.Filter):
def filter(self, record):
return "no dictionary for class" not in record.msg
log["/ROOT.TClass.TClass"].addFilter(NoDictMessagesFilter())
# Turn ROOT errors into exceptions
from rootpy.logger.magic import DANGER
DANGER.enable = True
import ROOT
ROOT.Error("test", "Test fatal")
# Result:
# ERROR:ROOT.test] Test fatal
# Traceback (most recent call last):
# File "test.py", line 36, in <module>
# ROOT.Fatal("test", "Test fatal")
# File "test.py", line 36, in <module>
# ROOT.Fatal("test", "Test fatal")
# File "rootpy/logger/roothandler.py", line 40, in python_logging_error_handler
# raise ROOTError(level, location, msg)
# rootpy.ROOTError: level=6000, loc='test', msg='Test fatal'
# Primitive function tracing:
@log.trace()
def salut():
return
@log.trace()
def hello(what):
salut()
return "42"
hello("world")
# Result:
# DEBUG:myapp.trace.hello] > ('world',) {}
# DEBUG:myapp.trace.salut] > () {}
# DEBUG:myapp.trace.salut] < return None [0.00 sec]
# DEBUG:myapp.trace.hello] < return 42 [0.00 sec]
"""
from __future__ import absolute_import
import logging
import os
import re
import sys
import threading
from functools import wraps
from time import time
from .utils import check_tty
from .extended_logger import ExtendedLogger
logging.setLoggerClass(ExtendedLogger)
log = logging.getLogger("rootpy")
if not os.environ.get("DEBUG", False):
log.setLevel(log.INFO)
from .formatter import CustomFormatter, CustomColoredFormatter
def check_tty_handler(handler):
if not hasattr(handler, "stream"):
return False
return check_tty(handler.stream)
log_root = logging.getLogger()
if not log_root.handlers:
# Add a handler to the top-level logger if it doesn't already have one
handler = logging.StreamHandler()
if check_tty_handler(handler):
handler.setFormatter(CustomColoredFormatter())
else:
handler.setFormatter(CustomFormatter())
log_root.addHandler(handler)
# Make the top-level logger as verbose as possible.
# Log messages that make it to the screen are controlled by the handler
log_root.setLevel(logging.DEBUG)
l = logging.getLogger("rootpy.logger")
l.debug("Adding rootpy's default logging handler to the root logger")
from .magic import set_error_handler
from .roothandler import python_logging_error_handler
__all__ = [
'log_trace',
'set_error_handler',
'python_logging_error_handler',
'LogFilter',
'LiteralFilter',
]
class TraceDepth(threading.local):
value = -1
trace_depth = TraceDepth()
def log_trace(logger, level=logging.DEBUG, show_enter=True, show_exit=True):
"""
log a statement on function entry and exit
"""
def wrap(function):
l = logger.getChild(function.__name__).log
@wraps(function)
def thunk(*args, **kwargs):
global trace_depth
trace_depth.value += 1
try:
start = time()
if show_enter:
l(level, "{0}> {1} {2}".format(" "*trace_depth.value,
args, kwargs))
try:
result = function(*args, **kwargs)
except:
_, result, _ = sys.exc_info()
raise
finally:
if show_exit:
l(level, "{0}< return {1} [{2:.2f} sec]".format(
" "*trace_depth.value, result, time() - start))
finally:
trace_depth.value -= 1
return result
return thunk
return wrap
class LogFilter(logging.Filter):
def __init__(self, logger, message_regex):
logging.Filter.__init__(self)
self.logger = logger
self.message_regex = re.compile(message_regex)
def __enter__(self):
self.logger.addFilter(self)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.logger.removeFilter(self)
def filter(self, record):
return not self.message_regex.match(record.getMessage())
class LiteralFilter(logging.Filter):
def __init__(self, literals):
logging.Filter.__init__(self)
self.literals = literals
def filter(self, record):
return record.getMessage() not in self.literals
# filter superfluous ROOT warnings
for histtype in 'CSIFD':
for dimen in '123':
log["/ROOT.TH{0}{1}.Add".format(dimen, histtype)].addFilter(
LiteralFilter([
"Attempt to add histograms with different axis limits",]))
log["/ROOT.TH{0}{1}.Divide".format(dimen, histtype)].addFilter(
LiteralFilter([
"Attempt to divide histograms with different axis limits",]))
log["/ROOT.TH{0}{1}.Multiply".format(dimen, histtype)].addFilter(
LiteralFilter([
"Attempt to multiply histograms with different axis limits",]))
| 31.157895 | 85 | 0.640971 | from __future__ import absolute_import
import logging
import os
import re
import sys
import threading
from functools import wraps
from time import time
from .utils import check_tty
from .extended_logger import ExtendedLogger
logging.setLoggerClass(ExtendedLogger)
log = logging.getLogger("rootpy")
if not os.environ.get("DEBUG", False):
log.setLevel(log.INFO)
from .formatter import CustomFormatter, CustomColoredFormatter
def check_tty_handler(handler):
if not hasattr(handler, "stream"):
return False
return check_tty(handler.stream)
log_root = logging.getLogger()
if not log_root.handlers:
handler = logging.StreamHandler()
if check_tty_handler(handler):
handler.setFormatter(CustomColoredFormatter())
else:
handler.setFormatter(CustomFormatter())
log_root.addHandler(handler)
# Make the top-level logger as verbose as possible.
# Log messages that make it to the screen are controlled by the handler
log_root.setLevel(logging.DEBUG)
l = logging.getLogger("rootpy.logger")
l.debug("Adding rootpy's default logging handler to the root logger")
from .magic import set_error_handler
from .roothandler import python_logging_error_handler
__all__ = [
'log_trace',
'set_error_handler',
'python_logging_error_handler',
'LogFilter',
'LiteralFilter',
]
class TraceDepth(threading.local):
value = -1
trace_depth = TraceDepth()
def log_trace(logger, level=logging.DEBUG, show_enter=True, show_exit=True):
def wrap(function):
l = logger.getChild(function.__name__).log
@wraps(function)
def thunk(*args, **kwargs):
global trace_depth
trace_depth.value += 1
try:
start = time()
if show_enter:
l(level, "{0}> {1} {2}".format(" "*trace_depth.value,
args, kwargs))
try:
result = function(*args, **kwargs)
except:
_, result, _ = sys.exc_info()
raise
finally:
if show_exit:
l(level, "{0}< return {1} [{2:.2f} sec]".format(
" "*trace_depth.value, result, time() - start))
finally:
trace_depth.value -= 1
return result
return thunk
return wrap
class LogFilter(logging.Filter):
def __init__(self, logger, message_regex):
logging.Filter.__init__(self)
self.logger = logger
self.message_regex = re.compile(message_regex)
def __enter__(self):
self.logger.addFilter(self)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.logger.removeFilter(self)
def filter(self, record):
return not self.message_regex.match(record.getMessage())
class LiteralFilter(logging.Filter):
def __init__(self, literals):
logging.Filter.__init__(self)
self.literals = literals
def filter(self, record):
return record.getMessage() not in self.literals
for histtype in 'CSIFD':
for dimen in '123':
log["/ROOT.TH{0}{1}.Add".format(dimen, histtype)].addFilter(
LiteralFilter([
"Attempt to add histograms with different axis limits",]))
log["/ROOT.TH{0}{1}.Divide".format(dimen, histtype)].addFilter(
LiteralFilter([
"Attempt to divide histograms with different axis limits",]))
log["/ROOT.TH{0}{1}.Multiply".format(dimen, histtype)].addFilter(
LiteralFilter([
"Attempt to multiply histograms with different axis limits",]))
| true | true |
f71c5653c53151aeb46ae97e28196d989957f8df | 586 | py | Python | data/scripts/templates/object/mobile/shared_swirl_prong_hue.py | obi-two/GameServer | 7d37024e2291a97d49522610cd8f1dbe5666afc2 | [
"MIT"
] | 20 | 2015-02-23T15:11:56.000Z | 2022-03-18T20:56:48.000Z | data/scripts/templates/object/mobile/shared_swirl_prong_hue.py | apathyboy/swganh | 665128efe9154611dec4cb5efc61d246dd095984 | [
"MIT"
] | null | null | null | data/scripts/templates/object/mobile/shared_swirl_prong_hue.py | apathyboy/swganh | 665128efe9154611dec4cb5efc61d246dd095984 | [
"MIT"
] | 20 | 2015-04-04T16:35:59.000Z | 2022-03-24T14:54:37.000Z | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_swirl_prong_hue.iff"
result.attribute_template_id = 9
result.stfName("monster_name","swirl_prong")
#### BEGIN MODIFICATIONS ####
result.setStringAttribute("radial_filename", "radials/player_pet.py")
result.options_mask = 0x100
result.pvp_status = PVPSTATUS.PvPStatus_None
#### END MODIFICATIONS ####
return result | 29.3 | 70 | 0.750853 | true | true | |
f71c568fa66d427f4b77e6e61c5dc1c0fa4fdaf1 | 815 | py | Python | src/lists/tests/test_home_page.py | dmitricus/django-docker | 46e99dc4f3d902e7fda56f85260358f80f505297 | [
"MIT"
] | null | null | null | src/lists/tests/test_home_page.py | dmitricus/django-docker | 46e99dc4f3d902e7fda56f85260358f80f505297 | [
"MIT"
] | null | null | null | src/lists/tests/test_home_page.py | dmitricus/django-docker | 46e99dc4f3d902e7fda56f85260358f80f505297 | [
"MIT"
] | null | null | null | from django.urls import resolve
from django.test import TestCase
from django.http import HttpRequest
from lists.views import home_page
class HomePageTest(TestCase):
"""Тест домашней страницы"""
def test_root_url_resolve_to_home_page_view(self):
"""Тест: корневой url преобразуется в представление домашней страницы"""
found = resolve('/lists/')
self.assertEqual(found.func, home_page)
def test_home_page_returns_correct_html(self):
"""Тест: домашняя страница возвращает правильный html"""
request = HttpRequest()
response = home_page(request)
html = response.content.decode('utf-8')
self.assertTrue(html.startswith('<html>'))
self.assertIn('<title>To-Do lists</title>', html)
self.assertTrue(html.endswith('</html>'))
| 33.958333 | 80 | 0.696933 | from django.urls import resolve
from django.test import TestCase
from django.http import HttpRequest
from lists.views import home_page
class HomePageTest(TestCase):
def test_root_url_resolve_to_home_page_view(self):
found = resolve('/lists/')
self.assertEqual(found.func, home_page)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = home_page(request)
html = response.content.decode('utf-8')
self.assertTrue(html.startswith('<html>'))
self.assertIn('<title>To-Do lists</title>', html)
self.assertTrue(html.endswith('</html>'))
| true | true |
f71c5890e794a661a56497593ddf0dfcf0ad6fc7 | 9,686 | py | Python | main.py | trueleo/python-teletootbot | d033a79b05a13d10bd2d4f0bd68d4ecaa47f3cb3 | [
"MIT"
] | 4 | 2019-04-19T12:58:51.000Z | 2021-07-27T01:12:41.000Z | main.py | trueleo/python-teletootbot | d033a79b05a13d10bd2d4f0bd68d4ecaa47f3cb3 | [
"MIT"
] | null | null | null | main.py | trueleo/python-teletootbot | d033a79b05a13d10bd2d4f0bd68d4ecaa47f3cb3 | [
"MIT"
] | null | null | null | from telegram.ext import MessageHandler, Filters, CommandHandler, Updater
from mastodon import MastodonIllegalArgumentError, MastodonUnauthorizedError
import DataHandler
import threading
import os
import sys
import logging
import certifi
import urllib3
import re
bot_token = '<your bot token here>'
# secretfile = open('secretbot', 'r')
# secret = secretfile.readline().rstrip('\n')
# bot_token = secret
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
test_visibility = 'public'
group_media_queue = {}
lookup_dict = {}
tootObject = DataHandler.mastodonapi.TootObject
def geturl(url_string):
man = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where() ,num_pools=1)
response = man.urlopen('GET', url_string)
rurl = response.geturl()
return re.search(r'([://a-z.0-9]+/)', rurl, re.I).group(0)
def load_account(chat_id, force_reload=False):
try:
if force_reload:
raise KeyError
return lookup_dict[chat_id]
except KeyError:
account = DataHandler.account_object(chat_id)
lookup_dict[chat_id] = account
return account
def download(file_id, telegram_file_object):
file_url = telegram_file_object.file_path
file_ext = re.search(r'\.[0-9a-z]+$', file_url).group()
media_name = 'media-' + str(file_id) + file_ext
telegram_file_object.download(media_name)
return media_name
def process_group_media(chat_id, key):
files = group_media_queue.pop(key)
toot_object = tootObject()
for file_tuple in files:
file_id = file_tuple[0]
telegram_file_object = file_tuple[1]
caption = file_tuple[2]
media_name = download(file_id, telegram_file_object)
toot_object.append(text=caption, media=media_name)
tooting(chat_id, toot_object, test_visibility)
for media in toot_object.medias:
os.remove(media)
def add_to_group_media_queue(chat_id, group_id, file_id, telegram_file_object, caption):
key = str(chat_id) + str(group_id)
try:
media_container = group_media_queue[key]
except KeyError:
threading.Timer(20, process_group_media, [chat_id, key]).start()
media_container = []
group_media_queue[key] = media_container
finally:
media_container.append( (file_id, telegram_file_object, caption) )
def tooting(chat_id, tootobject, visibility):
load_account(chat_id).toot(tootobject, visibility)
def reply(context, chat_id, text):
context.bot.send_message(chat_id=chat_id, text=text, parse_mode='markdown')
def start(update, context):
context.bot.send_message(chat_id=update.message.chat_id,
text="Toot to Mastodon using this bot. See /help")
def add(update, context):
chat_id = update.message.chat_id
try:
assert len(context.args) == 3
except AssertionError:
reply(context, chat_id, 'usage:`\n/add <user_email> <password> <full_instance_url>`\nexample: `/add john@doe.com cyberpunk277 https://mastodon.social/`')
return
else:
username = context.args[0]
instance = geturl(context.args[2])
password = context.args[1]
try:
new_account = DataHandler.insert_account( chat_id,
username,
instance,
password)
reply(context, chat_id, 'Account added successfully')
except MastodonIllegalArgumentError:
reply(context, chat_id, 'Authentication failed')
reply(context, chat_id, 'usage:`\n/add <user_email> <password> <full_instance_url>`\nexample: `\add john@doe.com cyberpunk277 https://mastodon.social/`')
except MastodonUnauthorizedError:
reply(context, chat_id, 'Authentication failed')
except DataHandler.InsertError:
reply(context, chat_id, 'Account already registered')
except:
reply(context, chat_id, 'Oops!, Something gone wrong. Check and try again')
else:
if isinstance(new_account, DataHandler.mastodonapi.MastodonAccount) and (DataHandler.number_of_accounts(chat_id) == 1):
lookup_dict[chat_id] = new_account
DataHandler.upsert_user(chat_id, 1)
reply(context, chat_id, 'Great!, You can use /listall to list your currently registered accounts')
def setdefault(update, context):
chat_id = update.message.chat_id
number_of_accounts = DataHandler.number_of_accounts(chat_id)
if number_of_accounts == 0:
reply(context, chat_id, 'You have not registered any mastodon account yet')
return
if number_of_accounts == 1:
acc = DataHandler.account_info(chat_id)
reply(context, chat_id, "Your only registered account is `{}` at `{}`".format(acc[0], acc[1]))
return
try:
newDefault = int(context.args[0])
if newDefault <= number_of_accounts:
DataHandler.upsert_user(chat_id, newDefault)
accountObj = load_account(chat_id, force_reload=True)
reply(context, chat_id,
"Now you can toot to your account `{}` at `{}`".format(
accountObj.user,
accountObj.instance))
else:
reply(context, chat_id,
"You need to specify right account number as given in /listall")
except:
reply(context, chat_id, "`/setdefault` <number>")
def delete(update, context):
chat_id = update.message.chat_id
number_of_accounts = DataHandler.number_of_accounts(chat_id)
if number_of_accounts == 0:
reply(context, chat_id,
'You don\'t have any registered account(s) to delete')
elif number_of_accounts == 1:
DataHandler.delete_user(chat_id)
lookup_dict.pop(chat_id)
else:
try:
acc_num = int(context.args[0])
if acc_num > number_of_accounts:
reply(context, chat_id, "You need to specify right account number as given in /listall")
return
current_default = DataHandler.get_default_acc(chat_id)
id_to_delete = DataHandler.account_id(chat_id, acc_num)
DataHandler.delete_account(id_to_delete)
if id_to_delete == current_default:
DataHandler.upsert_user(chat_id, 1)
load_account(chat_id, force_reload=True)
account_info_tuple = DataHandler.account_info(chat_id)
reply(context, chat_id, 'Your current default account is now set to {username} @ {instance}'.format(
username=account_info_tuple[0],
instance=account_info_tuple[1]))
except:
reply(context, chat_id, '`usage:`\n`/delete <number>`')
def deleteall(update, context):
chat_id = update.message.chat_id
try:
assert (context.args[0] == 'yes')
except:
reply(context, chat_id, '`NOTE: delete all registered accounts \nusage:\n/deleteall yes`')
else:
DataHandler.delete_user(chat_id)
try:
lookup_dict.pop(chat_id)
except KeyError:
pass
def listall(update, context):
chat_id = update.message.chat_id
text = DataHandler.all_accounts(chat_id)
reply(context, chat_id, "currenly registered accounts\n" + text)
def media(update, context):
chat_id = update.message.chat_id
file_id = update.message.photo[-1].file_id
new_file = context.bot.get_file(file_id)
if update.message.media_group_id:
add_to_group_media_queue(chat_id, update.message.media_group_id,
file_id, new_file, update.message.caption)
else:
try:
media_name = download(file_id, new_file)
tooting(chat_id, tootObject(update.message.caption, media_name), test_visibility)
except DataHandler.NoDataError:
reply(context, chat_id, 'Please add an account first using /add')
def text(update, context):
chat_id = update.message.chat_id
try:
tooting(chat_id, tootObject(update.message.text), test_visibility)
except DataHandler.NoDataError:
reply(context, chat_id, 'Please add an account first using /add')
def helpcommand(update, context):
chat_id = update.message.chat_id
reply(context, chat_id, "With TeleToot Bot you can to post on any Mastodon account's public timeline. Currently you can only post on one account at a time although you can authenticate various accounts and switch between them\n`availible commands:\n`/add\n/listall\n/setdefault\n/delete\n/deleteall")
reply(context, chat_id, "To start tooting using your mastodon account send `/add <registered email> <password> <instance_url>`. See /add for more detail")
updater = Updater(bot_token, use_context=True)
dispatcher = updater.dispatcher
list_of_commands = [start, add, listall, setdefault, delete, deleteall]
def load_commands(commands):
for command in commands:
dispatcher.add_handler(CommandHandler(command.__name__, command))
load_commands(list_of_commands)
media_handler = MessageHandler(Filters.photo | (Filters.text & Filters.photo),
media, pass_job_queue=True)
text_handler = MessageHandler(Filters.text, text)
dispatcher.add_handler(media_handler)
dispatcher.add_handler(text_handler)
dispatcher.add_handler(CommandHandler('help', helpcommand))
updater.start_polling(poll_interval=1.0, timeout=60)
updater.idle()
| 40.024793 | 305 | 0.661986 | from telegram.ext import MessageHandler, Filters, CommandHandler, Updater
from mastodon import MastodonIllegalArgumentError, MastodonUnauthorizedError
import DataHandler
import threading
import os
import sys
import logging
import certifi
import urllib3
import re
bot_token = '<your bot token here>'
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
test_visibility = 'public'
group_media_queue = {}
lookup_dict = {}
tootObject = DataHandler.mastodonapi.TootObject
def geturl(url_string):
man = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where() ,num_pools=1)
response = man.urlopen('GET', url_string)
rurl = response.geturl()
return re.search(r'([://a-z.0-9]+/)', rurl, re.I).group(0)
def load_account(chat_id, force_reload=False):
try:
if force_reload:
raise KeyError
return lookup_dict[chat_id]
except KeyError:
account = DataHandler.account_object(chat_id)
lookup_dict[chat_id] = account
return account
def download(file_id, telegram_file_object):
file_url = telegram_file_object.file_path
file_ext = re.search(r'\.[0-9a-z]+$', file_url).group()
media_name = 'media-' + str(file_id) + file_ext
telegram_file_object.download(media_name)
return media_name
def process_group_media(chat_id, key):
files = group_media_queue.pop(key)
toot_object = tootObject()
for file_tuple in files:
file_id = file_tuple[0]
telegram_file_object = file_tuple[1]
caption = file_tuple[2]
media_name = download(file_id, telegram_file_object)
toot_object.append(text=caption, media=media_name)
tooting(chat_id, toot_object, test_visibility)
for media in toot_object.medias:
os.remove(media)
def add_to_group_media_queue(chat_id, group_id, file_id, telegram_file_object, caption):
key = str(chat_id) + str(group_id)
try:
media_container = group_media_queue[key]
except KeyError:
threading.Timer(20, process_group_media, [chat_id, key]).start()
media_container = []
group_media_queue[key] = media_container
finally:
media_container.append( (file_id, telegram_file_object, caption) )
def tooting(chat_id, tootobject, visibility):
load_account(chat_id).toot(tootobject, visibility)
def reply(context, chat_id, text):
context.bot.send_message(chat_id=chat_id, text=text, parse_mode='markdown')
def start(update, context):
context.bot.send_message(chat_id=update.message.chat_id,
text="Toot to Mastodon using this bot. See /help")
def add(update, context):
chat_id = update.message.chat_id
try:
assert len(context.args) == 3
except AssertionError:
reply(context, chat_id, 'usage:`\n/add <user_email> <password> <full_instance_url>`\nexample: `/add john@doe.com cyberpunk277 https://mastodon.social/`')
return
else:
username = context.args[0]
instance = geturl(context.args[2])
password = context.args[1]
try:
new_account = DataHandler.insert_account( chat_id,
username,
instance,
password)
reply(context, chat_id, 'Account added successfully')
except MastodonIllegalArgumentError:
reply(context, chat_id, 'Authentication failed')
reply(context, chat_id, 'usage:`\n/add <user_email> <password> <full_instance_url>`\nexample: `\add john@doe.com cyberpunk277 https://mastodon.social/`')
except MastodonUnauthorizedError:
reply(context, chat_id, 'Authentication failed')
except DataHandler.InsertError:
reply(context, chat_id, 'Account already registered')
except:
reply(context, chat_id, 'Oops!, Something gone wrong. Check and try again')
else:
if isinstance(new_account, DataHandler.mastodonapi.MastodonAccount) and (DataHandler.number_of_accounts(chat_id) == 1):
lookup_dict[chat_id] = new_account
DataHandler.upsert_user(chat_id, 1)
reply(context, chat_id, 'Great!, You can use /listall to list your currently registered accounts')
def setdefault(update, context):
chat_id = update.message.chat_id
number_of_accounts = DataHandler.number_of_accounts(chat_id)
if number_of_accounts == 0:
reply(context, chat_id, 'You have not registered any mastodon account yet')
return
if number_of_accounts == 1:
acc = DataHandler.account_info(chat_id)
reply(context, chat_id, "Your only registered account is `{}` at `{}`".format(acc[0], acc[1]))
return
try:
newDefault = int(context.args[0])
if newDefault <= number_of_accounts:
DataHandler.upsert_user(chat_id, newDefault)
accountObj = load_account(chat_id, force_reload=True)
reply(context, chat_id,
"Now you can toot to your account `{}` at `{}`".format(
accountObj.user,
accountObj.instance))
else:
reply(context, chat_id,
"You need to specify right account number as given in /listall")
except:
reply(context, chat_id, "`/setdefault` <number>")
def delete(update, context):
chat_id = update.message.chat_id
number_of_accounts = DataHandler.number_of_accounts(chat_id)
if number_of_accounts == 0:
reply(context, chat_id,
'You don\'t have any registered account(s) to delete')
elif number_of_accounts == 1:
DataHandler.delete_user(chat_id)
lookup_dict.pop(chat_id)
else:
try:
acc_num = int(context.args[0])
if acc_num > number_of_accounts:
reply(context, chat_id, "You need to specify right account number as given in /listall")
return
current_default = DataHandler.get_default_acc(chat_id)
id_to_delete = DataHandler.account_id(chat_id, acc_num)
DataHandler.delete_account(id_to_delete)
if id_to_delete == current_default:
DataHandler.upsert_user(chat_id, 1)
load_account(chat_id, force_reload=True)
account_info_tuple = DataHandler.account_info(chat_id)
reply(context, chat_id, 'Your current default account is now set to {username} @ {instance}'.format(
username=account_info_tuple[0],
instance=account_info_tuple[1]))
except:
reply(context, chat_id, '`usage:`\n`/delete <number>`')
def deleteall(update, context):
chat_id = update.message.chat_id
try:
assert (context.args[0] == 'yes')
except:
reply(context, chat_id, '`NOTE: delete all registered accounts \nusage:\n/deleteall yes`')
else:
DataHandler.delete_user(chat_id)
try:
lookup_dict.pop(chat_id)
except KeyError:
pass
def listall(update, context):
chat_id = update.message.chat_id
text = DataHandler.all_accounts(chat_id)
reply(context, chat_id, "currenly registered accounts\n" + text)
def media(update, context):
chat_id = update.message.chat_id
file_id = update.message.photo[-1].file_id
new_file = context.bot.get_file(file_id)
if update.message.media_group_id:
add_to_group_media_queue(chat_id, update.message.media_group_id,
file_id, new_file, update.message.caption)
else:
try:
media_name = download(file_id, new_file)
tooting(chat_id, tootObject(update.message.caption, media_name), test_visibility)
except DataHandler.NoDataError:
reply(context, chat_id, 'Please add an account first using /add')
def text(update, context):
chat_id = update.message.chat_id
try:
tooting(chat_id, tootObject(update.message.text), test_visibility)
except DataHandler.NoDataError:
reply(context, chat_id, 'Please add an account first using /add')
def helpcommand(update, context):
chat_id = update.message.chat_id
reply(context, chat_id, "With TeleToot Bot you can to post on any Mastodon account's public timeline. Currently you can only post on one account at a time although you can authenticate various accounts and switch between them\n`availible commands:\n`/add\n/listall\n/setdefault\n/delete\n/deleteall")
reply(context, chat_id, "To start tooting using your mastodon account send `/add <registered email> <password> <instance_url>`. See /add for more detail")
updater = Updater(bot_token, use_context=True)
dispatcher = updater.dispatcher
list_of_commands = [start, add, listall, setdefault, delete, deleteall]
def load_commands(commands):
for command in commands:
dispatcher.add_handler(CommandHandler(command.__name__, command))
load_commands(list_of_commands)
media_handler = MessageHandler(Filters.photo | (Filters.text & Filters.photo),
media, pass_job_queue=True)
text_handler = MessageHandler(Filters.text, text)
dispatcher.add_handler(media_handler)
dispatcher.add_handler(text_handler)
dispatcher.add_handler(CommandHandler('help', helpcommand))
updater.start_polling(poll_interval=1.0, timeout=60)
updater.idle()
| true | true |
f71c58960b4d96b75911c82859c3b22907774f53 | 864 | py | Python | texteditor.py | p10rahulm/python-basics | d8f6172d42c465382d672a6813dccfbe6dff45a7 | [
"MIT"
] | null | null | null | texteditor.py | p10rahulm/python-basics | d8f6172d42c465382d672a6813dccfbe6dff45a7 | [
"MIT"
] | null | null | null | texteditor.py | p10rahulm/python-basics | d8f6172d42c465382d672a6813dccfbe6dff45a7 | [
"MIT"
] | null | null | null |
from tkinter import *
import tkinter.filedialog as tkFileDialog
root = Tk("Text Editor")
text = Text(root)
text.grid()
def saveas():
global text
t = text.get("1.0", "end-1c")
savelocation= tkFileDialog.asksaveasfilename()
file1=open(savelocation, "w+")
file1.write(t)
file1.close()
button=Button(root, text="Save", command=saveas)
button.grid()
def FontHelvetica():
global text
text.config(font="Helvetica")
def FontCourier():
global text
text.config(font="Courier")
font=Menubutton(root, text="Font")
font.grid()
font.menu=Menu(font, tearoff=0)
font["menu"]=font.menu
Helvetica=IntVar()
arial=IntVar()
times=IntVar()
Courier=IntVar()
font.menu.add_checkbutton(label="Courier", variable=Courier,command=FontCourier)
font.menu.add_checkbutton(label="Helvetica", variable=Helvetica,command=FontHelvetica)
root.mainloop() | 23.351351 | 86 | 0.725694 |
from tkinter import *
import tkinter.filedialog as tkFileDialog
root = Tk("Text Editor")
text = Text(root)
text.grid()
def saveas():
global text
t = text.get("1.0", "end-1c")
savelocation= tkFileDialog.asksaveasfilename()
file1=open(savelocation, "w+")
file1.write(t)
file1.close()
button=Button(root, text="Save", command=saveas)
button.grid()
def FontHelvetica():
global text
text.config(font="Helvetica")
def FontCourier():
global text
text.config(font="Courier")
font=Menubutton(root, text="Font")
font.grid()
font.menu=Menu(font, tearoff=0)
font["menu"]=font.menu
Helvetica=IntVar()
arial=IntVar()
times=IntVar()
Courier=IntVar()
font.menu.add_checkbutton(label="Courier", variable=Courier,command=FontCourier)
font.menu.add_checkbutton(label="Helvetica", variable=Helvetica,command=FontHelvetica)
root.mainloop() | true | true |
f71c58990649e0b8588522179070c5aec9ae9d99 | 93 | py | Python | moopy/comments/admin.py | qrizan/moopy | 10459351727710c77279f24f224786622abc91b8 | [
"MIT"
] | 1 | 2017-01-15T21:58:06.000Z | 2017-01-15T21:58:06.000Z | moopy/comments/admin.py | qrizan/moopy | 10459351727710c77279f24f224786622abc91b8 | [
"MIT"
] | null | null | null | moopy/comments/admin.py | qrizan/moopy | 10459351727710c77279f24f224786622abc91b8 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Comment
admin.site.register(Comment)
| 13.285714 | 32 | 0.806452 | from django.contrib import admin
from .models import Comment
admin.site.register(Comment)
| true | true |
f71c58e4c78d5b8915f7be9b2f64999c3533da13 | 3,659 | py | Python | Python Data Structure/A1Q1_Letian Xu.py | XULetian/Python_Fundamental | b901014a3257649cf1b3205ee6c17ba35184cad7 | [
"MIT"
] | null | null | null | Python Data Structure/A1Q1_Letian Xu.py | XULetian/Python_Fundamental | b901014a3257649cf1b3205ee6c17ba35184cad7 | [
"MIT"
] | null | null | null | Python Data Structure/A1Q1_Letian Xu.py | XULetian/Python_Fundamental | b901014a3257649cf1b3205ee6c17ba35184cad7 | [
"MIT"
] | null | null | null | # Letian Xu
# 01/10/2019
# I have not given or received any unauthorized assistance on this assignment.
def overlap(s1,s2):
'''
Arguments: s1 and s2 represent two lists, also represent a square.
Return: The area of the overlap of the two squares, if the squares do not overlap, return 0.
'''
# Use the range function to get a list for the horizontal side of a square;
# and the set() method to get intersection of two lists;
# this the lenth of two squares' overlap on x-axis.
x1 = list(range(s1[0],s1[0]+s1[2]))
x2 = list(range(s2[0],s2[0]+s2[2]))
x = len(set(x1) & set(x2))
# and this is the lenth of two squares' overlap on y-axis
y1 = list(range(s1[1],s1[1]+s1[2]))
y2 = list(range(s2[1],s2[1]+s2[2]))
y = len(set(y1) & set(y2))
# If the one of the lenth was 0, the result will return 0.
return x*y
totalScore = 0
S1 = [1,5,3]
S2 = [5,6,2]
S3 = [2,1,2]
S4 = [9,6,2]
S5 = [7,2,3]
S6 = [3,2,5]
S7 = [5,3,1]
#---------- ---------- ---------- ---------- ----------
print( "Test 1: " + str(S1) + str(S6) )
print( "Correct Answer: 2" )
r1 = overlap(S1,S6)
r2 = overlap(S6,S1)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 2:
s1 = s1 + 1
if r2 == 2:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
#---------- ---------- ---------- ---------- ----------
print( "Test 2: " + str(S2) + str(S6) )
print( "Correct Answer: 2" )
r1 = overlap(S2,S6)
r2 = overlap(S6,S2)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 2:
s1 = s1 + 1
if r2 == 2:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
#---------- ---------- ---------- ---------- ----------
print( "Test 3: " + str(S3) + str(S6) )
print( "Correct Answer: 1" )
r1 = overlap(S3,S6)
r2 = overlap(S6,S3)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 1:
s1 = s1 + 1
if r2 == 1:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
#---------- ---------- ---------- ---------- ----------
print( "Test 4: " + str(S4) + str(S6) )
print( "Correct Answer: 0" )
r1 = overlap(S4,S6)
r2 = overlap(S6,S4)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 0:
s1 = s1 + 1
if r2 == 0:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
#---------- ---------- ---------- ---------- ----------
print( "Test 5: " + str(S5) + str(S6) )
print( "Correct Answer: 3" )
r1 = overlap(S5,S6)
r2 = overlap(S6,S5)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 3:
s1 = s1 + 1
if r2 == 3:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
#---------- ---------- ---------- ---------- ----------
print( "Test 6: " + str(S6) + str(S6) )
print( "Correct Answer: 25" )
r1 = overlap(S6,S6)
r2 = overlap(S6,S6)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 25:
s1 = s1 + 1
if r2 == 25:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
#---------- ---------- ---------- ---------- ----------
print( "Test 7: " + str(S7) + str(S6) )
print( "Correct Answer: 1" )
r1 = overlap(S7,S6)
r2 = overlap(S6,S7)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 1:
s1 = s1 + 1
if r2 == 1:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
#---------- ---------- ---------- ---------- ----------
print ( "Total Score: " + str(totalScore) )
print ( "Percentage: " + str(100*totalScore/14) )
| 22.447853 | 96 | 0.496037 |
def overlap(s1,s2):
x1 = list(range(s1[0],s1[0]+s1[2]))
x2 = list(range(s2[0],s2[0]+s2[2]))
x = len(set(x1) & set(x2))
# and this is the lenth of two squares' overlap on y-axis
y1 = list(range(s1[1],s1[1]+s1[2]))
y2 = list(range(s2[1],s2[1]+s2[2]))
y = len(set(y1) & set(y2))
return x*y
totalScore = 0
S1 = [1,5,3]
S2 = [5,6,2]
S3 = [2,1,2]
S4 = [9,6,2]
S5 = [7,2,3]
S6 = [3,2,5]
S7 = [5,3,1]
print( "Test 1: " + str(S1) + str(S6) )
print( "Correct Answer: 2" )
r1 = overlap(S1,S6)
r2 = overlap(S6,S1)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 2:
s1 = s1 + 1
if r2 == 2:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
print( "Test 2: " + str(S2) + str(S6) )
print( "Correct Answer: 2" )
r1 = overlap(S2,S6)
r2 = overlap(S6,S2)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 2:
s1 = s1 + 1
if r2 == 2:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
print( "Test 3: " + str(S3) + str(S6) )
print( "Correct Answer: 1" )
r1 = overlap(S3,S6)
r2 = overlap(S6,S3)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 1:
s1 = s1 + 1
if r2 == 1:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
print( "Test 4: " + str(S4) + str(S6) )
print( "Correct Answer: 0" )
r1 = overlap(S4,S6)
r2 = overlap(S6,S4)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 0:
s1 = s1 + 1
if r2 == 0:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
print( "Test 5: " + str(S5) + str(S6) )
print( "Correct Answer: 3" )
r1 = overlap(S5,S6)
r2 = overlap(S6,S5)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 3:
s1 = s1 + 1
if r2 == 3:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
print( "Test 6: " + str(S6) + str(S6) )
print( "Correct Answer: 25" )
r1 = overlap(S6,S6)
r2 = overlap(S6,S6)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 25:
s1 = s1 + 1
if r2 == 25:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
print( "Test 7: " + str(S7) + str(S6) )
print( "Correct Answer: 1" )
r1 = overlap(S7,S6)
r2 = overlap(S6,S7)
print( "Result 1: " + str(r1) )
print( "Result 2: " + str(r2) )
s1 = 0
if r1 == 1:
s1 = s1 + 1
if r2 == 1:
s1 = s1 + 1
print( "Score: " + str(s1) )
print()
totalScore = totalScore + s1
print ( "Total Score: " + str(totalScore) )
print ( "Percentage: " + str(100*totalScore/14) )
| true | true |
f71c5a07ef97232bc9ba249f594bbcbea596ebe9 | 747 | py | Python | pywhatkit/wwd.py | apoorvthedude/PyWhatKit | edc60ad44c11d4700b531be1bb86639ff3fce270 | [
"MIT"
] | 2 | 2022-01-15T00:53:30.000Z | 2022-01-16T22:56:23.000Z | pywhatkit/wwd.py | apoorvthedude/PyWhatKit | edc60ad44c11d4700b531be1bb86639ff3fce270 | [
"MIT"
] | null | null | null | pywhatkit/wwd.py | apoorvthedude/PyWhatKit | edc60ad44c11d4700b531be1bb86639ff3fce270 | [
"MIT"
] | null | null | null | import webbrowser as web
def tutorial_hindi() -> None:
"""Watch tutorial on how to use this library on YouTube in Hindi"""
web.open("https://youtu.be/o6WV9zFJg1o")
def tutorial_english() -> None:
"""Watch tutorial on how to use this library on YouTube in English"""
web.open("https://youtu.be/vpfrwpO_HKY")
def developer_contact() -> None:
"""Contact information of developer for feedbacks"""
link = "https://github.com/Ankit404butfound/PyWhatKit"
print(f"You can reach out to us on GitHub {link} for help regarding any issues related to the module.")
def join_discord() -> None:
"""Opens the invite link for the discord server"""
web.open("https://discord.gg/62Yf5mushu")
| 27.666667 | 108 | 0.670683 | import webbrowser as web
def tutorial_hindi() -> None:
web.open("https://youtu.be/o6WV9zFJg1o")
def tutorial_english() -> None:
web.open("https://youtu.be/vpfrwpO_HKY")
def developer_contact() -> None:
link = "https://github.com/Ankit404butfound/PyWhatKit"
print(f"You can reach out to us on GitHub {link} for help regarding any issues related to the module.")
def join_discord() -> None:
web.open("https://discord.gg/62Yf5mushu")
| true | true |
f71c5aa86d59a64428303d427cd50c450b530ad6 | 6,559 | py | Python | tilde/parsers/__init__.py | fossabot/tilde-1 | 143810a711f00dc1c64a6eb10573986dddadfcef | [
"MIT"
] | null | null | null | tilde/parsers/__init__.py | fossabot/tilde-1 | 143810a711f00dc1c64a6eb10573986dddadfcef | [
"MIT"
] | null | null | null | tilde/parsers/__init__.py | fossabot/tilde-1 | 143810a711f00dc1c64a6eb10573986dddadfcef | [
"MIT"
] | null | null | null |
# Generic parser schema
# with the default values
# Author: Evgeny Blokhin
import os, sys
import re
import time
import math
import random
import hashlib
import base64
from ase.data import chemical_symbols
class Output:
def __init__(self, filename='', calcset=False):
self._filename = filename # for quick and cheap checksums (NB never generate checksum from the entire calc file, which may be huge)
self.data = '' # file contents holder; may be empty for some parsers!
self._checksum = None # NB do not use directly
self._calcset = calcset
self._nested_depth = 0
self.download_size = 0
self.related_files = []
if self._calcset:
self.info = {}
return
self._starttime = time.time()
self.structures = [] # list of ASE objects with additional properties
self.convergence = [] # zero-point energy convergence (I)
self.tresholds = [] # optimization convergence, list of 5 lists (II)
self.ncycles = [] # number of cycles at each optimisation step
self.electrons = {
#'rgkmax': None,
'basis_set': None, # format depends on ansatz:
# LCAO Gaussians: {'bs': {}, 'ps': {}}
# PWs and LAPW: [atom1, ...]
'eigvals': {}, # raw eigenvalues {k:{alpha:[], beta:[]},}
'projected': [], # raw eigenvalues [..., ...] for total DOS smearing
'dos': {}, # in advance pre-computed DOS
'bands': {} # in advance pre-computed band structure
}
# NB own properties for CRYSTAL: impacts, proj_eigv_impacts, e_proj_eigvals (TODO)
self.phonons = {
'modes': {},
'irreps': {},
'ir_active': {},
'raman_active': {},
'ph_eigvecs': {},
'ph_k_degeneracy': {},
'dfp_disps': [],
'dfp_magnitude': None,
'dielectric_tensor':False,
'zpe': None,
'td': None
}
self.elastic = {}
# modules output object
self.apps = {}
# classification and technical info object
# NB API call *classify* extends it with the new items
self.info = {
'warns': [],
'framework': 0x0, # code name
'prog': 'unknown version', # code version
'perf': None, # benchmarking
'location': filename,
'finished': 0x0,
'duration': None,
'input': None,
'energy': None, # in eV
'standard': '',
'formula': '',
'dims': False, # cell volume
'periodicity':0x0,
'natom': 0,
'elements': [],
'contents': [],
'lack': False,
'expanded': False,
'tags': [],
'etype': 0x0,
'bandgap': None, # in eV
'bandgaptype':0x0,
'optgeom': False,
'calctypes': [],
'H': None,
'H_types': [],
'tol': None,
'k': None,
'kshift': None,
'smear': None, # in a.u.
'smeartype': None,
'spin': 0x0,
'lockstate': None,
'ansatz': 0x0,
'techs': [],
'dtype': 0x0
}
@classmethod
def iparse(cls, filename):
return [cls(filename)]
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
return setattr(self, key, value)
def __repr__(self):
out = ''
for repr in dir(self):
if not hasattr(getattr(self, repr), '__call__') and repr != '__doc__':
if repr == 'structures' and len(getattr(self, repr)):
if len(getattr(self, repr)) > 1:
out += repr + " ->\nINITIAL:\n" + str( getattr(self, repr)[0] ) + "\nFINAL:\n" + str( getattr(self, repr)[-1] ) + "\n\n"
else:
out += repr + " -> " + str( getattr(self, repr)[-1] ) + "\n\n"
else:
str_repr = str( getattr(self, repr) )
if len(str_repr) < 2000:
out += repr + ' -> ' + str_repr + "\n\n"
else:
out += repr + ' -> ' + str_repr[:1000] + '...\n\n'
return out
def warning(self, msg):
self.info['warns'].append(msg)
def get_checksum(self):
'''
Retrieve unique hash in a cross-platform manner:
this is how calculation identity is determined
'''
if self._checksum:
return self._checksum
if not self._filename:
raise RuntimeError('Source calc file is required in order to properly save the data!')
calc_checksum = hashlib.sha224()
struc_repr = ""
for ase_obj in self.structures:
struc_repr += "%3.6f %3.6f %3.6f %3.6f %3.6f %3.6f %3.6f %3.6f %3.6f " % tuple(map(abs, [ase_obj.cell[0][0], ase_obj.cell[0][1], ase_obj.cell[0][2], ase_obj.cell[1][0], ase_obj.cell[1][1], ase_obj.cell[1][2], ase_obj.cell[2][0], ase_obj.cell[2][1], ase_obj.cell[2][2]])) # NB beware of length & minus zeros
for atom in ase_obj:
struc_repr += "%s %3.6f %3.6f %3.6f " % tuple(map(abs, [chemical_symbols.index(atom.symbol), atom.x, atom.y, atom.z])) # NB beware of length & minus zeros
if self.info["energy"] is None:
energy = str(None)
else:
energy = str(round(self.info['energy'], 11 - int(math.log10(math.fabs(self.info['energy'])))))
calc_checksum.update((
struc_repr + "\n" +
energy + "\n" +
self.info['prog'] + "\n" +
str(self.info['input']) + "\n" +
str(sum([2**x for x in self.info['calctypes']]))
).encode('ascii')) # NB this is fixed and should not be changed
result = base64.b32encode(calc_checksum.digest()).decode('ascii')
result = result[:result.index('=')] + 'CI'
return result
def benchmark(self):
self.info['perf'] = "%1.2f" % (time.time() - self._starttime)
| 35.646739 | 318 | 0.479646 |
import os, sys
import re
import time
import math
import random
import hashlib
import base64
from ase.data import chemical_symbols
class Output:
def __init__(self, filename='', calcset=False):
self._filename = filename
self.data = ''
self._checksum = None
self._calcset = calcset
self._nested_depth = 0
self.download_size = 0
self.related_files = []
if self._calcset:
self.info = {}
return
self._starttime = time.time()
self.structures = []
self.convergence = []
self.tresholds = []
self.ncycles = []
self.electrons = {
'basis_set': None,
'eigvals': {},
'projected': [],
'dos': {},
'bands': {}
}
self.phonons = {
'modes': {},
'irreps': {},
'ir_active': {},
'raman_active': {},
'ph_eigvecs': {},
'ph_k_degeneracy': {},
'dfp_disps': [],
'dfp_magnitude': None,
'dielectric_tensor':False,
'zpe': None,
'td': None
}
self.elastic = {}
self.apps = {}
self.info = {
'warns': [],
'framework': 0x0,
'prog': 'unknown version',
'perf': None,
'location': filename,
'finished': 0x0,
'duration': None,
'input': None,
'energy': None,
'standard': '',
'formula': '',
'dims': False,
'periodicity':0x0,
'natom': 0,
'elements': [],
'contents': [],
'lack': False,
'expanded': False,
'tags': [],
'etype': 0x0,
'bandgap': None,
'bandgaptype':0x0,
'optgeom': False,
'calctypes': [],
'H': None,
'H_types': [],
'tol': None,
'k': None,
'kshift': None,
'smear': None,
'smeartype': None,
'spin': 0x0,
'lockstate': None,
'ansatz': 0x0,
'techs': [],
'dtype': 0x0
}
@classmethod
def iparse(cls, filename):
return [cls(filename)]
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
return setattr(self, key, value)
def __repr__(self):
out = ''
for repr in dir(self):
if not hasattr(getattr(self, repr), '__call__') and repr != '__doc__':
if repr == 'structures' and len(getattr(self, repr)):
if len(getattr(self, repr)) > 1:
out += repr + " ->\nINITIAL:\n" + str( getattr(self, repr)[0] ) + "\nFINAL:\n" + str( getattr(self, repr)[-1] ) + "\n\n"
else:
out += repr + " -> " + str( getattr(self, repr)[-1] ) + "\n\n"
else:
str_repr = str( getattr(self, repr) )
if len(str_repr) < 2000:
out += repr + ' -> ' + str_repr + "\n\n"
else:
out += repr + ' -> ' + str_repr[:1000] + '...\n\n'
return out
def warning(self, msg):
self.info['warns'].append(msg)
def get_checksum(self):
if self._checksum:
return self._checksum
if not self._filename:
raise RuntimeError('Source calc file is required in order to properly save the data!')
calc_checksum = hashlib.sha224()
struc_repr = ""
for ase_obj in self.structures:
struc_repr += "%3.6f %3.6f %3.6f %3.6f %3.6f %3.6f %3.6f %3.6f %3.6f " % tuple(map(abs, [ase_obj.cell[0][0], ase_obj.cell[0][1], ase_obj.cell[0][2], ase_obj.cell[1][0], ase_obj.cell[1][1], ase_obj.cell[1][2], ase_obj.cell[2][0], ase_obj.cell[2][1], ase_obj.cell[2][2]]))
for atom in ase_obj:
struc_repr += "%s %3.6f %3.6f %3.6f " % tuple(map(abs, [chemical_symbols.index(atom.symbol), atom.x, atom.y, atom.z]))
if self.info["energy"] is None:
energy = str(None)
else:
energy = str(round(self.info['energy'], 11 - int(math.log10(math.fabs(self.info['energy'])))))
calc_checksum.update((
struc_repr + "\n" +
energy + "\n" +
self.info['prog'] + "\n" +
str(self.info['input']) + "\n" +
str(sum([2**x for x in self.info['calctypes']]))
).encode('ascii'))
result = base64.b32encode(calc_checksum.digest()).decode('ascii')
result = result[:result.index('=')] + 'CI'
return result
def benchmark(self):
self.info['perf'] = "%1.2f" % (time.time() - self._starttime)
| true | true |
f71c5c0fa48035e54718849c2b4a1ba58fa91295 | 4,789 | py | Python | Models/opt_torch.py | tarkantemizoz/Cost-Sensitive-Learning | 083f8dfd2950b7e3874df34bf61c2ca1e4a91fbb | [
"Apache-2.0"
] | 2 | 2021-03-05T08:06:17.000Z | 2021-04-13T21:03:12.000Z | Models/opt_torch.py | tarkantemizoz/Cost-Sensitive-Learning | 083f8dfd2950b7e3874df34bf61c2ca1e4a91fbb | [
"Apache-2.0"
] | null | null | null | Models/opt_torch.py | tarkantemizoz/Cost-Sensitive-Learning | 083f8dfd2950b7e3874df34bf61c2ca1e4a91fbb | [
"Apache-2.0"
] | 1 | 2021-03-10T18:10:30.000Z | 2021-03-10T18:10:30.000Z | # coding: utf-8
# Copyright 2020 Tarkan Temizoz
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import torch
import matplotlib.pyplot as plt
from sklearn.utils import shuffle
from Models.linearnet import LinearNet
class Optimization:
""" A helper class to train, test and diagnose Cost-sensitive Logistic Regression
Attributes:
model: CSLR model.
optimizer: Optimizer of the network.
train_return: List of train returns.
val_return: List of validation returns.
validation: Whether there is validation data.
batch_size: Batch-size of the network.
n_epochs: Total number of epochs.
n_steps: Number of epochs to evaluate the results
"""
def __init__(self, model, optimizer, config):
"""Initialises CLSR.
Args:
model: CSLR model.
optimizer: Optimizer of the network.
config: Configuration of the network.
"""
self.model = model
self.optimizer = optimizer
self.train_return = []
self.val_return = []
self.validation = False
self.batch_size = config.get("batch_size",32)
self.n_epochs = config.get("n_epochs", 1000)
self.n_steps = config.get("n_steps", self.n_epochs)
@staticmethod
def batch(iterable, n):
"""Creates batches."""
l = len(iterable)
for ndx in range(0, l, n):
yield iterable[ndx:min(ndx + n, l)]
def train(self, x_train, r_train, x_val=None, r_val=None):
"""Applies simple feed-forward network to an input.
Args:
x_train: train features
r_train: train returns
x_val: validation features
r_val: validation returns
"""
if x_val is not None or r_val is not None:
self.validation = True
start_time = time.time()
for epoch in range(self.n_epochs):
x_shuff, r_shuff = shuffle(x_train, r_train)
self.model.train()
for j in self.batch(range(0, len(x_shuff)),self.batch_size):
if len(j) < 2:
break
x_batch = x_shuff[j]
r_batch = r_shuff[j]
self.optimizer.zero_grad()
outputs, _, _ = self.model(x_batch)
loss = -torch.mul(outputs, r_batch).sum()
loss.backward()
self.optimizer.step()
returns_train, _, _ = self.evaluate(x_train, r_train)
self.train_return.append(returns_train)
if self.validation is True:
returns_val, _, _ = self.evaluate(x_val, r_val)
self.val_return.append(returns_val)
if ((epoch+1) % self.n_steps == 0):
elapsed = time.time() - start_time
print(
("Epoch %d Train Return: %.3f.") % (epoch + 1, self.train_return[-1]),
((" Validation Return: %.3f. Elapsed time: %.3fs.")
% (self.val_return[-1], elapsed)
if self.validation is True else
" Elapsed time: %.3fs."
% elapsed)
)
start_time = time.time()
def evaluate(self, x_test, r_test):
"""Evaluates simple feed-forward network to an input.
Args:
x_test: features of the evaluated data
r_test: returns of the evaluated data
Returns:
Triple of Tensors for: (Total returns, decision variables, probabilities)
"""
with torch.no_grad():
outputs, probs, _ = self.model(x_test)
returns = torch.mul(outputs, r_test).sum()
return returns, outputs, probs
def plot_return(self):
"""Draws a plot, Trains Returns vs Test Returns"""
plt.plot(self.train_return, label="Train Return")
plt.plot(self.val_return, label="Test Return")
plt.legend()
plt.title("Returns")
| 35.738806 | 91 | 0.554187 |
import time
import torch
import matplotlib.pyplot as plt
from sklearn.utils import shuffle
from Models.linearnet import LinearNet
class Optimization:
def __init__(self, model, optimizer, config):
self.model = model
self.optimizer = optimizer
self.train_return = []
self.val_return = []
self.validation = False
self.batch_size = config.get("batch_size",32)
self.n_epochs = config.get("n_epochs", 1000)
self.n_steps = config.get("n_steps", self.n_epochs)
@staticmethod
def batch(iterable, n):
l = len(iterable)
for ndx in range(0, l, n):
yield iterable[ndx:min(ndx + n, l)]
def train(self, x_train, r_train, x_val=None, r_val=None):
if x_val is not None or r_val is not None:
self.validation = True
start_time = time.time()
for epoch in range(self.n_epochs):
x_shuff, r_shuff = shuffle(x_train, r_train)
self.model.train()
for j in self.batch(range(0, len(x_shuff)),self.batch_size):
if len(j) < 2:
break
x_batch = x_shuff[j]
r_batch = r_shuff[j]
self.optimizer.zero_grad()
outputs, _, _ = self.model(x_batch)
loss = -torch.mul(outputs, r_batch).sum()
loss.backward()
self.optimizer.step()
returns_train, _, _ = self.evaluate(x_train, r_train)
self.train_return.append(returns_train)
if self.validation is True:
returns_val, _, _ = self.evaluate(x_val, r_val)
self.val_return.append(returns_val)
if ((epoch+1) % self.n_steps == 0):
elapsed = time.time() - start_time
print(
("Epoch %d Train Return: %.3f.") % (epoch + 1, self.train_return[-1]),
((" Validation Return: %.3f. Elapsed time: %.3fs.")
% (self.val_return[-1], elapsed)
if self.validation is True else
" Elapsed time: %.3fs."
% elapsed)
)
start_time = time.time()
def evaluate(self, x_test, r_test):
with torch.no_grad():
outputs, probs, _ = self.model(x_test)
returns = torch.mul(outputs, r_test).sum()
return returns, outputs, probs
def plot_return(self):
plt.plot(self.train_return, label="Train Return")
plt.plot(self.val_return, label="Test Return")
plt.legend()
plt.title("Returns")
| true | true |
f71c5d5f8d7d8019ce24e7f8738fe2a3db585bd9 | 12,026 | py | Python | grabcut.py | nong-fu/grabcut | 19a43eed7597ffae456349e4f0568da2f8f1f25c | [
"Apache-2.0"
] | null | null | null | grabcut.py | nong-fu/grabcut | 19a43eed7597ffae456349e4f0568da2f8f1f25c | [
"Apache-2.0"
] | null | null | null | grabcut.py | nong-fu/grabcut | 19a43eed7597ffae456349e4f0568da2f8f1f25c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
import sys
from pathlib import Path
import webbrowser
import numpy as np
import cv2
from PIL import Image
from PyQt5.QtCore import QDir, Qt, pyqtSlot, pyqtSignal
from PyQt5.QtGui import QImage, QPixmap, QColor
from PyQt5.QtWidgets import (
QApplication, QMainWindow, QWidget,
QMessageBox, QFileDialog, QLabel, QSpinBox, QPushButton,
QActionGroup, QAction, QSizePolicy, QHBoxLayout,
)
from ui_grabcut import Ui_MainWindow
class Canvas(QLabel):
"""Canvas for drawing mask layer on Image.
"""
mousePressed = pyqtSignal()
mouseMoved = pyqtSignal(int, int, int, int)
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
# self.setStyleSheet("border: 1px solid red;")
self.last_x, self.last_y = None, None
def mousePressEvent(self, e):
self.mousePressed.emit()
def mouseMoveEvent(self, e):
x, y = e.x(), e.y()
if self.last_x is None:
self.last_x, self.last_y = x, y
return
self.mouseMoved.emit(self.last_x, self.last_y, x, y)
self.last_x, self.last_y = x, y
def mouseReleaseEvent(self, e):
self.last_x, self.last_y = None, None
class MainWindow(QMainWindow):
def __init__(self, *args, **kwargs):
super(MainWindow, self).__init__(*args, **kwargs)
# orign image data
self.img = None
# mask layer for grabcut
self.mask = None
# history masks for undo
self.masks = []
# grabcut algorithm param iterCount
self.iterCount = 5
# canvas image cache
self.imgWithMask = None
# mask mode to color, don't use dict, too slow!
self.mode2color = (
# cv2.GC_BGD == 0
np.array([0, 0, 255], dtype=np.uint8),
# cv2.GC_FGD == 1
np.array([0, 255, 0], dtype=np.uint8),
# cv2.GC_PR_BGD == 2
np.array([0, 0, 120], dtype=np.uint8),
# cv2.GC_PR_FGD == 3
np.array([0, 120, 0], dtype=np.uint8),
)
# NONE mean none of (BGD/FGD/PR_BGD/PR_FGD)
self.GC_NONE = 255
# mask layer alpha
self.alpha = 0.3
self.imgPath = Path.cwd()
self.penSize = 40
# init ui order matter
self.initUI()
def grabCut(self, iterCount):
if self.img is None:
self.showMessage("No image")
return
# avoid grabCut crash
if not np.any((self.mask == cv2.GC_FGD) | (self.mask == cv2.GC_PR_FGD)):
self.showMessage("no GC_FGD or GC_PR_FGD")
return
# before grabcut, save mask to stack
self.pushMask()
bgdModel = np.zeros((1, 65), np.float64)
fgdModel = np.zeros((1, 65), np.float64)
_ = cv2.grabCut(self.img, self.mask, None, bgdModel,
fgdModel, iterCount, cv2.GC_INIT_WITH_MASK)
self.drawPartialImgWithMask(self.masks[-1], self.mask)
# display result
self.ui.displayResultAction.setChecked(True)
self.repaint()
def drawingMask(self, x1, y1, x2, y2):
"""drawing an small partial of the mask layer,
which is a small line segment.
"""
if self.img is None:
return
# when hidden mask or display result, don't draw mask
if self.ui.hiddenMaskAction.isChecked() or \
self.ui.displayResultAction.isChecked():
return
if self.ui.prFgdAction.isChecked():
mode = cv2.GC_PR_FGD
elif self.ui.prBgdAction.isChecked():
mode = cv2.GC_PR_BGD
elif self.ui.fgdAction.isChecked():
mode = cv2.GC_FGD
else: # bgdAction
mode = cv2.GC_BGD
cv2.line(self.mask, (x1, y1), (x2, y2), mode, self.penSize)
partialMask = np.zeros(self.mask.shape, np.uint8)
# GC_BGD is 0, can't use 0 as default
partialMask.fill(self.GC_NONE)
cv2.line(partialMask, (x1, y1), (x2, y2), mode, self.penSize)
indices = np.where(partialMask != self.GC_NONE)
if indices[0].size == 0:
# nothing new in partialMask
return
self.imgWithMask[indices] = (1 - self.alpha)*self.img[indices] + \
self.alpha*self.mode2color[mode]
self.repaint()
def pushMask(self):
"""push a mask to history list masks for undo.
"""
# if mask hasn't changed
if len(self.masks) > 0 and np.array_equal(self.masks[-1], self.mask):
return
self.masks.append(self.mask.copy())
def drawPartialImgWithMask(self, curMask, newMask):
"""draw partial imgWithMask.
mask changed from curMask to newMask, only draw the changed part.
"""
# redraw partial imgWithMask
indices = np.where(curMask != newMask)
if indices[0].size == 0:
# two masks are equal
return
self.imgWithMask[indices] = (1-self.alpha)*self.img[indices] + \
self.alpha*np.array([self.mode2color[m] for m in newMask[indices]])
def getResult(self):
"""use mask cuf off forground area as final result.
"""
result_mask = np.where((self.mask == 2) | (
self.mask == 0), 0, 1).astype('uint8')
return self.img*result_mask[:, :, np.newaxis]
@pyqtSlot(name="on_displayResultAction_triggered")
@pyqtSlot(name="on_hiddenMaskAction_triggered")
def repaint(self):
"""repaint cavans.
"""
if self.img is None:
self.showMessage("No image")
return
if self.ui.displayResultAction.isChecked():
img = self.getResult()
elif self.ui.hiddenMaskAction.isChecked():
img = self.img
else:
img = self.imgWithMask
# convert opencv image to qt image
height, width, _ = img.shape
bytesOfLine = 3*width
image = QImage(img.tobytes(), width, height,
bytesOfLine, QImage.Format_RGB888).rgbSwapped()
self.canvas.setPixmap(QPixmap.fromImage(image))
def initUI(self):
# merge designer ui
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
# right box on toolbar
rightBox = QWidget(self.ui.toolBar)
boxLayout = QHBoxLayout()
# grabcut iterCount spinbox
boxLayout.addWidget(QLabel("iterCount"))
self.iterCountSpinBox = QSpinBox(self)
self.iterCountSpinBox.setRange(1, 100)
self.iterCountSpinBox.setValue(5)
boxLayout.addWidget(self.iterCountSpinBox)
boxLayout.addStretch(1)
# pen size spinbox
boxLayout.addWidget(QLabel("pen"))
self.penSizeSpinBox = QSpinBox(self)
self.penSizeSpinBox.setRange(1, 500)
self.penSizeSpinBox.setSingleStep(5)
self.penSizeSpinBox.setValue(40)
boxLayout.addWidget(self.penSizeSpinBox)
rightBox.setLayout(boxLayout)
self.ui.toolBar.addWidget(rightBox)
self.canvas = Canvas(self)
self.ui.scrollArea.setWidget(self.canvas)
# canvas align center in scroll area
self.ui.scrollArea.setAlignment(Qt.AlignCenter)
# fixed canvas that make it easier to select mask layer
self.canvas.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
# 4 types of mask layer flags
actionGroup = QActionGroup(self)
actionGroup.addAction(self.ui.fgdAction)
actionGroup.addAction(self.ui.bgdAction)
actionGroup.addAction(self.ui.prFgdAction)
actionGroup.addAction(self.ui.prBgdAction)
# handle events
self.ui.exitAction.triggered.connect(self.close)
self.penSizeSpinBox.valueChanged.connect(self.setPenSize)
self.iterCountSpinBox.valueChanged.connect(self.setIterCount)
self.ui.opencvAction.triggered.connect(lambda: webbrowser.open(
'https://opencv-python-tutroals.readthedocs.io/en/'
'latest/py_tutorials/py_imgproc/py_grabcut/py_grabcut.html'
))
self.canvas.mousePressed.connect(self.pushMask)
self.canvas.mouseMoved.connect(self.drawingMask)
self.resetUiToDrawMaskMode()
def resetUiToDrawMaskMode(self):
"""reset ui to draw mask mode.
"""
self.ui.prFgdAction.setChecked(True)
self.ui.displayResultAction.setChecked(False)
self.ui.hiddenMaskAction.setChecked(False)
def setPenSize(self, v):
self.penSize = v
def setIterCount(self, v):
self.iterCount = v
def showMessage(self, msg):
self.ui.statusbar.showMessage(msg)
@pyqtSlot(name="on_openAction_triggered")
def openImage(self):
fileName, _ = QFileDialog.getOpenFileName(
self, "Open File", str(self.imgPath))
if not fileName:
return
imgFile = Path(fileName)
self.imgPath = imgFile.parent
# cv2.imread can't read image that path contain chinese characters,
# so this is a workaround.
# self.img = cv2.imread(fileName)
data = np.fromfile(fileName, dtype=np.uint8)
self.img = cv2.imdecode(data, cv2.IMREAD_UNCHANGED)
# discarding alpha channel
self.img = self.img[:,:,:3]
self.reset()
@pyqtSlot(name="on_saveAction_triggered")
def saveResult(self):
if self.img is None:
self.showMessage("no result to save")
return
fileName, _ = QFileDialog.getSaveFileName(
self, "Save File", str(self.imgPath))
if not fileName:
return
imgFile = Path(fileName)
self.imgPath = imgFile.parent
# default save as png
if not imgFile.suffix:
imgFile = imgFile.with_suffix('.png')
result = self.getResult()
# cv2.imwrite can't write image that path contain chinese characters.
im = Image.fromarray(result)
im.save(imgFile.as_posix())
@pyqtSlot(name="on_exportMaskAction_triggered")
def exportMask(self):
if self.mask is None or not self.mask.any():
self.showMessage("no mask")
return
fileName, _ = QFileDialog.getSaveFileName(
self, "Save Mask", str(self.imgPath))
if not fileName:
return
imgFile = Path(fileName)
self.imgPath = imgFile.parent
# default save as png
if not imgFile.suffix:
imgFile = imgFile.with_suffix('.png')
im = Image.fromarray(self.mask)
im.save(imgFile.as_posix())
@pyqtSlot(name="on_undoAction_triggered")
def undo(self):
if len(self.masks) == 0:
self.showMessage("undo stack is empty")
return
prevMask = self.masks.pop()
self.drawPartialImgWithMask(self.mask, prevMask)
self.mask = prevMask
# after undo, uncheck display result and hidden mask
self.resetUiToDrawMaskMode()
self.repaint()
@pyqtSlot(name="on_resetAction_triggered")
def reset(self):
if self.img is None:
self.showMessage("No image")
return
self.mask = np.zeros(self.img.shape[:2], np.uint8)
self.mask.fill(cv2.GC_PR_BGD)
self.masks = []
# re-create imgWidthMask
self.imgWithMask = np.zeros(self.img.shape, np.uint8)
self.imgWithMask[...] = (1-self.alpha)*self.img + \
self.alpha*self.mode2color[cv2.GC_PR_BGD]
self.resetUiToDrawMaskMode()
self.repaint()
@pyqtSlot(name="on_grabCutAction_triggered")
def runGrabCut(self):
self.grabCut(self.iterCount)
@pyqtSlot(name="on_singleStepAction_triggered")
def runGrabCutSingleStep(self):
self.grabCut(1)
def closeEvent(self, evt):
# maybe popup a dialog to ask user accept or ignore
evt.accept()
if __name__ == '__main__':
app = QApplication(sys.argv)
window = MainWindow()
window.show()
app.exec_()
| 31.564304 | 80 | 0.606187 |
import sys
from pathlib import Path
import webbrowser
import numpy as np
import cv2
from PIL import Image
from PyQt5.QtCore import QDir, Qt, pyqtSlot, pyqtSignal
from PyQt5.QtGui import QImage, QPixmap, QColor
from PyQt5.QtWidgets import (
QApplication, QMainWindow, QWidget,
QMessageBox, QFileDialog, QLabel, QSpinBox, QPushButton,
QActionGroup, QAction, QSizePolicy, QHBoxLayout,
)
from ui_grabcut import Ui_MainWindow
class Canvas(QLabel):
mousePressed = pyqtSignal()
mouseMoved = pyqtSignal(int, int, int, int)
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
self.last_x, self.last_y = None, None
def mousePressEvent(self, e):
self.mousePressed.emit()
def mouseMoveEvent(self, e):
x, y = e.x(), e.y()
if self.last_x is None:
self.last_x, self.last_y = x, y
return
self.mouseMoved.emit(self.last_x, self.last_y, x, y)
self.last_x, self.last_y = x, y
def mouseReleaseEvent(self, e):
self.last_x, self.last_y = None, None
class MainWindow(QMainWindow):
def __init__(self, *args, **kwargs):
super(MainWindow, self).__init__(*args, **kwargs)
self.img = None
self.mask = None
self.masks = []
self.iterCount = 5
self.imgWithMask = None
self.mode2color = (
# cv2.GC_BGD == 0
np.array([0, 0, 255], dtype=np.uint8),
# cv2.GC_FGD == 1
np.array([0, 255, 0], dtype=np.uint8),
# cv2.GC_PR_BGD == 2
np.array([0, 0, 120], dtype=np.uint8),
# cv2.GC_PR_FGD == 3
np.array([0, 120, 0], dtype=np.uint8),
)
# NONE mean none of (BGD/FGD/PR_BGD/PR_FGD)
self.GC_NONE = 255
# mask layer alpha
self.alpha = 0.3
self.imgPath = Path.cwd()
self.penSize = 40
# init ui order matter
self.initUI()
def grabCut(self, iterCount):
if self.img is None:
self.showMessage("No image")
return
# avoid grabCut crash
if not np.any((self.mask == cv2.GC_FGD) | (self.mask == cv2.GC_PR_FGD)):
self.showMessage("no GC_FGD or GC_PR_FGD")
return
# before grabcut, save mask to stack
self.pushMask()
bgdModel = np.zeros((1, 65), np.float64)
fgdModel = np.zeros((1, 65), np.float64)
_ = cv2.grabCut(self.img, self.mask, None, bgdModel,
fgdModel, iterCount, cv2.GC_INIT_WITH_MASK)
self.drawPartialImgWithMask(self.masks[-1], self.mask)
# display result
self.ui.displayResultAction.setChecked(True)
self.repaint()
def drawingMask(self, x1, y1, x2, y2):
if self.img is None:
return
# when hidden mask or display result, don't draw mask
if self.ui.hiddenMaskAction.isChecked() or \
self.ui.displayResultAction.isChecked():
return
if self.ui.prFgdAction.isChecked():
mode = cv2.GC_PR_FGD
elif self.ui.prBgdAction.isChecked():
mode = cv2.GC_PR_BGD
elif self.ui.fgdAction.isChecked():
mode = cv2.GC_FGD
else:
mode = cv2.GC_BGD
cv2.line(self.mask, (x1, y1), (x2, y2), mode, self.penSize)
partialMask = np.zeros(self.mask.shape, np.uint8)
partialMask.fill(self.GC_NONE)
cv2.line(partialMask, (x1, y1), (x2, y2), mode, self.penSize)
indices = np.where(partialMask != self.GC_NONE)
if indices[0].size == 0:
# nothing new in partialMask
return
self.imgWithMask[indices] = (1 - self.alpha)*self.img[indices] + \
self.alpha*self.mode2color[mode]
self.repaint()
def pushMask(self):
# if mask hasn't changed
if len(self.masks) > 0 and np.array_equal(self.masks[-1], self.mask):
return
self.masks.append(self.mask.copy())
def drawPartialImgWithMask(self, curMask, newMask):
indices = np.where(curMask != newMask)
if indices[0].size == 0:
return
self.imgWithMask[indices] = (1-self.alpha)*self.img[indices] + \
self.alpha*np.array([self.mode2color[m] for m in newMask[indices]])
def getResult(self):
result_mask = np.where((self.mask == 2) | (
self.mask == 0), 0, 1).astype('uint8')
return self.img*result_mask[:, :, np.newaxis]
@pyqtSlot(name="on_displayResultAction_triggered")
@pyqtSlot(name="on_hiddenMaskAction_triggered")
def repaint(self):
if self.img is None:
self.showMessage("No image")
return
if self.ui.displayResultAction.isChecked():
img = self.getResult()
elif self.ui.hiddenMaskAction.isChecked():
img = self.img
else:
img = self.imgWithMask
height, width, _ = img.shape
bytesOfLine = 3*width
image = QImage(img.tobytes(), width, height,
bytesOfLine, QImage.Format_RGB888).rgbSwapped()
self.canvas.setPixmap(QPixmap.fromImage(image))
def initUI(self):
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
rightBox = QWidget(self.ui.toolBar)
boxLayout = QHBoxLayout()
boxLayout.addWidget(QLabel("iterCount"))
self.iterCountSpinBox = QSpinBox(self)
self.iterCountSpinBox.setRange(1, 100)
self.iterCountSpinBox.setValue(5)
boxLayout.addWidget(self.iterCountSpinBox)
boxLayout.addStretch(1)
boxLayout.addWidget(QLabel("pen"))
self.penSizeSpinBox = QSpinBox(self)
self.penSizeSpinBox.setRange(1, 500)
self.penSizeSpinBox.setSingleStep(5)
self.penSizeSpinBox.setValue(40)
boxLayout.addWidget(self.penSizeSpinBox)
rightBox.setLayout(boxLayout)
self.ui.toolBar.addWidget(rightBox)
self.canvas = Canvas(self)
self.ui.scrollArea.setWidget(self.canvas)
self.ui.scrollArea.setAlignment(Qt.AlignCenter)
self.canvas.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
actionGroup = QActionGroup(self)
actionGroup.addAction(self.ui.fgdAction)
actionGroup.addAction(self.ui.bgdAction)
actionGroup.addAction(self.ui.prFgdAction)
actionGroup.addAction(self.ui.prBgdAction)
self.ui.exitAction.triggered.connect(self.close)
self.penSizeSpinBox.valueChanged.connect(self.setPenSize)
self.iterCountSpinBox.valueChanged.connect(self.setIterCount)
self.ui.opencvAction.triggered.connect(lambda: webbrowser.open(
'https://opencv-python-tutroals.readthedocs.io/en/'
'latest/py_tutorials/py_imgproc/py_grabcut/py_grabcut.html'
))
self.canvas.mousePressed.connect(self.pushMask)
self.canvas.mouseMoved.connect(self.drawingMask)
self.resetUiToDrawMaskMode()
def resetUiToDrawMaskMode(self):
self.ui.prFgdAction.setChecked(True)
self.ui.displayResultAction.setChecked(False)
self.ui.hiddenMaskAction.setChecked(False)
def setPenSize(self, v):
self.penSize = v
def setIterCount(self, v):
self.iterCount = v
def showMessage(self, msg):
self.ui.statusbar.showMessage(msg)
@pyqtSlot(name="on_openAction_triggered")
def openImage(self):
fileName, _ = QFileDialog.getOpenFileName(
self, "Open File", str(self.imgPath))
if not fileName:
return
imgFile = Path(fileName)
self.imgPath = imgFile.parent
# so this is a workaround.
# self.img = cv2.imread(fileName)
data = np.fromfile(fileName, dtype=np.uint8)
self.img = cv2.imdecode(data, cv2.IMREAD_UNCHANGED)
# discarding alpha channel
self.img = self.img[:,:,:3]
self.reset()
@pyqtSlot(name="on_saveAction_triggered")
def saveResult(self):
if self.img is None:
self.showMessage("no result to save")
return
fileName, _ = QFileDialog.getSaveFileName(
self, "Save File", str(self.imgPath))
if not fileName:
return
imgFile = Path(fileName)
self.imgPath = imgFile.parent
# default save as png
if not imgFile.suffix:
imgFile = imgFile.with_suffix('.png')
result = self.getResult()
# cv2.imwrite can't write image that path contain chinese characters.
im = Image.fromarray(result)
im.save(imgFile.as_posix())
@pyqtSlot(name="on_exportMaskAction_triggered")
def exportMask(self):
if self.mask is None or not self.mask.any():
self.showMessage("no mask")
return
fileName, _ = QFileDialog.getSaveFileName(
self, "Save Mask", str(self.imgPath))
if not fileName:
return
imgFile = Path(fileName)
self.imgPath = imgFile.parent
if not imgFile.suffix:
imgFile = imgFile.with_suffix('.png')
im = Image.fromarray(self.mask)
im.save(imgFile.as_posix())
@pyqtSlot(name="on_undoAction_triggered")
def undo(self):
if len(self.masks) == 0:
self.showMessage("undo stack is empty")
return
prevMask = self.masks.pop()
self.drawPartialImgWithMask(self.mask, prevMask)
self.mask = prevMask
self.resetUiToDrawMaskMode()
self.repaint()
@pyqtSlot(name="on_resetAction_triggered")
def reset(self):
if self.img is None:
self.showMessage("No image")
return
self.mask = np.zeros(self.img.shape[:2], np.uint8)
self.mask.fill(cv2.GC_PR_BGD)
self.masks = []
self.imgWithMask = np.zeros(self.img.shape, np.uint8)
self.imgWithMask[...] = (1-self.alpha)*self.img + \
self.alpha*self.mode2color[cv2.GC_PR_BGD]
self.resetUiToDrawMaskMode()
self.repaint()
@pyqtSlot(name="on_grabCutAction_triggered")
def runGrabCut(self):
self.grabCut(self.iterCount)
@pyqtSlot(name="on_singleStepAction_triggered")
def runGrabCutSingleStep(self):
self.grabCut(1)
def closeEvent(self, evt):
evt.accept()
if __name__ == '__main__':
app = QApplication(sys.argv)
window = MainWindow()
window.show()
app.exec_()
| true | true |
f71c5d93e6fff0721c0bfffe7881cdff8bdc9c08 | 952 | py | Python | scripts/runAll.py | acatwithacomputer/proteus | 80dfad95da6ab4d18a88a035f55c26b03540a864 | [
"MIT"
] | null | null | null | scripts/runAll.py | acatwithacomputer/proteus | 80dfad95da6ab4d18a88a035f55c26b03540a864 | [
"MIT"
] | 13 | 2018-02-08T23:22:59.000Z | 2020-12-06T19:40:32.000Z | scripts/runAll.py | acatwithacomputer/proteus | 80dfad95da6ab4d18a88a035f55c26b03540a864 | [
"MIT"
] | 1 | 2020-02-17T03:25:34.000Z | 2020-02-17T03:25:34.000Z | #! /usr/bin/env python
from __future__ import print_function
import sys
import os
import glob
pFiles = glob.glob('*_p.py')
caseDict = {}
for pf in pFiles:
caseDict[pf] = set(glob.glob(pf[:-5]+'*_n.py'))
#fix cases were problem name is a subset of some other problem name
for pf1 in pFiles:
for pf2 in pFiles:
if pf2.find(pf1[:-4]):
nf1Set=set(glob.glob(pf1[:-5]+'*_n.py'))
caseDict[pf2] -= nf1Set
for pf in pFiles:
print(pf)
print(caseDict[pf])
for p,nList in caseDict.items():
if len(nList) == 0:
sys.stdout.write("\n----------------Skipping "+p+". No n file----------------------\n")
sys.stdout.flush()
else:
for n in nList:
args = ('proteusRun.py',p,n,'-l 4','-b','runAllBatch.py')
sys.stdout.write("\n----------------Running "+p+"---"+n+"\n")
sys.stdout.flush()
os.spawnvpe(os.P_WAIT,'proteusRun.py',args,os.environ)
| 31.733333 | 96 | 0.553571 |
from __future__ import print_function
import sys
import os
import glob
pFiles = glob.glob('*_p.py')
caseDict = {}
for pf in pFiles:
caseDict[pf] = set(glob.glob(pf[:-5]+'*_n.py'))
for pf1 in pFiles:
for pf2 in pFiles:
if pf2.find(pf1[:-4]):
nf1Set=set(glob.glob(pf1[:-5]+'*_n.py'))
caseDict[pf2] -= nf1Set
for pf in pFiles:
print(pf)
print(caseDict[pf])
for p,nList in caseDict.items():
if len(nList) == 0:
sys.stdout.write("\n----------------Skipping "+p+". No n file----------------------\n")
sys.stdout.flush()
else:
for n in nList:
args = ('proteusRun.py',p,n,'-l 4','-b','runAllBatch.py')
sys.stdout.write("\n----------------Running "+p+"---"+n+"\n")
sys.stdout.flush()
os.spawnvpe(os.P_WAIT,'proteusRun.py',args,os.environ)
| true | true |
f71c5df90825c721c8d73769c1b51879fc9c7df2 | 2,706 | py | Python | runloop/adminx.py | luqin/firefly | 2e5ab17f2d20deb3c68c927f6208ea89db7c639d | [
"MIT"
] | null | null | null | runloop/adminx.py | luqin/firefly | 2e5ab17f2d20deb3c68c927f6208ea89db7c639d | [
"MIT"
] | 9 | 2020-03-24T16:45:25.000Z | 2022-03-11T23:40:51.000Z | runloop/adminx.py | luqin/firefly | 2e5ab17f2d20deb3c68c927f6208ea89db7c639d | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from django.forms import ModelMultipleChoiceField
from django.utils.translation import ugettext as _
import xadmin
from .xadmin_action import RunloopAction
from .models import RunLoopGroup, Orders
ACTION_NAME = {
'add': _('Can add %s'),
'change': _('Can change %s'),
'edit': _('Can edit %s'),
'delete': _('Can delete %s'),
'view': _('Can view %s'),
}
def get_stock_name(p):
action = p.codename.split('_')[0]
if action in ACTION_NAME:
return ACTION_NAME[action] % str(p.content_type)
else:
return p.co_name
class StockModelMultipleChoiceField(ModelMultipleChoiceField):
def label_from_instance(self, p):
return get_stock_name(p)
@xadmin.sites.register(RunLoopGroup)
class RunLoopGroupAdmin(object):
list_display = ("name", "start", "end", "status", "description", 'link',)
list_display_links = ("name",)
# readony_fields = ("status", )
exclude = ['status']
list_quick_filter = [{"field": "name", "limit": 10}]
search_fields = ["name"]
reversion_enable = True
style_fields = {"factor_buys": "checkbox-inline", "factor_sells": "checkbox-inline", "positions": "radio-inline",
"stocks": "m2m_transfer"}
# def get_field_attrs(self, db_field, **kwargs):
# print("db_field", db_field)
# attrs = super(RunLoopGroupAdmin, self).get_field_attrs(db_field, **kwargs)
# if db_field.name == 'stocks':
# attrs['form_class'] = StockModelMultipleChoiceField
# return attrs
actions = [RunloopAction]
def link(self, instance):
if instance.status == 'done':
return "<a href='%s/k' target='_blank'>%s</a>" % (
instance.id, '买卖点') + " <a href='%s/returns' target='_blank'>%s</a>" % (instance.id, '收益')
else:
return ""
link.short_description = '<div style="width: 100px;">报表</div>'
link.allow_tags = True
link.is_column = False
@xadmin.sites.register(Orders)
class OrdersAdmin(object):
list_display = (
"run_loop_group", "stock", "profit", "profit_cg_hunder", "buy_date", "buy_price", "buy_cnt", "buy_factor",
"sell_date", "sell_price", "sell_type_extra", "sell_type")
list_display_links = ("stock",)
# readony_fields = ("status", )
# exclude = ['status']
list_quick_filter = [{"field": "stock", "limit": 10}]
search_fields = ["stock"]
reversion_enable = True
# xadmin.sites.site.register(HostGroup, HostGroupAdmin)
# xadmin.sites.site.register(MaintainLog, MaintainLogAdmin)
# xadmin.sites.site.register(IDC, IDCAdmin)
# xadmin.sites.site.register(AccessRecord, AccessRecordAdmin)
| 30.404494 | 117 | 0.64745 | from __future__ import absolute_import
from django.forms import ModelMultipleChoiceField
from django.utils.translation import ugettext as _
import xadmin
from .xadmin_action import RunloopAction
from .models import RunLoopGroup, Orders
ACTION_NAME = {
'add': _('Can add %s'),
'change': _('Can change %s'),
'edit': _('Can edit %s'),
'delete': _('Can delete %s'),
'view': _('Can view %s'),
}
def get_stock_name(p):
action = p.codename.split('_')[0]
if action in ACTION_NAME:
return ACTION_NAME[action] % str(p.content_type)
else:
return p.co_name
class StockModelMultipleChoiceField(ModelMultipleChoiceField):
def label_from_instance(self, p):
return get_stock_name(p)
@xadmin.sites.register(RunLoopGroup)
class RunLoopGroupAdmin(object):
list_display = ("name", "start", "end", "status", "description", 'link',)
list_display_links = ("name",)
exclude = ['status']
list_quick_filter = [{"field": "name", "limit": 10}]
search_fields = ["name"]
reversion_enable = True
style_fields = {"factor_buys": "checkbox-inline", "factor_sells": "checkbox-inline", "positions": "radio-inline",
"stocks": "m2m_transfer"}
actions = [RunloopAction]
def link(self, instance):
if instance.status == 'done':
return "<a href='%s/k' target='_blank'>%s</a>" % (
instance.id, '买卖点') + " <a href='%s/returns' target='_blank'>%s</a>" % (instance.id, '收益')
else:
return ""
link.short_description = '<div style="width: 100px;">报表</div>'
link.allow_tags = True
link.is_column = False
@xadmin.sites.register(Orders)
class OrdersAdmin(object):
list_display = (
"run_loop_group", "stock", "profit", "profit_cg_hunder", "buy_date", "buy_price", "buy_cnt", "buy_factor",
"sell_date", "sell_price", "sell_type_extra", "sell_type")
list_display_links = ("stock",)
list_quick_filter = [{"field": "stock", "limit": 10}]
search_fields = ["stock"]
reversion_enable = True
| true | true |
f71c5ef0b1d632892fa3fd528d707dc54828ea6e | 16,431 | py | Python | pychron/mv/focus/autofocus_manager.py | ael-noblegas/pychron | 6ebbbb1f66a614972b62b7a9be4c784ae61b5d62 | [
"Apache-2.0"
] | 1 | 2019-02-27T21:57:44.000Z | 2019-02-27T21:57:44.000Z | pychron/mv/focus/autofocus_manager.py | ael-noblegas/pychron | 6ebbbb1f66a614972b62b7a9be4c784ae61b5d62 | [
"Apache-2.0"
] | 80 | 2018-07-17T20:10:20.000Z | 2021-08-17T15:38:24.000Z | pychron/mv/focus/autofocus_manager.py | AGESLDEO/pychron | 1a81e05d9fba43b797f335ceff6837c016633bcf | [
"Apache-2.0"
] | null | null | null | # ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# =============enthought library imports=======================
from __future__ import absolute_import
from __future__ import print_function
import six.moves.cPickle as pickle
from traits.api import Bool, Any, Instance, Button, Property, Event, on_trait_change
from traitsui.api import View, Item, Handler, HGroup
# ============= standard library imports ========================
# from threading import Thread
from threading import Event as TEvent
from numpy import linspace, argmin, argmax, random, asarray
import time
import os
# ============= local library imports ==========================
from pychron.core.time_series.time_series import smooth
from pychron.image.cv_wrapper import grayspace, crop, get_focus_measure
# from pychron.image.cvwrapper import grayspace, get_focus_measure, crop, resize
from scipy.ndimage.measurements import variance
from scipy.ndimage.filters import generic_gradient_magnitude, sobel
from scipy.ndimage import sum as ndsum
from pychron.paths import paths
from pychron.managers.manager import Manager
from pychron.image.image import Image
# from pychron.machine_vision.focus_parameters import FocusParameters
# from pychron.image.image_editor import ImageEditor
from pychron.graph.graph import Graph
from pychron.mv.focus.focus_parameters import FocusParameters
from pychron.core.ui.image_editor import ImageEditor
from pychron.core.ui.gui import invoke_in_main_thread
from pychron.core.ui.thread import Thread
class ConfigureHandler(Handler):
def closed(self, info, isok):
if isok:
info.object.dump_parameters()
class AutoFocusManager(Manager):
"""
currently uses passive focus techniques
see
http://en.wikipedia.org/wiki/Autofocus
"""
video = Any
laser_manager = Any
stage_controller = Any
canvas = Any
parameters = Instance(FocusParameters)
configure_button = Button('configure')
autofocus_button = Event
autofocus_label = Property(depends_on='autofocusing')
autofocusing = Bool
# threading event for cancel signal
_evt_autofocusing = None
image = Instance(Image, ())
graph = None
def dump_parameters(self):
p = os.path.join(paths.hidden_dir, 'autofocus_configure')
self.info('dumping parameters to {}'.format(p))
with open(p, 'wb') as f:
pickle.dump(self.parameters, f)
def load_parameter(self):
p = os.path.join(paths.hidden_dir, 'autofocus_configure')
if os.path.isfile(p):
with open(p, 'rb') as f:
try:
params = pickle.load(f)
self.info('loading parameters from {}'.format(p))
if not isinstance(params, FocusParameters):
self.info('out of date parameters file. using default')
params = FocusParameters()
return params
except Exception as e:
print('autofocus load parameter', e)
return FocusParameters()
else:
return FocusParameters()
def passive_focus(self, block=False, **kw):
self._evt_autofocusing = TEvent()
self._evt_autofocusing.clear()
# manager = self.laser_manager
oper = self.parameters.operator
self.info('passive focus. operator = {}'.format(oper))
g = self.graph
if not g:
g = Graph(plotcontainer_dict=dict(padding=10),
window_x=0.70,
window_y=20,
window_width=325,
window_height=325,
window_title='Autofocus'
)
self.graph = g
g.clear()
g.new_plot(padding=[40, 10, 10, 40],
xtitle='Z (mm)',
ytitle='Focus Measure ({})'.format(oper)
)
g.new_series()
g.new_series()
invoke_in_main_thread(self._open_graph)
target = self._passive_focus
self._passive_focus_thread = Thread(name='autofocus', target=target,
args=(self._evt_autofocusing,
),
kwargs=kw
)
self._passive_focus_thread.start()
if block:
# while 1:
# if not self._passive_focus_thread.isRunning():
# break
# time.sleep(0.25)
self._passive_focus_thread.join()
def _open_graph(self):
ui = self.graph.edit_traits()
self.add_window(ui)
def stop_focus(self):
if self.stage_controller:
self.stage_controller.stop()
self.info('autofocusing stopped by user')
def _passive_focus(self, stop_signal, set_zoom=True):
'''
sweep z looking for max focus measure
FMgrad= roberts or sobel (sobel removes noise)
FMvar = intensity variance
'''
self.autofocusing = True
manager = self.laser_manager
fstart = self.parameters.fstart
fend = self.parameters.fend
step_scalar = self.parameters.step_scalar
zoom = self.parameters.zoom
operator = self.parameters.operator
steps = step_scalar * (max(fend, fstart) - min(fend, fstart)) + 1
prev_zoom = None
if set_zoom and \
manager is not None and \
zoom:
motor = manager.get_motor('zoom')
if motor:
prev_zoom = motor.data_position
self.info('setting zoom: {}'.format(zoom))
manager.set_motor('zoom', zoom, block=True)
time.sleep(1.5)
args = self._do_focusing(fstart, fend, steps, operator)
if manager is not None:
if prev_zoom is not None:
self.info('returning to previous zoom: {}'.format(prev_zoom))
manager.set_motor('zoom', prev_zoom, block=True)
if args:
mi, fmi, ma, fma = args
self.info('''passive focus results:Operator={}
ImageGradmin={} (z={})
ImageGradmax={}, (z={})'''.format(operator, mi, fmi, ma, fma))
focus_pos = fma
self.graph.add_vertical_rule(focus_pos)
self.graph.redraw()
# self.graph.add_vertical_rule(fma)
self.info('calculated focus z= {}'.format(focus_pos))
# if set_z:
controller = self.stage_controller
if controller is not None:
if not stop_signal.isSet():
controller.single_axis_move('z', focus_pos, block=True)
controller._z_position = focus_pos
controller.z_progress = focus_pos
self.autofocusing = False
def _cancel_sweep(self, vo):
if self._evt_autofocusing.isSet():
# return to original velocity
self.autofocusing = False
self._reset_velocity(vo)
return True
def _reset_velocity(self, vo):
if self.stage_controller:
pdict = dict(velocity=vo, key='z')
self.stage_controller.set_single_axis_motion_parameters(pdict=pdict)
def _do_focusing(self, start, end, steps, operator):
screen_roi = self._get_roi()
self._add_focus_area_rect(*screen_roi)
src = self._load_source()
src = asarray(src)
h, w, _d = src.shape
cx = w / 2.
cy = h / 2.
cw = self.parameters.crop_width
ch = self.parameters.crop_height
roi = cx, cy, cw, ch
'''
start the z in motion and take pictures as you go
query stage_controller to get current z
'''
self.info('focus sweep start={} end={}'.format(start, end))
# move to start position
controller = self.stage_controller
if controller:
vo = controller.axes['z'].velocity
if self._cancel_sweep(vo):
return
self.graph.set_x_limits(min(start, end), max(start, end), pad=2)
# sweep 1 and velocity 1
self._do_sweep(start, end, velocity=self.parameters.velocity_scalar1)
fms, focussteps = self._collect_focus_measures(operator, roi)
if not (fms and focussteps):
return
# reached end of sweep
# calculate a nominal focal point
args = self._calculate_nominal_focal_point(fms, focussteps)
if not args:
return
nfocal = args[3]
nwin = self.parameters.negative_window
pwin = self.parameters.positive_window
if self._cancel_sweep(vo):
return
nstart, nend = max(0, nfocal - nwin), nfocal + pwin
# mi = min(min(nstart, nend), min(start, end))
# ma = max(max(nstart, nend), max(start, end))
# self.graph.set_x_limits(mi, ma, pad=2)
time.sleep(1)
# do a slow tight sweep around the nominal focal point
self._do_sweep(nstart, nend, velocity=self.parameters.velocity_scalar2)
fms, focussteps = self._collect_focus_measures(operator, roi, series=1)
self._reset_velocity(vo)
else:
focussteps = linspace(0, 10, 11)
fms = -(focussteps - 5) ** 2 + 10 + random.random(11)
self.info('frames analyzed {}'.format(len(fms)))
# self.canvas.markupcontainer.pop('croprect')
return self._calculate_nominal_focal_point(fms, focussteps)
def _do_sweep(self, start, end, velocity=None):
controller = self.stage_controller
controller.single_axis_move('z', start, block=True)
# time.sleep(0.1)
# explicitly check for motion
# controller.block(axis='z')
if velocity:
vo = controller.axes['z'].velocity
controller.set_single_axis_motion_parameters(pdict=dict(velocity=vo * velocity,
key='z'))
self.info('starting sweep from {}'.format(controller.z_progress))
# pause before moving to end
time.sleep(0.25)
controller.single_axis_move('z', end, update=100, immediate=True)
def _collect_focus_measures(self, operator, roi, series=0):
controller = self.stage_controller
focussteps = []
fms = []
if controller.timer:
p = controller.timer.get_interval()
self.debug('controller timer period {}'.format(p))
pz = controller.z_progress
while 1:
src = self._load_source()
x = controller.z_progress
if x != pz:
y = self._calculate_focus_measure(src, operator, roi)
self.graph.add_datum((x, y), series=series)
focussteps.append(x)
fms.append(y)
pz = x
if not (controller.timer.isActive() and \
not self._evt_autofocusing.isSet()):
break
time.sleep(p)
self.debug('sweep finished')
return fms, focussteps
def _calculate_nominal_focal_point(self, fms, focussteps):
if fms:
sfms = smooth(fms)
if sfms is not None:
self.graph.new_series(focussteps, sfms)
self.graph.redraw()
fmi = focussteps[argmin(sfms)]
fma = focussteps[argmax(sfms)]
mi = min(sfms)
ma = max(sfms)
return mi, fmi, ma, fma
def _calculate_focus_measure(self, src, operator, roi):
'''
see
IMPLEMENTATION OF A PASSIVE AUTOMATIC FOCUSING ALGORITHM
FOR DIGITAL STILL CAMERA
DOI 10.1109/30.468047
and
http://cybertron.cg.tu-berlin.de/pdci10/frankencam/#autofocus
'''
# need to resize to 640,480. this is the space the roi is in
# s = resize(grayspace(pychron), 640, 480)
src = grayspace(src)
v = crop(src, *roi)
di = dict(var=lambda x:variance(x),
laplace=lambda x: get_focus_measure(x, 'laplace'),
sobel=lambda x: ndsum(generic_gradient_magnitude(x, sobel, mode='nearest'))
)
func = di[operator]
return func(v)
def image_view(self):
v = View(Item('image', show_label=False, editor=ImageEditor(),
width=640,
height=480,
style='custom'))
return v
def traits_view(self):
v = View(
HGroup(self._button_factory('autofocus_button', 'autofocus_label'),
Item('configure_button', show_label=False),
show_border=True,
label='Autofocus'
)
)
return v
def configure_view(self):
v = View(Item('parameters', style='custom', show_label=False),
handler=ConfigureHandler,
buttons=['OK', 'Cancel'],
kind='livemodal',
title='Configure Autofocus',
x=0.80,
y=0.05
)
return v
def _load_source(self):
src = self.video.get_frame()
return src
# if pychron:
# return Image.new_frame(pychron)
# self.image.load(pychron)
# return self.image.source_frame
def _get_roi(self):
w = self.parameters.crop_width
h = self.parameters.crop_height
cx, cy = self.canvas.get_center_rect_position(w, h)
# cw, ch = self.canvas.outer_bounds
# print w, h, cw, ch
# cx = cw / 2. - w / 2.
# cy = ch / 2. - h / 2.
# cx = (cw - w) / 2.
# cy = (ch - h) / 2.
# cx = (640 * self.canvas.scaling - w) / 2
# cy = (480 * self.canvas.scaling - h) / 2
roi = cx, cy, w, h
return roi
def _add_focus_area_rect(self, cx, cy, w, h):
# pl = self.canvas.padding_left
# pb = self.canvas.padding_bottom
self.canvas.remove_item('croprect')
self.canvas.add_markup_rect(cx, cy, w, h, identifier='croprect')
def _autofocus_button_fired(self):
if not self.autofocusing:
self.autofocusing = True
self.passive_focus()
else:
self.autofocusing = False
self._evt_autofocusing.set()
self.stop_focus()
def _configure_button_fired(self):
self._crop_rect_update()
self.edit_traits(view='configure_view', kind='livemodal')
self.canvas.remove_item('croprect')
# try:
# self.canvas.markupcontainer.pop('croprect')
# except KeyError:
# pass
@on_trait_change('parameters:[_crop_width,_crop_height]')
def _crop_rect_update(self):
roi = self._get_roi()
self._add_focus_area_rect(*roi)
def _get_autofocus_label(self):
return 'Autofocus' if not self.autofocusing else 'Stop'
def _parameters_default(self):
return self.load_parameter()
def _autofocusing_changed(self, new):
if not new:
self.canvas.remove_item('croprect')
# ===============================================================================
# Deprecated
# ===============================================================================
# ============= EOF =====================================
| 33.261134 | 93 | 0.559369 |
from __future__ import absolute_import
from __future__ import print_function
import six.moves.cPickle as pickle
from traits.api import Bool, Any, Instance, Button, Property, Event, on_trait_change
from traitsui.api import View, Item, Handler, HGroup
from threading import Event as TEvent
from numpy import linspace, argmin, argmax, random, asarray
import time
import os
from pychron.core.time_series.time_series import smooth
from pychron.image.cv_wrapper import grayspace, crop, get_focus_measure
from scipy.ndimage.measurements import variance
from scipy.ndimage.filters import generic_gradient_magnitude, sobel
from scipy.ndimage import sum as ndsum
from pychron.paths import paths
from pychron.managers.manager import Manager
from pychron.image.image import Image
from pychron.graph.graph import Graph
from pychron.mv.focus.focus_parameters import FocusParameters
from pychron.core.ui.image_editor import ImageEditor
from pychron.core.ui.gui import invoke_in_main_thread
from pychron.core.ui.thread import Thread
class ConfigureHandler(Handler):
def closed(self, info, isok):
if isok:
info.object.dump_parameters()
class AutoFocusManager(Manager):
video = Any
laser_manager = Any
stage_controller = Any
canvas = Any
parameters = Instance(FocusParameters)
configure_button = Button('configure')
autofocus_button = Event
autofocus_label = Property(depends_on='autofocusing')
autofocusing = Bool
_evt_autofocusing = None
image = Instance(Image, ())
graph = None
def dump_parameters(self):
p = os.path.join(paths.hidden_dir, 'autofocus_configure')
self.info('dumping parameters to {}'.format(p))
with open(p, 'wb') as f:
pickle.dump(self.parameters, f)
def load_parameter(self):
p = os.path.join(paths.hidden_dir, 'autofocus_configure')
if os.path.isfile(p):
with open(p, 'rb') as f:
try:
params = pickle.load(f)
self.info('loading parameters from {}'.format(p))
if not isinstance(params, FocusParameters):
self.info('out of date parameters file. using default')
params = FocusParameters()
return params
except Exception as e:
print('autofocus load parameter', e)
return FocusParameters()
else:
return FocusParameters()
def passive_focus(self, block=False, **kw):
self._evt_autofocusing = TEvent()
self._evt_autofocusing.clear()
oper = self.parameters.operator
self.info('passive focus. operator = {}'.format(oper))
g = self.graph
if not g:
g = Graph(plotcontainer_dict=dict(padding=10),
window_x=0.70,
window_y=20,
window_width=325,
window_height=325,
window_title='Autofocus'
)
self.graph = g
g.clear()
g.new_plot(padding=[40, 10, 10, 40],
xtitle='Z (mm)',
ytitle='Focus Measure ({})'.format(oper)
)
g.new_series()
g.new_series()
invoke_in_main_thread(self._open_graph)
target = self._passive_focus
self._passive_focus_thread = Thread(name='autofocus', target=target,
args=(self._evt_autofocusing,
),
kwargs=kw
)
self._passive_focus_thread.start()
if block:
self._passive_focus_thread.join()
def _open_graph(self):
ui = self.graph.edit_traits()
self.add_window(ui)
def stop_focus(self):
if self.stage_controller:
self.stage_controller.stop()
self.info('autofocusing stopped by user')
def _passive_focus(self, stop_signal, set_zoom=True):
self.autofocusing = True
manager = self.laser_manager
fstart = self.parameters.fstart
fend = self.parameters.fend
step_scalar = self.parameters.step_scalar
zoom = self.parameters.zoom
operator = self.parameters.operator
steps = step_scalar * (max(fend, fstart) - min(fend, fstart)) + 1
prev_zoom = None
if set_zoom and \
manager is not None and \
zoom:
motor = manager.get_motor('zoom')
if motor:
prev_zoom = motor.data_position
self.info('setting zoom: {}'.format(zoom))
manager.set_motor('zoom', zoom, block=True)
time.sleep(1.5)
args = self._do_focusing(fstart, fend, steps, operator)
if manager is not None:
if prev_zoom is not None:
self.info('returning to previous zoom: {}'.format(prev_zoom))
manager.set_motor('zoom', prev_zoom, block=True)
if args:
mi, fmi, ma, fma = args
self.info('''passive focus results:Operator={}
ImageGradmin={} (z={})
ImageGradmax={}, (z={})'''.format(operator, mi, fmi, ma, fma))
focus_pos = fma
self.graph.add_vertical_rule(focus_pos)
self.graph.redraw()
self.info('calculated focus z= {}'.format(focus_pos))
controller = self.stage_controller
if controller is not None:
if not stop_signal.isSet():
controller.single_axis_move('z', focus_pos, block=True)
controller._z_position = focus_pos
controller.z_progress = focus_pos
self.autofocusing = False
def _cancel_sweep(self, vo):
if self._evt_autofocusing.isSet():
self.autofocusing = False
self._reset_velocity(vo)
return True
def _reset_velocity(self, vo):
if self.stage_controller:
pdict = dict(velocity=vo, key='z')
self.stage_controller.set_single_axis_motion_parameters(pdict=pdict)
def _do_focusing(self, start, end, steps, operator):
screen_roi = self._get_roi()
self._add_focus_area_rect(*screen_roi)
src = self._load_source()
src = asarray(src)
h, w, _d = src.shape
cx = w / 2.
cy = h / 2.
cw = self.parameters.crop_width
ch = self.parameters.crop_height
roi = cx, cy, cw, ch
self.info('focus sweep start={} end={}'.format(start, end))
controller = self.stage_controller
if controller:
vo = controller.axes['z'].velocity
if self._cancel_sweep(vo):
return
self.graph.set_x_limits(min(start, end), max(start, end), pad=2)
self._do_sweep(start, end, velocity=self.parameters.velocity_scalar1)
fms, focussteps = self._collect_focus_measures(operator, roi)
if not (fms and focussteps):
return
args = self._calculate_nominal_focal_point(fms, focussteps)
if not args:
return
nfocal = args[3]
nwin = self.parameters.negative_window
pwin = self.parameters.positive_window
if self._cancel_sweep(vo):
return
nstart, nend = max(0, nfocal - nwin), nfocal + pwin
time.sleep(1)
self._do_sweep(nstart, nend, velocity=self.parameters.velocity_scalar2)
fms, focussteps = self._collect_focus_measures(operator, roi, series=1)
self._reset_velocity(vo)
else:
focussteps = linspace(0, 10, 11)
fms = -(focussteps - 5) ** 2 + 10 + random.random(11)
self.info('frames analyzed {}'.format(len(fms)))
return self._calculate_nominal_focal_point(fms, focussteps)
def _do_sweep(self, start, end, velocity=None):
controller = self.stage_controller
controller.single_axis_move('z', start, block=True)
if velocity:
vo = controller.axes['z'].velocity
controller.set_single_axis_motion_parameters(pdict=dict(velocity=vo * velocity,
key='z'))
self.info('starting sweep from {}'.format(controller.z_progress))
time.sleep(0.25)
controller.single_axis_move('z', end, update=100, immediate=True)
def _collect_focus_measures(self, operator, roi, series=0):
controller = self.stage_controller
focussteps = []
fms = []
if controller.timer:
p = controller.timer.get_interval()
self.debug('controller timer period {}'.format(p))
pz = controller.z_progress
while 1:
src = self._load_source()
x = controller.z_progress
if x != pz:
y = self._calculate_focus_measure(src, operator, roi)
self.graph.add_datum((x, y), series=series)
focussteps.append(x)
fms.append(y)
pz = x
if not (controller.timer.isActive() and \
not self._evt_autofocusing.isSet()):
break
time.sleep(p)
self.debug('sweep finished')
return fms, focussteps
def _calculate_nominal_focal_point(self, fms, focussteps):
if fms:
sfms = smooth(fms)
if sfms is not None:
self.graph.new_series(focussteps, sfms)
self.graph.redraw()
fmi = focussteps[argmin(sfms)]
fma = focussteps[argmax(sfms)]
mi = min(sfms)
ma = max(sfms)
return mi, fmi, ma, fma
def _calculate_focus_measure(self, src, operator, roi):
src = grayspace(src)
v = crop(src, *roi)
di = dict(var=lambda x:variance(x),
laplace=lambda x: get_focus_measure(x, 'laplace'),
sobel=lambda x: ndsum(generic_gradient_magnitude(x, sobel, mode='nearest'))
)
func = di[operator]
return func(v)
def image_view(self):
v = View(Item('image', show_label=False, editor=ImageEditor(),
width=640,
height=480,
style='custom'))
return v
def traits_view(self):
v = View(
HGroup(self._button_factory('autofocus_button', 'autofocus_label'),
Item('configure_button', show_label=False),
show_border=True,
label='Autofocus'
)
)
return v
def configure_view(self):
v = View(Item('parameters', style='custom', show_label=False),
handler=ConfigureHandler,
buttons=['OK', 'Cancel'],
kind='livemodal',
title='Configure Autofocus',
x=0.80,
y=0.05
)
return v
def _load_source(self):
src = self.video.get_frame()
return src
def _get_roi(self):
w = self.parameters.crop_width
h = self.parameters.crop_height
cx, cy = self.canvas.get_center_rect_position(w, h)
roi = cx, cy, w, h
return roi
def _add_focus_area_rect(self, cx, cy, w, h):
self.canvas.remove_item('croprect')
self.canvas.add_markup_rect(cx, cy, w, h, identifier='croprect')
def _autofocus_button_fired(self):
if not self.autofocusing:
self.autofocusing = True
self.passive_focus()
else:
self.autofocusing = False
self._evt_autofocusing.set()
self.stop_focus()
def _configure_button_fired(self):
self._crop_rect_update()
self.edit_traits(view='configure_view', kind='livemodal')
self.canvas.remove_item('croprect')
@on_trait_change('parameters:[_crop_width,_crop_height]')
def _crop_rect_update(self):
roi = self._get_roi()
self._add_focus_area_rect(*roi)
def _get_autofocus_label(self):
return 'Autofocus' if not self.autofocusing else 'Stop'
def _parameters_default(self):
return self.load_parameter()
def _autofocusing_changed(self, new):
if not new:
self.canvas.remove_item('croprect')
| true | true |
f71c5f4fe3f324377d2b741bc0a1bb84b3949735 | 602 | py | Python | gdrive_sync/migrations/0002_drivefile_video.py | mitodl/ocw-studio | 949f96ec0647064f8d495ebdd22d66eea7d024a5 | [
"BSD-3-Clause"
] | 2 | 2020-08-07T15:55:41.000Z | 2021-08-16T18:28:09.000Z | gdrive_sync/migrations/0002_drivefile_video.py | mitodl/ocw-studio | 949f96ec0647064f8d495ebdd22d66eea7d024a5 | [
"BSD-3-Clause"
] | 924 | 2020-08-10T17:54:19.000Z | 2022-03-31T21:15:17.000Z | gdrive_sync/migrations/0002_drivefile_video.py | mitodl/ocw-studio | 949f96ec0647064f8d495ebdd22d66eea7d024a5 | [
"BSD-3-Clause"
] | null | null | null | # Generated by Django 3.1.12 on 2021-08-09 17:27
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("videos", "0001_initial"),
("gdrive_sync", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="drivefile",
name="video",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="videos.video",
),
),
]
| 23.153846 | 61 | 0.546512 |
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("videos", "0001_initial"),
("gdrive_sync", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="drivefile",
name="video",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="videos.video",
),
),
]
| true | true |
f71c5f5935f04be92603bb1df6e9e04ca6fbc926 | 9,829 | py | Python | main.py | GautamGunecha/KBC-Quiz | 9e78435aaf2a88c6dc2bd38324d9e0b807ac909d | [
"MIT"
] | null | null | null | main.py | GautamGunecha/KBC-Quiz | 9e78435aaf2a88c6dc2bd38324d9e0b807ac909d | [
"MIT"
] | null | null | null | main.py | GautamGunecha/KBC-Quiz | 9e78435aaf2a88c6dc2bd38324d9e0b807ac909d | [
"MIT"
] | null | null | null | # Python Quiz Game
import time
from pygame import mixer
mixer.init()
mixer.music.load("Audio/KBCMAIN.mp3")
mixer.music.set_volume(0.2)
mixer.music.play()
print("Let's Play Kaun Banega Crorepati")
name = input("Please Enter your Name: ")
print("Welcome", name)
decision = input("Do you want to play KBC(Yes/No): ")
decision = decision.lower()
if decision != "yes":
print(name, "Have a nice Day")
quit()
print("Let's Begin :)")
print("On Screen Your 1st Question")
print("")
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 1: International Literacy Day is observed on?")
correct = "Choose correct Option: "
print(correct)
print('''
A) Sep 8 B) Nov 28
C) May 2 D) Sep 22
''')
answer = input("Answer: ")
print("Option", answer, " locked kiya jey")
answer = answer.lower()
if (answer == 'a'):
print("Sahi Javaab", name)
print("Price Money Earned: Rs1000/-")
else:
print("Better Luck Next Time")
print("Correct Answer: A) Sep 8")
exit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 2: In which group of places the Kumbha Mela is held every twelve years?")
print(correct)
print('''
A) Ujjain, Puri, Prayag, Haridwar B) Prayag, Haridwar, Ujjain, Nasik
C) Rameshwaram, Puri, Badrinath, Dwarika D) Chittakoot, Ujjain, Prayad, Haridwar
''')
answer = input("Answer: ")
print("Option", answer, " locked kiya jey")
answer = answer.lower()
if(answer == 'b'):
print("Sahi Javaab", name)
print("Price Money Earned: Rs2000/-")
else:
print("Better Luck Next Time")
print("Correct Answer: B) Prayag, Haridwar, Ujjain, Nasik")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 3) Which day is observed as the World Standards Day")
print(correct)
print('''
A) June 26 B) Oct 14
C) Nov 15 D) Dec 2
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'b':
print("Sahi Javaab", name)
print("Price Money Earned: Rs3000/-")
else:
print("Better Luck next Time")
print("Correct Answer: B) Oct 14")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 4) Where did Perseverance rover successfully land in 2021")
print(correct)
print('''
A) Mars B) Venus
C) Jupiter D) Ganymede
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'a':
print("Sahi Javaab", name)
print("Price Money Earned: Rs5000/-")
else:
print("Better Luck next Time")
print("Correct Answer: A) Mars")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 5) Name the person who was also known as Deshbandhu?")
print(correct)
print('''
A) S.Radhakrishnan B) G.K. Gokhale
C) Chittaranjan Das D) Madan Mohan Malviya
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'c':
print("Sahi Javaab", name)
print("Price Money Earned: Rs10000")
else:
print("Better Luck next Time")
print("Correct Answer: C) Chittaranjan Das")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 6) The capital of Uttarakhand is")
print(correct)
print('''
A) Masoorie B) Dehra Dun
C) Nainital D) Mumbai
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'b':
print("Sahi Javaab", name)
print("Price Money Earned: Rs20,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: B) Dehra Dun")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 7) Geet Govind is a famous creation of")
print(correct)
print('''
A) Bana Bhatt B) Jayadev
C) Kalidas D) Bharat Muni
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'c':
print("Sahi Javaab", name)
print("Price Money Earned: Rs40,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: C) Kalidas")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 8) Which of the followin represents the Finance Commissions that have been set-up so far")
print(correct)
print('''
A) 10 B) 11
C) 12 D) 13
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'd':
print("Sahi Javaab", name)
print("Price Money Earned: Rs80,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: C) 13")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 9) According to the Constitution of India, which of the following is NOT one of the main organs of the Goverment")
print(correct)
print('''
A) Legislature B) Bureaucracy
C) Executive D) Judiciar
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'b':
print("Sahi Javaab", name)
print("Price Money Earned: Rs1,60,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: B) Bureaucracy")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 10) Panchayati Raj comes under?")
print(correct)
print('''
A) Residual list B) Concurrent list
C) State list D) Union list
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'c':
print("Sahi Javaab", name)
print("Price Money Earned: Rs3,20,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: C) State List")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 11) Harshcharita and KadamBari are the works of")
print(correct)
print('''
A) Kalhan B) Bana Bhatta
C) Panini D) Patanjali
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'b':
print("Sahi Javaab", name)
print("Price Money Earned: Rs6,40,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: B) Bana Bhatta")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 12) When did the war of Americans Independence take place?")
print(correct)
print('''
A) 1770 B) 1772
C) 1774 D) 1776
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'd':
print("Sahi Javaab", name)
print("Price Money Earned: Rs12,50,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: D) 1776")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 13) The river was also called as the Ganges of the South, Name the river from the given options")
print(correct)
print('''
A) Godavari B) Krishna
C) Cauvery D) Yamuna
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'c':
print("Sahi Javaab", name)
print("Price Money Earned: Rs25,00,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: C) Cauvery")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 14) Which Indian state is inhabited by 'Jaintiya tribes")
print(correct)
print('''
A) Arunachal Pradesh B) Manipur
C) Mizoram D) Meghalaya
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'a':
print("Sahi Javaab", name)
print("Price Money Earned: Rs50,00,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: C) Arunachal Pradesh")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 15) In the World Boxing Championships 2017, who won India its first medal?")
print(correct)
print('''
A) Gautav Bidhuri B) Alexzander
C) Tarzan D) Mitsuda
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'a':
print("Sahi Javaab", name)
print("Price Money Earned: Rs1,00,00,000")
else:
print("Better Luck next Time")
print("Correct Answer: A) Gaurav Bidhuri")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 16) What is the height of Siachen Glacier at eastern Karakoram range in the Himalya Mountains?")
print(correct)
print('''
A) 5400 Metre B) 6400 Metre
C) 5600 Metre D) 8500 Metre
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'a':
print("Sahi Javaab", name)
print("Price Money Earned: Rs7,00,00,000")
else:
print("Better Luck next Time")
print("Correct Answer: A) 5400 Metre")
quit()
mixer.music.load("Audio/AmitabhSir.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Congratulations! You are now crorepati", name)
| 29.428144 | 126 | 0.630786 |
import time
from pygame import mixer
mixer.init()
mixer.music.load("Audio/KBCMAIN.mp3")
mixer.music.set_volume(0.2)
mixer.music.play()
print("Let's Play Kaun Banega Crorepati")
name = input("Please Enter your Name: ")
print("Welcome", name)
decision = input("Do you want to play KBC(Yes/No): ")
decision = decision.lower()
if decision != "yes":
print(name, "Have a nice Day")
quit()
print("Let's Begin :)")
print("On Screen Your 1st Question")
print("")
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 1: International Literacy Day is observed on?")
correct = "Choose correct Option: "
print(correct)
print('''
A) Sep 8 B) Nov 28
C) May 2 D) Sep 22
''')
answer = input("Answer: ")
print("Option", answer, " locked kiya jey")
answer = answer.lower()
if (answer == 'a'):
print("Sahi Javaab", name)
print("Price Money Earned: Rs1000/-")
else:
print("Better Luck Next Time")
print("Correct Answer: A) Sep 8")
exit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 2: In which group of places the Kumbha Mela is held every twelve years?")
print(correct)
print('''
A) Ujjain, Puri, Prayag, Haridwar B) Prayag, Haridwar, Ujjain, Nasik
C) Rameshwaram, Puri, Badrinath, Dwarika D) Chittakoot, Ujjain, Prayad, Haridwar
''')
answer = input("Answer: ")
print("Option", answer, " locked kiya jey")
answer = answer.lower()
if(answer == 'b'):
print("Sahi Javaab", name)
print("Price Money Earned: Rs2000/-")
else:
print("Better Luck Next Time")
print("Correct Answer: B) Prayag, Haridwar, Ujjain, Nasik")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 3) Which day is observed as the World Standards Day")
print(correct)
print('''
A) June 26 B) Oct 14
C) Nov 15 D) Dec 2
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'b':
print("Sahi Javaab", name)
print("Price Money Earned: Rs3000/-")
else:
print("Better Luck next Time")
print("Correct Answer: B) Oct 14")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 4) Where did Perseverance rover successfully land in 2021")
print(correct)
print('''
A) Mars B) Venus
C) Jupiter D) Ganymede
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'a':
print("Sahi Javaab", name)
print("Price Money Earned: Rs5000/-")
else:
print("Better Luck next Time")
print("Correct Answer: A) Mars")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 5) Name the person who was also known as Deshbandhu?")
print(correct)
print('''
A) S.Radhakrishnan B) G.K. Gokhale
C) Chittaranjan Das D) Madan Mohan Malviya
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'c':
print("Sahi Javaab", name)
print("Price Money Earned: Rs10000")
else:
print("Better Luck next Time")
print("Correct Answer: C) Chittaranjan Das")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 6) The capital of Uttarakhand is")
print(correct)
print('''
A) Masoorie B) Dehra Dun
C) Nainital D) Mumbai
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'b':
print("Sahi Javaab", name)
print("Price Money Earned: Rs20,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: B) Dehra Dun")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 7) Geet Govind is a famous creation of")
print(correct)
print('''
A) Bana Bhatt B) Jayadev
C) Kalidas D) Bharat Muni
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'c':
print("Sahi Javaab", name)
print("Price Money Earned: Rs40,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: C) Kalidas")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 8) Which of the followin represents the Finance Commissions that have been set-up so far")
print(correct)
print('''
A) 10 B) 11
C) 12 D) 13
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'd':
print("Sahi Javaab", name)
print("Price Money Earned: Rs80,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: C) 13")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 9) According to the Constitution of India, which of the following is NOT one of the main organs of the Goverment")
print(correct)
print('''
A) Legislature B) Bureaucracy
C) Executive D) Judiciar
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'b':
print("Sahi Javaab", name)
print("Price Money Earned: Rs1,60,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: B) Bureaucracy")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 10) Panchayati Raj comes under?")
print(correct)
print('''
A) Residual list B) Concurrent list
C) State list D) Union list
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'c':
print("Sahi Javaab", name)
print("Price Money Earned: Rs3,20,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: C) State List")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 11) Harshcharita and KadamBari are the works of")
print(correct)
print('''
A) Kalhan B) Bana Bhatta
C) Panini D) Patanjali
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'b':
print("Sahi Javaab", name)
print("Price Money Earned: Rs6,40,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: B) Bana Bhatta")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 12) When did the war of Americans Independence take place?")
print(correct)
print('''
A) 1770 B) 1772
C) 1774 D) 1776
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'd':
print("Sahi Javaab", name)
print("Price Money Earned: Rs12,50,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: D) 1776")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 13) The river was also called as the Ganges of the South, Name the river from the given options")
print(correct)
print('''
A) Godavari B) Krishna
C) Cauvery D) Yamuna
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'c':
print("Sahi Javaab", name)
print("Price Money Earned: Rs25,00,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: C) Cauvery")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 14) Which Indian state is inhabited by 'Jaintiya tribes")
print(correct)
print('''
A) Arunachal Pradesh B) Manipur
C) Mizoram D) Meghalaya
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'a':
print("Sahi Javaab", name)
print("Price Money Earned: Rs50,00,000/-")
else:
print("Better Luck next Time")
print("Correct Answer: C) Arunachal Pradesh")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 15) In the World Boxing Championships 2017, who won India its first medal?")
print(correct)
print('''
A) Gautav Bidhuri B) Alexzander
C) Tarzan D) Mitsuda
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'a':
print("Sahi Javaab", name)
print("Price Money Earned: Rs1,00,00,000")
else:
print("Better Luck next Time")
print("Correct Answer: A) Gaurav Bidhuri")
quit()
mixer.music.load("Audio/KBC-T.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Ques 16) What is the height of Siachen Glacier at eastern Karakoram range in the Himalya Mountains?")
print(correct)
print('''
A) 5400 Metre B) 6400 Metre
C) 5600 Metre D) 8500 Metre
''')
answer = input("Answer: ")
print("Option", answer, "locked kiya jey")
answer = answer.lower()
if answer == 'a':
print("Sahi Javaab", name)
print("Price Money Earned: Rs7,00,00,000")
else:
print("Better Luck next Time")
print("Correct Answer: A) 5400 Metre")
quit()
mixer.music.load("Audio/AmitabhSir.mp3")
mixer.music.set_volume(0.3)
mixer.music.play()
print("Congratulations! You are now crorepati", name)
| true | true |
f71c608e5b1cc741cd572c1921ad56281e09b9eb | 2,400 | py | Python | contrib/python/Jinja2/tests/test_utils.py | HeyLey/catboost | f472aed90604ebe727537d9d4a37147985e10ec2 | [
"Apache-2.0"
] | 1 | 2019-01-26T02:58:50.000Z | 2019-01-26T02:58:50.000Z | contrib/python/Jinja2/tests/test_utils.py | HeyLey/catboost | f472aed90604ebe727537d9d4a37147985e10ec2 | [
"Apache-2.0"
] | 1 | 2019-07-29T12:25:53.000Z | 2019-07-29T12:25:53.000Z | contrib/python/Jinja2/tests/test_utils.py | HeyLey/catboost | f472aed90604ebe727537d9d4a37147985e10ec2 | [
"Apache-2.0"
] | 1 | 2020-11-11T16:56:19.000Z | 2020-11-11T16:56:19.000Z | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.utils
~~~~~~~~~~~~~~~~~~~~~~
Tests utilities jinja uses.
:copyright: (c) 2017 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import gc
import pytest
import pickle
from jinja2.utils import LRUCache, escape, object_type_repr, urlize, \
select_autoescape
@pytest.mark.utils
@pytest.mark.lrucache
class TestLRUCache(object):
def test_simple(self):
d = LRUCache(3)
d["a"] = 1
d["b"] = 2
d["c"] = 3
d["a"]
d["d"] = 4
assert len(d) == 3
assert 'a' in d and 'c' in d and 'd' in d and 'b' not in d
def test_pickleable(self):
cache = LRUCache(2)
cache["foo"] = 42
cache["bar"] = 23
cache["foo"]
for protocol in range(3):
copy = pickle.loads(pickle.dumps(cache, protocol))
assert copy.capacity == cache.capacity
assert copy._mapping == cache._mapping
assert copy._queue == cache._queue
@pytest.mark.utils
@pytest.mark.helpers
class TestHelpers(object):
def test_object_type_repr(self):
class X(object):
pass
assert object_type_repr(42) == 'int object'
assert object_type_repr([]) == 'list object'
assert object_type_repr(X()) == 'test_utils.X object'
assert object_type_repr(None) == 'None'
assert object_type_repr(Ellipsis) == 'Ellipsis'
def test_autoescape_select(self):
func = select_autoescape(
enabled_extensions=('html', '.htm'),
disabled_extensions=('txt',),
default_for_string='STRING',
default='NONE',
)
assert func(None) == 'STRING'
assert func('unknown.foo') == 'NONE'
assert func('foo.html') == True
assert func('foo.htm') == True
assert func('foo.txt') == False
assert func('FOO.HTML') == True
assert func('FOO.TXT') == False
@pytest.mark.utils
@pytest.mark.escapeUrlizeTarget
class TestEscapeUrlizeTarget(object):
def test_escape_urlize_target(self):
url = "http://example.org"
target = "<script>"
assert urlize(url, target=target) == ('<a href="http://example.org"'
' target="<script>">'
'http://example.org</a>')
| 27.586207 | 76 | 0.558333 |
import gc
import pytest
import pickle
from jinja2.utils import LRUCache, escape, object_type_repr, urlize, \
select_autoescape
@pytest.mark.utils
@pytest.mark.lrucache
class TestLRUCache(object):
def test_simple(self):
d = LRUCache(3)
d["a"] = 1
d["b"] = 2
d["c"] = 3
d["a"]
d["d"] = 4
assert len(d) == 3
assert 'a' in d and 'c' in d and 'd' in d and 'b' not in d
def test_pickleable(self):
cache = LRUCache(2)
cache["foo"] = 42
cache["bar"] = 23
cache["foo"]
for protocol in range(3):
copy = pickle.loads(pickle.dumps(cache, protocol))
assert copy.capacity == cache.capacity
assert copy._mapping == cache._mapping
assert copy._queue == cache._queue
@pytest.mark.utils
@pytest.mark.helpers
class TestHelpers(object):
def test_object_type_repr(self):
class X(object):
pass
assert object_type_repr(42) == 'int object'
assert object_type_repr([]) == 'list object'
assert object_type_repr(X()) == 'test_utils.X object'
assert object_type_repr(None) == 'None'
assert object_type_repr(Ellipsis) == 'Ellipsis'
def test_autoescape_select(self):
func = select_autoescape(
enabled_extensions=('html', '.htm'),
disabled_extensions=('txt',),
default_for_string='STRING',
default='NONE',
)
assert func(None) == 'STRING'
assert func('unknown.foo') == 'NONE'
assert func('foo.html') == True
assert func('foo.htm') == True
assert func('foo.txt') == False
assert func('FOO.HTML') == True
assert func('FOO.TXT') == False
@pytest.mark.utils
@pytest.mark.escapeUrlizeTarget
class TestEscapeUrlizeTarget(object):
def test_escape_urlize_target(self):
url = "http://example.org"
target = "<script>"
assert urlize(url, target=target) == ('<a href="http://example.org"'
' target="<script>">'
'http://example.org</a>')
| true | true |
f71c6158b5131e8416bc7f571ce69e11b023703b | 52,287 | py | Python | dendropy/test/test_datamodel_tree_list.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | dendropy/test/test_datamodel_tree_list.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | dendropy/test/test_datamodel_tree_list.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | # !/usr/bin/env python
##############################################################################
## DendroPy Phylogenetic Computing Library.
##
## Copyright 2010-2015 Jeet Sukumaran and Mark T. Holder.
## All rights reserved.
##
## See "LICENSE.rst" for terms and conditions of usage.
##
## If you use this work or any portion thereof in published work,
## please cite it as:
##
## Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
## for phylogenetic computing. Bioinformatics 26: 1569-1571.
##
##############################################################################
"""
Tests for dendropy.TreeList.
"""
import copy
import sys
import unittest
import collections
import dendropy
import random
from dendropy.test.support import dendropytest
from dendropy.test.support import curated_test_tree
from dendropy.test.support import curated_test_tree_list
from dendropy.test.support import compare_and_validate
class TestTreeListBasicOperations(dendropytest.ExtendedTestCase):
def test_insert_simple_list_foreign_namespace(self):
for idx in range(6):
tlist = curated_test_tree_list.get_tree_list(5)
self.assertEqual(len(tlist), 5)
self.assertEqual(len(tlist._trees), 5)
original_tns = tlist.taxon_namespace
tree = curated_test_tree_list.get_tree()
tlist.insert(idx, tree)
self.assertEqual(len(tlist), 6)
self.assertEqual(len(tlist._trees), 6)
self.assertIs(tlist.taxon_namespace, original_tns)
self.assertIn(tree, tlist)
self.assertIs(tree.taxon_namespace, tlist.taxon_namespace)
self.assertEqual(len(tlist.taxon_namespace), 7)
for t1 in tlist:
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_insert_simple_list_native_namespace(self):
for idx in range(6):
tns = dendropy.TaxonNamespace()
tlist = curated_test_tree_list.get_tree_list(5, taxon_namespace=tns)
self.assertEqual(len(tlist), 5)
self.assertEqual(len(tlist._trees), 5)
original_tns = tlist.taxon_namespace
tree = curated_test_tree_list.get_tree(taxon_namespace=tns)
tlist.insert(idx, tree)
self.assertEqual(len(tlist), 6)
self.assertEqual(len(tlist._trees), 6)
self.assertIs(tlist.taxon_namespace, original_tns)
self.assertIn(tree, tlist)
self.assertIs(tree.taxon_namespace, tlist.taxon_namespace)
self.assertEqual(len(tlist.taxon_namespace), 7)
for t1 in tlist:
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_append_simple_list_foreign_namespace(self):
tlist, trees = curated_test_tree_list.get_tree_list_and_list_of_trees(num_trees=curated_test_tree_list.DEFAULT_NUM_TREES)
original_tns = tlist.taxon_namespace
for t in trees:
tlist.append(t)
self.assertEqual(len(tlist), curated_test_tree_list.DEFAULT_NUM_TREES)
self.assertIs(tlist.taxon_namespace, original_tns)
# self.assertEqual(len(tlist.taxon_namespace), len(tlist[0].tax_labels))
self.assertEqual(len(tlist.taxon_namespace), 7)
for t1, t2 in zip(tlist, trees):
self.assertIs(t1, t2)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_append_simple_list_same_namespace(self):
tns = dendropy.TaxonNamespace()
tlist, trees = curated_test_tree_list.get_tree_list_and_list_of_trees(
num_trees=curated_test_tree_list.DEFAULT_NUM_TREES,
tree_list_taxon_namespace=tns,
list_of_trees_taxon_namespace=tns)
original_tns = tlist.taxon_namespace
for t in trees:
tlist.append(t)
self.assertEqual(len(tlist), curated_test_tree_list.DEFAULT_NUM_TREES)
self.assertIs(tlist.taxon_namespace, original_tns)
# self.assertEqual(len(tlist.taxon_namespace), len(tlist[0].tax_labels))
self.assertEqual(len(tlist.taxon_namespace), 7)
for t1, t2 in zip(tlist, trees):
self.assertIs(t1, t2)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_iadd_from_another_tree_list_different_namespace(self):
tlist = curated_test_tree_list.get_tree_list(num_trees=3)
original_tns = tlist.taxon_namespace
original_tlist_len = len(tlist)
original_tree_labels = [t.label for t in tlist]
self.assertEqual(len(original_tree_labels), len(tlist))
self.assertEqual(original_tlist_len, 3)
tlist_source = curated_test_tree_list.get_tree_list(num_trees=5)
self.assertEqual(len(tlist_source), 5)
source_tree_labels = [t.label for t in tlist_source]
self.assertEqual(len(source_tree_labels), len(tlist_source))
tlist += tlist_source
self.assertEqual(len(tlist), original_tlist_len + len(tlist_source))
self.assertIs(tlist.taxon_namespace, original_tns)
# self.assertEqual(len(tlist.taxon_namespace), len(tlist[0].tax_labels))
self.assertEqual(len(tlist.taxon_namespace), 7)
expected_tree_labels = original_tree_labels + source_tree_labels
self.assertEqual(len(tlist), len(expected_tree_labels))
for t1, tlabel in zip(tlist, expected_tree_labels):
self.assertIn(t1, tlist)
self.assertNotIn(t1, tlist_source)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
self.assertEqual(t1.label, tlabel)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_iadd_from_list_of_trees_different_namespace(self):
tlist = curated_test_tree_list.get_tree_list(num_trees=3)
original_tns = tlist.taxon_namespace
original_tlist_len = len(tlist)
original_tree_labels = [t.label for t in tlist]
self.assertEqual(len(original_tree_labels), len(tlist))
self.assertEqual(original_tlist_len, 3)
source_trees = curated_test_tree_list.get_trees(
num_trees=5,
taxon_namespace=None,
label=None,
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False)
self.assertEqual(len(source_trees), 5)
source_tree_labels = [t.label for t in source_trees]
self.assertEqual(len(source_tree_labels), len(source_trees))
tlist += source_trees
self.assertEqual(len(tlist), original_tlist_len + len(source_trees))
self.assertIs(tlist.taxon_namespace, original_tns)
# self.assertEqual(len(tlist.taxon_namespace), len(tlist[0].tax_labels))
self.assertEqual(len(tlist.taxon_namespace), 7)
expected_tree_labels = original_tree_labels + source_tree_labels
self.assertEqual(len(tlist), len(expected_tree_labels))
for t1, tlabel in zip(tlist, expected_tree_labels):
self.assertIn(t1, tlist)
if tlabel in source_tree_labels:
self.assertIn(t1, source_trees)
else:
self.assertNotIn(t1, source_trees)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
self.assertEqual(t1.label, tlabel)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_add_from_another_tree_list_different_namespace(self):
tlist_source1 = curated_test_tree_list.get_tree_list(num_trees=3)
original_tns = tlist_source1.taxon_namespace
source1_tree_labels = [t.label for t in tlist_source1]
self.assertEqual(len(source1_tree_labels), len(tlist_source1))
self.assertEqual(len(tlist_source1), 3)
tlist_source2 = curated_test_tree_list.get_trees(num_trees=5)
self.assertEqual(len(tlist_source2), 5)
source2_tree_labels = [t.label for t in tlist_source2]
self.assertEqual(len(source2_tree_labels), len(tlist_source2))
tlist = tlist_source1 + tlist_source2
self.assertEqual(len(tlist_source1), 3)
self.assertEqual(len(tlist_source2), 5)
self.assertEqual(len(tlist), len(tlist_source1) + len(tlist_source2))
self.assertIs(tlist.taxon_namespace, original_tns)
self.assertEqual(len(tlist.taxon_namespace), 7)
expected_tree_labels = source1_tree_labels + source2_tree_labels
self.assertEqual(len(tlist), len(expected_tree_labels))
for t1, tlabel in zip(tlist, expected_tree_labels):
self.assertIn(t1, tlist)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
self.assertEqual(t1.label, tlabel)
if t1.label in source1_tree_labels:
self.assertNotIn(t1, tlist_source1)
self.assertNotIn(t1, tlist_source2)
else:
self.assertNotIn(t1, tlist_source1)
self.assertIn(t1, tlist_source2)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_contains(self):
tlist = curated_test_tree_list.get_tree_list(5)
self.assertEqual(len(tlist._trees), len(tlist))
self.assertEqual(len(tlist), 5)
trees = curated_test_tree_list.get_trees(5)
self.assertEqual(len(trees), 5)
for t in tlist:
self.assertTrue(t in tlist._trees)
self.assertTrue(t in tlist)
for t in trees:
self.assertFalse(t in tlist._trees)
self.assertFalse(t in tlist)
tlist += trees
for t in trees:
self.assertTrue(t in tlist._trees)
self.assertTrue(t in tlist)
def test_delitem(self):
tsize = 5
for del_idx in range(-tsize, tsize):
tlist = curated_test_tree_list.get_tree_list(tsize)
original_trees = list(tlist._trees)
self.assertIn(original_trees[del_idx], tlist._trees)
del tlist[del_idx]
self.assertNotIn(original_trees[del_idx], tlist._trees)
self.assertEqual(len(tlist), tsize - 1)
del original_trees[del_idx]
self.assertEqual(tlist._trees, original_trees)
def test_iter(self):
tlist = curated_test_tree_list.get_tree_list(5)
self.assertEqual(len(tlist), 5)
self.assertEqual(len(tlist._trees), len(tlist))
for t1, t2 in zip(tlist, tlist._trees):
self.assertIs(t1, t2)
def test_reversed(self):
tlist = curated_test_tree_list.get_tree_list(5)
self.assertEqual(len(tlist), 5)
self.assertEqual(len(tlist._trees), len(tlist))
for t1, t2 in zip(reversed(tlist), reversed(tlist._trees)):
self.assertIs(t1, t2)
def test_getitem_simple(self):
tsize = 5
tlist = curated_test_tree_list.get_tree_list(tsize)
self.assertEqual(len(tlist), tsize)
self.assertEqual(len(tlist._trees), len(tlist))
for idx in range(-tsize, tsize):
self.assertIs(tlist[idx], tlist._trees[idx])
self.assertTrue(isinstance(tlist[idx], dendropy.Tree))
def test_getitem_slice(self):
tsize = 5
tlist = curated_test_tree_list.get_tree_list(tsize)
self.assertEqual(len(tlist), tsize)
self.assertEqual(len(tlist._trees), len(tlist))
for a in range(-tsize, tsize):
for b in range(-tsize, tsize):
for step in range(-tsize, tsize):
if step == 0:
continue
tt = tlist[a:b:step]
k = tlist._trees[a:b:step]
self.assertEqual(len(tt), len(k))
for t1, t2 in zip(tt, k):
self.assertIn(t1, tlist)
self.assertIn(t1, tlist._trees)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
def test_setitem_simple(self):
tsize = 5
for idx in range(-tsize, tsize):
tlist = curated_test_tree_list.get_tree_list(tsize)
self.assertEqual(len(tlist), tsize)
self.assertEqual(len(tlist._trees), len(tlist))
old_tree = tlist[idx]
new_tree = curated_test_tree_list.get_tree()
tlist[idx] = new_tree
self.assertIs(tlist[idx], new_tree)
self.assertIsNot(tlist[idx], old_tree)
self.assertIn(new_tree, tlist)
self.assertNotIn(old_tree, tlist)
self.assertIs(new_tree.taxon_namespace,
tlist.taxon_namespace)
self.assertEqual(len(tlist.taxon_namespace), 7)
for tree in tlist:
for nd in tree:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_setitem_slice_from_list(self):
tsize = 5
for a in range(-tsize, tsize):
for b in range(-tsize, tsize):
for step in range(-tsize, tsize):
if step == 0:
continue
slice_obj = slice(a, b, step)
slice_len = len(range(*slice_obj.indices(tsize)))
if slice_len <= 0:
continue
tlist = curated_test_tree_list.get_tree_list(tsize)
self.assertEqual(len(tlist), tsize)
self.assertEqual(len(tlist._trees), len(tlist))
copy_list = list(tlist._trees)
source = curated_test_tree_list.get_trees(slice_len)
tlist[a:b:step] = source
copy_list[a:b:step] = source
expected_tree_labels = [t.label for t in copy_list]
self.assertEqual(len(tlist), len(copy_list))
self.assertEqual(len(tlist), len(tlist._trees))
self.assertEqual(len(tlist.taxon_namespace), 7)
for t1, t2, tlabel in zip(tlist, copy_list, expected_tree_labels):
self.assertIs(t1, t2)
self.assertIn(t1, tlist)
self.assertIn(t1, tlist._trees)
self.assertEqual(t1.label, tlabel)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_setitem_slice_from_tree_list(self):
tsize = 5
for a in range(-tsize, tsize):
for b in range(-tsize, tsize):
for step in range(-tsize, tsize):
if step == 0:
continue
slice_obj = slice(a, b, step)
slice_indexes = list(range(*slice_obj.indices(tsize)))
slice_len = len(slice_indexes)
if slice_len <= 0:
continue
tlist = curated_test_tree_list.get_tree_list(tsize)
self.assertEqual(len(tlist), tsize)
self.assertEqual(len(tlist._trees), len(tlist))
copy_list = list(tlist._trees)
source = curated_test_tree_list.get_tree_list(slice_len)
copy_list[a:b:step] = source._trees
tlist[a:b:step] = source
expected_tree_labels = [t.label for t in copy_list]
self.assertEqual(len(tlist), len(copy_list))
self.assertEqual(len(tlist), len(tlist._trees))
self.assertEqual(len(tlist.taxon_namespace), 7)
for idx, (t1, t2, tlabel) in enumerate(zip(tlist, copy_list, expected_tree_labels)):
self.assertIn(t1, tlist)
self.assertIn(t1, tlist._trees)
self.assertEqual(t1.label, tlabel)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
if idx in slice_indexes:
self.assertIsNot(t1, t2)
self.assertIn(t1, tlist)
self.assertIn(t1, tlist._trees)
self.assertNotIn(t2, tlist)
self.assertNotIn(t2, tlist._trees)
self.assertNotIn(t1, source)
self.assertNotIn(t1, source._trees)
else:
self.assertIs(t1, t2)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_clear(self):
tlist = curated_test_tree_list.get_tree_list(5)
self.assertEqual(len(tlist._trees), 5)
tlist.clear()
self.assertEqual(len(tlist), 0)
self.assertEqual(len(tlist._trees), 0)
def test_extend_from_another_tree_list_different_namespace(self):
tlist = curated_test_tree_list.get_tree_list(num_trees=3)
original_tns = tlist.taxon_namespace
original_tlist_len = len(tlist)
original_tree_labels = [t.label for t in tlist]
self.assertEqual(len(original_tree_labels), len(tlist))
self.assertEqual(original_tlist_len, 3)
tlist_source = curated_test_tree_list.get_tree_list(num_trees=5)
self.assertEqual(len(tlist_source), 5)
source_tree_labels = [t.label for t in tlist_source]
self.assertEqual(len(source_tree_labels), len(tlist_source))
tlist.extend(tlist_source)
self.assertEqual(len(tlist), original_tlist_len + len(tlist_source))
self.assertIs(tlist.taxon_namespace, original_tns)
# self.assertEqual(len(tlist.taxon_namespace), len(tlist[0].tax_labels))
self.assertEqual(len(tlist.taxon_namespace), 7)
expected_tree_labels = original_tree_labels + source_tree_labels
self.assertEqual(len(tlist), len(expected_tree_labels))
for t1, tlabel in zip(tlist, expected_tree_labels):
self.assertIn(t1, tlist)
self.assertNotIn(t1, tlist_source)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
self.assertEqual(t1.label, tlabel)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_extend_from_list_of_trees_different_namespace(self):
tlist = curated_test_tree_list.get_tree_list(num_trees=3)
original_tns = tlist.taxon_namespace
original_tlist_len = len(tlist)
original_tree_labels = [t.label for t in tlist]
self.assertEqual(len(original_tree_labels), len(tlist))
self.assertEqual(original_tlist_len, 3)
source_trees = curated_test_tree_list.get_trees(
num_trees=5,
taxon_namespace=None,
label=None,
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False)
self.assertEqual(len(source_trees), 5)
source_tree_labels = [t.label for t in source_trees]
self.assertEqual(len(source_tree_labels), len(source_trees))
tlist.extend(source_trees)
self.assertEqual(len(tlist), original_tlist_len + len(source_trees))
self.assertIs(tlist.taxon_namespace, original_tns)
# self.assertEqual(len(tlist.taxon_namespace), len(tlist[0].tax_labels))
self.assertEqual(len(tlist.taxon_namespace), 7)
expected_tree_labels = original_tree_labels + source_tree_labels
self.assertEqual(len(tlist), len(expected_tree_labels))
for t1, tlabel in zip(tlist, expected_tree_labels):
self.assertIn(t1, tlist)
if tlabel in source_tree_labels:
self.assertIn(t1, source_trees)
else:
self.assertNotIn(t1, source_trees)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
self.assertEqual(t1.label, tlabel)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_index(self):
tlist = curated_test_tree_list.get_tree_list(5)
for idx, t in enumerate(tlist):
self.assertIs(t, tlist[idx])
self.assertEqual(tlist.index(t), idx)
def test_pop1(self):
tlist = curated_test_tree_list.get_tree_list(5)
k = tlist[-1]
t = tlist.pop()
self.assertIs(t, k)
self.assertEqual(len(tlist), 4)
self.assertNotIn(t, tlist)
def test_pop2(self):
for idx in range(5):
tlist = curated_test_tree_list.get_tree_list(5)
k = tlist[idx]
t = tlist.pop(idx)
self.assertIs(t, k)
self.assertEqual(len(tlist), 4)
self.assertNotIn(t, tlist)
def test_remove(self):
tlist = curated_test_tree_list.get_tree_list(5)
t = tlist[0]
tlist.remove(t)
self.assertEqual(len(tlist), 4)
self.assertNotIn(t, tlist)
def test_remove(self):
tlist = curated_test_tree_list.get_tree_list(5)
clist = list(tlist._trees)
tlist.reverse()
clist.reverse()
for t1, t2 in zip(tlist, clist):
self.assertIs(t1, t2)
def test_sort(self):
for r in (True, False):
tlist = curated_test_tree_list.get_tree_list(5)
clist = list(tlist._trees)
tlist.sort(key=lambda x: x.label, reverse=r)
clist.sort(key=lambda x: x.label, reverse=r)
for t1, t2 in zip(tlist, clist):
self.assertIs(t1, t2)
class TreeListCreatingAndCloning(
curated_test_tree.CuratedTestTree,
compare_and_validate.Comparator,
unittest.TestCase):
def add_tree_annotations(self, tree):
for idx, nd in enumerate(tree):
if idx % 2 == 0:
nd.edge.label = "E{}".format(idx)
nd.edge.length = idx
an1 = nd.annotations.add_new("a{}".format(idx),
"{}{}{}".format(nd.label, nd.taxon, idx))
an2 = nd.annotations.add_bound_attribute("label")
an3 = an1.annotations.add_bound_attribute("name")
ae1 = nd.edge.annotations.add_new("a{}".format(idx),
"{}{}".format(nd.edge.label, idx))
ae2 = nd.edge.annotations.add_bound_attribute("label")
ae3 = ae1.annotations.add_bound_attribute("name")
tree.annotations.add_new("a", 0)
tree.label = "hello"
b = tree.annotations.add_bound_attribute("label")
b.annotations.add_new("c", 3)
def add_tree_list_annotations(self, tree_list):
tree_list.annotations.add_new("a", 0)
tree_list.label = "hello"
b = tree_list.annotations.add_bound_attribute("label")
b.annotations.add_new("c", 3)
def add_taxon_namespace_annotations(self, tns):
for idx, taxon in enumerate(tns):
a = taxon.annotations.add_new("!color", str(idx))
a.annotations.add_new("setbytest", "a")
def setUp(self):
self.num_trees = 5
tree1, anodes1, lnodes1, inodes1 = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False)
self.original_taxon_labels = [t.label for t in tree1.taxon_namespace]
assert len(self.original_taxon_labels) == len(anodes1)
def get_tree_list(self):
tlist1 = dendropy.TreeList()
self.num_trees = 5
for idx in range(self.num_trees):
tree1, anodes1, lnodes1, inodes1 = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False,
taxon_namespace=tlist1.taxon_namespace)
self.add_tree_annotations(tree1)
tlist1.append(tree1)
self.add_tree_list_annotations(tlist1)
self.add_taxon_namespace_annotations(tlist1.taxon_namespace)
return tlist1
def test_shallow_copy_with_initializer_list(self):
tlist1 = self.get_tree_list()
trees = tlist1._trees
tlist2 = dendropy.TreeList(trees)
self.assertEqual(len(tlist2), self.num_trees)
for tcopy, toriginal in zip(tlist2, trees):
self.assertIs(tcopy, toriginal)
self.assertIs(tcopy.taxon_namespace, tlist2.taxon_namespace)
def test_clone0(self):
tlist1 = self.get_tree_list()
for tlist2 in (
tlist1.clone(0),
):
self.assertIs(tlist2.taxon_namespace, tlist1.taxon_namespace)
self.assertEqual(len(tlist2), self.num_trees)
for tcopy, toriginal in zip(tlist2, tlist1):
self.assertIs(tcopy, toriginal)
self.assertIs(tcopy.taxon_namespace, tlist2.taxon_namespace)
def test_taxon_namespace_scoped_copy(self):
tlist1 = self.get_tree_list()
for tlist2 in (
tlist1.clone(1),
dendropy.TreeList(tlist1),
tlist1.taxon_namespace_scoped_copy(),):
self.compare_distinct_tree_list(tlist2, tlist1,
taxon_namespace_scoped=True,
compare_tree_annotations=True,
compare_taxon_annotations=True)
def test_deepcopy_including_namespace(self):
tlist1 = self.get_tree_list()
for idx, tlist2 in enumerate((
tlist1.clone(2),
copy.deepcopy(tlist1),
)):
self.compare_distinct_tree_list(tlist2, tlist1,
taxon_namespace_scoped=False,
compare_tree_annotations=True,
compare_taxon_annotations=True)
def test_deepcopy_excluding_namespace(self):
tlist1 = self.get_tree_list()
tlist2 = dendropy.TreeList(tlist1,
taxon_namespace=dendropy.TaxonNamespace())
self.compare_distinct_tree_list(tlist2, tlist1,
taxon_namespace_scoped=False,
compare_tree_annotations=True,
compare_taxon_annotations=False)
class TreeListIdentity(unittest.TestCase):
def setUp(self):
self.tns = dendropy.TaxonNamespace()
self.t1 = dendropy.TreeList(label="a", taxon_namespace=self.tns)
self.t2 = dendropy.TreeList(label="a", taxon_namespace=self.tns)
self.t3 = dendropy.TreeList(label="a")
def test_equal(self):
# two distinct |TreeList| objects are equal
# if they have the same namespace and trees
trees = [dendropy.Tree() for i in range(5)]
for tree in trees:
self.t1._trees.append(tree)
self.t2._trees.append(tree)
self.assertEqual(self.t1, self.t2)
def test_unequal1(self):
# two distinct |TreeList| objects are equal
# if they have the same namespace and trees
trees1 = [dendropy.Tree() for i in range(5)]
for tree in trees1:
self.t1._trees.append(tree)
trees2 = [dendropy.Tree() for i in range(5)]
for tree in trees2:
self.t2._trees.append(tree)
self.assertNotEqual(self.t1, self.t2)
def test_unequal2(self):
# two distinct |TreeList| objects are equal
# if they have the same namespace and trees
trees1 = [dendropy.Tree() for i in range(5)]
for tree in trees1:
self.t1._trees.append(tree)
self.t3._trees.append(tree)
self.assertNotEqual(self.t1, self.t3)
def test_hash_dict_membership(self):
k = {}
k[self.t1] = 1
k[self.t2] = 2
self.assertEqual(len(k), 2)
self.assertEqual(k[self.t1], 1)
self.assertEqual(k[self.t2], 2)
self.assertIn(self.t1, k)
self.assertIn(self.t2, k)
del k[self.t1]
self.assertNotIn(self.t1, k)
self.assertIn(self.t2, k)
self.assertEqual(len(k), 1)
k1 = {self.t1: 1}
k2 = {self.t2: 1}
self.assertIn(self.t1, k1)
self.assertIn(self.t2, k2)
self.assertNotIn(self.t2, k1)
self.assertNotIn(self.t1, k2)
def test_hash_set_membership(self):
k = set()
k.add(self.t1)
k.add(self.t2)
self.assertEqual(len(k), 2)
self.assertIn(self.t1, k)
self.assertIn(self.t2, k)
k.discard(self.t1)
self.assertNotIn(self.t1, k)
self.assertIn(self.t2, k)
self.assertEqual(len(k), 1)
k1 = {self.t1: 1}
k2 = {self.t2: 1}
self.assertIn(self.t1, k1)
self.assertIn(self.t2, k2)
self.assertNotIn(self.t2, k1)
self.assertNotIn(self.t1, k2)
class TestTreeListUpdateTaxonNamespace(
curated_test_tree.CuratedTestTree,
dendropytest.ExtendedTestCase):
def setUp(self):
trees = []
for idx in range(5):
tree1, anodes1, lnodes1, inodes1 = self.get_tree(
suppress_internal_node_taxa=True,
suppress_leaf_node_taxa=True)
trees.append(tree1)
self.expected_labels = set()
self.expected_taxa = set()
node_label_to_taxon_label_map = {
"a" : "z01",
"b" : "<NONE>",
"c" : "z03",
"e" : "z04",
"f" : "z05",
"g" : "z06",
"h" : None,
"i" : None,
"j" : "z09",
"k" : "z10",
"l" : "z11",
"m" : "<NONE>",
"n" : None,
"o" : "z14",
"p" : "z15",
}
registry = {}
for tree_idx, tree in enumerate(trees):
for nd in tree:
if nd.label is not None:
if tree_idx > 3:
nd.label = node_label_to_taxon_label_map[nd.label]
if nd.label == "<NONE>":
try:
t = registry[None]
except KeyError:
t = dendropy.Taxon(label=None)
registry[None] = t
self.expected_labels.add(None)
else:
try:
t = registry[nd.label]
except KeyError:
t = dendropy.Taxon(label=nd.label)
registry[nd.label] = t
self.expected_labels.add(nd.label)
nd.taxon = t
self.expected_taxa.add(nd.taxon)
self.tree_list = dendropy.TreeList()
self.tree_list._trees = trees
def test_noop_update_with_no_taxa(self):
trees = []
tns = dendropy.TaxonNamespace()
for idx in range(5):
tree1, anodes1, lnodes1, inodes1 = self.get_tree(
suppress_internal_node_taxa=True,
suppress_leaf_node_taxa=True,
taxon_namespace=tns)
trees.append(tree1)
tlst = dendropy.TreeList(taxon_namespace=tns)
tlst._trees = trees
original_tns = tlst.taxon_namespace
self.assertEqual(len(original_tns), 0)
tlst.update_taxon_namespace()
self.assertIs(tlst.taxon_namespace, original_tns)
for tree in tlst:
self.assertIs(tree.taxon_namespace, tlst.taxon_namespace)
self.assertEqual(len(original_tns), 0)
def test_update(self):
original_tns = self.tree_list.taxon_namespace
self.assertEqual(len(original_tns), 0)
self.tree_list.update_taxon_namespace()
self.tree_list.update_taxon_namespace()
self.tree_list.update_taxon_namespace()
for tree in self.tree_list:
self.assertIs(tree.taxon_namespace, self.tree_list.taxon_namespace)
self.assertIs(self.tree_list.taxon_namespace, original_tns)
new_taxa = [t for t in original_tns]
new_labels = [t.label for t in original_tns]
self.assertCountEqual(new_taxa, self.expected_taxa)
self.assertCountEqual(new_labels, self.expected_labels)
class TestTreeListMigrateAndReconstructTaxonNamespace(
curated_test_tree.CuratedTestTree,
dendropytest.ExtendedTestCase):
def setUp(self):
tns = dendropy.TaxonNamespace()
trees = []
for idx in range(8):
tree, anodes, lnodes, inodes = self.get_tree(
suppress_internal_node_taxa=True,
suppress_leaf_node_taxa=True,
taxon_namespace=tns)
trees.append(tree)
self.node_label_to_taxon_label_map = {
"a" : "a",
"b" : "a",
"c" : "2",
"e" : "2",
"f" : "b",
"g" : "B",
"h" : "B",
"i" : "h",
"j" : "H",
"k" : "h",
"l" : None,
"m" : None,
"n" : "H",
"o" : "J",
"p" : "j",
}
self.original_taxa = []
registry = {}
for tree in trees:
for idx, nd in enumerate(tree):
try:
t = registry[nd.label]
except KeyError:
taxon_label = self.node_label_to_taxon_label_map[nd.label]
t = dendropy.Taxon(label=taxon_label)
registry[nd.label] = t
self.original_taxa.append(t)
tree.taxon_namespace.add_taxon(t)
nd.taxon = t
nd.original_taxon = t
assert len(tree.taxon_namespace) == len(self.node_label_to_taxon_label_map)
assert len(tree.taxon_namespace) == len(self.original_taxa)
self.tree_list = dendropy.TreeList(taxon_namespace=tns)
self.tree_list._trees = trees
def verify_taxon_namespace_reconstruction(self,
unify_taxa_by_label=False,
case_sensitive_label_mapping=True,
original_tns=None,
redundant_taxa=False):
if unify_taxa_by_label:
if not case_sensitive_label_mapping:
expected_labels = []
for label in self.node_label_to_taxon_label_map.values():
if label is None:
expected_labels.append(label)
else:
label = label.upper()
if label not in expected_labels:
expected_labels.append(label)
else:
expected_labels = list(set(label for label in self.node_label_to_taxon_label_map.values()))
else:
expected_labels = [label for label in self.node_label_to_taxon_label_map.values()]
for tree in self.tree_list:
seen_taxa = []
self.assertIs(tree.taxon_namespace, self.tree_list.taxon_namespace)
for nd in tree:
self.assertIsNot(nd.taxon, nd.original_taxon)
if not case_sensitive_label_mapping and nd.taxon.label is not None:
self.assertEqual(nd.taxon.label.upper(), nd.original_taxon.label.upper())
self.assertEqual(self.node_label_to_taxon_label_map[nd.label].upper(), nd.taxon.label.upper())
else:
self.assertEqual(nd.taxon.label, nd.original_taxon.label)
self.assertEqual(self.node_label_to_taxon_label_map[nd.label], nd.taxon.label)
self.assertNotIn(nd.original_taxon, tree.taxon_namespace)
self.assertIn(nd.original_taxon, self.original_taxa)
self.assertIn(nd.taxon, tree.taxon_namespace)
self.assertNotIn(nd.taxon, self.original_taxa)
if original_tns is not None:
self.assertNotIn(nd.taxon, original_tns)
if nd.taxon not in seen_taxa:
seen_taxa.append(nd.taxon)
else:
self.assertTrue(unify_taxa_by_label or redundant_taxa)
if not case_sensitive_label_mapping:
self.assertIn(nd.taxon.label, [t.label for t in seen_taxa])
else:
if nd.taxon.label is None:
self.assertIs(nd.original_taxon.label, None)
self.assertEqual([t.label for t in seen_taxa].count(None), 1)
else:
x1 = [t.label.upper() for t in seen_taxa if t.label is not None]
self.assertIn(nd.taxon.label.upper(), x1)
self.assertEqual(len(seen_taxa), len(tree.taxon_namespace))
if not case_sensitive_label_mapping:
seen_labels = [(t.label.upper() if t.label is not None else None) for t in seen_taxa]
else:
seen_labels = [t.label for t in seen_taxa]
c1 = collections.Counter(expected_labels)
c2 = collections.Counter(seen_labels)
self.assertEqual(c1, c2)
self.assertEqual(len(tree.taxon_namespace), len(expected_labels))
if not unify_taxa_by_label and not redundant_taxa:
self.assertEqual(len(tree.taxon_namespace), len(self.node_label_to_taxon_label_map))
def test_basic_reconstruction(self):
tns = dendropy.TaxonNamespace()
trees = []
for idx in range(5):
tree, anodes, lnodes, inodes = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False,
taxon_namespace=tns)
trees.append(tree)
tree_list = dendropy.TreeList(taxon_namespace=tns)
tree_list._trees = trees
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = True
tree_list.taxon_namespace = new_tns
tree_list.reconstruct_taxon_namespace(unify_taxa_by_label=False)
self.assertIsNot(tree_list.taxon_namespace, tns)
self.assertIs(tree_list.taxon_namespace, new_tns)
self.assertEqual(len(tree_list.taxon_namespace), len(tns))
original_labels = [t.label for t in tns]
new_labels = [t.label for t in new_tns]
self.assertCountEqual(new_labels, original_labels)
for tree in tree_list:
self.assertIs(tree.taxon_namespace, tree_list.taxon_namespace)
for nd in tree:
if nd.taxon is not None:
self.assertIn(nd.taxon, tree.taxon_namespace)
self.assertNotIn(nd.taxon, tns)
def test_reconstruct_taxon_namespace_non_unifying(self):
original_tns = self.tree_list.taxon_namespace
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = True
self.tree_list._taxon_namespace = new_tns
self.assertEqual(len(self.tree_list.taxon_namespace), 0)
self.tree_list.reconstruct_taxon_namespace(unify_taxa_by_label=False)
self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
self.assertIs(self.tree_list.taxon_namespace, new_tns)
self.verify_taxon_namespace_reconstruction(
unify_taxa_by_label=False,
case_sensitive_label_mapping=True)
def test_reconstruct_taxon_namespace_unifying_case_sensitive(self):
original_tns = self.tree_list.taxon_namespace
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = True
self.tree_list._taxon_namespace = new_tns
self.assertEqual(len(self.tree_list.taxon_namespace), 0)
self.tree_list.reconstruct_taxon_namespace(unify_taxa_by_label=True)
self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
self.assertIs(self.tree_list.taxon_namespace, new_tns)
self.verify_taxon_namespace_reconstruction(
unify_taxa_by_label=True,
case_sensitive_label_mapping=True,
original_tns=original_tns)
def test_reconstruct_taxon_namespace_unifying_case_insensitive(self):
original_tns = self.tree_list.taxon_namespace
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = False
self.tree_list._taxon_namespace = new_tns
self.assertEqual(len(self.tree_list.taxon_namespace), 0)
self.tree_list.reconstruct_taxon_namespace(unify_taxa_by_label=True)
self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
self.assertIs(self.tree_list.taxon_namespace, new_tns)
self.verify_taxon_namespace_reconstruction(
unify_taxa_by_label=True,
case_sensitive_label_mapping=False,
original_tns=original_tns)
def test_basic_migration(self):
tns = dendropy.TaxonNamespace()
trees = []
for idx in range(5):
tree, anodes, lnodes, inodes = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False,
taxon_namespace=tns)
trees.append(tree)
tree_list = dendropy.TreeList(taxon_namespace=tns)
tree_list._trees = trees
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = True
tree_list.taxon_namespace = new_tns
tree_list.migrate_taxon_namespace(
new_tns,
unify_taxa_by_label=False)
self.assertIsNot(tree_list.taxon_namespace, tns)
self.assertIs(tree_list.taxon_namespace, new_tns)
self.assertEqual(len(tree_list.taxon_namespace), len(tns))
original_labels = [t.label for t in tns]
new_labels = [t.label for t in new_tns]
self.assertCountEqual(new_labels, original_labels)
for tree in tree_list:
self.assertIs(tree.taxon_namespace, tree_list.taxon_namespace)
for nd in tree:
if nd.taxon is not None:
self.assertIn(nd.taxon, tree.taxon_namespace)
self.assertNotIn(nd.taxon, tns)
def test_migrate_taxon_namespace_non_unifying(self):
original_tns = self.tree_list.taxon_namespace
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = True
self.tree_list.migrate_taxon_namespace(
new_tns,
unify_taxa_by_label=False)
self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
self.assertIs(self.tree_list.taxon_namespace, new_tns)
self.verify_taxon_namespace_reconstruction(
unify_taxa_by_label=False,
case_sensitive_label_mapping=True,
original_tns=original_tns)
def test_migrate_taxon_namespace_unifying_case_sensitive(self):
original_tns = self.tree_list.taxon_namespace
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = True
self.tree_list.migrate_taxon_namespace(
new_tns,
unify_taxa_by_label=True)
self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
self.assertIs(self.tree_list.taxon_namespace, new_tns)
self.verify_taxon_namespace_reconstruction(
unify_taxa_by_label=True,
case_sensitive_label_mapping=True,
original_tns=original_tns)
def test_migrate_taxon_namespace_unifying_case_insensitive(self):
original_tns = self.tree_list.taxon_namespace
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = False
self.tree_list.migrate_taxon_namespace(
new_tns,
unify_taxa_by_label=True)
self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
self.assertIs(self.tree_list.taxon_namespace, new_tns)
self.verify_taxon_namespace_reconstruction(
unify_taxa_by_label=True,
case_sensitive_label_mapping=False,
original_tns=original_tns)
class TestTreeListAppend(
curated_test_tree.CuratedTestTree,
unittest.TestCase):
def setUp(self):
self.native_tns = dendropy.TaxonNamespace()
self.tree_list = dendropy.TreeList(taxon_namespace=self.native_tns)
self.foreign_tns = dendropy.TaxonNamespace()
self.foreign_tree, anodes, lnodes, inodes = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False,
taxon_namespace=self.foreign_tns)
for nd in self.foreign_tree:
nd.original_taxon = nd.taxon
self.check_tns = dendropy.TaxonNamespace()
self.check_tree, anodes, lnodes, inodes = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False,
taxon_namespace=self.check_tns)
def test_append_default(self):
self.assertIsNot(self.tree_list.taxon_namespace, self.foreign_tree.taxon_namespace)
self.tree_list.append(self.foreign_tree)
self.assertEqual(len(self.tree_list), 1)
self.assertIn(self.foreign_tree, self.tree_list)
self.assertIs(self.foreign_tree, self.tree_list[0])
self.assertIs(self.tree_list.taxon_namespace, self.native_tns)
self.assertIs(self.foreign_tree.taxon_namespace, self.tree_list.taxon_namespace)
self.assertEqual(len(self.tree_list.taxon_namespace), len(self.foreign_tns))
for nd in self.foreign_tree:
if nd.taxon:
self.assertIn(nd.taxon, self.tree_list.taxon_namespace)
self.assertIsNot(nd.taxon, nd.original_taxon)
self.assertIn(nd.original_taxon, self.foreign_tns)
self.assertNotIn(nd.original_taxon, self.tree_list.taxon_namespace)
self.assertEqual(nd.taxon.label, nd.original_taxon.label)
def test_append_migrate_matching_labels(self):
kwargs_groups = [
{"taxon_import_strategy": "migrate", "unify_taxa_by_label": True},
{"taxon_import_strategy": "migrate", "unify_taxa_by_label": False},
{"taxon_import_strategy": "add", },
]
for kwargs in kwargs_groups:
self.setUp()
self.assertEqual(len(self.tree_list.taxon_namespace), 0)
native_tree, anodes, lnodes, inodes = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False,
taxon_namespace=self.native_tns)
self.assertEqual(len(self.tree_list.taxon_namespace), len(self.postorder_sequence))
self.assertEqual(len(self.tree_list.taxon_namespace), len(self.foreign_tns))
original_tns_len = len(self.tree_list.taxon_namespace)
self.tree_list.append(self.foreign_tree, **kwargs)
self.assertEqual(len(self.tree_list), 1)
self.assertIn(self.foreign_tree, self.tree_list)
self.assertIs(self.foreign_tree, self.tree_list[0])
self.assertIs(self.foreign_tree.taxon_namespace, self.tree_list.taxon_namespace)
if kwargs["taxon_import_strategy"] == "add":
self.assertEqual(len(self.tree_list.taxon_namespace),
original_tns_len + len(self.foreign_tns))
for nd in self.foreign_tree:
self.assertIn(nd.taxon, self.foreign_tns)
self.assertIn(nd.taxon, self.tree_list.taxon_namespace)
else:
if "unify_taxa_by_label" not in kwargs or not kwargs["unify_taxa_by_label"]:
self.assertEqual(len(self.tree_list.taxon_namespace),
original_tns_len + len(self.foreign_tns))
else:
self.assertEqual(len(self.tree_list.taxon_namespace), original_tns_len)
for nd in self.foreign_tree:
self.assertNotIn(nd.taxon, self.foreign_tns)
self.assertIn(nd.taxon, self.tree_list.taxon_namespace)
def test_append_add(self):
self.assertIsNot(self.tree_list.taxon_namespace, self.foreign_tree.taxon_namespace)
self.tree_list.append(self.foreign_tree,
taxon_import_strategy="add")
self.assertEqual(len(self.tree_list), 1)
self.assertIn(self.foreign_tree, self.tree_list)
self.assertIs(self.foreign_tree, self.tree_list[0])
self.assertIs(self.tree_list.taxon_namespace, self.native_tns)
self.assertIs(self.foreign_tree.taxon_namespace, self.tree_list.taxon_namespace)
self.assertEqual(len(self.tree_list.taxon_namespace), len(self.foreign_tns))
for nd in self.foreign_tree:
if nd.taxon:
self.assertIn(nd.taxon, self.tree_list.taxon_namespace)
self.assertIs(nd.taxon, nd.original_taxon)
self.assertIn(nd.original_taxon, self.foreign_tns)
self.assertIn(nd.original_taxon, self.tree_list.taxon_namespace)
class TestTreeListTaxa(
curated_test_tree.CuratedTestTree,
dendropytest.ExtendedTestCase):
def setUp(self):
self.tree_list = dendropy.TreeList()
self.expected_taxa = None
for i in range(10):
tree1, anodes1, lnodes1, inodes1 = self.get_tree(
taxon_namespace=self.tree_list.taxon_namespace,
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False)
self.tree_list.append(tree1)
if self.expected_taxa is None:
self.expected_taxa = set([nd.taxon for nd in anodes1 if nd.taxon is not None])
def test_basic_taxa(self):
self.assertEqual(self.tree_list.poll_taxa(), self.expected_taxa)
class TestTreeListPurgeTaxonNamespace(
curated_test_tree.CuratedTestTree,
dendropytest.ExtendedTestCase):
def setUp(self):
self.tree_list = dendropy.TreeList()
self.expected_taxa = None
for i in range(10):
tree1, anodes1, lnodes1, inodes1 = self.get_tree(
taxon_namespace=self.tree_list.taxon_namespace,
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False)
self.tree_list.append(tree1)
if self.expected_taxa is None:
self.expected_taxa = set([nd.taxon for nd in anodes1 if nd.taxon is not None])
def test_noop_purge(self):
self.assertEqual(set(self.tree_list.taxon_namespace), self.expected_taxa)
self.tree_list.purge_taxon_namespace()
self.assertEqual(set(self.tree_list.taxon_namespace), self.expected_taxa)
def test_basic_purge(self):
self.assertEqual(set(self.tree_list.taxon_namespace), self.expected_taxa)
added_taxa = set(self.expected_taxa)
for label in ("z1", "z2", "z3", "z4"):
t = self.tree_list.taxon_namespace.new_taxon(label=label)
added_taxa.add(t)
self.assertEqual(set(self.tree_list.taxon_namespace), added_taxa)
self.tree_list.purge_taxon_namespace()
self.assertEqual(set(self.tree_list.taxon_namespace), self.expected_taxa)
class TreeListCreation(unittest.TestCase):
def test_create_with_taxon_namespace(self):
tns = dendropy.TaxonNamespace()
tt = dendropy.TreeList(label="a", taxon_namespace=tns)
self.assertEqual(tt.label, "a")
self.assertIs(tt.taxon_namespace, tns)
class TestSpecialTreeListConstruction(
unittest.TestCase):
def test_construction_from_another_tree_different_label(self):
tlist1 = dendropy.TreeList()
tlist1.label = "tlist1"
self.assertEqual(tlist1.label, "tlist1")
tlist2 = dendropy.TreeList(tlist1, label="tlist2")
self.assertEqual(tlist2.label, "tlist2")
self.assertNotEqual(tlist1.label, "tlist2")
self.assertNotEqual(tlist1.label, tlist2.label)
if __name__ == "__main__":
unittest.main()
| 44.499574 | 129 | 0.613785 |
ls))
for t1, tlabel in zip(tlist, expected_tree_labels):
self.assertIn(t1, tlist)
if tlabel in source_tree_labels:
self.assertIn(t1, source_trees)
else:
self.assertNotIn(t1, source_trees)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
self.assertEqual(t1.label, tlabel)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_add_from_another_tree_list_different_namespace(self):
tlist_source1 = curated_test_tree_list.get_tree_list(num_trees=3)
original_tns = tlist_source1.taxon_namespace
source1_tree_labels = [t.label for t in tlist_source1]
self.assertEqual(len(source1_tree_labels), len(tlist_source1))
self.assertEqual(len(tlist_source1), 3)
tlist_source2 = curated_test_tree_list.get_trees(num_trees=5)
self.assertEqual(len(tlist_source2), 5)
source2_tree_labels = [t.label for t in tlist_source2]
self.assertEqual(len(source2_tree_labels), len(tlist_source2))
tlist = tlist_source1 + tlist_source2
self.assertEqual(len(tlist_source1), 3)
self.assertEqual(len(tlist_source2), 5)
self.assertEqual(len(tlist), len(tlist_source1) + len(tlist_source2))
self.assertIs(tlist.taxon_namespace, original_tns)
self.assertEqual(len(tlist.taxon_namespace), 7)
expected_tree_labels = source1_tree_labels + source2_tree_labels
self.assertEqual(len(tlist), len(expected_tree_labels))
for t1, tlabel in zip(tlist, expected_tree_labels):
self.assertIn(t1, tlist)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
self.assertEqual(t1.label, tlabel)
if t1.label in source1_tree_labels:
self.assertNotIn(t1, tlist_source1)
self.assertNotIn(t1, tlist_source2)
else:
self.assertNotIn(t1, tlist_source1)
self.assertIn(t1, tlist_source2)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_contains(self):
tlist = curated_test_tree_list.get_tree_list(5)
self.assertEqual(len(tlist._trees), len(tlist))
self.assertEqual(len(tlist), 5)
trees = curated_test_tree_list.get_trees(5)
self.assertEqual(len(trees), 5)
for t in tlist:
self.assertTrue(t in tlist._trees)
self.assertTrue(t in tlist)
for t in trees:
self.assertFalse(t in tlist._trees)
self.assertFalse(t in tlist)
tlist += trees
for t in trees:
self.assertTrue(t in tlist._trees)
self.assertTrue(t in tlist)
def test_delitem(self):
tsize = 5
for del_idx in range(-tsize, tsize):
tlist = curated_test_tree_list.get_tree_list(tsize)
original_trees = list(tlist._trees)
self.assertIn(original_trees[del_idx], tlist._trees)
del tlist[del_idx]
self.assertNotIn(original_trees[del_idx], tlist._trees)
self.assertEqual(len(tlist), tsize - 1)
del original_trees[del_idx]
self.assertEqual(tlist._trees, original_trees)
def test_iter(self):
tlist = curated_test_tree_list.get_tree_list(5)
self.assertEqual(len(tlist), 5)
self.assertEqual(len(tlist._trees), len(tlist))
for t1, t2 in zip(tlist, tlist._trees):
self.assertIs(t1, t2)
def test_reversed(self):
tlist = curated_test_tree_list.get_tree_list(5)
self.assertEqual(len(tlist), 5)
self.assertEqual(len(tlist._trees), len(tlist))
for t1, t2 in zip(reversed(tlist), reversed(tlist._trees)):
self.assertIs(t1, t2)
def test_getitem_simple(self):
tsize = 5
tlist = curated_test_tree_list.get_tree_list(tsize)
self.assertEqual(len(tlist), tsize)
self.assertEqual(len(tlist._trees), len(tlist))
for idx in range(-tsize, tsize):
self.assertIs(tlist[idx], tlist._trees[idx])
self.assertTrue(isinstance(tlist[idx], dendropy.Tree))
def test_getitem_slice(self):
tsize = 5
tlist = curated_test_tree_list.get_tree_list(tsize)
self.assertEqual(len(tlist), tsize)
self.assertEqual(len(tlist._trees), len(tlist))
for a in range(-tsize, tsize):
for b in range(-tsize, tsize):
for step in range(-tsize, tsize):
if step == 0:
continue
tt = tlist[a:b:step]
k = tlist._trees[a:b:step]
self.assertEqual(len(tt), len(k))
for t1, t2 in zip(tt, k):
self.assertIn(t1, tlist)
self.assertIn(t1, tlist._trees)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
def test_setitem_simple(self):
tsize = 5
for idx in range(-tsize, tsize):
tlist = curated_test_tree_list.get_tree_list(tsize)
self.assertEqual(len(tlist), tsize)
self.assertEqual(len(tlist._trees), len(tlist))
old_tree = tlist[idx]
new_tree = curated_test_tree_list.get_tree()
tlist[idx] = new_tree
self.assertIs(tlist[idx], new_tree)
self.assertIsNot(tlist[idx], old_tree)
self.assertIn(new_tree, tlist)
self.assertNotIn(old_tree, tlist)
self.assertIs(new_tree.taxon_namespace,
tlist.taxon_namespace)
self.assertEqual(len(tlist.taxon_namespace), 7)
for tree in tlist:
for nd in tree:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_setitem_slice_from_list(self):
tsize = 5
for a in range(-tsize, tsize):
for b in range(-tsize, tsize):
for step in range(-tsize, tsize):
if step == 0:
continue
slice_obj = slice(a, b, step)
slice_len = len(range(*slice_obj.indices(tsize)))
if slice_len <= 0:
continue
tlist = curated_test_tree_list.get_tree_list(tsize)
self.assertEqual(len(tlist), tsize)
self.assertEqual(len(tlist._trees), len(tlist))
copy_list = list(tlist._trees)
source = curated_test_tree_list.get_trees(slice_len)
tlist[a:b:step] = source
copy_list[a:b:step] = source
expected_tree_labels = [t.label for t in copy_list]
self.assertEqual(len(tlist), len(copy_list))
self.assertEqual(len(tlist), len(tlist._trees))
self.assertEqual(len(tlist.taxon_namespace), 7)
for t1, t2, tlabel in zip(tlist, copy_list, expected_tree_labels):
self.assertIs(t1, t2)
self.assertIn(t1, tlist)
self.assertIn(t1, tlist._trees)
self.assertEqual(t1.label, tlabel)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_setitem_slice_from_tree_list(self):
tsize = 5
for a in range(-tsize, tsize):
for b in range(-tsize, tsize):
for step in range(-tsize, tsize):
if step == 0:
continue
slice_obj = slice(a, b, step)
slice_indexes = list(range(*slice_obj.indices(tsize)))
slice_len = len(slice_indexes)
if slice_len <= 0:
continue
tlist = curated_test_tree_list.get_tree_list(tsize)
self.assertEqual(len(tlist), tsize)
self.assertEqual(len(tlist._trees), len(tlist))
copy_list = list(tlist._trees)
source = curated_test_tree_list.get_tree_list(slice_len)
copy_list[a:b:step] = source._trees
tlist[a:b:step] = source
expected_tree_labels = [t.label for t in copy_list]
self.assertEqual(len(tlist), len(copy_list))
self.assertEqual(len(tlist), len(tlist._trees))
self.assertEqual(len(tlist.taxon_namespace), 7)
for idx, (t1, t2, tlabel) in enumerate(zip(tlist, copy_list, expected_tree_labels)):
self.assertIn(t1, tlist)
self.assertIn(t1, tlist._trees)
self.assertEqual(t1.label, tlabel)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
if idx in slice_indexes:
self.assertIsNot(t1, t2)
self.assertIn(t1, tlist)
self.assertIn(t1, tlist._trees)
self.assertNotIn(t2, tlist)
self.assertNotIn(t2, tlist._trees)
self.assertNotIn(t1, source)
self.assertNotIn(t1, source._trees)
else:
self.assertIs(t1, t2)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_clear(self):
tlist = curated_test_tree_list.get_tree_list(5)
self.assertEqual(len(tlist._trees), 5)
tlist.clear()
self.assertEqual(len(tlist), 0)
self.assertEqual(len(tlist._trees), 0)
def test_extend_from_another_tree_list_different_namespace(self):
tlist = curated_test_tree_list.get_tree_list(num_trees=3)
original_tns = tlist.taxon_namespace
original_tlist_len = len(tlist)
original_tree_labels = [t.label for t in tlist]
self.assertEqual(len(original_tree_labels), len(tlist))
self.assertEqual(original_tlist_len, 3)
tlist_source = curated_test_tree_list.get_tree_list(num_trees=5)
self.assertEqual(len(tlist_source), 5)
source_tree_labels = [t.label for t in tlist_source]
self.assertEqual(len(source_tree_labels), len(tlist_source))
tlist.extend(tlist_source)
self.assertEqual(len(tlist), original_tlist_len + len(tlist_source))
self.assertIs(tlist.taxon_namespace, original_tns)
self.assertEqual(len(tlist.taxon_namespace), 7)
expected_tree_labels = original_tree_labels + source_tree_labels
self.assertEqual(len(tlist), len(expected_tree_labels))
for t1, tlabel in zip(tlist, expected_tree_labels):
self.assertIn(t1, tlist)
self.assertNotIn(t1, tlist_source)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
self.assertEqual(t1.label, tlabel)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_extend_from_list_of_trees_different_namespace(self):
tlist = curated_test_tree_list.get_tree_list(num_trees=3)
original_tns = tlist.taxon_namespace
original_tlist_len = len(tlist)
original_tree_labels = [t.label for t in tlist]
self.assertEqual(len(original_tree_labels), len(tlist))
self.assertEqual(original_tlist_len, 3)
source_trees = curated_test_tree_list.get_trees(
num_trees=5,
taxon_namespace=None,
label=None,
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False)
self.assertEqual(len(source_trees), 5)
source_tree_labels = [t.label for t in source_trees]
self.assertEqual(len(source_tree_labels), len(source_trees))
tlist.extend(source_trees)
self.assertEqual(len(tlist), original_tlist_len + len(source_trees))
self.assertIs(tlist.taxon_namespace, original_tns)
self.assertEqual(len(tlist.taxon_namespace), 7)
expected_tree_labels = original_tree_labels + source_tree_labels
self.assertEqual(len(tlist), len(expected_tree_labels))
for t1, tlabel in zip(tlist, expected_tree_labels):
self.assertIn(t1, tlist)
if tlabel in source_tree_labels:
self.assertIn(t1, source_trees)
else:
self.assertNotIn(t1, source_trees)
self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
self.assertEqual(t1.label, tlabel)
for nd in t1:
self.assertIn(nd.taxon, tlist.taxon_namespace)
def test_index(self):
tlist = curated_test_tree_list.get_tree_list(5)
for idx, t in enumerate(tlist):
self.assertIs(t, tlist[idx])
self.assertEqual(tlist.index(t), idx)
def test_pop1(self):
tlist = curated_test_tree_list.get_tree_list(5)
k = tlist[-1]
t = tlist.pop()
self.assertIs(t, k)
self.assertEqual(len(tlist), 4)
self.assertNotIn(t, tlist)
def test_pop2(self):
for idx in range(5):
tlist = curated_test_tree_list.get_tree_list(5)
k = tlist[idx]
t = tlist.pop(idx)
self.assertIs(t, k)
self.assertEqual(len(tlist), 4)
self.assertNotIn(t, tlist)
def test_remove(self):
tlist = curated_test_tree_list.get_tree_list(5)
t = tlist[0]
tlist.remove(t)
self.assertEqual(len(tlist), 4)
self.assertNotIn(t, tlist)
def test_remove(self):
tlist = curated_test_tree_list.get_tree_list(5)
clist = list(tlist._trees)
tlist.reverse()
clist.reverse()
for t1, t2 in zip(tlist, clist):
self.assertIs(t1, t2)
def test_sort(self):
for r in (True, False):
tlist = curated_test_tree_list.get_tree_list(5)
clist = list(tlist._trees)
tlist.sort(key=lambda x: x.label, reverse=r)
clist.sort(key=lambda x: x.label, reverse=r)
for t1, t2 in zip(tlist, clist):
self.assertIs(t1, t2)
class TreeListCreatingAndCloning(
curated_test_tree.CuratedTestTree,
compare_and_validate.Comparator,
unittest.TestCase):
def add_tree_annotations(self, tree):
for idx, nd in enumerate(tree):
if idx % 2 == 0:
nd.edge.label = "E{}".format(idx)
nd.edge.length = idx
an1 = nd.annotations.add_new("a{}".format(idx),
"{}{}{}".format(nd.label, nd.taxon, idx))
an2 = nd.annotations.add_bound_attribute("label")
an3 = an1.annotations.add_bound_attribute("name")
ae1 = nd.edge.annotations.add_new("a{}".format(idx),
"{}{}".format(nd.edge.label, idx))
ae2 = nd.edge.annotations.add_bound_attribute("label")
ae3 = ae1.annotations.add_bound_attribute("name")
tree.annotations.add_new("a", 0)
tree.label = "hello"
b = tree.annotations.add_bound_attribute("label")
b.annotations.add_new("c", 3)
def add_tree_list_annotations(self, tree_list):
tree_list.annotations.add_new("a", 0)
tree_list.label = "hello"
b = tree_list.annotations.add_bound_attribute("label")
b.annotations.add_new("c", 3)
def add_taxon_namespace_annotations(self, tns):
for idx, taxon in enumerate(tns):
a = taxon.annotations.add_new("!color", str(idx))
a.annotations.add_new("setbytest", "a")
def setUp(self):
self.num_trees = 5
tree1, anodes1, lnodes1, inodes1 = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False)
self.original_taxon_labels = [t.label for t in tree1.taxon_namespace]
assert len(self.original_taxon_labels) == len(anodes1)
def get_tree_list(self):
tlist1 = dendropy.TreeList()
self.num_trees = 5
for idx in range(self.num_trees):
tree1, anodes1, lnodes1, inodes1 = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False,
taxon_namespace=tlist1.taxon_namespace)
self.add_tree_annotations(tree1)
tlist1.append(tree1)
self.add_tree_list_annotations(tlist1)
self.add_taxon_namespace_annotations(tlist1.taxon_namespace)
return tlist1
def test_shallow_copy_with_initializer_list(self):
tlist1 = self.get_tree_list()
trees = tlist1._trees
tlist2 = dendropy.TreeList(trees)
self.assertEqual(len(tlist2), self.num_trees)
for tcopy, toriginal in zip(tlist2, trees):
self.assertIs(tcopy, toriginal)
self.assertIs(tcopy.taxon_namespace, tlist2.taxon_namespace)
def test_clone0(self):
tlist1 = self.get_tree_list()
for tlist2 in (
tlist1.clone(0),
):
self.assertIs(tlist2.taxon_namespace, tlist1.taxon_namespace)
self.assertEqual(len(tlist2), self.num_trees)
for tcopy, toriginal in zip(tlist2, tlist1):
self.assertIs(tcopy, toriginal)
self.assertIs(tcopy.taxon_namespace, tlist2.taxon_namespace)
def test_taxon_namespace_scoped_copy(self):
tlist1 = self.get_tree_list()
for tlist2 in (
tlist1.clone(1),
dendropy.TreeList(tlist1),
tlist1.taxon_namespace_scoped_copy(),):
self.compare_distinct_tree_list(tlist2, tlist1,
taxon_namespace_scoped=True,
compare_tree_annotations=True,
compare_taxon_annotations=True)
def test_deepcopy_including_namespace(self):
tlist1 = self.get_tree_list()
for idx, tlist2 in enumerate((
tlist1.clone(2),
copy.deepcopy(tlist1),
)):
self.compare_distinct_tree_list(tlist2, tlist1,
taxon_namespace_scoped=False,
compare_tree_annotations=True,
compare_taxon_annotations=True)
def test_deepcopy_excluding_namespace(self):
tlist1 = self.get_tree_list()
tlist2 = dendropy.TreeList(tlist1,
taxon_namespace=dendropy.TaxonNamespace())
self.compare_distinct_tree_list(tlist2, tlist1,
taxon_namespace_scoped=False,
compare_tree_annotations=True,
compare_taxon_annotations=False)
class TreeListIdentity(unittest.TestCase):
def setUp(self):
self.tns = dendropy.TaxonNamespace()
self.t1 = dendropy.TreeList(label="a", taxon_namespace=self.tns)
self.t2 = dendropy.TreeList(label="a", taxon_namespace=self.tns)
self.t3 = dendropy.TreeList(label="a")
def test_equal(self):
trees = [dendropy.Tree() for i in range(5)]
for tree in trees:
self.t1._trees.append(tree)
self.t2._trees.append(tree)
self.assertEqual(self.t1, self.t2)
def test_unequal1(self):
trees1 = [dendropy.Tree() for i in range(5)]
for tree in trees1:
self.t1._trees.append(tree)
trees2 = [dendropy.Tree() for i in range(5)]
for tree in trees2:
self.t2._trees.append(tree)
self.assertNotEqual(self.t1, self.t2)
def test_unequal2(self):
trees1 = [dendropy.Tree() for i in range(5)]
for tree in trees1:
self.t1._trees.append(tree)
self.t3._trees.append(tree)
self.assertNotEqual(self.t1, self.t3)
def test_hash_dict_membership(self):
k = {}
k[self.t1] = 1
k[self.t2] = 2
self.assertEqual(len(k), 2)
self.assertEqual(k[self.t1], 1)
self.assertEqual(k[self.t2], 2)
self.assertIn(self.t1, k)
self.assertIn(self.t2, k)
del k[self.t1]
self.assertNotIn(self.t1, k)
self.assertIn(self.t2, k)
self.assertEqual(len(k), 1)
k1 = {self.t1: 1}
k2 = {self.t2: 1}
self.assertIn(self.t1, k1)
self.assertIn(self.t2, k2)
self.assertNotIn(self.t2, k1)
self.assertNotIn(self.t1, k2)
def test_hash_set_membership(self):
k = set()
k.add(self.t1)
k.add(self.t2)
self.assertEqual(len(k), 2)
self.assertIn(self.t1, k)
self.assertIn(self.t2, k)
k.discard(self.t1)
self.assertNotIn(self.t1, k)
self.assertIn(self.t2, k)
self.assertEqual(len(k), 1)
k1 = {self.t1: 1}
k2 = {self.t2: 1}
self.assertIn(self.t1, k1)
self.assertIn(self.t2, k2)
self.assertNotIn(self.t2, k1)
self.assertNotIn(self.t1, k2)
class TestTreeListUpdateTaxonNamespace(
curated_test_tree.CuratedTestTree,
dendropytest.ExtendedTestCase):
def setUp(self):
trees = []
for idx in range(5):
tree1, anodes1, lnodes1, inodes1 = self.get_tree(
suppress_internal_node_taxa=True,
suppress_leaf_node_taxa=True)
trees.append(tree1)
self.expected_labels = set()
self.expected_taxa = set()
node_label_to_taxon_label_map = {
"a" : "z01",
"b" : "<NONE>",
"c" : "z03",
"e" : "z04",
"f" : "z05",
"g" : "z06",
"h" : None,
"i" : None,
"j" : "z09",
"k" : "z10",
"l" : "z11",
"m" : "<NONE>",
"n" : None,
"o" : "z14",
"p" : "z15",
}
registry = {}
for tree_idx, tree in enumerate(trees):
for nd in tree:
if nd.label is not None:
if tree_idx > 3:
nd.label = node_label_to_taxon_label_map[nd.label]
if nd.label == "<NONE>":
try:
t = registry[None]
except KeyError:
t = dendropy.Taxon(label=None)
registry[None] = t
self.expected_labels.add(None)
else:
try:
t = registry[nd.label]
except KeyError:
t = dendropy.Taxon(label=nd.label)
registry[nd.label] = t
self.expected_labels.add(nd.label)
nd.taxon = t
self.expected_taxa.add(nd.taxon)
self.tree_list = dendropy.TreeList()
self.tree_list._trees = trees
def test_noop_update_with_no_taxa(self):
trees = []
tns = dendropy.TaxonNamespace()
for idx in range(5):
tree1, anodes1, lnodes1, inodes1 = self.get_tree(
suppress_internal_node_taxa=True,
suppress_leaf_node_taxa=True,
taxon_namespace=tns)
trees.append(tree1)
tlst = dendropy.TreeList(taxon_namespace=tns)
tlst._trees = trees
original_tns = tlst.taxon_namespace
self.assertEqual(len(original_tns), 0)
tlst.update_taxon_namespace()
self.assertIs(tlst.taxon_namespace, original_tns)
for tree in tlst:
self.assertIs(tree.taxon_namespace, tlst.taxon_namespace)
self.assertEqual(len(original_tns), 0)
def test_update(self):
original_tns = self.tree_list.taxon_namespace
self.assertEqual(len(original_tns), 0)
self.tree_list.update_taxon_namespace()
self.tree_list.update_taxon_namespace()
self.tree_list.update_taxon_namespace()
for tree in self.tree_list:
self.assertIs(tree.taxon_namespace, self.tree_list.taxon_namespace)
self.assertIs(self.tree_list.taxon_namespace, original_tns)
new_taxa = [t for t in original_tns]
new_labels = [t.label for t in original_tns]
self.assertCountEqual(new_taxa, self.expected_taxa)
self.assertCountEqual(new_labels, self.expected_labels)
class TestTreeListMigrateAndReconstructTaxonNamespace(
curated_test_tree.CuratedTestTree,
dendropytest.ExtendedTestCase):
def setUp(self):
tns = dendropy.TaxonNamespace()
trees = []
for idx in range(8):
tree, anodes, lnodes, inodes = self.get_tree(
suppress_internal_node_taxa=True,
suppress_leaf_node_taxa=True,
taxon_namespace=tns)
trees.append(tree)
self.node_label_to_taxon_label_map = {
"a" : "a",
"b" : "a",
"c" : "2",
"e" : "2",
"f" : "b",
"g" : "B",
"h" : "B",
"i" : "h",
"j" : "H",
"k" : "h",
"l" : None,
"m" : None,
"n" : "H",
"o" : "J",
"p" : "j",
}
self.original_taxa = []
registry = {}
for tree in trees:
for idx, nd in enumerate(tree):
try:
t = registry[nd.label]
except KeyError:
taxon_label = self.node_label_to_taxon_label_map[nd.label]
t = dendropy.Taxon(label=taxon_label)
registry[nd.label] = t
self.original_taxa.append(t)
tree.taxon_namespace.add_taxon(t)
nd.taxon = t
nd.original_taxon = t
assert len(tree.taxon_namespace) == len(self.node_label_to_taxon_label_map)
assert len(tree.taxon_namespace) == len(self.original_taxa)
self.tree_list = dendropy.TreeList(taxon_namespace=tns)
self.tree_list._trees = trees
def verify_taxon_namespace_reconstruction(self,
unify_taxa_by_label=False,
case_sensitive_label_mapping=True,
original_tns=None,
redundant_taxa=False):
if unify_taxa_by_label:
if not case_sensitive_label_mapping:
expected_labels = []
for label in self.node_label_to_taxon_label_map.values():
if label is None:
expected_labels.append(label)
else:
label = label.upper()
if label not in expected_labels:
expected_labels.append(label)
else:
expected_labels = list(set(label for label in self.node_label_to_taxon_label_map.values()))
else:
expected_labels = [label for label in self.node_label_to_taxon_label_map.values()]
for tree in self.tree_list:
seen_taxa = []
self.assertIs(tree.taxon_namespace, self.tree_list.taxon_namespace)
for nd in tree:
self.assertIsNot(nd.taxon, nd.original_taxon)
if not case_sensitive_label_mapping and nd.taxon.label is not None:
self.assertEqual(nd.taxon.label.upper(), nd.original_taxon.label.upper())
self.assertEqual(self.node_label_to_taxon_label_map[nd.label].upper(), nd.taxon.label.upper())
else:
self.assertEqual(nd.taxon.label, nd.original_taxon.label)
self.assertEqual(self.node_label_to_taxon_label_map[nd.label], nd.taxon.label)
self.assertNotIn(nd.original_taxon, tree.taxon_namespace)
self.assertIn(nd.original_taxon, self.original_taxa)
self.assertIn(nd.taxon, tree.taxon_namespace)
self.assertNotIn(nd.taxon, self.original_taxa)
if original_tns is not None:
self.assertNotIn(nd.taxon, original_tns)
if nd.taxon not in seen_taxa:
seen_taxa.append(nd.taxon)
else:
self.assertTrue(unify_taxa_by_label or redundant_taxa)
if not case_sensitive_label_mapping:
self.assertIn(nd.taxon.label, [t.label for t in seen_taxa])
else:
if nd.taxon.label is None:
self.assertIs(nd.original_taxon.label, None)
self.assertEqual([t.label for t in seen_taxa].count(None), 1)
else:
x1 = [t.label.upper() for t in seen_taxa if t.label is not None]
self.assertIn(nd.taxon.label.upper(), x1)
self.assertEqual(len(seen_taxa), len(tree.taxon_namespace))
if not case_sensitive_label_mapping:
seen_labels = [(t.label.upper() if t.label is not None else None) for t in seen_taxa]
else:
seen_labels = [t.label for t in seen_taxa]
c1 = collections.Counter(expected_labels)
c2 = collections.Counter(seen_labels)
self.assertEqual(c1, c2)
self.assertEqual(len(tree.taxon_namespace), len(expected_labels))
if not unify_taxa_by_label and not redundant_taxa:
self.assertEqual(len(tree.taxon_namespace), len(self.node_label_to_taxon_label_map))
def test_basic_reconstruction(self):
tns = dendropy.TaxonNamespace()
trees = []
for idx in range(5):
tree, anodes, lnodes, inodes = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False,
taxon_namespace=tns)
trees.append(tree)
tree_list = dendropy.TreeList(taxon_namespace=tns)
tree_list._trees = trees
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = True
tree_list.taxon_namespace = new_tns
tree_list.reconstruct_taxon_namespace(unify_taxa_by_label=False)
self.assertIsNot(tree_list.taxon_namespace, tns)
self.assertIs(tree_list.taxon_namespace, new_tns)
self.assertEqual(len(tree_list.taxon_namespace), len(tns))
original_labels = [t.label for t in tns]
new_labels = [t.label for t in new_tns]
self.assertCountEqual(new_labels, original_labels)
for tree in tree_list:
self.assertIs(tree.taxon_namespace, tree_list.taxon_namespace)
for nd in tree:
if nd.taxon is not None:
self.assertIn(nd.taxon, tree.taxon_namespace)
self.assertNotIn(nd.taxon, tns)
def test_reconstruct_taxon_namespace_non_unifying(self):
original_tns = self.tree_list.taxon_namespace
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = True
self.tree_list._taxon_namespace = new_tns
self.assertEqual(len(self.tree_list.taxon_namespace), 0)
self.tree_list.reconstruct_taxon_namespace(unify_taxa_by_label=False)
self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
self.assertIs(self.tree_list.taxon_namespace, new_tns)
self.verify_taxon_namespace_reconstruction(
unify_taxa_by_label=False,
case_sensitive_label_mapping=True)
def test_reconstruct_taxon_namespace_unifying_case_sensitive(self):
original_tns = self.tree_list.taxon_namespace
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = True
self.tree_list._taxon_namespace = new_tns
self.assertEqual(len(self.tree_list.taxon_namespace), 0)
self.tree_list.reconstruct_taxon_namespace(unify_taxa_by_label=True)
self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
self.assertIs(self.tree_list.taxon_namespace, new_tns)
self.verify_taxon_namespace_reconstruction(
unify_taxa_by_label=True,
case_sensitive_label_mapping=True,
original_tns=original_tns)
def test_reconstruct_taxon_namespace_unifying_case_insensitive(self):
original_tns = self.tree_list.taxon_namespace
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = False
self.tree_list._taxon_namespace = new_tns
self.assertEqual(len(self.tree_list.taxon_namespace), 0)
self.tree_list.reconstruct_taxon_namespace(unify_taxa_by_label=True)
self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
self.assertIs(self.tree_list.taxon_namespace, new_tns)
self.verify_taxon_namespace_reconstruction(
unify_taxa_by_label=True,
case_sensitive_label_mapping=False,
original_tns=original_tns)
def test_basic_migration(self):
tns = dendropy.TaxonNamespace()
trees = []
for idx in range(5):
tree, anodes, lnodes, inodes = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False,
taxon_namespace=tns)
trees.append(tree)
tree_list = dendropy.TreeList(taxon_namespace=tns)
tree_list._trees = trees
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = True
tree_list.taxon_namespace = new_tns
tree_list.migrate_taxon_namespace(
new_tns,
unify_taxa_by_label=False)
self.assertIsNot(tree_list.taxon_namespace, tns)
self.assertIs(tree_list.taxon_namespace, new_tns)
self.assertEqual(len(tree_list.taxon_namespace), len(tns))
original_labels = [t.label for t in tns]
new_labels = [t.label for t in new_tns]
self.assertCountEqual(new_labels, original_labels)
for tree in tree_list:
self.assertIs(tree.taxon_namespace, tree_list.taxon_namespace)
for nd in tree:
if nd.taxon is not None:
self.assertIn(nd.taxon, tree.taxon_namespace)
self.assertNotIn(nd.taxon, tns)
def test_migrate_taxon_namespace_non_unifying(self):
original_tns = self.tree_list.taxon_namespace
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = True
self.tree_list.migrate_taxon_namespace(
new_tns,
unify_taxa_by_label=False)
self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
self.assertIs(self.tree_list.taxon_namespace, new_tns)
self.verify_taxon_namespace_reconstruction(
unify_taxa_by_label=False,
case_sensitive_label_mapping=True,
original_tns=original_tns)
def test_migrate_taxon_namespace_unifying_case_sensitive(self):
original_tns = self.tree_list.taxon_namespace
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = True
self.tree_list.migrate_taxon_namespace(
new_tns,
unify_taxa_by_label=True)
self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
self.assertIs(self.tree_list.taxon_namespace, new_tns)
self.verify_taxon_namespace_reconstruction(
unify_taxa_by_label=True,
case_sensitive_label_mapping=True,
original_tns=original_tns)
def test_migrate_taxon_namespace_unifying_case_insensitive(self):
original_tns = self.tree_list.taxon_namespace
new_tns = dendropy.TaxonNamespace()
new_tns.is_case_sensitive = False
self.tree_list.migrate_taxon_namespace(
new_tns,
unify_taxa_by_label=True)
self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
self.assertIs(self.tree_list.taxon_namespace, new_tns)
self.verify_taxon_namespace_reconstruction(
unify_taxa_by_label=True,
case_sensitive_label_mapping=False,
original_tns=original_tns)
class TestTreeListAppend(
curated_test_tree.CuratedTestTree,
unittest.TestCase):
def setUp(self):
self.native_tns = dendropy.TaxonNamespace()
self.tree_list = dendropy.TreeList(taxon_namespace=self.native_tns)
self.foreign_tns = dendropy.TaxonNamespace()
self.foreign_tree, anodes, lnodes, inodes = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False,
taxon_namespace=self.foreign_tns)
for nd in self.foreign_tree:
nd.original_taxon = nd.taxon
self.check_tns = dendropy.TaxonNamespace()
self.check_tree, anodes, lnodes, inodes = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False,
taxon_namespace=self.check_tns)
def test_append_default(self):
self.assertIsNot(self.tree_list.taxon_namespace, self.foreign_tree.taxon_namespace)
self.tree_list.append(self.foreign_tree)
self.assertEqual(len(self.tree_list), 1)
self.assertIn(self.foreign_tree, self.tree_list)
self.assertIs(self.foreign_tree, self.tree_list[0])
self.assertIs(self.tree_list.taxon_namespace, self.native_tns)
self.assertIs(self.foreign_tree.taxon_namespace, self.tree_list.taxon_namespace)
self.assertEqual(len(self.tree_list.taxon_namespace), len(self.foreign_tns))
for nd in self.foreign_tree:
if nd.taxon:
self.assertIn(nd.taxon, self.tree_list.taxon_namespace)
self.assertIsNot(nd.taxon, nd.original_taxon)
self.assertIn(nd.original_taxon, self.foreign_tns)
self.assertNotIn(nd.original_taxon, self.tree_list.taxon_namespace)
self.assertEqual(nd.taxon.label, nd.original_taxon.label)
def test_append_migrate_matching_labels(self):
kwargs_groups = [
{"taxon_import_strategy": "migrate", "unify_taxa_by_label": True},
{"taxon_import_strategy": "migrate", "unify_taxa_by_label": False},
{"taxon_import_strategy": "add", },
]
for kwargs in kwargs_groups:
self.setUp()
self.assertEqual(len(self.tree_list.taxon_namespace), 0)
native_tree, anodes, lnodes, inodes = self.get_tree(
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False,
taxon_namespace=self.native_tns)
self.assertEqual(len(self.tree_list.taxon_namespace), len(self.postorder_sequence))
self.assertEqual(len(self.tree_list.taxon_namespace), len(self.foreign_tns))
original_tns_len = len(self.tree_list.taxon_namespace)
self.tree_list.append(self.foreign_tree, **kwargs)
self.assertEqual(len(self.tree_list), 1)
self.assertIn(self.foreign_tree, self.tree_list)
self.assertIs(self.foreign_tree, self.tree_list[0])
self.assertIs(self.foreign_tree.taxon_namespace, self.tree_list.taxon_namespace)
if kwargs["taxon_import_strategy"] == "add":
self.assertEqual(len(self.tree_list.taxon_namespace),
original_tns_len + len(self.foreign_tns))
for nd in self.foreign_tree:
self.assertIn(nd.taxon, self.foreign_tns)
self.assertIn(nd.taxon, self.tree_list.taxon_namespace)
else:
if "unify_taxa_by_label" not in kwargs or not kwargs["unify_taxa_by_label"]:
self.assertEqual(len(self.tree_list.taxon_namespace),
original_tns_len + len(self.foreign_tns))
else:
self.assertEqual(len(self.tree_list.taxon_namespace), original_tns_len)
for nd in self.foreign_tree:
self.assertNotIn(nd.taxon, self.foreign_tns)
self.assertIn(nd.taxon, self.tree_list.taxon_namespace)
def test_append_add(self):
self.assertIsNot(self.tree_list.taxon_namespace, self.foreign_tree.taxon_namespace)
self.tree_list.append(self.foreign_tree,
taxon_import_strategy="add")
self.assertEqual(len(self.tree_list), 1)
self.assertIn(self.foreign_tree, self.tree_list)
self.assertIs(self.foreign_tree, self.tree_list[0])
self.assertIs(self.tree_list.taxon_namespace, self.native_tns)
self.assertIs(self.foreign_tree.taxon_namespace, self.tree_list.taxon_namespace)
self.assertEqual(len(self.tree_list.taxon_namespace), len(self.foreign_tns))
for nd in self.foreign_tree:
if nd.taxon:
self.assertIn(nd.taxon, self.tree_list.taxon_namespace)
self.assertIs(nd.taxon, nd.original_taxon)
self.assertIn(nd.original_taxon, self.foreign_tns)
self.assertIn(nd.original_taxon, self.tree_list.taxon_namespace)
class TestTreeListTaxa(
curated_test_tree.CuratedTestTree,
dendropytest.ExtendedTestCase):
def setUp(self):
self.tree_list = dendropy.TreeList()
self.expected_taxa = None
for i in range(10):
tree1, anodes1, lnodes1, inodes1 = self.get_tree(
taxon_namespace=self.tree_list.taxon_namespace,
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False)
self.tree_list.append(tree1)
if self.expected_taxa is None:
self.expected_taxa = set([nd.taxon for nd in anodes1 if nd.taxon is not None])
def test_basic_taxa(self):
self.assertEqual(self.tree_list.poll_taxa(), self.expected_taxa)
class TestTreeListPurgeTaxonNamespace(
curated_test_tree.CuratedTestTree,
dendropytest.ExtendedTestCase):
def setUp(self):
self.tree_list = dendropy.TreeList()
self.expected_taxa = None
for i in range(10):
tree1, anodes1, lnodes1, inodes1 = self.get_tree(
taxon_namespace=self.tree_list.taxon_namespace,
suppress_internal_node_taxa=False,
suppress_leaf_node_taxa=False)
self.tree_list.append(tree1)
if self.expected_taxa is None:
self.expected_taxa = set([nd.taxon for nd in anodes1 if nd.taxon is not None])
def test_noop_purge(self):
self.assertEqual(set(self.tree_list.taxon_namespace), self.expected_taxa)
self.tree_list.purge_taxon_namespace()
self.assertEqual(set(self.tree_list.taxon_namespace), self.expected_taxa)
def test_basic_purge(self):
self.assertEqual(set(self.tree_list.taxon_namespace), self.expected_taxa)
added_taxa = set(self.expected_taxa)
for label in ("z1", "z2", "z3", "z4"):
t = self.tree_list.taxon_namespace.new_taxon(label=label)
added_taxa.add(t)
self.assertEqual(set(self.tree_list.taxon_namespace), added_taxa)
self.tree_list.purge_taxon_namespace()
self.assertEqual(set(self.tree_list.taxon_namespace), self.expected_taxa)
class TreeListCreation(unittest.TestCase):
def test_create_with_taxon_namespace(self):
tns = dendropy.TaxonNamespace()
tt = dendropy.TreeList(label="a", taxon_namespace=tns)
self.assertEqual(tt.label, "a")
self.assertIs(tt.taxon_namespace, tns)
class TestSpecialTreeListConstruction(
unittest.TestCase):
def test_construction_from_another_tree_different_label(self):
tlist1 = dendropy.TreeList()
tlist1.label = "tlist1"
self.assertEqual(tlist1.label, "tlist1")
tlist2 = dendropy.TreeList(tlist1, label="tlist2")
self.assertEqual(tlist2.label, "tlist2")
self.assertNotEqual(tlist1.label, "tlist2")
self.assertNotEqual(tlist1.label, tlist2.label)
if __name__ == "__main__":
unittest.main()
| true | true |
f71c6192795695fce398b118465ead2235f638d3 | 263 | py | Python | services/movies_billing/subscription_service/src/models/customer.py | fuodorov/yacinema | 43ad869575fbaab7c7056229538638666aa87110 | [
"MIT"
] | null | null | null | services/movies_billing/subscription_service/src/models/customer.py | fuodorov/yacinema | 43ad869575fbaab7c7056229538638666aa87110 | [
"MIT"
] | null | null | null | services/movies_billing/subscription_service/src/models/customer.py | fuodorov/yacinema | 43ad869575fbaab7c7056229538638666aa87110 | [
"MIT"
] | 1 | 2021-09-30T09:49:40.000Z | 2021-09-30T09:49:40.000Z | from sqlalchemy import String, ForeignKey, Integer, Enum
from .base import AbstractModel, RequiredColumn
class Customer(AbstractModel):
__tablename__ = "customer"
user_id = RequiredColumn(String(50))
stripe_customer_id = RequiredColumn(String(50))
| 26.3 | 56 | 0.775665 | from sqlalchemy import String, ForeignKey, Integer, Enum
from .base import AbstractModel, RequiredColumn
class Customer(AbstractModel):
__tablename__ = "customer"
user_id = RequiredColumn(String(50))
stripe_customer_id = RequiredColumn(String(50))
| true | true |
f71c61dd6a6ae22677f0c87104c88f4220333ae8 | 234 | py | Python | physionet-django/project/fileviews/image.py | Lucas-Mc/physionet-build | 77da5da6273cf3f5f2afce95dc5d0ce3302741ca | [
"BSD-3-Clause"
] | 36 | 2019-02-14T18:10:39.000Z | 2022-01-21T12:48:52.000Z | physionet-django/project/fileviews/image.py | Lucas-Mc/physionet-build | 77da5da6273cf3f5f2afce95dc5d0ce3302741ca | [
"BSD-3-Clause"
] | 1,051 | 2019-01-31T18:03:14.000Z | 2022-03-31T20:53:04.000Z | physionet-django/project/fileviews/image.py | Lucas-Mc/physionet-build | 77da5da6273cf3f5f2afce95dc5d0ce3302741ca | [
"BSD-3-Clause"
] | 13 | 2019-03-26T11:02:32.000Z | 2022-03-17T11:39:49.000Z | from project.fileviews.base import FileView
class ImageFileView(FileView):
"""
Class for displaying image files.
"""
def render(self, request):
return super().render(request, 'project/file_view_image.html')
| 21.272727 | 70 | 0.692308 | from project.fileviews.base import FileView
class ImageFileView(FileView):
def render(self, request):
return super().render(request, 'project/file_view_image.html')
| true | true |
f71c63d30ec047c7337fa37ae62317af4f4cdf92 | 2,398 | py | Python | model_zoo/official/cv/alexnet/export.py | dongkcs/mindspore | cd7df6dbf463ff3128e9181e9d0c779cecb81320 | [
"Apache-2.0"
] | 2 | 2020-11-23T13:46:37.000Z | 2020-12-20T02:02:38.000Z | model_zoo/official/cv/alexnet/export.py | dongkcs/mindspore | cd7df6dbf463ff3128e9181e9d0c779cecb81320 | [
"Apache-2.0"
] | null | null | null | model_zoo/official/cv/alexnet/export.py | dongkcs/mindspore | cd7df6dbf463ff3128e9181e9d0c779cecb81320 | [
"Apache-2.0"
] | 1 | 2021-01-01T08:35:01.000Z | 2021-01-01T08:35:01.000Z | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
##############export checkpoint file into air and onnx models#################
python export.py
"""
import argparse
import numpy as np
import mindspore as ms
from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net, export
from src.config import alexnet_cifar10_cfg, alexnet_imagenet_cfg
from src.alexnet import AlexNet
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Classification')
parser.add_argument('--dataset_name', type=str, default='cifar10', choices=['imagenet', 'cifar10'],
help='please choose dataset: imagenet or cifar10.')
parser.add_argument('--device_target', type=str, default="Ascend",
choices=['Ascend', 'GPU'],
help='device where the code will be implemented (default: Ascend)')
parser.add_argument('--ckpt_path', type=str, default="./ckpt", help='if is test, must provide\
path where the trained ckpt file')
args_opt = parser.parse_args()
context.set_context(mode=context.GRAPH_MODE, device_target=args_opt.device_target)
if args_opt.dataset_name == 'cifar10':
cfg = alexnet_cifar10_cfg
elif args_opt.dataset_name == 'imagenet':
cfg = alexnet_imagenet_cfg
else:
raise ValueError("dataset is not support.")
net = AlexNet(num_classes=cfg.num_classes)
param_dict = load_checkpoint(args_opt.ckpt_path)
load_param_into_net(net, param_dict)
input_arr = Tensor(np.random.uniform(0.0, 1.0, size=[1, 3, cfg.image_height, cfg.image_width]), ms.float32)
export(net, input_arr, file_name=cfg.air_name, file_format="AIR")
| 42.821429 | 111 | 0.685988 |
import argparse
import numpy as np
import mindspore as ms
from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net, export
from src.config import alexnet_cifar10_cfg, alexnet_imagenet_cfg
from src.alexnet import AlexNet
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Classification')
parser.add_argument('--dataset_name', type=str, default='cifar10', choices=['imagenet', 'cifar10'],
help='please choose dataset: imagenet or cifar10.')
parser.add_argument('--device_target', type=str, default="Ascend",
choices=['Ascend', 'GPU'],
help='device where the code will be implemented (default: Ascend)')
parser.add_argument('--ckpt_path', type=str, default="./ckpt", help='if is test, must provide\
path where the trained ckpt file')
args_opt = parser.parse_args()
context.set_context(mode=context.GRAPH_MODE, device_target=args_opt.device_target)
if args_opt.dataset_name == 'cifar10':
cfg = alexnet_cifar10_cfg
elif args_opt.dataset_name == 'imagenet':
cfg = alexnet_imagenet_cfg
else:
raise ValueError("dataset is not support.")
net = AlexNet(num_classes=cfg.num_classes)
param_dict = load_checkpoint(args_opt.ckpt_path)
load_param_into_net(net, param_dict)
input_arr = Tensor(np.random.uniform(0.0, 1.0, size=[1, 3, cfg.image_height, cfg.image_width]), ms.float32)
export(net, input_arr, file_name=cfg.air_name, file_format="AIR")
| true | true |
f71c6605bc3c94744764c205c3291d67c5416f2f | 624 | py | Python | setup.py | tianjianjiang/bigscience-metadata | 3460c8d2bca2c818646feb3b49c50b351b51ad70 | [
"Apache-2.0"
] | null | null | null | setup.py | tianjianjiang/bigscience-metadata | 3460c8d2bca2c818646feb3b49c50b351b51ad70 | [
"Apache-2.0"
] | null | null | null | setup.py | tianjianjiang/bigscience-metadata | 3460c8d2bca2c818646feb3b49c50b351b51ad70 | [
"Apache-2.0"
] | null | null | null | from setuptools import find_packages, setup
def req_file(filename):
with open(filename) as f:
content = f.readlines()
return [x.strip() for x in content]
install_requires = req_file("requirements.txt")
setup(
name="bsmetadata",
python_requires=">=3.7.11, <3.10",
version="0.1.0",
url="https://github.com/bigscience-workshop/metadata.git",
author="Multiple Authors",
author_email="xxx",
description="Codebase for including metadata (e.g., URLs, timestamps, HTML tags) during language model pretraining.",
packages=find_packages(),
install_requires=install_requires,
)
| 27.130435 | 121 | 0.698718 | from setuptools import find_packages, setup
def req_file(filename):
with open(filename) as f:
content = f.readlines()
return [x.strip() for x in content]
install_requires = req_file("requirements.txt")
setup(
name="bsmetadata",
python_requires=">=3.7.11, <3.10",
version="0.1.0",
url="https://github.com/bigscience-workshop/metadata.git",
author="Multiple Authors",
author_email="xxx",
description="Codebase for including metadata (e.g., URLs, timestamps, HTML tags) during language model pretraining.",
packages=find_packages(),
install_requires=install_requires,
)
| true | true |
f71c66238339944e28e835fcee02bfdd0f475eda | 1,171 | py | Python | setup.py | judoscale/judoscale-python | 3a2c82921ec5feb81d2691037b9bf13e94f7e5c9 | [
"MIT"
] | null | null | null | setup.py | judoscale/judoscale-python | 3a2c82921ec5feb81d2691037b9bf13e94f7e5c9 | [
"MIT"
] | null | null | null | setup.py | judoscale/judoscale-python | 3a2c82921ec5feb81d2691037b9bf13e94f7e5c9 | [
"MIT"
] | null | null | null | import setuptools
# To publish:
#
# - Update VERSION constant below
# - python3 -m pip install --upgrade build twine
# - rm -rf dist && python3 -m build
# - python3 -m twine upload dist/*
# - Username is __token__, password is token value
VERSION = "1.0.0rc1"
INSTALL_REQUIRES = ["requests<3.0.0"]
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="judoscale-python",
version=VERSION,
author="Adam McCrea",
author_email="adam@adamlogic.com",
description="Official Python adapter for Judoscale—the advanced autoscaler for Heroku",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/judoscale/judoscale-python",
project_urls={
"Issue Tracker": "https://github.com/judoscale/judoscale-python/issues",
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
package_dir={"": "src"},
packages=setuptools.find_packages(where="src"),
install_requires=INSTALL_REQUIRES,
)
| 30.815789 | 91 | 0.674637 | import setuptools
VERSION = "1.0.0rc1"
INSTALL_REQUIRES = ["requests<3.0.0"]
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="judoscale-python",
version=VERSION,
author="Adam McCrea",
author_email="adam@adamlogic.com",
description="Official Python adapter for Judoscale—the advanced autoscaler for Heroku",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/judoscale/judoscale-python",
project_urls={
"Issue Tracker": "https://github.com/judoscale/judoscale-python/issues",
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
package_dir={"": "src"},
packages=setuptools.find_packages(where="src"),
install_requires=INSTALL_REQUIRES,
)
| true | true |
f71c68260a4ae6f5fd97179a6c4e00a92144ea7f | 36,156 | py | Python | zerver/migrations/0001_initial.py | N-Shar-ma/zulip | 95303a9929424b55a1f7c7cce9313c4619a9533b | [
"Apache-2.0"
] | 4 | 2021-09-16T16:46:55.000Z | 2022-02-06T13:00:21.000Z | zerver/migrations/0001_initial.py | jai2201/zulip | 95303a9929424b55a1f7c7cce9313c4619a9533b | [
"Apache-2.0"
] | null | null | null | zerver/migrations/0001_initial.py | jai2201/zulip | 95303a9929424b55a1f7c7cce9313c4619a9533b | [
"Apache-2.0"
] | null | null | null | # Generated by Django 1.11.2 on 2017-06-22 10:22
import bitfield.models
import django.contrib.auth.models
import django.core.validators
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
from django.contrib.postgres.indexes import GinIndex
from django.contrib.postgres.search import SearchVectorField
from django.db import migrations, models
from django.db.backends.postgresql.schema import BaseDatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db.models.functions import Upper
from zerver.models import generate_email_token_for_stream
def migrate_existing_attachment_data(
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
Attachment = apps.get_model("zerver", "Attachment")
Recipient = apps.get_model("zerver", "Recipient")
Stream = apps.get_model("zerver", "Stream")
attachments = Attachment.objects.all()
for entry in attachments:
owner = entry.owner
entry.realm = owner.realm
for message in entry.messages.all():
if owner == message.sender:
if message.recipient.type == Recipient.STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
is_realm_public = (
not stream.realm.is_zephyr_mirror_realm and not stream.invite_only
)
entry.is_realm_public = entry.is_realm_public or is_realm_public
entry.save()
class Migration(migrations.Migration):
initial = True
dependencies = [
("auth", "0001_initial"),
]
if settings.POSTGRESQL_MISSING_DICTIONARIES:
fts_sql = """
CREATE TEXT SEARCH CONFIGURATION zulip.english_us_search (COPY=pg_catalog.english);
"""
else:
fts_sql = """
CREATE TEXT SEARCH DICTIONARY english_us_hunspell
(template = ispell, DictFile = en_us, AffFile = en_us, StopWords = zulip_english);
CREATE TEXT SEARCH CONFIGURATION zulip.english_us_search (COPY=pg_catalog.english);
ALTER TEXT SEARCH CONFIGURATION zulip.english_us_search
ALTER MAPPING FOR asciiword, asciihword, hword_asciipart, word, hword, hword_part
WITH english_us_hunspell, english_stem;
"""
fts_sql += """
CREATE FUNCTION escape_html(text) RETURNS text IMMUTABLE LANGUAGE 'sql' AS $$
SELECT replace(replace(replace(replace(replace($1, '&', '&'), '<', '<'),
'>', '>'), '"', '"'), '''', ''');
$$ ;
CREATE TABLE fts_update_log (id SERIAL PRIMARY KEY, message_id INTEGER NOT NULL);
CREATE FUNCTION do_notify_fts_update_log() RETURNS trigger LANGUAGE plpgsql AS
$$ BEGIN NOTIFY fts_update_log; RETURN NEW; END $$;
CREATE TRIGGER fts_update_log_notify AFTER INSERT ON fts_update_log
FOR EACH STATEMENT EXECUTE PROCEDURE do_notify_fts_update_log();
CREATE FUNCTION append_to_fts_update_log() RETURNS trigger LANGUAGE plpgsql AS
$$ BEGIN INSERT INTO fts_update_log (message_id) VALUES (NEW.id); RETURN NEW; END $$;
CREATE TRIGGER zerver_message_update_search_tsvector_async
BEFORE INSERT OR UPDATE OF subject, rendered_content ON zerver_message
FOR EACH ROW EXECUTE PROCEDURE append_to_fts_update_log();
"""
operations = [
migrations.CreateModel(
name="UserProfile",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("password", models.CharField(max_length=128, verbose_name="password")),
(
"last_login",
models.DateTimeField(
default=django.utils.timezone.now, verbose_name="last login"
),
),
(
"is_superuser",
models.BooleanField(
default=False,
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
("email", models.EmailField(db_index=True, max_length=75, unique=True)),
("is_staff", models.BooleanField(default=False)),
("is_active", models.BooleanField(default=True)),
("is_bot", models.BooleanField(default=False)),
("date_joined", models.DateTimeField(default=django.utils.timezone.now)),
("is_mirror_dummy", models.BooleanField(default=False)),
("full_name", models.CharField(max_length=100)),
("short_name", models.CharField(max_length=100)),
("pointer", models.IntegerField()),
("last_pointer_updater", models.CharField(max_length=64)),
("api_key", models.CharField(max_length=32)),
("enable_stream_desktop_notifications", models.BooleanField(default=True)),
("enable_stream_sounds", models.BooleanField(default=True)),
("enable_desktop_notifications", models.BooleanField(default=True)),
("enable_sounds", models.BooleanField(default=True)),
("enable_offline_email_notifications", models.BooleanField(default=True)),
("enable_offline_push_notifications", models.BooleanField(default=True)),
("enable_digest_emails", models.BooleanField(default=True)),
("default_desktop_notifications", models.BooleanField(default=True)),
(
"last_reminder",
models.DateTimeField(default=django.utils.timezone.now, null=True),
),
("rate_limits", models.CharField(default="", max_length=100)),
("default_all_public_streams", models.BooleanField(default=False)),
("enter_sends", models.NullBooleanField(default=True)),
("autoscroll_forever", models.BooleanField(default=False)),
("twenty_four_hour_time", models.BooleanField(default=False)),
(
"avatar_source",
models.CharField(
choices=[
("G", "Hosted by Gravatar"),
("U", "Uploaded by user"),
("S", "System generated"),
],
default="G",
max_length=1,
),
),
(
"tutorial_status",
models.CharField(
choices=[("W", "Waiting"), ("S", "Started"), ("F", "Finished")],
default="W",
max_length=1,
),
),
("onboarding_steps", models.TextField(default="[]")),
("invites_granted", models.IntegerField(default=0)),
("invites_used", models.IntegerField(default=0)),
("alert_words", models.TextField(default="[]")),
("muted_topics", models.TextField(default="[]")),
(
"bot_owner",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="Client",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("name", models.CharField(db_index=True, max_length=30, unique=True)),
],
),
migrations.CreateModel(
name="DefaultStream",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
],
),
migrations.CreateModel(
name="Huddle",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("huddle_hash", models.CharField(db_index=True, max_length=40, unique=True)),
],
),
migrations.CreateModel(
name="Message",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("subject", models.CharField(db_index=True, max_length=60)),
("content", models.TextField()),
("rendered_content", models.TextField(null=True)),
("rendered_content_version", models.IntegerField(null=True)),
("pub_date", models.DateTimeField(db_index=True, verbose_name="date published")),
("last_edit_time", models.DateTimeField(null=True)),
("edit_history", models.TextField(null=True)),
("has_attachment", models.BooleanField(db_index=True, default=False)),
("has_image", models.BooleanField(db_index=True, default=False)),
("has_link", models.BooleanField(db_index=True, default=False)),
],
),
migrations.CreateModel(
name="PreregistrationUser",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("email", models.EmailField(max_length=75)),
("invited_at", models.DateTimeField(auto_now=True)),
("status", models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name="PushDeviceToken",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("kind", models.PositiveSmallIntegerField(choices=[(1, "apns"), (2, "gcm")])),
("token", models.CharField(max_length=4096, unique=True)),
(
"last_updated",
models.DateTimeField(auto_now=True, default=django.utils.timezone.now),
),
("ios_app_id", models.TextField(null=True)),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="Realm",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("domain", models.CharField(db_index=True, max_length=40, unique=True)),
("name", models.CharField(max_length=40, null=True)),
("restricted_to_domain", models.BooleanField(default=True)),
("invite_required", models.BooleanField(default=False)),
("invite_by_admins_only", models.BooleanField(default=False)),
("mandatory_topics", models.BooleanField(default=False)),
("show_digest_email", models.BooleanField(default=True)),
("name_changes_disabled", models.BooleanField(default=False)),
("date_created", models.DateTimeField(default=django.utils.timezone.now)),
("deactivated", models.BooleanField(default=False)),
],
options={
"permissions": (("administer", "Administer a realm"),),
},
),
migrations.CreateModel(
name="RealmAlias",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("domain", models.CharField(db_index=True, max_length=80, unique=True)),
(
"realm",
models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
],
),
migrations.CreateModel(
name="RealmEmoji",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("name", models.TextField()),
("img_url", models.TextField()),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
],
),
migrations.CreateModel(
name="RealmFilter",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("pattern", models.TextField()),
("url_format_string", models.TextField()),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
],
),
migrations.CreateModel(
name="Recipient",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("type_id", models.IntegerField(db_index=True)),
("type", models.PositiveSmallIntegerField(db_index=True)),
],
),
migrations.CreateModel(
name="Referral",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("email", models.EmailField(max_length=75)),
("timestamp", models.DateTimeField(auto_now_add=True)),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="ScheduledJob",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("scheduled_timestamp", models.DateTimeField()),
("type", models.PositiveSmallIntegerField()),
("data", models.TextField()),
("filter_id", models.IntegerField(null=True)),
("filter_string", models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name="Stream",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("name", models.CharField(db_index=True, max_length=60)),
("invite_only", models.NullBooleanField(default=False)),
(
"email_token",
models.CharField(default=generate_email_token_for_stream, max_length=32),
),
("description", models.CharField(default="", max_length=1024)),
("date_created", models.DateTimeField(default=django.utils.timezone.now)),
("deactivated", models.BooleanField(default=False)),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
],
),
migrations.CreateModel(
name="Subscription",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("active", models.BooleanField(default=True)),
("in_home_view", models.NullBooleanField(default=True)),
("color", models.CharField(default="#c2c2c2", max_length=10)),
("desktop_notifications", models.BooleanField(default=True)),
("audible_notifications", models.BooleanField(default=True)),
("notifications", models.BooleanField(default=False)),
(
"recipient",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="UserActivity",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("query", models.CharField(db_index=True, max_length=50)),
("count", models.IntegerField()),
("last_visit", models.DateTimeField(verbose_name="last visit")),
(
"client",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Client"
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="UserActivityInterval",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("start", models.DateTimeField(db_index=True, verbose_name="start time")),
("end", models.DateTimeField(db_index=True, verbose_name="end time")),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="UserMessage",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
(
"flags",
bitfield.models.BitField(
[
"read",
"starred",
"collapsed",
"mentioned",
"wildcard_mentioned",
"summarize_in_home",
"summarize_in_stream",
"force_expand",
"force_collapse",
"has_alert_word",
"historical",
"is_me_message",
],
default=0,
),
),
(
"message",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Message"
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="UserPresence",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("timestamp", models.DateTimeField(verbose_name="presence changed")),
("status", models.PositiveSmallIntegerField(default=1)),
(
"client",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Client"
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.AlterUniqueTogether(
name="userpresence",
unique_together={("user_profile", "client")},
),
migrations.AlterUniqueTogether(
name="usermessage",
unique_together={("user_profile", "message")},
),
migrations.AlterUniqueTogether(
name="useractivity",
unique_together={("user_profile", "client", "query")},
),
migrations.AlterUniqueTogether(
name="subscription",
unique_together={("user_profile", "recipient")},
),
migrations.AlterUniqueTogether(
name="stream",
unique_together={("name", "realm")},
),
migrations.AlterUniqueTogether(
name="recipient",
unique_together={("type", "type_id")},
),
migrations.AlterUniqueTogether(
name="realmfilter",
unique_together={("realm", "pattern")},
),
migrations.AlterUniqueTogether(
name="realmemoji",
unique_together={("realm", "name")},
),
migrations.AddField(
model_name="realm",
name="notifications_stream",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="zerver.Stream",
),
),
migrations.AddField(
model_name="preregistrationuser",
name="realm",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
migrations.AddField(
model_name="preregistrationuser",
name="referred_by",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
migrations.AddField(
model_name="preregistrationuser",
name="streams",
field=models.ManyToManyField(null=True, to="zerver.Stream"),
),
migrations.AddField(
model_name="message",
name="recipient",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
),
),
migrations.AddField(
model_name="message",
name="sender",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
migrations.AddField(
model_name="message",
name="sending_client",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Client"
),
),
migrations.AddField(
model_name="defaultstream",
name="realm",
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"),
),
migrations.AddField(
model_name="defaultstream",
name="stream",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream"
),
),
migrations.AlterUniqueTogether(
name="defaultstream",
unique_together={("realm", "stream")},
),
migrations.AddField(
model_name="userprofile",
name="default_events_register_stream",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="zerver.Stream",
),
),
migrations.AddField(
model_name="userprofile",
name="default_sending_stream",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="zerver.Stream",
),
),
migrations.AddField(
model_name="userprofile",
name="groups",
field=models.ManyToManyField(
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.Group",
verbose_name="groups",
),
),
migrations.AddField(
model_name="userprofile",
name="realm",
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"),
),
migrations.AddField(
model_name="userprofile",
name="user_permissions",
field=models.ManyToManyField(
blank=True,
help_text="Specific permissions for this user.",
related_name="user_set",
related_query_name="user",
to="auth.Permission",
verbose_name="user permissions",
),
),
migrations.AddField(
model_name="message",
name="search_tsvector",
field=SearchVectorField(null=True),
),
migrations.AddIndex(
model_name="message",
index=GinIndex(
"search_tsvector", fastupdate=False, name="zerver_message_search_tsvector"
),
),
migrations.RunSQL(
sql=fts_sql,
),
migrations.AlterModelManagers(
name="userprofile",
managers=[
("objects", django.contrib.auth.models.UserManager()),
],
),
migrations.AlterField(
model_name="preregistrationuser",
name="email",
field=models.EmailField(max_length=254),
),
migrations.AlterField(
model_name="preregistrationuser",
name="streams",
field=models.ManyToManyField(to="zerver.Stream"),
),
migrations.AlterField(
model_name="pushdevicetoken",
name="last_updated",
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name="referral",
name="email",
field=models.EmailField(max_length=254),
),
migrations.AlterField(
model_name="userprofile",
name="email",
field=models.EmailField(db_index=True, max_length=254, unique=True),
),
migrations.AlterField(
model_name="userprofile",
name="last_login",
field=models.DateTimeField(blank=True, null=True, verbose_name="last login"),
),
migrations.AddIndex(
model_name="message",
index=models.Index(Upper("subject"), name="upper_subject_idx"),
),
migrations.AddIndex(
model_name="stream",
index=models.Index(Upper("name"), name="upper_stream_name_idx"),
),
migrations.AddField(
model_name="userprofile",
name="left_side_userlist",
field=models.BooleanField(default=False),
),
migrations.AlterModelOptions(
name="realm",
options={
"permissions": (
("administer", "Administer a realm"),
("api_super_user", "Can send messages as other users for mirroring"),
)
},
),
migrations.AddIndex(
model_name="userprofile",
index=models.Index(Upper("email"), name="upper_userprofile_email_idx"),
),
migrations.AlterField(
model_name="userprofile",
name="is_active",
field=models.BooleanField(db_index=True, default=True),
),
migrations.AlterField(
model_name="userprofile",
name="is_bot",
field=models.BooleanField(db_index=True, default=False),
),
migrations.AddIndex(
model_name="preregistrationuser",
index=models.Index(Upper("email"), name="upper_preregistration_email_idx"),
),
migrations.AlterField(
model_name="userprofile",
name="enable_stream_desktop_notifications",
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name="userprofile",
name="enable_stream_sounds",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="userprofile",
name="is_api_super_user",
field=models.BooleanField(db_index=True, default=False),
),
migrations.AddField(
model_name="userprofile",
name="is_realm_admin",
field=models.BooleanField(db_index=True, default=False),
),
migrations.AlterField(
model_name="realmemoji",
name="img_url",
field=models.URLField(),
),
migrations.AlterField(
model_name="realmemoji",
name="name",
field=models.TextField(
validators=[
django.core.validators.MinLengthValidator(1),
django.core.validators.RegexValidator(regex="^[0-9a-zA-Z.\\-_]+(?<![.\\-_])$"),
]
),
),
migrations.AlterField(
model_name="realmemoji",
name="img_url",
field=models.URLField(max_length=1000),
),
migrations.CreateModel(
name="Attachment",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("file_name", models.CharField(db_index=True, max_length=100)),
("path_id", models.TextField(db_index=True)),
(
"create_time",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("messages", models.ManyToManyField(to="zerver.Message")),
(
"owner",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
("is_realm_public", models.BooleanField(default=False)),
],
),
migrations.AddField(
model_name="realm",
name="create_stream_by_admins_only",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="userprofile",
name="bot_type",
field=models.PositiveSmallIntegerField(db_index=True, null=True),
),
migrations.AlterField(
model_name="realmemoji",
name="name",
field=models.TextField(
validators=[
django.core.validators.MinLengthValidator(1),
django.core.validators.RegexValidator(
message="Invalid characters in emoji name",
regex="^[0-9a-zA-Z.\\-_]+(?<![.\\-_])$",
),
]
),
),
migrations.AddField(
model_name="preregistrationuser",
name="realm_creation",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="attachment",
name="realm",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="zerver.Realm",
),
),
migrations.RunPython(
code=migrate_existing_attachment_data,
elidable=True,
),
migrations.AddField(
model_name="subscription",
name="pin_to_top",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="userprofile",
name="default_language",
field=models.CharField(default="en", max_length=50),
),
migrations.AddField(
model_name="realm",
name="allow_message_editing",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="realm",
name="message_content_edit_limit_seconds",
field=models.IntegerField(default=600),
),
migrations.AddField(
model_name="realm",
name="default_language",
field=models.CharField(default="en", max_length=50),
),
migrations.AddField(
model_name="userprofile",
name="tos_version",
field=models.CharField(max_length=10, null=True),
),
]
| 38.545842 | 126 | 0.491841 |
import bitfield.models
import django.contrib.auth.models
import django.core.validators
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
from django.contrib.postgres.indexes import GinIndex
from django.contrib.postgres.search import SearchVectorField
from django.db import migrations, models
from django.db.backends.postgresql.schema import BaseDatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db.models.functions import Upper
from zerver.models import generate_email_token_for_stream
def migrate_existing_attachment_data(
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
Attachment = apps.get_model("zerver", "Attachment")
Recipient = apps.get_model("zerver", "Recipient")
Stream = apps.get_model("zerver", "Stream")
attachments = Attachment.objects.all()
for entry in attachments:
owner = entry.owner
entry.realm = owner.realm
for message in entry.messages.all():
if owner == message.sender:
if message.recipient.type == Recipient.STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
is_realm_public = (
not stream.realm.is_zephyr_mirror_realm and not stream.invite_only
)
entry.is_realm_public = entry.is_realm_public or is_realm_public
entry.save()
class Migration(migrations.Migration):
initial = True
dependencies = [
("auth", "0001_initial"),
]
if settings.POSTGRESQL_MISSING_DICTIONARIES:
fts_sql = """
CREATE TEXT SEARCH CONFIGURATION zulip.english_us_search (COPY=pg_catalog.english);
"""
else:
fts_sql = """
CREATE TEXT SEARCH DICTIONARY english_us_hunspell
(template = ispell, DictFile = en_us, AffFile = en_us, StopWords = zulip_english);
CREATE TEXT SEARCH CONFIGURATION zulip.english_us_search (COPY=pg_catalog.english);
ALTER TEXT SEARCH CONFIGURATION zulip.english_us_search
ALTER MAPPING FOR asciiword, asciihword, hword_asciipart, word, hword, hword_part
WITH english_us_hunspell, english_stem;
"""
fts_sql += """
CREATE FUNCTION escape_html(text) RETURNS text IMMUTABLE LANGUAGE 'sql' AS $$
SELECT replace(replace(replace(replace(replace($1, '&', '&'), '<', '<'),
'>', '>'), '"', '"'), '''', ''');
$$ ;
CREATE TABLE fts_update_log (id SERIAL PRIMARY KEY, message_id INTEGER NOT NULL);
CREATE FUNCTION do_notify_fts_update_log() RETURNS trigger LANGUAGE plpgsql AS
$$ BEGIN NOTIFY fts_update_log; RETURN NEW; END $$;
CREATE TRIGGER fts_update_log_notify AFTER INSERT ON fts_update_log
FOR EACH STATEMENT EXECUTE PROCEDURE do_notify_fts_update_log();
CREATE FUNCTION append_to_fts_update_log() RETURNS trigger LANGUAGE plpgsql AS
$$ BEGIN INSERT INTO fts_update_log (message_id) VALUES (NEW.id); RETURN NEW; END $$;
CREATE TRIGGER zerver_message_update_search_tsvector_async
BEFORE INSERT OR UPDATE OF subject, rendered_content ON zerver_message
FOR EACH ROW EXECUTE PROCEDURE append_to_fts_update_log();
"""
operations = [
migrations.CreateModel(
name="UserProfile",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("password", models.CharField(max_length=128, verbose_name="password")),
(
"last_login",
models.DateTimeField(
default=django.utils.timezone.now, verbose_name="last login"
),
),
(
"is_superuser",
models.BooleanField(
default=False,
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
("email", models.EmailField(db_index=True, max_length=75, unique=True)),
("is_staff", models.BooleanField(default=False)),
("is_active", models.BooleanField(default=True)),
("is_bot", models.BooleanField(default=False)),
("date_joined", models.DateTimeField(default=django.utils.timezone.now)),
("is_mirror_dummy", models.BooleanField(default=False)),
("full_name", models.CharField(max_length=100)),
("short_name", models.CharField(max_length=100)),
("pointer", models.IntegerField()),
("last_pointer_updater", models.CharField(max_length=64)),
("api_key", models.CharField(max_length=32)),
("enable_stream_desktop_notifications", models.BooleanField(default=True)),
("enable_stream_sounds", models.BooleanField(default=True)),
("enable_desktop_notifications", models.BooleanField(default=True)),
("enable_sounds", models.BooleanField(default=True)),
("enable_offline_email_notifications", models.BooleanField(default=True)),
("enable_offline_push_notifications", models.BooleanField(default=True)),
("enable_digest_emails", models.BooleanField(default=True)),
("default_desktop_notifications", models.BooleanField(default=True)),
(
"last_reminder",
models.DateTimeField(default=django.utils.timezone.now, null=True),
),
("rate_limits", models.CharField(default="", max_length=100)),
("default_all_public_streams", models.BooleanField(default=False)),
("enter_sends", models.NullBooleanField(default=True)),
("autoscroll_forever", models.BooleanField(default=False)),
("twenty_four_hour_time", models.BooleanField(default=False)),
(
"avatar_source",
models.CharField(
choices=[
("G", "Hosted by Gravatar"),
("U", "Uploaded by user"),
("S", "System generated"),
],
default="G",
max_length=1,
),
),
(
"tutorial_status",
models.CharField(
choices=[("W", "Waiting"), ("S", "Started"), ("F", "Finished")],
default="W",
max_length=1,
),
),
("onboarding_steps", models.TextField(default="[]")),
("invites_granted", models.IntegerField(default=0)),
("invites_used", models.IntegerField(default=0)),
("alert_words", models.TextField(default="[]")),
("muted_topics", models.TextField(default="[]")),
(
"bot_owner",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="Client",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("name", models.CharField(db_index=True, max_length=30, unique=True)),
],
),
migrations.CreateModel(
name="DefaultStream",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
],
),
migrations.CreateModel(
name="Huddle",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("huddle_hash", models.CharField(db_index=True, max_length=40, unique=True)),
],
),
migrations.CreateModel(
name="Message",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("subject", models.CharField(db_index=True, max_length=60)),
("content", models.TextField()),
("rendered_content", models.TextField(null=True)),
("rendered_content_version", models.IntegerField(null=True)),
("pub_date", models.DateTimeField(db_index=True, verbose_name="date published")),
("last_edit_time", models.DateTimeField(null=True)),
("edit_history", models.TextField(null=True)),
("has_attachment", models.BooleanField(db_index=True, default=False)),
("has_image", models.BooleanField(db_index=True, default=False)),
("has_link", models.BooleanField(db_index=True, default=False)),
],
),
migrations.CreateModel(
name="PreregistrationUser",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("email", models.EmailField(max_length=75)),
("invited_at", models.DateTimeField(auto_now=True)),
("status", models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name="PushDeviceToken",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("kind", models.PositiveSmallIntegerField(choices=[(1, "apns"), (2, "gcm")])),
("token", models.CharField(max_length=4096, unique=True)),
(
"last_updated",
models.DateTimeField(auto_now=True, default=django.utils.timezone.now),
),
("ios_app_id", models.TextField(null=True)),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="Realm",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("domain", models.CharField(db_index=True, max_length=40, unique=True)),
("name", models.CharField(max_length=40, null=True)),
("restricted_to_domain", models.BooleanField(default=True)),
("invite_required", models.BooleanField(default=False)),
("invite_by_admins_only", models.BooleanField(default=False)),
("mandatory_topics", models.BooleanField(default=False)),
("show_digest_email", models.BooleanField(default=True)),
("name_changes_disabled", models.BooleanField(default=False)),
("date_created", models.DateTimeField(default=django.utils.timezone.now)),
("deactivated", models.BooleanField(default=False)),
],
options={
"permissions": (("administer", "Administer a realm"),),
},
),
migrations.CreateModel(
name="RealmAlias",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("domain", models.CharField(db_index=True, max_length=80, unique=True)),
(
"realm",
models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
],
),
migrations.CreateModel(
name="RealmEmoji",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("name", models.TextField()),
("img_url", models.TextField()),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
],
),
migrations.CreateModel(
name="RealmFilter",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("pattern", models.TextField()),
("url_format_string", models.TextField()),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
],
),
migrations.CreateModel(
name="Recipient",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("type_id", models.IntegerField(db_index=True)),
("type", models.PositiveSmallIntegerField(db_index=True)),
],
),
migrations.CreateModel(
name="Referral",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("email", models.EmailField(max_length=75)),
("timestamp", models.DateTimeField(auto_now_add=True)),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="ScheduledJob",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("scheduled_timestamp", models.DateTimeField()),
("type", models.PositiveSmallIntegerField()),
("data", models.TextField()),
("filter_id", models.IntegerField(null=True)),
("filter_string", models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name="Stream",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("name", models.CharField(db_index=True, max_length=60)),
("invite_only", models.NullBooleanField(default=False)),
(
"email_token",
models.CharField(default=generate_email_token_for_stream, max_length=32),
),
("description", models.CharField(default="", max_length=1024)),
("date_created", models.DateTimeField(default=django.utils.timezone.now)),
("deactivated", models.BooleanField(default=False)),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
],
),
migrations.CreateModel(
name="Subscription",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("active", models.BooleanField(default=True)),
("in_home_view", models.NullBooleanField(default=True)),
("color", models.CharField(default="#c2c2c2", max_length=10)),
("desktop_notifications", models.BooleanField(default=True)),
("audible_notifications", models.BooleanField(default=True)),
("notifications", models.BooleanField(default=False)),
(
"recipient",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="UserActivity",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("query", models.CharField(db_index=True, max_length=50)),
("count", models.IntegerField()),
("last_visit", models.DateTimeField(verbose_name="last visit")),
(
"client",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Client"
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="UserActivityInterval",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("start", models.DateTimeField(db_index=True, verbose_name="start time")),
("end", models.DateTimeField(db_index=True, verbose_name="end time")),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="UserMessage",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
(
"flags",
bitfield.models.BitField(
[
"read",
"starred",
"collapsed",
"mentioned",
"wildcard_mentioned",
"summarize_in_home",
"summarize_in_stream",
"force_expand",
"force_collapse",
"has_alert_word",
"historical",
"is_me_message",
],
default=0,
),
),
(
"message",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Message"
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="UserPresence",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("timestamp", models.DateTimeField(verbose_name="presence changed")),
("status", models.PositiveSmallIntegerField(default=1)),
(
"client",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Client"
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.AlterUniqueTogether(
name="userpresence",
unique_together={("user_profile", "client")},
),
migrations.AlterUniqueTogether(
name="usermessage",
unique_together={("user_profile", "message")},
),
migrations.AlterUniqueTogether(
name="useractivity",
unique_together={("user_profile", "client", "query")},
),
migrations.AlterUniqueTogether(
name="subscription",
unique_together={("user_profile", "recipient")},
),
migrations.AlterUniqueTogether(
name="stream",
unique_together={("name", "realm")},
),
migrations.AlterUniqueTogether(
name="recipient",
unique_together={("type", "type_id")},
),
migrations.AlterUniqueTogether(
name="realmfilter",
unique_together={("realm", "pattern")},
),
migrations.AlterUniqueTogether(
name="realmemoji",
unique_together={("realm", "name")},
),
migrations.AddField(
model_name="realm",
name="notifications_stream",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="zerver.Stream",
),
),
migrations.AddField(
model_name="preregistrationuser",
name="realm",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
migrations.AddField(
model_name="preregistrationuser",
name="referred_by",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
migrations.AddField(
model_name="preregistrationuser",
name="streams",
field=models.ManyToManyField(null=True, to="zerver.Stream"),
),
migrations.AddField(
model_name="message",
name="recipient",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
),
),
migrations.AddField(
model_name="message",
name="sender",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
migrations.AddField(
model_name="message",
name="sending_client",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Client"
),
),
migrations.AddField(
model_name="defaultstream",
name="realm",
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"),
),
migrations.AddField(
model_name="defaultstream",
name="stream",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream"
),
),
migrations.AlterUniqueTogether(
name="defaultstream",
unique_together={("realm", "stream")},
),
migrations.AddField(
model_name="userprofile",
name="default_events_register_stream",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="zerver.Stream",
),
),
migrations.AddField(
model_name="userprofile",
name="default_sending_stream",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="zerver.Stream",
),
),
migrations.AddField(
model_name="userprofile",
name="groups",
field=models.ManyToManyField(
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.Group",
verbose_name="groups",
),
),
migrations.AddField(
model_name="userprofile",
name="realm",
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"),
),
migrations.AddField(
model_name="userprofile",
name="user_permissions",
field=models.ManyToManyField(
blank=True,
help_text="Specific permissions for this user.",
related_name="user_set",
related_query_name="user",
to="auth.Permission",
verbose_name="user permissions",
),
),
migrations.AddField(
model_name="message",
name="search_tsvector",
field=SearchVectorField(null=True),
),
migrations.AddIndex(
model_name="message",
index=GinIndex(
"search_tsvector", fastupdate=False, name="zerver_message_search_tsvector"
),
),
migrations.RunSQL(
sql=fts_sql,
),
migrations.AlterModelManagers(
name="userprofile",
managers=[
("objects", django.contrib.auth.models.UserManager()),
],
),
migrations.AlterField(
model_name="preregistrationuser",
name="email",
field=models.EmailField(max_length=254),
),
migrations.AlterField(
model_name="preregistrationuser",
name="streams",
field=models.ManyToManyField(to="zerver.Stream"),
),
migrations.AlterField(
model_name="pushdevicetoken",
name="last_updated",
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name="referral",
name="email",
field=models.EmailField(max_length=254),
),
migrations.AlterField(
model_name="userprofile",
name="email",
field=models.EmailField(db_index=True, max_length=254, unique=True),
),
migrations.AlterField(
model_name="userprofile",
name="last_login",
field=models.DateTimeField(blank=True, null=True, verbose_name="last login"),
),
migrations.AddIndex(
model_name="message",
index=models.Index(Upper("subject"), name="upper_subject_idx"),
),
migrations.AddIndex(
model_name="stream",
index=models.Index(Upper("name"), name="upper_stream_name_idx"),
),
migrations.AddField(
model_name="userprofile",
name="left_side_userlist",
field=models.BooleanField(default=False),
),
migrations.AlterModelOptions(
name="realm",
options={
"permissions": (
("administer", "Administer a realm"),
("api_super_user", "Can send messages as other users for mirroring"),
)
},
),
migrations.AddIndex(
model_name="userprofile",
index=models.Index(Upper("email"), name="upper_userprofile_email_idx"),
),
migrations.AlterField(
model_name="userprofile",
name="is_active",
field=models.BooleanField(db_index=True, default=True),
),
migrations.AlterField(
model_name="userprofile",
name="is_bot",
field=models.BooleanField(db_index=True, default=False),
),
migrations.AddIndex(
model_name="preregistrationuser",
index=models.Index(Upper("email"), name="upper_preregistration_email_idx"),
),
migrations.AlterField(
model_name="userprofile",
name="enable_stream_desktop_notifications",
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name="userprofile",
name="enable_stream_sounds",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="userprofile",
name="is_api_super_user",
field=models.BooleanField(db_index=True, default=False),
),
migrations.AddField(
model_name="userprofile",
name="is_realm_admin",
field=models.BooleanField(db_index=True, default=False),
),
migrations.AlterField(
model_name="realmemoji",
name="img_url",
field=models.URLField(),
),
migrations.AlterField(
model_name="realmemoji",
name="name",
field=models.TextField(
validators=[
django.core.validators.MinLengthValidator(1),
django.core.validators.RegexValidator(regex="^[0-9a-zA-Z.\\-_]+(?<![.\\-_])$"),
]
),
),
migrations.AlterField(
model_name="realmemoji",
name="img_url",
field=models.URLField(max_length=1000),
),
migrations.CreateModel(
name="Attachment",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("file_name", models.CharField(db_index=True, max_length=100)),
("path_id", models.TextField(db_index=True)),
(
"create_time",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("messages", models.ManyToManyField(to="zerver.Message")),
(
"owner",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
("is_realm_public", models.BooleanField(default=False)),
],
),
migrations.AddField(
model_name="realm",
name="create_stream_by_admins_only",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="userprofile",
name="bot_type",
field=models.PositiveSmallIntegerField(db_index=True, null=True),
),
migrations.AlterField(
model_name="realmemoji",
name="name",
field=models.TextField(
validators=[
django.core.validators.MinLengthValidator(1),
django.core.validators.RegexValidator(
message="Invalid characters in emoji name",
regex="^[0-9a-zA-Z.\\-_]+(?<![.\\-_])$",
),
]
),
),
migrations.AddField(
model_name="preregistrationuser",
name="realm_creation",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="attachment",
name="realm",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="zerver.Realm",
),
),
migrations.RunPython(
code=migrate_existing_attachment_data,
elidable=True,
),
migrations.AddField(
model_name="subscription",
name="pin_to_top",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="userprofile",
name="default_language",
field=models.CharField(default="en", max_length=50),
),
migrations.AddField(
model_name="realm",
name="allow_message_editing",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="realm",
name="message_content_edit_limit_seconds",
field=models.IntegerField(default=600),
),
migrations.AddField(
model_name="realm",
name="default_language",
field=models.CharField(default="en", max_length=50),
),
migrations.AddField(
model_name="userprofile",
name="tos_version",
field=models.CharField(max_length=10, null=True),
),
]
| true | true |
f71c69a2524bdf3501491590fb98b0dba43692e4 | 3,863 | py | Python | img_filter/img_advanced_filter.py | Gretacyh/images-downloader-fliter | ffe070026a45c741013a575a6a985d97e28d6fd7 | [
"ICU",
"MIT"
] | null | null | null | img_filter/img_advanced_filter.py | Gretacyh/images-downloader-fliter | ffe070026a45c741013a575a6a985d97e28d6fd7 | [
"ICU",
"MIT"
] | null | null | null | img_filter/img_advanced_filter.py | Gretacyh/images-downloader-fliter | ffe070026a45c741013a575a6a985d97e28d6fd7 | [
"ICU",
"MIT"
] | null | null | null | import os
import re
import cv2
import umap
import torch
import torch.nn as nn
from torchvision import models
import torch.nn.functional as F
import numpy as np
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
def global_std_pool2d(x):
"""2D global standard variation pooling"""
return torch.std(x.view(x.size()[0], x.size()[1], -1, 1), dim=2, keepdim=True)
class ResNet50(torch.nn.Module):
"""Modified ResNet50 for feature extraction"""
def __init__(self):
super(ResNet50, self).__init__()
self.features = nn.Sequential(*list(models.resnet50(pretrained=True).children())[:-2])
# 冻结模型
for p in self.features.parameters():
p.requires_grad = False
# 检测是否有GPU
if torch.cuda.is_available():
self.device = torch.device("cuda")
else:
self.device = torch.device("cpu")
self.to(self.device)
def forward(self, x):
# features@: 7->res5c
for ii, model in enumerate(self.features):
x = model(x)
if ii == 7:
features_mean = nn.functional.adaptive_avg_pool2d(x, 1)
features_std = global_std_pool2d(x)
return features_mean, features_std
# 提取图像特征
def get_img_feature(model, img_path):
img = cv2.imread(img_path, flags=cv2.IMREAD_COLOR)
img = torch.from_numpy(img)
img = img.to(model.device).float()
img = torch.unsqueeze(img, 0) # batch size 1
img = img.permute(0, 3, 1, 2)
feature = model(img)
return feature
# UMAP降维
def do_umap(features, channel=2, random_state=None):
model = umap.UMAP(n_components=channel, random_state=random_state)
return model.fit_transform(features), model
# t-SNE降维
def do_tsne(data, random_state=0):
tsne = TSNE(n_components=2, init='pca', random_state=random_state)
return tsne.fit_transform(data), tsne
# 绘制数据图像
def plot_embedding(data, type=None, text=None, title="", colors=None):
if type is None:
type = np.zeros_like(data[:, 0])
x_min, x_max = np.min(data, 0), np.max(data, 0)
data = (data - x_min) / (x_max - x_min)
fig = plt.figure()
ax = plt.subplot(111)
for i in range(data.shape[0]):
if text is not None:
plt.text(data[i, 0], data[i, 1], str(text[i]),
color=plt.cm.Set1((text[i] + 1) / 10.) if colors is None else colors[type[i]],
fontdict={'weight': 'bold', 'size': 8})
else:
plt.scatter(data[i, 0], data[i, 1], s=3,
color=plt.cm.Set1((text[i] + 1) / 10.) if colors is None else colors[type[i]])
plt.xticks([])
plt.yticks([])
plt.title(title)
plt.show()
return fig
if __name__ == '__main__':
root_dir = "/root/yanghan/cat"
file_suffix = "jpeg|jpg|png"
remove_dir = root_dir + "/remove"
if not os.path.exists(remove_dir):
os.makedirs(remove_dir)
# 模型初始化
model = ResNet50()
# 提取图像特征
feature_list = []
name_list = []
for img_name in os.listdir(root_dir)[:]:
# 对处理文件的类型进行过滤
if re.search(file_suffix, img_name) is None:
continue
img_path = root_dir + "/" + img_name
mean, std = get_img_feature(model, img_path)
mean = mean.to('cpu').numpy().reshape(-1)
std = std.to('cpu').numpy().reshape(-1)
feature = np.concatenate((mean, std), 0)
print(feature.shape)
feature_list.append(feature)
name_list.append(img_name[7:10])
# 特征绘图
feature_list = np.array(feature_list)
name_list = np.array(name_list)
feature_list_tsne, _ = do_tsne(feature_list)
plot_embedding(feature_list_tsne, title="tsne", text=name_list)
feature_list_umap, _ = do_umap(feature_list)
plot_embedding(feature_list_umap, title="umap", text=name_list)
cv2.waitKey()
| 30.904 | 102 | 0.619467 | import os
import re
import cv2
import umap
import torch
import torch.nn as nn
from torchvision import models
import torch.nn.functional as F
import numpy as np
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
def global_std_pool2d(x):
return torch.std(x.view(x.size()[0], x.size()[1], -1, 1), dim=2, keepdim=True)
class ResNet50(torch.nn.Module):
def __init__(self):
super(ResNet50, self).__init__()
self.features = nn.Sequential(*list(models.resnet50(pretrained=True).children())[:-2])
for p in self.features.parameters():
p.requires_grad = False
if torch.cuda.is_available():
self.device = torch.device("cuda")
else:
self.device = torch.device("cpu")
self.to(self.device)
def forward(self, x):
for ii, model in enumerate(self.features):
x = model(x)
if ii == 7:
features_mean = nn.functional.adaptive_avg_pool2d(x, 1)
features_std = global_std_pool2d(x)
return features_mean, features_std
def get_img_feature(model, img_path):
img = cv2.imread(img_path, flags=cv2.IMREAD_COLOR)
img = torch.from_numpy(img)
img = img.to(model.device).float()
img = torch.unsqueeze(img, 0)
img = img.permute(0, 3, 1, 2)
feature = model(img)
return feature
def do_umap(features, channel=2, random_state=None):
model = umap.UMAP(n_components=channel, random_state=random_state)
return model.fit_transform(features), model
def do_tsne(data, random_state=0):
tsne = TSNE(n_components=2, init='pca', random_state=random_state)
return tsne.fit_transform(data), tsne
def plot_embedding(data, type=None, text=None, title="", colors=None):
if type is None:
type = np.zeros_like(data[:, 0])
x_min, x_max = np.min(data, 0), np.max(data, 0)
data = (data - x_min) / (x_max - x_min)
fig = plt.figure()
ax = plt.subplot(111)
for i in range(data.shape[0]):
if text is not None:
plt.text(data[i, 0], data[i, 1], str(text[i]),
color=plt.cm.Set1((text[i] + 1) / 10.) if colors is None else colors[type[i]],
fontdict={'weight': 'bold', 'size': 8})
else:
plt.scatter(data[i, 0], data[i, 1], s=3,
color=plt.cm.Set1((text[i] + 1) / 10.) if colors is None else colors[type[i]])
plt.xticks([])
plt.yticks([])
plt.title(title)
plt.show()
return fig
if __name__ == '__main__':
root_dir = "/root/yanghan/cat"
file_suffix = "jpeg|jpg|png"
remove_dir = root_dir + "/remove"
if not os.path.exists(remove_dir):
os.makedirs(remove_dir)
model = ResNet50()
feature_list = []
name_list = []
for img_name in os.listdir(root_dir)[:]:
if re.search(file_suffix, img_name) is None:
continue
img_path = root_dir + "/" + img_name
mean, std = get_img_feature(model, img_path)
mean = mean.to('cpu').numpy().reshape(-1)
std = std.to('cpu').numpy().reshape(-1)
feature = np.concatenate((mean, std), 0)
print(feature.shape)
feature_list.append(feature)
name_list.append(img_name[7:10])
feature_list = np.array(feature_list)
name_list = np.array(name_list)
feature_list_tsne, _ = do_tsne(feature_list)
plot_embedding(feature_list_tsne, title="tsne", text=name_list)
feature_list_umap, _ = do_umap(feature_list)
plot_embedding(feature_list_umap, title="umap", text=name_list)
cv2.waitKey()
| true | true |
f71c6a899f50afd65c97f6d10559da3bf89ef5b8 | 162 | py | Python | querybuilder_rules/compat.py | Apkawa/django-querybuilder-rules | 430488f6e50be7ac74b5b757c9bef0d09e49301b | [
"MIT"
] | null | null | null | querybuilder_rules/compat.py | Apkawa/django-querybuilder-rules | 430488f6e50be7ac74b5b757c9bef0d09e49301b | [
"MIT"
] | null | null | null | querybuilder_rules/compat.py | Apkawa/django-querybuilder-rules | 430488f6e50be7ac74b5b757c9bef0d09e49301b | [
"MIT"
] | null | null | null | try:
from django.utils.encoding import smart_text, smart_str
except ImportError:
from django.utils.encoding import smart_unicode as smart_text, smart_str
| 32.4 | 76 | 0.808642 | try:
from django.utils.encoding import smart_text, smart_str
except ImportError:
from django.utils.encoding import smart_unicode as smart_text, smart_str
| true | true |
f71c6b46358edba34d7bcd2375000838bace4301 | 2,623 | py | Python | acme/tf/networks/__init__.py | RaoulDrake/acme | 16ad2f284ad7c038081454a9b820d8f424b3ad1f | [
"Apache-2.0"
] | 1 | 2021-06-17T16:11:43.000Z | 2021-06-17T16:11:43.000Z | acme/tf/networks/__init__.py | RaoulDrake/acme | 16ad2f284ad7c038081454a9b820d8f424b3ad1f | [
"Apache-2.0"
] | null | null | null | acme/tf/networks/__init__.py | RaoulDrake/acme | 16ad2f284ad7c038081454a9b820d8f424b3ad1f | [
"Apache-2.0"
] | null | null | null | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Useful network definitions."""
from acme.tf.networks.atari import AtariTorso
from acme.tf.networks.atari import DeepIMPALAAtariNetwork
from acme.tf.networks.atari import DQNAtariNetwork
from acme.tf.networks.atari import IMPALAAtariNetwork
from acme.tf.networks.atari import R2D2AtariNetwork
from acme.tf.networks.base import DistributionalModule
from acme.tf.networks.base import Module
from acme.tf.networks.base import RNNCore
from acme.tf.networks.continuous import LayerNormAndResidualMLP
from acme.tf.networks.continuous import LayerNormMLP
from acme.tf.networks.continuous import NearZeroInitializedLinear
from acme.tf.networks.discrete import DiscreteFilteredQNetwork
from acme.tf.networks.distributional import ApproximateMode
from acme.tf.networks.distributional import DiscreteValuedHead
from acme.tf.networks.distributional import MultivariateGaussianMixture
from acme.tf.networks.distributional import MultivariateNormalDiagHead
from acme.tf.networks.distributional import UnivariateGaussianMixture
from acme.tf.networks.distributions import DiscreteValuedDistribution
from acme.tf.networks.duelling import DuellingMLP
from acme.tf.networks.multihead import Multihead
from acme.tf.networks.multiplexers import CriticMultiplexer
from acme.tf.networks.noise import ClippedGaussian
from acme.tf.networks.policy_value import PolicyValueHead
from acme.tf.networks.recurrence import CriticDeepRNN
from acme.tf.networks.recurrence import RecurrentExpQWeightedPolicy
from acme.tf.networks.rescaling import ClipToSpec
from acme.tf.networks.rescaling import RescaleToSpec
from acme.tf.networks.rescaling import TanhToSpec
from acme.tf.networks.stochastic import ExpQWeightedPolicy
from acme.tf.networks.stochastic import StochasticMeanHead
from acme.tf.networks.stochastic import StochasticModeHead
from acme.tf.networks.stochastic import StochasticSamplingHead
from acme.tf.networks.vision import ResNetTorso
# For backwards compatibility.
GaussianMixtureHead = UnivariateGaussianMixture
| 48.574074 | 74 | 0.847884 |
from acme.tf.networks.atari import AtariTorso
from acme.tf.networks.atari import DeepIMPALAAtariNetwork
from acme.tf.networks.atari import DQNAtariNetwork
from acme.tf.networks.atari import IMPALAAtariNetwork
from acme.tf.networks.atari import R2D2AtariNetwork
from acme.tf.networks.base import DistributionalModule
from acme.tf.networks.base import Module
from acme.tf.networks.base import RNNCore
from acme.tf.networks.continuous import LayerNormAndResidualMLP
from acme.tf.networks.continuous import LayerNormMLP
from acme.tf.networks.continuous import NearZeroInitializedLinear
from acme.tf.networks.discrete import DiscreteFilteredQNetwork
from acme.tf.networks.distributional import ApproximateMode
from acme.tf.networks.distributional import DiscreteValuedHead
from acme.tf.networks.distributional import MultivariateGaussianMixture
from acme.tf.networks.distributional import MultivariateNormalDiagHead
from acme.tf.networks.distributional import UnivariateGaussianMixture
from acme.tf.networks.distributions import DiscreteValuedDistribution
from acme.tf.networks.duelling import DuellingMLP
from acme.tf.networks.multihead import Multihead
from acme.tf.networks.multiplexers import CriticMultiplexer
from acme.tf.networks.noise import ClippedGaussian
from acme.tf.networks.policy_value import PolicyValueHead
from acme.tf.networks.recurrence import CriticDeepRNN
from acme.tf.networks.recurrence import RecurrentExpQWeightedPolicy
from acme.tf.networks.rescaling import ClipToSpec
from acme.tf.networks.rescaling import RescaleToSpec
from acme.tf.networks.rescaling import TanhToSpec
from acme.tf.networks.stochastic import ExpQWeightedPolicy
from acme.tf.networks.stochastic import StochasticMeanHead
from acme.tf.networks.stochastic import StochasticModeHead
from acme.tf.networks.stochastic import StochasticSamplingHead
from acme.tf.networks.vision import ResNetTorso
GaussianMixtureHead = UnivariateGaussianMixture
| true | true |
f71c6bd2042d817dbc78da43dc684306860b7f69 | 1,029 | py | Python | pyquilted/pdf_printer.py | cocoroutine/pyquilted | dd8644043deec17608e00f46e3ac4562b8879603 | [
"MIT"
] | 1 | 2019-02-21T20:10:37.000Z | 2019-02-21T20:10:37.000Z | pyquilted/pdf_printer.py | cocoroutine/pyquilted | dd8644043deec17608e00f46e3ac4562b8879603 | [
"MIT"
] | null | null | null | pyquilted/pdf_printer.py | cocoroutine/pyquilted | dd8644043deec17608e00f46e3ac4562b8879603 | [
"MIT"
] | null | null | null | import pdfkit
class PdfPrinter:
"""A wrapper class around pdfkit functionality to print html to pdfs"""
@staticmethod
def from_file(infile, outfile):
options = {
"page-size": "Letter",
"dpi": "96",
"margin-top": "1in",
"margin-right": "1.25in",
"margin-bottom": "1in",
"margin-left": "1.25in",
"disable-smart-shrinking": None,
"zoom": 1,
}
pdfkit.from_file(infile, outfile, options=options)
@staticmethod
def from_string(document, outfile):
options = {
"page-size": "Letter",
"dpi": "96",
"margin-top": "1in",
"margin-right": "1.25in",
"margin-bottom": "1in",
"margin-left": "1.25in",
"disable-smart-shrinking": None,
"zoom": 1,
}
pdfkit.from_string(document, outfile, options=options)
| 31.181818 | 75 | 0.462585 | import pdfkit
class PdfPrinter:
@staticmethod
def from_file(infile, outfile):
options = {
"page-size": "Letter",
"dpi": "96",
"margin-top": "1in",
"margin-right": "1.25in",
"margin-bottom": "1in",
"margin-left": "1.25in",
"disable-smart-shrinking": None,
"zoom": 1,
}
pdfkit.from_file(infile, outfile, options=options)
@staticmethod
def from_string(document, outfile):
options = {
"page-size": "Letter",
"dpi": "96",
"margin-top": "1in",
"margin-right": "1.25in",
"margin-bottom": "1in",
"margin-left": "1.25in",
"disable-smart-shrinking": None,
"zoom": 1,
}
pdfkit.from_string(document, outfile, options=options)
| true | true |
f71c6bda42c3ee94832851be22cdd754e1af0b13 | 1,164 | py | Python | example_app/app.py | quanpower/flask_jsondash | 274c41bcbc754fd217b7dc4679c377bac912b88d | [
"MIT"
] | 1 | 2017-09-01T16:08:49.000Z | 2017-09-01T16:08:49.000Z | example_app/app.py | quanpower/flask_jsondash | 274c41bcbc754fd217b7dc4679c377bac912b88d | [
"MIT"
] | null | null | null | example_app/app.py | quanpower/flask_jsondash | 274c41bcbc754fd217b7dc4679c377bac912b88d | [
"MIT"
] | null | null | null | """This is an example app, demonstrating usage."""
import os
from flask import Flask
from flask_jsondash.charts_builder import charts
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=False,
JSONDASH_GLOBALDASH=True,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_edit_global():
return True
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
static=dict(
js_path='js/vendor/',
css_path='css/vendor/',
),
auth=dict(
edit_global=_can_edit_global,
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 7777))
HOST = os.getenv('HOST', '0.0.0.0')
app.run(debug=True, host=HOST, port=PORT)
| 18.1875 | 62 | 0.652921 |
import os
from flask import Flask
from flask_jsondash.charts_builder import charts
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=False,
JSONDASH_GLOBALDASH=True,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_edit_global():
return True
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
static=dict(
js_path='js/vendor/',
css_path='css/vendor/',
),
auth=dict(
edit_global=_can_edit_global,
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 7777))
HOST = os.getenv('HOST', '0.0.0.0')
app.run(debug=True, host=HOST, port=PORT)
| true | true |
f71c6d6149e0b6acd0b94e3af246e4aa23a4b08d | 10,495 | py | Python | grr/client/grr_response_client/client_actions/admin.py | tsehori/grr | 048506f22f74642bfe61749069a45ddf496fdab3 | [
"Apache-2.0"
] | 1 | 2021-07-01T01:43:06.000Z | 2021-07-01T01:43:06.000Z | grr/client/grr_response_client/client_actions/admin.py | tsehori/grr | 048506f22f74642bfe61749069a45ddf496fdab3 | [
"Apache-2.0"
] | 44 | 2021-05-14T22:49:24.000Z | 2022-03-13T21:54:02.000Z | grr/client/grr_response_client/client_actions/admin.py | tsehori/grr | 048506f22f74642bfe61749069a45ddf496fdab3 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Lint as: python3
"""Client actions related to administrating the client and its configuration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import logging
import os
import platform
import socket
import traceback
import cryptography
from cryptography.hazmat.backends import openssl
import pkg_resources
import psutil
import pytsk3
import yara
from grr_response_client import actions
from grr_response_client import communicator
from grr_response_client.client_actions import tempfiles
from grr_response_client.client_actions import timeline
from grr_response_core import config
from grr_response_core.lib import config_lib
from grr_response_core.lib import queues
from grr_response_core.lib import rdfvalue
from grr_response_core.lib import utils
from grr_response_core.lib.rdfvalues import client as rdf_client
from grr_response_core.lib.rdfvalues import client_action as rdf_client_action
from grr_response_core.lib.rdfvalues import client_stats as rdf_client_stats
from grr_response_core.lib.rdfvalues import flows as rdf_flows
from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
class Echo(actions.ActionPlugin):
"""Returns a message to the server."""
in_rdfvalue = rdf_client_action.EchoRequest
out_rdfvalues = [rdf_client_action.EchoRequest]
def Run(self, args):
self.SendReply(args)
def GetHostnameFromClient(args):
del args # Unused.
yield rdf_protodict.DataBlob(string=socket.gethostname())
class GetHostname(actions.ActionPlugin):
"""Retrieves the host name of the client."""
out_rdfvalues = [rdf_protodict.DataBlob]
def Run(self, args):
for res in GetHostnameFromClient(args):
self.SendReply(res)
class GetPlatformInfo(actions.ActionPlugin):
"""Retrieves platform information."""
out_rdfvalues = [rdf_client.Uname]
def Run(self, unused_args):
"""Populate platform information into a Uname response."""
self.SendReply(rdf_client.Uname.FromCurrentSystem())
class Kill(actions.ActionPlugin):
"""A client action for terminating (killing) the client.
Used for testing process respawn.
"""
out_rdfvalues = [rdf_flows.GrrMessage]
def Run(self, unused_arg):
"""Run the kill."""
# Send a message back to the service to say that we are about to shutdown.
reply = rdf_flows.GrrStatus(status=rdf_flows.GrrStatus.ReturnedStatus.OK)
# Queue up the response message, jump the queue.
self.SendReply(reply, message_type=rdf_flows.GrrMessage.Type.STATUS)
# Give the http thread some time to send the reply.
self.grr_worker.Sleep(10)
# Die ourselves.
logging.info("Dying on request.")
os._exit(242) # pylint: disable=protected-access
class GetConfiguration(actions.ActionPlugin):
"""Retrieves the running configuration parameters."""
in_rdfvalue = None
out_rdfvalues = [rdf_protodict.Dict]
BLOCKED_PARAMETERS = ["Client.private_key"]
def Run(self, unused_arg):
"""Retrieve the configuration except for the blocked parameters."""
out = self.out_rdfvalues[0]()
for descriptor in config.CONFIG.type_infos:
if descriptor.name in self.BLOCKED_PARAMETERS:
value = "[Redacted]"
else:
try:
value = config.CONFIG.Get(descriptor.name, default=None)
except (config_lib.Error, KeyError, AttributeError, ValueError) as e:
logging.info("Config reading error: %s", e)
continue
if value is not None:
out[descriptor.name] = value
self.SendReply(out)
class GetLibraryVersions(actions.ActionPlugin):
"""Retrieves version information for installed libraries."""
in_rdfvalue = None
out_rdfvalues = [rdf_protodict.Dict]
def GetSSLVersion(self):
return openssl.backend.openssl_version_text()
def GetCryptographyVersion(self):
return cryptography.__version__
def GetPSUtilVersion(self):
return ".".join(map(utils.SmartUnicode, psutil.version_info))
def GetProtoVersion(self):
return pkg_resources.get_distribution("protobuf").version
def GetTSKVersion(self):
return pytsk3.TSK_VERSION_STR
def GetPyTSKVersion(self):
return pytsk3.get_version()
def GetYaraVersion(self):
return yara.YARA_VERSION
library_map = {
"pytsk": GetPyTSKVersion,
"TSK": GetTSKVersion,
"cryptography": GetCryptographyVersion,
"SSL": GetSSLVersion,
"psutil": GetPSUtilVersion,
"yara": GetYaraVersion,
}
error_str = "Unable to determine library version: %s"
def Run(self, unused_arg):
result = self.out_rdfvalues[0]()
for lib, f in self.library_map.items():
try:
result[lib] = f(self)
except Exception: # pylint: disable=broad-except
result[lib] = self.error_str % traceback.format_exc()
self.SendReply(result)
class UpdateConfiguration(actions.ActionPlugin):
"""Updates configuration parameters on the client."""
in_rdfvalue = rdf_protodict.Dict
UPDATABLE_FIELDS = {"Client.foreman_check_frequency",
"Client.server_urls",
"Client.max_post_size",
"Client.max_out_queue",
"Client.poll_min",
"Client.poll_max",
"Client.rss_max"} # pyformat: disable
def _UpdateConfig(self, filtered_arg, config_obj):
for field, value in filtered_arg.items():
config_obj.Set(field, value)
try:
config_obj.Write()
except (IOError, OSError):
pass
def Run(self, arg):
"""Does the actual work."""
try:
if self.grr_worker.client.FleetspeakEnabled():
raise ValueError("Not supported on Fleetspeak enabled clients.")
except AttributeError:
pass
smart_arg = {str(field): value for field, value in arg.items()}
disallowed_fields = [
field for field in smart_arg
if field not in UpdateConfiguration.UPDATABLE_FIELDS
]
if disallowed_fields:
raise ValueError("Received an update request for restricted field(s) %s."
% ",".join(disallowed_fields))
if platform.system() != "Windows":
# Check config validity before really applying the changes. This isn't
# implemented for our Windows clients though, whose configs are stored in
# the registry, as opposed to in the filesystem.
canary_config = config.CONFIG.CopyConfig()
# Prepare a temporary file we'll write changes to.
with tempfiles.CreateGRRTempFile(mode="w+") as temp_fd:
temp_filename = temp_fd.name
# Write canary_config changes to temp_filename.
canary_config.SetWriteBack(temp_filename)
self._UpdateConfig(smart_arg, canary_config)
try:
# Assert temp_filename is usable by loading it.
canary_config.SetWriteBack(temp_filename)
# Wide exception handling passed here from config_lib.py...
except Exception: # pylint: disable=broad-except
logging.warning("Updated config file %s is not usable.", temp_filename)
raise
# If temp_filename works, remove it (if not, it's useful for debugging).
os.unlink(temp_filename)
# The changes seem to work, so push them to the real config.
self._UpdateConfig(smart_arg, config.CONFIG)
def GetClientInformation() -> rdf_client.ClientInformation:
return rdf_client.ClientInformation(
client_name=config.CONFIG["Client.name"],
client_binary_name=psutil.Process().name(),
client_description=config.CONFIG["Client.description"],
client_version=int(config.CONFIG["Source.version_numeric"]),
build_time=config.CONFIG["Client.build_time"],
labels=config.CONFIG.Get("Client.labels", default=None),
timeline_btime_support=timeline.BTIME_SUPPORT)
class GetClientInfo(actions.ActionPlugin):
"""Obtains information about the GRR client installed."""
out_rdfvalues = [rdf_client.ClientInformation]
def Run(self, unused_args):
self.SendReply(GetClientInformation())
class GetClientStats(actions.ActionPlugin):
"""This retrieves some stats about the GRR process."""
in_rdfvalue = rdf_client_action.GetClientStatsRequest
out_rdfvalues = [rdf_client_stats.ClientStats]
def Run(self, arg):
"""Returns the client stats."""
if arg is None:
arg = rdf_client_action.GetClientStatsRequest()
proc = psutil.Process(os.getpid())
meminfo = proc.memory_info()
boot_time = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(psutil.boot_time())
create_time = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(proc.create_time())
response = rdf_client_stats.ClientStats(
RSS_size=meminfo.rss,
VMS_size=meminfo.vms,
memory_percent=proc.memory_percent(),
bytes_received=communicator.GRR_CLIENT_RECEIVED_BYTES.GetValue(),
bytes_sent=communicator.GRR_CLIENT_SENT_BYTES.GetValue(),
create_time=create_time,
boot_time=boot_time)
response.cpu_samples = self.grr_worker.stats_collector.CpuSamplesBetween(
start_time=arg.start_time, end_time=arg.end_time)
response.io_samples = self.grr_worker.stats_collector.IOSamplesBetween(
start_time=arg.start_time, end_time=arg.end_time)
self.Send(response)
def Send(self, response):
self.SendReply(response)
class GetClientStatsAuto(GetClientStats):
"""This class is used to send the reply to a well known flow on the server."""
def Send(self, response):
self.grr_worker.SendReply(
rdf_client_stats.ClientStats.Downsampled(response),
session_id=rdfvalue.SessionID(queue=queues.STATS, flow_name="Stats"),
response_id=0,
request_id=0,
message_type=rdf_flows.GrrMessage.Type.MESSAGE,
require_fastpoll=False)
class SendStartupInfo(actions.ActionPlugin):
in_rdfvalue = None
out_rdfvalues = [rdf_client.StartupInfo]
well_known_session_id = rdfvalue.SessionID(flow_name="Startup")
def Run(self, unused_arg, ttl=None):
"""Returns the startup information."""
logging.debug("Sending startup information.")
boot_time = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(psutil.boot_time())
response = rdf_client.StartupInfo(
boot_time=boot_time, client_info=GetClientInformation())
self.grr_worker.SendReply(
response,
session_id=self.well_known_session_id,
response_id=0,
request_id=0,
message_type=rdf_flows.GrrMessage.Type.MESSAGE,
require_fastpoll=False,
ttl=ttl)
| 32.292308 | 80 | 0.723106 |
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import logging
import os
import platform
import socket
import traceback
import cryptography
from cryptography.hazmat.backends import openssl
import pkg_resources
import psutil
import pytsk3
import yara
from grr_response_client import actions
from grr_response_client import communicator
from grr_response_client.client_actions import tempfiles
from grr_response_client.client_actions import timeline
from grr_response_core import config
from grr_response_core.lib import config_lib
from grr_response_core.lib import queues
from grr_response_core.lib import rdfvalue
from grr_response_core.lib import utils
from grr_response_core.lib.rdfvalues import client as rdf_client
from grr_response_core.lib.rdfvalues import client_action as rdf_client_action
from grr_response_core.lib.rdfvalues import client_stats as rdf_client_stats
from grr_response_core.lib.rdfvalues import flows as rdf_flows
from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
class Echo(actions.ActionPlugin):
in_rdfvalue = rdf_client_action.EchoRequest
out_rdfvalues = [rdf_client_action.EchoRequest]
def Run(self, args):
self.SendReply(args)
def GetHostnameFromClient(args):
del args
yield rdf_protodict.DataBlob(string=socket.gethostname())
class GetHostname(actions.ActionPlugin):
out_rdfvalues = [rdf_protodict.DataBlob]
def Run(self, args):
for res in GetHostnameFromClient(args):
self.SendReply(res)
class GetPlatformInfo(actions.ActionPlugin):
out_rdfvalues = [rdf_client.Uname]
def Run(self, unused_args):
self.SendReply(rdf_client.Uname.FromCurrentSystem())
class Kill(actions.ActionPlugin):
out_rdfvalues = [rdf_flows.GrrMessage]
def Run(self, unused_arg):
reply = rdf_flows.GrrStatus(status=rdf_flows.GrrStatus.ReturnedStatus.OK)
self.SendReply(reply, message_type=rdf_flows.GrrMessage.Type.STATUS)
self.grr_worker.Sleep(10)
logging.info("Dying on request.")
os._exit(242)
class GetConfiguration(actions.ActionPlugin):
in_rdfvalue = None
out_rdfvalues = [rdf_protodict.Dict]
BLOCKED_PARAMETERS = ["Client.private_key"]
def Run(self, unused_arg):
out = self.out_rdfvalues[0]()
for descriptor in config.CONFIG.type_infos:
if descriptor.name in self.BLOCKED_PARAMETERS:
value = "[Redacted]"
else:
try:
value = config.CONFIG.Get(descriptor.name, default=None)
except (config_lib.Error, KeyError, AttributeError, ValueError) as e:
logging.info("Config reading error: %s", e)
continue
if value is not None:
out[descriptor.name] = value
self.SendReply(out)
class GetLibraryVersions(actions.ActionPlugin):
in_rdfvalue = None
out_rdfvalues = [rdf_protodict.Dict]
def GetSSLVersion(self):
return openssl.backend.openssl_version_text()
def GetCryptographyVersion(self):
return cryptography.__version__
def GetPSUtilVersion(self):
return ".".join(map(utils.SmartUnicode, psutil.version_info))
def GetProtoVersion(self):
return pkg_resources.get_distribution("protobuf").version
def GetTSKVersion(self):
return pytsk3.TSK_VERSION_STR
def GetPyTSKVersion(self):
return pytsk3.get_version()
def GetYaraVersion(self):
return yara.YARA_VERSION
library_map = {
"pytsk": GetPyTSKVersion,
"TSK": GetTSKVersion,
"cryptography": GetCryptographyVersion,
"SSL": GetSSLVersion,
"psutil": GetPSUtilVersion,
"yara": GetYaraVersion,
}
error_str = "Unable to determine library version: %s"
def Run(self, unused_arg):
result = self.out_rdfvalues[0]()
for lib, f in self.library_map.items():
try:
result[lib] = f(self)
except Exception:
result[lib] = self.error_str % traceback.format_exc()
self.SendReply(result)
class UpdateConfiguration(actions.ActionPlugin):
in_rdfvalue = rdf_protodict.Dict
UPDATABLE_FIELDS = {"Client.foreman_check_frequency",
"Client.server_urls",
"Client.max_post_size",
"Client.max_out_queue",
"Client.poll_min",
"Client.poll_max",
"Client.rss_max"}
def _UpdateConfig(self, filtered_arg, config_obj):
for field, value in filtered_arg.items():
config_obj.Set(field, value)
try:
config_obj.Write()
except (IOError, OSError):
pass
def Run(self, arg):
try:
if self.grr_worker.client.FleetspeakEnabled():
raise ValueError("Not supported on Fleetspeak enabled clients.")
except AttributeError:
pass
smart_arg = {str(field): value for field, value in arg.items()}
disallowed_fields = [
field for field in smart_arg
if field not in UpdateConfiguration.UPDATABLE_FIELDS
]
if disallowed_fields:
raise ValueError("Received an update request for restricted field(s) %s."
% ",".join(disallowed_fields))
if platform.system() != "Windows":
# implemented for our Windows clients though, whose configs are stored in
# the registry, as opposed to in the filesystem.
canary_config = config.CONFIG.CopyConfig()
# Prepare a temporary file we'll write changes to.
with tempfiles.CreateGRRTempFile(mode="w+") as temp_fd:
temp_filename = temp_fd.name
canary_config.SetWriteBack(temp_filename)
self._UpdateConfig(smart_arg, canary_config)
try:
canary_config.SetWriteBack(temp_filename)
except Exception:
logging.warning("Updated config file %s is not usable.", temp_filename)
raise
os.unlink(temp_filename)
# The changes seem to work, so push them to the real config.
self._UpdateConfig(smart_arg, config.CONFIG)
def GetClientInformation() -> rdf_client.ClientInformation:
return rdf_client.ClientInformation(
client_name=config.CONFIG["Client.name"],
client_binary_name=psutil.Process().name(),
client_description=config.CONFIG["Client.description"],
client_version=int(config.CONFIG["Source.version_numeric"]),
build_time=config.CONFIG["Client.build_time"],
labels=config.CONFIG.Get("Client.labels", default=None),
timeline_btime_support=timeline.BTIME_SUPPORT)
class GetClientInfo(actions.ActionPlugin):
out_rdfvalues = [rdf_client.ClientInformation]
def Run(self, unused_args):
self.SendReply(GetClientInformation())
class GetClientStats(actions.ActionPlugin):
in_rdfvalue = rdf_client_action.GetClientStatsRequest
out_rdfvalues = [rdf_client_stats.ClientStats]
def Run(self, arg):
if arg is None:
arg = rdf_client_action.GetClientStatsRequest()
proc = psutil.Process(os.getpid())
meminfo = proc.memory_info()
boot_time = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(psutil.boot_time())
create_time = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(proc.create_time())
response = rdf_client_stats.ClientStats(
RSS_size=meminfo.rss,
VMS_size=meminfo.vms,
memory_percent=proc.memory_percent(),
bytes_received=communicator.GRR_CLIENT_RECEIVED_BYTES.GetValue(),
bytes_sent=communicator.GRR_CLIENT_SENT_BYTES.GetValue(),
create_time=create_time,
boot_time=boot_time)
response.cpu_samples = self.grr_worker.stats_collector.CpuSamplesBetween(
start_time=arg.start_time, end_time=arg.end_time)
response.io_samples = self.grr_worker.stats_collector.IOSamplesBetween(
start_time=arg.start_time, end_time=arg.end_time)
self.Send(response)
def Send(self, response):
self.SendReply(response)
class GetClientStatsAuto(GetClientStats):
def Send(self, response):
self.grr_worker.SendReply(
rdf_client_stats.ClientStats.Downsampled(response),
session_id=rdfvalue.SessionID(queue=queues.STATS, flow_name="Stats"),
response_id=0,
request_id=0,
message_type=rdf_flows.GrrMessage.Type.MESSAGE,
require_fastpoll=False)
class SendStartupInfo(actions.ActionPlugin):
in_rdfvalue = None
out_rdfvalues = [rdf_client.StartupInfo]
well_known_session_id = rdfvalue.SessionID(flow_name="Startup")
def Run(self, unused_arg, ttl=None):
logging.debug("Sending startup information.")
boot_time = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(psutil.boot_time())
response = rdf_client.StartupInfo(
boot_time=boot_time, client_info=GetClientInformation())
self.grr_worker.SendReply(
response,
session_id=self.well_known_session_id,
response_id=0,
request_id=0,
message_type=rdf_flows.GrrMessage.Type.MESSAGE,
require_fastpoll=False,
ttl=ttl)
| true | true |
f71c6da0c8beda1979477eff278dde12b8d91849 | 1,317 | py | Python | social_fabric/network_template_processor.py | social-fabric/social-fabric | 5b6adacf4717865a262bf4364fac62f945c52f41 | [
"Apache-2.0"
] | null | null | null | social_fabric/network_template_processor.py | social-fabric/social-fabric | 5b6adacf4717865a262bf4364fac62f945c52f41 | [
"Apache-2.0"
] | null | null | null | social_fabric/network_template_processor.py | social-fabric/social-fabric | 5b6adacf4717865a262bf4364fac62f945c52f41 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2020 - Neptunium Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from jinja2 import Environment, FileSystemLoader, StrictUndefined
from social_fabric.config_repo import ConfigRepo
class NetworkTemplateProcessor:
def __init__(self):
self.file_loader = FileSystemLoader(ConfigRepo.TEMPLATE_SRC_DIR)
self.env = Environment(loader=self.file_loader, undefined=StrictUndefined)
def process(self, filename, *args, **kwargs):
template = self.env.get_template(filename)
return template.render(*args, **kwargs)
if __name__ == '__main__':
config = {}
net_template_processor = NetworkTemplateProcessor()
output = net_template_processor.process('docker-compose-ca.yaml',
BCC_NETWORK_DOMAIN='orsnet',
BCC_CA_ADDR='ca.theobjects.com',
BCC_CA_PORT='7055',
BCC_CA_PUBLIC_CERT='ca.theobjects.com.cert.pem',
BCC_CA_PRIVATE_KEY='ca.theobjects.com.priv.key',
BCC_CA_ADMIN_NAME='admin', BCC_CA_ADMIN_PASSWORD='adminpw')
with open('/tmp/docker-compose.yaml', 'w') as f:
f.write(output)
| 38.735294 | 103 | 0.593014 |
from jinja2 import Environment, FileSystemLoader, StrictUndefined
from social_fabric.config_repo import ConfigRepo
class NetworkTemplateProcessor:
def __init__(self):
self.file_loader = FileSystemLoader(ConfigRepo.TEMPLATE_SRC_DIR)
self.env = Environment(loader=self.file_loader, undefined=StrictUndefined)
def process(self, filename, *args, **kwargs):
template = self.env.get_template(filename)
return template.render(*args, **kwargs)
if __name__ == '__main__':
config = {}
net_template_processor = NetworkTemplateProcessor()
output = net_template_processor.process('docker-compose-ca.yaml',
BCC_NETWORK_DOMAIN='orsnet',
BCC_CA_ADDR='ca.theobjects.com',
BCC_CA_PORT='7055',
BCC_CA_PUBLIC_CERT='ca.theobjects.com.cert.pem',
BCC_CA_PRIVATE_KEY='ca.theobjects.com.priv.key',
BCC_CA_ADMIN_NAME='admin', BCC_CA_ADMIN_PASSWORD='adminpw')
with open('/tmp/docker-compose.yaml', 'w') as f:
f.write(output)
| true | true |
f71c6e32018acdc987579657fa46ba5d3cf922de | 6,231 | py | Python | crabageprediction/venv/Lib/site-packages/matplotlib/tests/test_sphinxext.py | 13rianlucero/CrabAgePrediction | 92bc7fbe1040f49e820473e33cc3902a5a7177c7 | [
"MIT"
] | 7 | 2022-01-16T12:28:16.000Z | 2022-03-04T15:31:45.000Z | crabageprediction/venv/Lib/site-packages/matplotlib/tests/test_sphinxext.py | 13rianlucero/CrabAgePrediction | 92bc7fbe1040f49e820473e33cc3902a5a7177c7 | [
"MIT"
] | 5 | 2022-01-16T10:08:41.000Z | 2022-01-20T05:34:09.000Z | crabageprediction/venv/Lib/site-packages/matplotlib/tests/test_sphinxext.py | 13rianlucero/CrabAgePrediction | 92bc7fbe1040f49e820473e33cc3902a5a7177c7 | [
"MIT"
] | 4 | 2022-02-04T22:58:27.000Z | 2022-02-14T19:29:18.000Z | """Tests for tinypages build using sphinx extensions."""
import filecmp
import os
from pathlib import Path
import shutil
from subprocess import Popen, PIPE
import sys
import pytest
pytest.importorskip('sphinx',
minversion=None if sys.version_info < (3, 10) else '4.1.3')
def test_tinypages(tmpdir):
source_dir = Path(tmpdir) / 'src'
shutil.copytree(Path(__file__).parent / 'tinypages', source_dir)
html_dir = source_dir / '_build' / 'html'
doctree_dir = source_dir / 'doctrees'
# Build the pages with warnings turned into errors
cmd = [sys.executable, '-msphinx', '-W', '-b', 'html',
'-d', str(doctree_dir),
str(Path(__file__).parent / 'tinypages'), str(html_dir)]
# On CI, gcov emits warnings (due to agg headers being included with the
# same name in multiple extension modules -- but we don't care about their
# coverage anyways); hide them using GCOV_ERROR_FILE.
proc = Popen(
cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True,
env={**os.environ, "MPLBACKEND": "", "GCOV_ERROR_FILE": os.devnull})
out, err = proc.communicate()
# Build the pages with warnings turned into errors
build_sphinx_html(source_dir, doctree_dir, html_dir)
def plot_file(num):
return html_dir / f'some_plots-{num}.png'
def plot_directive_file(num):
# This is always next to the doctree dir.
return doctree_dir.parent / 'plot_directive' / f'some_plots-{num}.png'
range_10, range_6, range_4 = [plot_file(i) for i in range(1, 4)]
# Plot 5 is range(6) plot
assert filecmp.cmp(range_6, plot_file(5))
# Plot 7 is range(4) plot
assert filecmp.cmp(range_4, plot_file(7))
# Plot 11 is range(10) plot
assert filecmp.cmp(range_10, plot_file(11))
# Plot 12 uses the old range(10) figure and the new range(6) figure
assert filecmp.cmp(range_10, plot_file('12_00'))
assert filecmp.cmp(range_6, plot_file('12_01'))
# Plot 13 shows close-figs in action
assert filecmp.cmp(range_4, plot_file(13))
# Plot 14 has included source
html_contents = (html_dir / 'some_plots.html').read_bytes()
assert b'# Only a comment' in html_contents
# check plot defined in external file.
assert filecmp.cmp(range_4, html_dir / 'range4.png')
assert filecmp.cmp(range_6, html_dir / 'range6.png')
# check if figure caption made it into html file
assert b'This is the caption for plot 15.' in html_contents
# check if figure caption using :caption: made it into html file
assert b'Plot 17 uses the caption option.' in html_contents
# check if figure caption made it into html file
assert b'This is the caption for plot 18.' in html_contents
# check if the custom classes made it into the html file
assert b'plot-directive my-class my-other-class' in html_contents
# check that the multi-image caption is applied twice
assert html_contents.count(b'This caption applies to both plots.') == 2
# Plot 21 is range(6) plot via an include directive. But because some of
# the previous plots are repeated, the argument to plot_file() is only 17.
assert filecmp.cmp(range_6, plot_file(17))
# Modify the included plot
contents = (source_dir / 'included_plot_21.rst').read_text()
contents = contents.replace('plt.plot(range(6))', 'plt.plot(range(4))')
(source_dir / 'included_plot_21.rst').write_text(contents)
# Build the pages again and check that the modified file was updated
modification_times = [plot_directive_file(i).stat().st_mtime
for i in (1, 2, 3, 5)]
build_sphinx_html(source_dir, doctree_dir, html_dir)
assert filecmp.cmp(range_4, plot_file(17))
# Check that the plots in the plot_directive folder weren't changed.
# (plot_directive_file(1) won't be modified, but it will be copied to html/
# upon compilation, so plot_file(1) will be modified)
assert plot_directive_file(1).stat().st_mtime == modification_times[0]
assert plot_directive_file(2).stat().st_mtime == modification_times[1]
assert plot_directive_file(3).stat().st_mtime == modification_times[2]
assert filecmp.cmp(range_10, plot_file(1))
assert filecmp.cmp(range_6, plot_file(2))
assert filecmp.cmp(range_4, plot_file(3))
# Make sure that figures marked with context are re-created (but that the
# contents are the same)
assert plot_directive_file(5).stat().st_mtime > modification_times[3]
assert filecmp.cmp(range_6, plot_file(5))
def test_plot_html_show_source_link(tmpdir):
source_dir = Path(tmpdir) / 'src'
source_dir.mkdir()
parent = Path(__file__).parent
shutil.copyfile(parent / 'tinypages/conf.py', source_dir / 'conf.py')
shutil.copytree(parent / 'tinypages/_static', source_dir / '_static')
doctree_dir = source_dir / 'doctrees'
(source_dir / 'index.rst').write_text("""
.. plot::
plt.plot(range(2))
""")
# Make sure source scripts are created by default
html_dir1 = source_dir / '_build' / 'html1'
build_sphinx_html(source_dir, doctree_dir, html_dir1)
assert "index-1.py" in [p.name for p in html_dir1.iterdir()]
# Make sure source scripts are NOT created when
# plot_html_show_source_link` is False
html_dir2 = source_dir / '_build' / 'html2'
build_sphinx_html(source_dir, doctree_dir, html_dir2,
extra_args=['-D', 'plot_html_show_source_link=0'])
assert "index-1.py" not in [p.name for p in html_dir2.iterdir()]
def build_sphinx_html(source_dir, doctree_dir, html_dir, extra_args=None):
# Build the pages with warnings turned into errors
extra_args = [] if extra_args is None else extra_args
cmd = [sys.executable, '-msphinx', '-W', '-b', 'html',
'-d', str(doctree_dir), str(source_dir), str(html_dir), *extra_args]
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True,
env={**os.environ, "MPLBACKEND": ""})
out, err = proc.communicate()
assert proc.returncode == 0, \
f"sphinx build failed with stdout:\n{out}\nstderr:\n{err}\n"
if err:
pytest.fail(f"sphinx build emitted the following warnings:\n{err}")
assert html_dir.is_dir()
| 44.507143 | 79 | 0.69074 |
import filecmp
import os
from pathlib import Path
import shutil
from subprocess import Popen, PIPE
import sys
import pytest
pytest.importorskip('sphinx',
minversion=None if sys.version_info < (3, 10) else '4.1.3')
def test_tinypages(tmpdir):
source_dir = Path(tmpdir) / 'src'
shutil.copytree(Path(__file__).parent / 'tinypages', source_dir)
html_dir = source_dir / '_build' / 'html'
doctree_dir = source_dir / 'doctrees'
cmd = [sys.executable, '-msphinx', '-W', '-b', 'html',
'-d', str(doctree_dir),
str(Path(__file__).parent / 'tinypages'), str(html_dir)]
# coverage anyways); hide them using GCOV_ERROR_FILE.
proc = Popen(
cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True,
env={**os.environ, "MPLBACKEND": "", "GCOV_ERROR_FILE": os.devnull})
out, err = proc.communicate()
# Build the pages with warnings turned into errors
build_sphinx_html(source_dir, doctree_dir, html_dir)
def plot_file(num):
return html_dir / f'some_plots-{num}.png'
def plot_directive_file(num):
# This is always next to the doctree dir.
return doctree_dir.parent / 'plot_directive' / f'some_plots-{num}.png'
range_10, range_6, range_4 = [plot_file(i) for i in range(1, 4)]
# Plot 5 is range(6) plot
assert filecmp.cmp(range_6, plot_file(5))
# Plot 7 is range(4) plot
assert filecmp.cmp(range_4, plot_file(7))
# Plot 11 is range(10) plot
assert filecmp.cmp(range_10, plot_file(11))
# Plot 12 uses the old range(10) figure and the new range(6) figure
assert filecmp.cmp(range_10, plot_file('12_00'))
assert filecmp.cmp(range_6, plot_file('12_01'))
# Plot 13 shows close-figs in action
assert filecmp.cmp(range_4, plot_file(13))
# Plot 14 has included source
html_contents = (html_dir / 'some_plots.html').read_bytes()
assert b'
# check plot defined in external file.
assert filecmp.cmp(range_4, html_dir / 'range4.png')
assert filecmp.cmp(range_6, html_dir / 'range6.png')
# check if figure caption made it into html file
assert b'This is the caption for plot 15.' in html_contents
# check if figure caption using :caption: made it into html file
assert b'Plot 17 uses the caption option.' in html_contents
# check if figure caption made it into html file
assert b'This is the caption for plot 18.' in html_contents
# check if the custom classes made it into the html file
assert b'plot-directive my-class my-other-class' in html_contents
# check that the multi-image caption is applied twice
assert html_contents.count(b'This caption applies to both plots.') == 2
# Plot 21 is range(6) plot via an include directive. But because some of
# the previous plots are repeated, the argument to plot_file() is only 17.
assert filecmp.cmp(range_6, plot_file(17))
# Modify the included plot
contents = (source_dir / 'included_plot_21.rst').read_text()
contents = contents.replace('plt.plot(range(6))', 'plt.plot(range(4))')
(source_dir / 'included_plot_21.rst').write_text(contents)
# Build the pages again and check that the modified file was updated
modification_times = [plot_directive_file(i).stat().st_mtime
for i in (1, 2, 3, 5)]
build_sphinx_html(source_dir, doctree_dir, html_dir)
assert filecmp.cmp(range_4, plot_file(17))
# Check that the plots in the plot_directive folder weren't changed.
# upon compilation, so plot_file(1) will be modified)
assert plot_directive_file(1).stat().st_mtime == modification_times[0]
assert plot_directive_file(2).stat().st_mtime == modification_times[1]
assert plot_directive_file(3).stat().st_mtime == modification_times[2]
assert filecmp.cmp(range_10, plot_file(1))
assert filecmp.cmp(range_6, plot_file(2))
assert filecmp.cmp(range_4, plot_file(3))
# Make sure that figures marked with context are re-created (but that the
# contents are the same)
assert plot_directive_file(5).stat().st_mtime > modification_times[3]
assert filecmp.cmp(range_6, plot_file(5))
def test_plot_html_show_source_link(tmpdir):
source_dir = Path(tmpdir) / 'src'
source_dir.mkdir()
parent = Path(__file__).parent
shutil.copyfile(parent / 'tinypages/conf.py', source_dir / 'conf.py')
shutil.copytree(parent / 'tinypages/_static', source_dir / '_static')
doctree_dir = source_dir / 'doctrees'
(source_dir / 'index.rst').write_text("""
.. plot::
plt.plot(range(2))
""")
# Make sure source scripts are created by default
html_dir1 = source_dir / '_build' / 'html1'
build_sphinx_html(source_dir, doctree_dir, html_dir1)
assert "index-1.py" in [p.name for p in html_dir1.iterdir()]
# Make sure source scripts are NOT created when
# plot_html_show_source_link` is False
html_dir2 = source_dir / '_build' / 'html2'
build_sphinx_html(source_dir, doctree_dir, html_dir2,
extra_args=['-D', 'plot_html_show_source_link=0'])
assert "index-1.py" not in [p.name for p in html_dir2.iterdir()]
def build_sphinx_html(source_dir, doctree_dir, html_dir, extra_args=None):
# Build the pages with warnings turned into errors
extra_args = [] if extra_args is None else extra_args
cmd = [sys.executable, '-msphinx', '-W', '-b', 'html',
'-d', str(doctree_dir), str(source_dir), str(html_dir), *extra_args]
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True,
env={**os.environ, "MPLBACKEND": ""})
out, err = proc.communicate()
assert proc.returncode == 0, \
f"sphinx build failed with stdout:\n{out}\nstderr:\n{err}\n"
if err:
pytest.fail(f"sphinx build emitted the following warnings:\n{err}")
assert html_dir.is_dir()
| true | true |
f71c6f7492e390d017ea6f6fa7414b737b27c660 | 7,147 | py | Python | distributed/tests/test_as_completed.py | abduhbm/distributed | d99752e030e2534c1f064865e2241289fedfe5a9 | [
"BSD-3-Clause"
] | null | null | null | distributed/tests/test_as_completed.py | abduhbm/distributed | d99752e030e2534c1f064865e2241289fedfe5a9 | [
"BSD-3-Clause"
] | null | null | null | distributed/tests/test_as_completed.py | abduhbm/distributed | d99752e030e2534c1f064865e2241289fedfe5a9 | [
"BSD-3-Clause"
] | null | null | null | import asyncio
from collections.abc import Iterator
from operator import add
import queue
import random
from time import sleep
import pytest
from tornado import gen
from distributed.client import _as_completed, as_completed, _first_completed, wait
from distributed.metrics import time
from distributed.utils import CancelledError
from distributed.utils_test import gen_cluster, inc, throws
from distributed.utils_test import client, cluster_fixture, loop # noqa: F401
@gen_cluster(client=True)
def test__as_completed(c, s, a, b):
x = c.submit(inc, 1)
y = c.submit(inc, 1)
z = c.submit(inc, 2)
q = queue.Queue()
yield _as_completed([x, y, z], q)
assert q.qsize() == 3
assert {q.get(), q.get(), q.get()} == {x, y, z}
result = yield _first_completed([x, y, z])
assert result in [x, y, z]
def test_as_completed(client):
x = client.submit(inc, 1)
y = client.submit(inc, 2)
z = client.submit(inc, 1)
seq = as_completed([x, y, z])
assert seq.count() == 3
assert isinstance(seq, Iterator)
assert set(seq) == {x, y, z}
assert seq.count() == 0
assert list(as_completed([])) == []
def test_as_completed_with_non_futures(client):
with pytest.raises(TypeError):
list(as_completed([1, 2, 3]))
def test_as_completed_add(client):
total = 0
expected = sum(map(inc, range(10)))
futures = client.map(inc, range(10))
ac = as_completed(futures)
for future in ac:
result = future.result()
total += result
if random.random() < 0.5:
future = client.submit(add, future, 10)
ac.add(future)
expected += result + 10
assert total == expected
def test_as_completed_update(client):
total = 0
todo = list(range(10))
expected = sum(map(inc, todo))
ac = as_completed([])
while todo or not ac.is_empty():
if todo:
work, todo = todo[:4], todo[4:]
ac.update(client.map(inc, work))
batch = ac.next_batch(block=True)
total += sum(r.result() for r in batch)
assert total == expected
def test_as_completed_repeats(client):
ac = as_completed()
x = client.submit(inc, 1)
ac.add(x)
ac.add(x)
assert next(ac) is x
assert next(ac) is x
with pytest.raises(StopIteration):
next(ac)
ac.add(x)
assert next(ac) is x
def test_as_completed_is_empty(client):
ac = as_completed()
assert ac.is_empty()
x = client.submit(inc, 1)
ac.add(x)
assert not ac.is_empty()
assert next(ac) is x
assert ac.is_empty()
def test_as_completed_cancel(client):
x = client.submit(inc, 1)
y = client.submit(inc, 1)
ac = as_completed([x, y])
x.cancel()
assert next(ac) is x or y
assert next(ac) is y or x
with pytest.raises(queue.Empty):
ac.queue.get(timeout=0.1)
res = list(as_completed([x, y, x]))
assert len(res) == 3
assert set(res) == {x, y}
assert res.count(x) == 2
def test_as_completed_cancel_last(client):
w = client.submit(inc, 0.3)
x = client.submit(inc, 1)
y = client.submit(inc, 0.3)
@gen.coroutine
def _():
yield gen.sleep(0.1)
yield w.cancel(asynchronous=True)
yield y.cancel(asynchronous=True)
client.loop.add_callback(_)
ac = as_completed([x, y])
result = set(ac)
assert result == {x, y}
@gen_cluster(client=True)
def test_async_for_py2_equivalent(c, s, a, b):
futures = c.map(sleep, [0.01] * 3, pure=False)
seq = as_completed(futures)
x = yield seq.__anext__()
y = yield seq.__anext__()
z = yield seq.__anext__()
assert x.done()
assert y.done()
assert z.done()
assert x.key != y.key
with pytest.raises(StopAsyncIteration):
yield seq.__anext__()
@gen_cluster(client=True)
def test_as_completed_error_async(c, s, a, b):
x = c.submit(throws, 1)
y = c.submit(inc, 1)
ac = as_completed([x, y])
first = yield ac.__anext__()
second = yield ac.__anext__()
result = {first, second}
assert result == {x, y}
assert x.status == "error"
assert y.status == "finished"
def test_as_completed_error(client):
x = client.submit(throws, 1)
y = client.submit(inc, 1)
ac = as_completed([x, y])
result = set(ac)
assert result == {x, y}
assert x.status == "error"
assert y.status == "finished"
def test_as_completed_with_results(client):
x = client.submit(throws, 1)
y = client.submit(inc, 5)
z = client.submit(inc, 1)
ac = as_completed([x, y, z], with_results=True)
y.cancel()
with pytest.raises(RuntimeError) as exc:
res = list(ac)
assert str(exc.value) == "hello!"
@gen_cluster(client=True)
def test_as_completed_with_results_async(c, s, a, b):
x = c.submit(throws, 1)
y = c.submit(inc, 5)
z = c.submit(inc, 1)
ac = as_completed([x, y, z], with_results=True)
yield y.cancel()
with pytest.raises(RuntimeError) as exc:
first = yield ac.__anext__()
second = yield ac.__anext__()
third = yield ac.__anext__()
assert str(exc.value) == "hello!"
def test_as_completed_with_results_no_raise(client):
x = client.submit(throws, 1)
y = client.submit(inc, 5)
z = client.submit(inc, 1)
ac = as_completed([x, y, z], with_results=True, raise_errors=False)
y.cancel()
res = list(ac)
dd = {r[0]: r[1:] for r in res}
assert set(dd.keys()) == {y, x, z}
assert x.status == "error"
assert y.status == "cancelled"
assert z.status == "finished"
assert isinstance(dd[y][0], CancelledError) or dd[y][0] == 6
assert isinstance(dd[x][0][1], RuntimeError)
assert dd[z][0] == 2
@gen_cluster(client=True)
async def test_str(c, s, a, b):
futures = c.map(inc, range(3))
ac = as_completed(futures)
assert "waiting=3" in str(ac)
assert "waiting=3" in repr(ac)
assert "done=0" in str(ac)
assert "done=0" in repr(ac)
await ac.__anext__()
start = time()
while "done=2" not in str(ac):
await asyncio.sleep(0.01)
assert time() < start + 2
@gen_cluster(client=True)
def test_as_completed_with_results_no_raise_async(c, s, a, b):
x = c.submit(throws, 1)
y = c.submit(inc, 5)
z = c.submit(inc, 1)
ac = as_completed([x, y, z], with_results=True, raise_errors=False)
c.loop.add_callback(y.cancel)
first = yield ac.__anext__()
second = yield ac.__anext__()
third = yield ac.__anext__()
res = [first, second, third]
dd = {r[0]: r[1:] for r in res}
assert set(dd.keys()) == {y, x, z}
assert x.status == "error"
assert y.status == "cancelled"
assert z.status == "finished"
assert isinstance(dd[y][0], CancelledError)
assert isinstance(dd[x][0][1], RuntimeError)
assert dd[z][0] == 2
@gen_cluster(client=True, timeout=None)
async def test_clear(c, s, a, b):
futures = c.map(inc, range(3))
ac = as_completed(futures)
await wait(futures)
ac.clear()
with pytest.raises(StopAsyncIteration):
await ac.__anext__()
del futures
while s.tasks:
await asyncio.sleep(0.3)
| 24.644828 | 82 | 0.622219 | import asyncio
from collections.abc import Iterator
from operator import add
import queue
import random
from time import sleep
import pytest
from tornado import gen
from distributed.client import _as_completed, as_completed, _first_completed, wait
from distributed.metrics import time
from distributed.utils import CancelledError
from distributed.utils_test import gen_cluster, inc, throws
from distributed.utils_test import client, cluster_fixture, loop
@gen_cluster(client=True)
def test__as_completed(c, s, a, b):
x = c.submit(inc, 1)
y = c.submit(inc, 1)
z = c.submit(inc, 2)
q = queue.Queue()
yield _as_completed([x, y, z], q)
assert q.qsize() == 3
assert {q.get(), q.get(), q.get()} == {x, y, z}
result = yield _first_completed([x, y, z])
assert result in [x, y, z]
def test_as_completed(client):
x = client.submit(inc, 1)
y = client.submit(inc, 2)
z = client.submit(inc, 1)
seq = as_completed([x, y, z])
assert seq.count() == 3
assert isinstance(seq, Iterator)
assert set(seq) == {x, y, z}
assert seq.count() == 0
assert list(as_completed([])) == []
def test_as_completed_with_non_futures(client):
with pytest.raises(TypeError):
list(as_completed([1, 2, 3]))
def test_as_completed_add(client):
total = 0
expected = sum(map(inc, range(10)))
futures = client.map(inc, range(10))
ac = as_completed(futures)
for future in ac:
result = future.result()
total += result
if random.random() < 0.5:
future = client.submit(add, future, 10)
ac.add(future)
expected += result + 10
assert total == expected
def test_as_completed_update(client):
total = 0
todo = list(range(10))
expected = sum(map(inc, todo))
ac = as_completed([])
while todo or not ac.is_empty():
if todo:
work, todo = todo[:4], todo[4:]
ac.update(client.map(inc, work))
batch = ac.next_batch(block=True)
total += sum(r.result() for r in batch)
assert total == expected
def test_as_completed_repeats(client):
ac = as_completed()
x = client.submit(inc, 1)
ac.add(x)
ac.add(x)
assert next(ac) is x
assert next(ac) is x
with pytest.raises(StopIteration):
next(ac)
ac.add(x)
assert next(ac) is x
def test_as_completed_is_empty(client):
ac = as_completed()
assert ac.is_empty()
x = client.submit(inc, 1)
ac.add(x)
assert not ac.is_empty()
assert next(ac) is x
assert ac.is_empty()
def test_as_completed_cancel(client):
x = client.submit(inc, 1)
y = client.submit(inc, 1)
ac = as_completed([x, y])
x.cancel()
assert next(ac) is x or y
assert next(ac) is y or x
with pytest.raises(queue.Empty):
ac.queue.get(timeout=0.1)
res = list(as_completed([x, y, x]))
assert len(res) == 3
assert set(res) == {x, y}
assert res.count(x) == 2
def test_as_completed_cancel_last(client):
w = client.submit(inc, 0.3)
x = client.submit(inc, 1)
y = client.submit(inc, 0.3)
@gen.coroutine
def _():
yield gen.sleep(0.1)
yield w.cancel(asynchronous=True)
yield y.cancel(asynchronous=True)
client.loop.add_callback(_)
ac = as_completed([x, y])
result = set(ac)
assert result == {x, y}
@gen_cluster(client=True)
def test_async_for_py2_equivalent(c, s, a, b):
futures = c.map(sleep, [0.01] * 3, pure=False)
seq = as_completed(futures)
x = yield seq.__anext__()
y = yield seq.__anext__()
z = yield seq.__anext__()
assert x.done()
assert y.done()
assert z.done()
assert x.key != y.key
with pytest.raises(StopAsyncIteration):
yield seq.__anext__()
@gen_cluster(client=True)
def test_as_completed_error_async(c, s, a, b):
x = c.submit(throws, 1)
y = c.submit(inc, 1)
ac = as_completed([x, y])
first = yield ac.__anext__()
second = yield ac.__anext__()
result = {first, second}
assert result == {x, y}
assert x.status == "error"
assert y.status == "finished"
def test_as_completed_error(client):
x = client.submit(throws, 1)
y = client.submit(inc, 1)
ac = as_completed([x, y])
result = set(ac)
assert result == {x, y}
assert x.status == "error"
assert y.status == "finished"
def test_as_completed_with_results(client):
x = client.submit(throws, 1)
y = client.submit(inc, 5)
z = client.submit(inc, 1)
ac = as_completed([x, y, z], with_results=True)
y.cancel()
with pytest.raises(RuntimeError) as exc:
res = list(ac)
assert str(exc.value) == "hello!"
@gen_cluster(client=True)
def test_as_completed_with_results_async(c, s, a, b):
x = c.submit(throws, 1)
y = c.submit(inc, 5)
z = c.submit(inc, 1)
ac = as_completed([x, y, z], with_results=True)
yield y.cancel()
with pytest.raises(RuntimeError) as exc:
first = yield ac.__anext__()
second = yield ac.__anext__()
third = yield ac.__anext__()
assert str(exc.value) == "hello!"
def test_as_completed_with_results_no_raise(client):
x = client.submit(throws, 1)
y = client.submit(inc, 5)
z = client.submit(inc, 1)
ac = as_completed([x, y, z], with_results=True, raise_errors=False)
y.cancel()
res = list(ac)
dd = {r[0]: r[1:] for r in res}
assert set(dd.keys()) == {y, x, z}
assert x.status == "error"
assert y.status == "cancelled"
assert z.status == "finished"
assert isinstance(dd[y][0], CancelledError) or dd[y][0] == 6
assert isinstance(dd[x][0][1], RuntimeError)
assert dd[z][0] == 2
@gen_cluster(client=True)
async def test_str(c, s, a, b):
futures = c.map(inc, range(3))
ac = as_completed(futures)
assert "waiting=3" in str(ac)
assert "waiting=3" in repr(ac)
assert "done=0" in str(ac)
assert "done=0" in repr(ac)
await ac.__anext__()
start = time()
while "done=2" not in str(ac):
await asyncio.sleep(0.01)
assert time() < start + 2
@gen_cluster(client=True)
def test_as_completed_with_results_no_raise_async(c, s, a, b):
x = c.submit(throws, 1)
y = c.submit(inc, 5)
z = c.submit(inc, 1)
ac = as_completed([x, y, z], with_results=True, raise_errors=False)
c.loop.add_callback(y.cancel)
first = yield ac.__anext__()
second = yield ac.__anext__()
third = yield ac.__anext__()
res = [first, second, third]
dd = {r[0]: r[1:] for r in res}
assert set(dd.keys()) == {y, x, z}
assert x.status == "error"
assert y.status == "cancelled"
assert z.status == "finished"
assert isinstance(dd[y][0], CancelledError)
assert isinstance(dd[x][0][1], RuntimeError)
assert dd[z][0] == 2
@gen_cluster(client=True, timeout=None)
async def test_clear(c, s, a, b):
futures = c.map(inc, range(3))
ac = as_completed(futures)
await wait(futures)
ac.clear()
with pytest.raises(StopAsyncIteration):
await ac.__anext__()
del futures
while s.tasks:
await asyncio.sleep(0.3)
| true | true |
f71c6fe7ef5eb499ee506fe71c00dd30ce728f85 | 14,768 | py | Python | pdf2image/pdf2image.py | ldevandiere/pdf2image | ba11f6a931337c889ac739f6b41a7e78690d6d32 | [
"MIT"
] | null | null | null | pdf2image/pdf2image.py | ldevandiere/pdf2image | ba11f6a931337c889ac739f6b41a7e78690d6d32 | [
"MIT"
] | null | null | null | pdf2image/pdf2image.py | ldevandiere/pdf2image | ba11f6a931337c889ac739f6b41a7e78690d6d32 | [
"MIT"
] | null | null | null | """
pdf2image is a light wrapper for the poppler-utils tools that can convert your
PDFs into Pillow images.
"""
import os
import platform
import tempfile
import types
import shutil
import pathlib
from subprocess import Popen, PIPE
from PIL import Image
from .generators import uuid_generator, counter_generator, ThreadSafeGenerator
from .parsers import (
parse_buffer_to_pgm,
parse_buffer_to_ppm,
parse_buffer_to_jpeg,
parse_buffer_to_png,
)
from .exceptions import (
PopplerNotInstalledError,
PDFInfoNotInstalledError,
PDFPageCountError,
PDFSyntaxError,
)
TRANSPARENT_FILE_TYPES = ["png", "tiff"]
PDFINFO_CONVERT_TO_INT = ["Pages"]
def convert_from_path(
pdf_path,
dpi=200,
output_folder=None,
first_page=None,
last_page=None,
fmt="ppm",
jpegopt=None,
thread_count=1,
userpw=None,
use_cropbox=False,
strict=False,
transparent=False,
single_file=False,
output_file=uuid_generator(),
poppler_path=None,
grayscale=False,
size=None,
paths_only=False,
use_pdftocairo=False,
):
"""
Description: Convert PDF to Image will throw whenever one of the condition is reached
Parameters:
pdf_path -> Path to the PDF that you want to convert
dpi -> Image quality in DPI (default 200)
output_folder -> Write the resulting images to a folder (instead of directly in memory)
first_page -> First page to process
last_page -> Last page to process before stopping
fmt -> Output image format
jpegopt -> jpeg options `quality`, `progressive`, and `optimize` (only for jpeg format)
thread_count -> How many threads we are allowed to spawn for processing
userpw -> PDF's password
use_cropbox -> Use cropbox instead of mediabox
strict -> When a Syntax Error is thrown, it will be raised as an Exception
transparent -> Output with a transparent background instead of a white one.
single_file -> Uses the -singlefile option from pdftoppm/pdftocairo
output_file -> What is the output filename or generator
poppler_path -> Path to look for poppler binaries
grayscale -> Output grayscale image(s)
size -> Size of the resulting image(s), uses the Pillow (width, height) standard
paths_only -> Don't load image(s), return paths instead (requires output_folder)
use_pdftocairo -> Use pdftocairo instead of pdftoppm, may help performance
"""
if use_pdftocairo and fmt == "ppm":
fmt = "png"
# We make sure that if passed arguments are Path objects, they're converted to strings
if isinstance(pdf_path, pathlib.PurePath):
pdf_path = pdf_path.as_posix()
if isinstance(output_folder, pathlib.PurePath):
output_folder = output_folder.as_posix()
if isinstance(poppler_path, pathlib.PurePath):
poppler_path = poppler_path.as_posix()
page_count = pdfinfo_from_path(pdf_path, userpw, poppler_path=poppler_path)["Pages"]
# We start by getting the output format, the buffer processing function and if we need pdftocairo
parsed_fmt, final_extension, parse_buffer_func, use_pdfcairo_format = _parse_format(
fmt, grayscale
)
# We use pdftocairo is the format requires it OR we need a transparent output
use_pdfcairo = (
use_pdftocairo
or use_pdfcairo_format
or (transparent and parsed_fmt in TRANSPARENT_FILE_TYPES)
)
poppler_version = _get_poppler_version(
"pdftocairo" if use_pdfcairo else "pdftoppm", poppler_path=poppler_path
)
if poppler_version <= 57:
jpegopt = None
# If output_file isn't a generator, it will be turned into one
if not isinstance(output_file, types.GeneratorType) and not isinstance(
output_file, ThreadSafeGenerator
):
if single_file:
output_file = iter([output_file])
else:
output_file = counter_generator(output_file)
if thread_count < 1:
thread_count = 1
if first_page is None:
first_page = 1
if last_page is None or last_page > page_count:
last_page = page_count
if first_page > last_page:
return []
auto_temp_dir = False
if output_folder is None and use_pdfcairo:
auto_temp_dir = True
output_folder = tempfile.mkdtemp()
# Recalculate page count based on first and last page
page_count = last_page - first_page + 1
if thread_count > page_count:
thread_count = page_count
reminder = page_count % thread_count
current_page = first_page
processes = []
for _ in range(thread_count):
thread_output_file = next(output_file)
# Get the number of pages the thread will be processing
thread_page_count = page_count // thread_count + int(reminder > 0)
# Build the command accordingly
args = _build_command(
["-r", str(dpi), pdf_path],
output_folder,
current_page,
current_page + thread_page_count - 1,
parsed_fmt,
jpegopt,
thread_output_file,
userpw,
use_cropbox,
transparent,
single_file,
grayscale,
size,
)
if use_pdfcairo:
args = [_get_command_path("pdftocairo", poppler_path)] + args
else:
args = [_get_command_path("pdftoppm", poppler_path)] + args
# Update page values
current_page = current_page + thread_page_count
reminder -= int(reminder > 0)
# Add poppler path to LD_LIBRARY_PATH
env = os.environ.copy()
if poppler_path is not None:
env["LD_LIBRARY_PATH"] = poppler_path + ":" + env.get("LD_LIBRARY_PATH", "")
# Spawn the process and save its uuid
processes.append(
(thread_output_file, Popen(args, env=env, stdout=PIPE, stderr=PIPE))
)
images = []
for uid, proc in processes:
data, err = proc.communicate()
if b"Syntax Error" in err and strict:
raise PDFSyntaxError(err.decode("utf8", "ignore"))
if output_folder is not None:
images += _load_from_output_folder(
output_folder, uid, final_extension, paths_only, in_memory=auto_temp_dir
)
else:
images += parse_buffer_func(data)
if auto_temp_dir:
shutil.rmtree(output_folder)
return images
def convert_from_bytes(
pdf_file,
dpi=200,
output_folder=None,
first_page=None,
last_page=None,
fmt="ppm",
jpegopt=None,
thread_count=1,
userpw=None,
use_cropbox=False,
strict=False,
transparent=False,
single_file=False,
output_file=uuid_generator(),
poppler_path=None,
grayscale=False,
size=None,
paths_only=False,
use_pdftocairo=False,
):
"""
Description: Convert PDF to Image will throw whenever one of the condition is reached
Parameters:
pdf_file -> Bytes representing the PDF file
dpi -> Image quality in DPI
output_folder -> Write the resulting images to a folder (instead of directly in memory)
first_page -> First page to process
last_page -> Last page to process before stopping
fmt -> Output image format
jpegopt -> jpeg options `quality`, `progressive`, and `optimize` (only for jpeg format)
thread_count -> How many threads we are allowed to spawn for processing
userpw -> PDF's password
use_cropbox -> Use cropbox instead of mediabox
strict -> When a Syntax Error is thrown, it will be raised as an Exception
transparent -> Output with a transparent background instead of a white one.
single_file -> Uses the -singlefile option from pdftoppm/pdftocairo
output_file -> What is the output filename or generator
poppler_path -> Path to look for poppler binaries
grayscale -> Output grayscale image(s)
size -> Size of the resulting image(s), uses the Pillow (width, height) standard
paths_only -> Don't load image(s), return paths instead (requires output_folder)
use_pdftocairo -> Use pdftocairo instead of pdftoppm, may help performance
"""
fh, temp_filename = tempfile.mkstemp()
try:
with open(temp_filename, "wb") as f:
f.write(pdf_file)
f.flush()
return convert_from_path(
f.name,
dpi=dpi,
output_folder=output_folder,
first_page=first_page,
last_page=last_page,
fmt=fmt,
jpegopt=jpegopt,
thread_count=thread_count,
userpw=userpw,
use_cropbox=use_cropbox,
strict=strict,
transparent=transparent,
single_file=single_file,
output_file=output_file,
poppler_path=poppler_path,
grayscale=grayscale,
size=size,
paths_only=paths_only,
use_pdftocairo=use_pdftocairo,
)
finally:
os.close(fh)
os.remove(temp_filename)
def _build_command(
args,
output_folder,
first_page,
last_page,
fmt,
jpegopt,
output_file,
userpw,
use_cropbox,
transparent,
single_file,
grayscale,
size,
):
if use_cropbox:
args.append("-cropbox")
if transparent and fmt in TRANSPARENT_FILE_TYPES:
args.append("-transp")
if first_page is not None:
args.extend(["-f", str(first_page)])
if last_page is not None:
args.extend(["-l", str(last_page)])
if fmt not in ["pgm", "ppm"]:
args.append("-" + fmt)
if fmt in ["jpeg", "jpg"] and jpegopt:
args.extend(["-jpegopt", _parse_jpegopt(jpegopt)])
if single_file:
args.append("-singlefile")
if output_folder is not None:
args.append(os.path.join(output_folder, output_file))
if userpw is not None:
args.extend(["-upw", userpw])
if grayscale:
args.append("-gray")
if size is None:
pass
elif isinstance(size, tuple) and len(size) == 2:
if size[0] is not None:
args.extend(["-scale-to-x", str(int(size[0]))])
else:
args.extend(["-scale-to-x", str(-1)])
if size[1] is not None:
args.extend(["-scale-to-y", str(int(size[1]))])
else:
args.extend(["-scale-to-y", str(-1)])
elif isinstance(size, tuple) and len(size) == 1:
args.extend(["-scale-to", str(int(size[0]))])
elif isinstance(size, int) or isinstance(size, float):
args.extend(["-scale-to", str(int(size))])
else:
raise ValueError("Size {} is not a tuple or an integer")
return args
def _parse_format(fmt, grayscale=False):
fmt = fmt.lower()
if fmt[0] == ".":
fmt = fmt[1:]
if fmt in ("jpeg", "jpg"):
return "jpeg", "jpg", parse_buffer_to_jpeg, False
if fmt == "png":
return "png", "png", parse_buffer_to_png, False
if fmt in ("tif", "tiff"):
return "tiff", "tif", None, True
if fmt == "ppm" and grayscale:
return "pgm", "pgm", parse_buffer_to_pgm, False
# Unable to parse the format so we'll use the default
return "ppm", "ppm", parse_buffer_to_ppm, False
def _parse_jpegopt(jpegopt):
parts = []
for k, v in jpegopt.items():
if v is True:
v = "y"
if v is False:
v = "n"
parts.append("{}={}".format(k, v))
return ",".join(parts)
def _get_command_path(command, poppler_path=None):
if platform.system() == "Windows":
command = command + ".exe"
if poppler_path is not None:
command = os.path.join(poppler_path, command)
return command
def _get_poppler_version(command, poppler_path=None):
command = [_get_command_path(command, poppler_path), "-v"]
env = os.environ.copy()
if poppler_path is not None:
env["LD_LIBRARY_PATH"] = poppler_path + ":" + env.get("LD_LIBRARY_PATH", "")
proc = Popen(command, env=env, stdout=PIPE, stderr=PIPE)
out, err = proc.communicate()
try:
# TODO: Make this more robust
return int(
err.decode("utf8", "ignore").split("\n")[0].split(" ")[-1].split(".")[1]
)
except:
# Lowest version that includes pdftocairo (2011)
return 17
def pdfinfo_from_path(pdf_path, userpw=None, poppler_path=None):
try:
command = [_get_command_path("pdfinfo", poppler_path), pdf_path]
if userpw is not None:
command.extend(["-upw", userpw])
# Add poppler path to LD_LIBRARY_PATH
env = os.environ.copy()
if poppler_path is not None:
env["LD_LIBRARY_PATH"] = poppler_path + ":" + env.get("LD_LIBRARY_PATH", "")
proc = Popen(command, env=env, stdout=PIPE, stderr=PIPE)
out, err = proc.communicate()
d = {}
for field in out.decode("utf8", "ignore").split("\n"):
sf = field.split(":")
key, value = sf[0], ":".join(sf[1:])
if key != "":
d[key] = (
int(value.strip())
if key in PDFINFO_CONVERT_TO_INT
else value.strip()
)
if "Pages" not in d:
raise ValueError
return d
except OSError:
raise PDFInfoNotInstalledError(
"Unable to get page count. Is poppler installed and in PATH?"
)
except ValueError:
raise PDFPageCountError(
"Unable to get page count.\n%s" % err.decode("utf8", "ignore")
)
def pdfinfo_from_bytes(pdf_file):
fh, temp_filename = tempfile.mkstemp()
try:
with open(temp_filename, "wb") as f:
f.write(pdf_file)
f.flush()
return pdfinfo_from_path(temp_filename)
finally:
os.close(fh)
os.remove(temp_filename)
def _load_from_output_folder(
output_folder, output_file, ext, paths_only, in_memory=False
):
images = []
for f in sorted(os.listdir(output_folder)):
if f.startswith(output_file) and f.split(".")[-1] == ext:
if paths_only:
images.append(os.path.join(output_folder, f))
else:
images.append(Image.open(os.path.join(output_folder, f)))
if in_memory:
images[-1].load()
return images
| 31.090526 | 101 | 0.609223 |
import os
import platform
import tempfile
import types
import shutil
import pathlib
from subprocess import Popen, PIPE
from PIL import Image
from .generators import uuid_generator, counter_generator, ThreadSafeGenerator
from .parsers import (
parse_buffer_to_pgm,
parse_buffer_to_ppm,
parse_buffer_to_jpeg,
parse_buffer_to_png,
)
from .exceptions import (
PopplerNotInstalledError,
PDFInfoNotInstalledError,
PDFPageCountError,
PDFSyntaxError,
)
TRANSPARENT_FILE_TYPES = ["png", "tiff"]
PDFINFO_CONVERT_TO_INT = ["Pages"]
def convert_from_path(
pdf_path,
dpi=200,
output_folder=None,
first_page=None,
last_page=None,
fmt="ppm",
jpegopt=None,
thread_count=1,
userpw=None,
use_cropbox=False,
strict=False,
transparent=False,
single_file=False,
output_file=uuid_generator(),
poppler_path=None,
grayscale=False,
size=None,
paths_only=False,
use_pdftocairo=False,
):
if use_pdftocairo and fmt == "ppm":
fmt = "png"
if isinstance(pdf_path, pathlib.PurePath):
pdf_path = pdf_path.as_posix()
if isinstance(output_folder, pathlib.PurePath):
output_folder = output_folder.as_posix()
if isinstance(poppler_path, pathlib.PurePath):
poppler_path = poppler_path.as_posix()
page_count = pdfinfo_from_path(pdf_path, userpw, poppler_path=poppler_path)["Pages"]
# We start by getting the output format, the buffer processing function and if we need pdftocairo
parsed_fmt, final_extension, parse_buffer_func, use_pdfcairo_format = _parse_format(
fmt, grayscale
)
# We use pdftocairo is the format requires it OR we need a transparent output
use_pdfcairo = (
use_pdftocairo
or use_pdfcairo_format
or (transparent and parsed_fmt in TRANSPARENT_FILE_TYPES)
)
poppler_version = _get_poppler_version(
"pdftocairo" if use_pdfcairo else "pdftoppm", poppler_path=poppler_path
)
if poppler_version <= 57:
jpegopt = None
# If output_file isn't a generator, it will be turned into one
if not isinstance(output_file, types.GeneratorType) and not isinstance(
output_file, ThreadSafeGenerator
):
if single_file:
output_file = iter([output_file])
else:
output_file = counter_generator(output_file)
if thread_count < 1:
thread_count = 1
if first_page is None:
first_page = 1
if last_page is None or last_page > page_count:
last_page = page_count
if first_page > last_page:
return []
auto_temp_dir = False
if output_folder is None and use_pdfcairo:
auto_temp_dir = True
output_folder = tempfile.mkdtemp()
page_count = last_page - first_page + 1
if thread_count > page_count:
thread_count = page_count
reminder = page_count % thread_count
current_page = first_page
processes = []
for _ in range(thread_count):
thread_output_file = next(output_file)
thread_page_count = page_count // thread_count + int(reminder > 0)
args = _build_command(
["-r", str(dpi), pdf_path],
output_folder,
current_page,
current_page + thread_page_count - 1,
parsed_fmt,
jpegopt,
thread_output_file,
userpw,
use_cropbox,
transparent,
single_file,
grayscale,
size,
)
if use_pdfcairo:
args = [_get_command_path("pdftocairo", poppler_path)] + args
else:
args = [_get_command_path("pdftoppm", poppler_path)] + args
current_page = current_page + thread_page_count
reminder -= int(reminder > 0)
env = os.environ.copy()
if poppler_path is not None:
env["LD_LIBRARY_PATH"] = poppler_path + ":" + env.get("LD_LIBRARY_PATH", "")
processes.append(
(thread_output_file, Popen(args, env=env, stdout=PIPE, stderr=PIPE))
)
images = []
for uid, proc in processes:
data, err = proc.communicate()
if b"Syntax Error" in err and strict:
raise PDFSyntaxError(err.decode("utf8", "ignore"))
if output_folder is not None:
images += _load_from_output_folder(
output_folder, uid, final_extension, paths_only, in_memory=auto_temp_dir
)
else:
images += parse_buffer_func(data)
if auto_temp_dir:
shutil.rmtree(output_folder)
return images
def convert_from_bytes(
pdf_file,
dpi=200,
output_folder=None,
first_page=None,
last_page=None,
fmt="ppm",
jpegopt=None,
thread_count=1,
userpw=None,
use_cropbox=False,
strict=False,
transparent=False,
single_file=False,
output_file=uuid_generator(),
poppler_path=None,
grayscale=False,
size=None,
paths_only=False,
use_pdftocairo=False,
):
fh, temp_filename = tempfile.mkstemp()
try:
with open(temp_filename, "wb") as f:
f.write(pdf_file)
f.flush()
return convert_from_path(
f.name,
dpi=dpi,
output_folder=output_folder,
first_page=first_page,
last_page=last_page,
fmt=fmt,
jpegopt=jpegopt,
thread_count=thread_count,
userpw=userpw,
use_cropbox=use_cropbox,
strict=strict,
transparent=transparent,
single_file=single_file,
output_file=output_file,
poppler_path=poppler_path,
grayscale=grayscale,
size=size,
paths_only=paths_only,
use_pdftocairo=use_pdftocairo,
)
finally:
os.close(fh)
os.remove(temp_filename)
def _build_command(
args,
output_folder,
first_page,
last_page,
fmt,
jpegopt,
output_file,
userpw,
use_cropbox,
transparent,
single_file,
grayscale,
size,
):
if use_cropbox:
args.append("-cropbox")
if transparent and fmt in TRANSPARENT_FILE_TYPES:
args.append("-transp")
if first_page is not None:
args.extend(["-f", str(first_page)])
if last_page is not None:
args.extend(["-l", str(last_page)])
if fmt not in ["pgm", "ppm"]:
args.append("-" + fmt)
if fmt in ["jpeg", "jpg"] and jpegopt:
args.extend(["-jpegopt", _parse_jpegopt(jpegopt)])
if single_file:
args.append("-singlefile")
if output_folder is not None:
args.append(os.path.join(output_folder, output_file))
if userpw is not None:
args.extend(["-upw", userpw])
if grayscale:
args.append("-gray")
if size is None:
pass
elif isinstance(size, tuple) and len(size) == 2:
if size[0] is not None:
args.extend(["-scale-to-x", str(int(size[0]))])
else:
args.extend(["-scale-to-x", str(-1)])
if size[1] is not None:
args.extend(["-scale-to-y", str(int(size[1]))])
else:
args.extend(["-scale-to-y", str(-1)])
elif isinstance(size, tuple) and len(size) == 1:
args.extend(["-scale-to", str(int(size[0]))])
elif isinstance(size, int) or isinstance(size, float):
args.extend(["-scale-to", str(int(size))])
else:
raise ValueError("Size {} is not a tuple or an integer")
return args
def _parse_format(fmt, grayscale=False):
fmt = fmt.lower()
if fmt[0] == ".":
fmt = fmt[1:]
if fmt in ("jpeg", "jpg"):
return "jpeg", "jpg", parse_buffer_to_jpeg, False
if fmt == "png":
return "png", "png", parse_buffer_to_png, False
if fmt in ("tif", "tiff"):
return "tiff", "tif", None, True
if fmt == "ppm" and grayscale:
return "pgm", "pgm", parse_buffer_to_pgm, False
return "ppm", "ppm", parse_buffer_to_ppm, False
def _parse_jpegopt(jpegopt):
parts = []
for k, v in jpegopt.items():
if v is True:
v = "y"
if v is False:
v = "n"
parts.append("{}={}".format(k, v))
return ",".join(parts)
def _get_command_path(command, poppler_path=None):
if platform.system() == "Windows":
command = command + ".exe"
if poppler_path is not None:
command = os.path.join(poppler_path, command)
return command
def _get_poppler_version(command, poppler_path=None):
command = [_get_command_path(command, poppler_path), "-v"]
env = os.environ.copy()
if poppler_path is not None:
env["LD_LIBRARY_PATH"] = poppler_path + ":" + env.get("LD_LIBRARY_PATH", "")
proc = Popen(command, env=env, stdout=PIPE, stderr=PIPE)
out, err = proc.communicate()
try:
# TODO: Make this more robust
return int(
err.decode("utf8", "ignore").split("\n")[0].split(" ")[-1].split(".")[1]
)
except:
# Lowest version that includes pdftocairo (2011)
return 17
def pdfinfo_from_path(pdf_path, userpw=None, poppler_path=None):
try:
command = [_get_command_path("pdfinfo", poppler_path), pdf_path]
if userpw is not None:
command.extend(["-upw", userpw])
# Add poppler path to LD_LIBRARY_PATH
env = os.environ.copy()
if poppler_path is not None:
env["LD_LIBRARY_PATH"] = poppler_path + ":" + env.get("LD_LIBRARY_PATH", "")
proc = Popen(command, env=env, stdout=PIPE, stderr=PIPE)
out, err = proc.communicate()
d = {}
for field in out.decode("utf8", "ignore").split("\n"):
sf = field.split(":")
key, value = sf[0], ":".join(sf[1:])
if key != "":
d[key] = (
int(value.strip())
if key in PDFINFO_CONVERT_TO_INT
else value.strip()
)
if "Pages" not in d:
raise ValueError
return d
except OSError:
raise PDFInfoNotInstalledError(
"Unable to get page count. Is poppler installed and in PATH?"
)
except ValueError:
raise PDFPageCountError(
"Unable to get page count.\n%s" % err.decode("utf8", "ignore")
)
def pdfinfo_from_bytes(pdf_file):
fh, temp_filename = tempfile.mkstemp()
try:
with open(temp_filename, "wb") as f:
f.write(pdf_file)
f.flush()
return pdfinfo_from_path(temp_filename)
finally:
os.close(fh)
os.remove(temp_filename)
def _load_from_output_folder(
output_folder, output_file, ext, paths_only, in_memory=False
):
images = []
for f in sorted(os.listdir(output_folder)):
if f.startswith(output_file) and f.split(".")[-1] == ext:
if paths_only:
images.append(os.path.join(output_folder, f))
else:
images.append(Image.open(os.path.join(output_folder, f)))
if in_memory:
images[-1].load()
return images
| true | true |
f71c70ebaffb4cb67ea8865b24dfc0fdb55a9000 | 2,986 | py | Python | resources/test_data/honeycomb/pbb/pbb.py | preym17/csit | 3151c98618c78e3782e48bbe4d9c8f906c126f69 | [
"Apache-2.0"
] | null | null | null | resources/test_data/honeycomb/pbb/pbb.py | preym17/csit | 3151c98618c78e3782e48bbe4d9c8f906c126f69 | [
"Apache-2.0"
] | null | null | null | resources/test_data/honeycomb/pbb/pbb.py | preym17/csit | 3151c98618c78e3782e48bbe4d9c8f906c126f69 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2016 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test variables for provider backbone bridge test suite."""
# pylint: disable=invalid-name
# Add pbb sub interface
# Configuration data
cfg_pbb_sub_if_1 = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ab",
"destination-address": "bb:bb:bb:bb:bb:bc",
"b-vlan-tag-vlan-id": "2223",
"outer-tag": "16",
"i-tag-isid": "12",
"interface-operation": "translate-2-1"
}
}
# Modify pbb sub interface
# Configuration data
cfg_pbb_sub_if_1_mod = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ac",
"destination-address": "bb:bb:bb:bb:bb:bd",
"b-vlan-tag-vlan-id": "2224",
"outer-tag": "17",
"i-tag-isid": "13",
"interface-operation": "push-2"
}
}
# Wrong configuration data
# Wrong source-address
cfg_pbb_sub_if_ID = '5'
cfg_pbb_sub_if_wrong_src_addr = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ag",
"destination-address": "bb:bb:bb:bb:bb:ce",
"b-vlan-tag-vlan-id": "2226",
"outer-tag": "19",
"i-tag-isid": "15",
"interface-operation": "pop-2"
}
}
# Wrong destination-address
cfg_pbb_sub_if_wrong_dst_addr = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ae",
"destination-address": "bb:bb:bb:bb:bb:cg",
"b-vlan-tag-vlan-id": "2226",
"outer-tag": "19",
"i-tag-isid": "15",
"interface-operation": "pop-2"
}
}
# Wrong b-vlan-tag-vlan-id
cfg_pbb_sub_if_wrong_vlan_tag = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ae",
"destination-address": "bb:bb:bb:bb:bb:ce",
"b-vlan-tag-vlan-id": "123456789",
"outer-tag": "19",
"i-tag-isid": "15",
"interface-operation": "pop-2"
}
}
# Wrong i-tag-isid
cfg_pbb_sub_if_wrong_i_tag = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ae",
"destination-address": "bb:bb:bb:bb:bb:ce",
"b-vlan-tag-vlan-id": "2226",
"outer-tag": "19",
"i-tag-isid": "167772152345",
"interface-operation": "pop-2"
}
}
# b-vlan-tag-vlan-id is missing
cfg_pbb_sub_if_no_vlan_tag = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ae",
"destination-address": "bb:bb:bb:bb:bb:ce",
"outer-tag": "19",
"i-tag-isid": "15",
"interface-operation": "pop-2"
}
}
| 28.711538 | 74 | 0.598794 |
cfg_pbb_sub_if_1 = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ab",
"destination-address": "bb:bb:bb:bb:bb:bc",
"b-vlan-tag-vlan-id": "2223",
"outer-tag": "16",
"i-tag-isid": "12",
"interface-operation": "translate-2-1"
}
}
cfg_pbb_sub_if_1_mod = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ac",
"destination-address": "bb:bb:bb:bb:bb:bd",
"b-vlan-tag-vlan-id": "2224",
"outer-tag": "17",
"i-tag-isid": "13",
"interface-operation": "push-2"
}
}
cfg_pbb_sub_if_ID = '5'
cfg_pbb_sub_if_wrong_src_addr = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ag",
"destination-address": "bb:bb:bb:bb:bb:ce",
"b-vlan-tag-vlan-id": "2226",
"outer-tag": "19",
"i-tag-isid": "15",
"interface-operation": "pop-2"
}
}
cfg_pbb_sub_if_wrong_dst_addr = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ae",
"destination-address": "bb:bb:bb:bb:bb:cg",
"b-vlan-tag-vlan-id": "2226",
"outer-tag": "19",
"i-tag-isid": "15",
"interface-operation": "pop-2"
}
}
cfg_pbb_sub_if_wrong_vlan_tag = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ae",
"destination-address": "bb:bb:bb:bb:bb:ce",
"b-vlan-tag-vlan-id": "123456789",
"outer-tag": "19",
"i-tag-isid": "15",
"interface-operation": "pop-2"
}
}
cfg_pbb_sub_if_wrong_i_tag = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ae",
"destination-address": "bb:bb:bb:bb:bb:ce",
"b-vlan-tag-vlan-id": "2226",
"outer-tag": "19",
"i-tag-isid": "167772152345",
"interface-operation": "pop-2"
}
}
cfg_pbb_sub_if_no_vlan_tag = {
"pbb-rewrite": {
"source-address": "aa:aa:aa:aa:aa:ae",
"destination-address": "bb:bb:bb:bb:bb:ce",
"outer-tag": "19",
"i-tag-isid": "15",
"interface-operation": "pop-2"
}
}
| true | true |
f71c717d6fee0d1489bbe83962abdd173aae3304 | 668 | py | Python | setup.py | ishine/fastHan | 09550a750bb06b89b81769b8786a7eb3f8ca5713 | [
"Apache-2.0"
] | 635 | 2020-06-11T12:32:53.000Z | 2022-03-31T09:31:32.000Z | setup.py | ishine/fastHan | 09550a750bb06b89b81769b8786a7eb3f8ca5713 | [
"Apache-2.0"
] | 37 | 2020-06-12T10:07:47.000Z | 2022-03-10T02:46:52.000Z | setup.py | ishine/fastHan | 09550a750bb06b89b81769b8786a7eb3f8ca5713 | [
"Apache-2.0"
] | 77 | 2020-06-11T17:08:17.000Z | 2022-03-30T05:40:10.000Z | #!/usr/bin/env python
# coding=utf-8
from setuptools import setup, find_packages
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('requirements.txt', encoding='utf-8') as f:
reqs = f.read()
pkgs = [p for p in find_packages()]
print(pkgs)
setup(
name='fastHan',
version='1.7',
url='https://github.com/fastnlp/fastHan',
description=(
'使用深度学习联合模型,解决中文分词、词性标注、依存分析、命名实体识别任务。'
),
long_description=readme,
long_description_content_type='text/markdown',
author='耿志超',
license='Apache License',
python_requires='>=3.6',
packages=pkgs,
install_requires=reqs.strip().split('\n'),
)
| 21.548387 | 53 | 0.652695 |
from setuptools import setup, find_packages
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('requirements.txt', encoding='utf-8') as f:
reqs = f.read()
pkgs = [p for p in find_packages()]
print(pkgs)
setup(
name='fastHan',
version='1.7',
url='https://github.com/fastnlp/fastHan',
description=(
'使用深度学习联合模型,解决中文分词、词性标注、依存分析、命名实体识别任务。'
),
long_description=readme,
long_description_content_type='text/markdown',
author='耿志超',
license='Apache License',
python_requires='>=3.6',
packages=pkgs,
install_requires=reqs.strip().split('\n'),
)
| true | true |
f71c719a80b561157aaf6bc9411436cc6e44d60f | 3,425 | py | Python | minemeld/ft/vt.py | zul126/minemeld-core | 2eb9b9bfd7654aee57aabd5fb280d4e89a438daf | [
"Apache-2.0"
] | 147 | 2016-07-22T18:15:49.000Z | 2022-03-26T23:32:44.000Z | minemeld/ft/vt.py | zul126/minemeld-core | 2eb9b9bfd7654aee57aabd5fb280d4e89a438daf | [
"Apache-2.0"
] | 167 | 2016-07-27T07:02:25.000Z | 2021-12-16T16:26:52.000Z | minemeld/ft/vt.py | zul126/minemeld-core | 2eb9b9bfd7654aee57aabd5fb280d4e89a438daf | [
"Apache-2.0"
] | 112 | 2016-07-22T07:14:29.000Z | 2022-03-24T18:43:12.000Z | # Copyright 2016 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module implements:
- minemeld.ft.vt.Notifications, the Miner node for VirusTotal Notifications
feed
"""
import logging
import os
import yaml
from . import json
LOG = logging.getLogger(__name__)
_VT_NOTIFICATIONS = 'https://www.virustotal.com/intelligence/hunting/notifications-feed/?key='
class Notifications(json.SimpleJSON):
def __init__(self, name, chassis, config):
super(Notifications, self).__init__(name, chassis, config)
self.api_key = None
def configure(self):
self.config['url'] = None
self.config['extractor'] = 'notifications'
self.config['prefix'] = 'vt'
super(Notifications, self).configure()
self.side_config_path = self.config.get('side_config', None)
if self.side_config_path is None:
self.side_config_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'%s_side_config.yml' % self.name
)
self._load_side_config()
def _load_side_config(self):
try:
with open(self.side_config_path, 'r') as f:
sconfig = yaml.safe_load(f)
except Exception as e:
LOG.error('%s - Error loading side config: %s', self.name, str(e))
return
self.api_key = sconfig.get('api_key', None)
if self.api_key is not None:
LOG.info('%s - api key set', self.name)
self.url = _VT_NOTIFICATIONS + self.api_key
def _process_item(self, item):
result = []
for htype in ['md5', 'sha256', 'sha1']:
value = {self.prefix+'_'+k: v for k, v in item.iteritems()}
indicator = value.pop(self.prefix+'_'+htype, None)
value['type'] = htype
if indicator is not None:
result.append([indicator, value])
return result
def _build_iterator(self, now):
if self.api_key is None:
LOG.info('%s - API key not set', self.name)
raise RuntimeError(
'%s - API Key not set' % self.name
)
return super(Notifications, self)._build_iterator(now)
def hup(self, source=None):
LOG.info('%s - hup received, reload side config', self.name)
self._load_side_config()
super(Notifications, self).hup(source=source)
@staticmethod
def gc(name, config=None):
json.SimpleJSON.gc(name, config=config)
side_config_path = None
if config is not None:
side_config_path = config.get('side_config', None)
if side_config_path is None:
side_config_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'{}_side_config.yml'.format(name)
)
try:
os.remove(side_config_path)
except:
pass
| 30.580357 | 94 | 0.616058 |
import logging
import os
import yaml
from . import json
LOG = logging.getLogger(__name__)
_VT_NOTIFICATIONS = 'https://www.virustotal.com/intelligence/hunting/notifications-feed/?key='
class Notifications(json.SimpleJSON):
def __init__(self, name, chassis, config):
super(Notifications, self).__init__(name, chassis, config)
self.api_key = None
def configure(self):
self.config['url'] = None
self.config['extractor'] = 'notifications'
self.config['prefix'] = 'vt'
super(Notifications, self).configure()
self.side_config_path = self.config.get('side_config', None)
if self.side_config_path is None:
self.side_config_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'%s_side_config.yml' % self.name
)
self._load_side_config()
def _load_side_config(self):
try:
with open(self.side_config_path, 'r') as f:
sconfig = yaml.safe_load(f)
except Exception as e:
LOG.error('%s - Error loading side config: %s', self.name, str(e))
return
self.api_key = sconfig.get('api_key', None)
if self.api_key is not None:
LOG.info('%s - api key set', self.name)
self.url = _VT_NOTIFICATIONS + self.api_key
def _process_item(self, item):
result = []
for htype in ['md5', 'sha256', 'sha1']:
value = {self.prefix+'_'+k: v for k, v in item.iteritems()}
indicator = value.pop(self.prefix+'_'+htype, None)
value['type'] = htype
if indicator is not None:
result.append([indicator, value])
return result
def _build_iterator(self, now):
if self.api_key is None:
LOG.info('%s - API key not set', self.name)
raise RuntimeError(
'%s - API Key not set' % self.name
)
return super(Notifications, self)._build_iterator(now)
def hup(self, source=None):
LOG.info('%s - hup received, reload side config', self.name)
self._load_side_config()
super(Notifications, self).hup(source=source)
@staticmethod
def gc(name, config=None):
json.SimpleJSON.gc(name, config=config)
side_config_path = None
if config is not None:
side_config_path = config.get('side_config', None)
if side_config_path is None:
side_config_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'{}_side_config.yml'.format(name)
)
try:
os.remove(side_config_path)
except:
pass
| true | true |
f71c722935297413c44452739b1c5efe80dcce1c | 5,736 | py | Python | tests/test_ac.py | knovichikhin/pyemv | 4a07cb550f27618822b530c6aa954e0820020ae7 | [
"MIT"
] | 14 | 2020-11-01T11:44:41.000Z | 2022-03-24T15:53:23.000Z | tests/test_ac.py | manoutoftime/pyemv | 4a07cb550f27618822b530c6aa954e0820020ae7 | [
"MIT"
] | 3 | 2021-06-18T01:36:00.000Z | 2021-10-17T02:09:50.000Z | tests/test_ac.py | manoutoftime/pyemv | 4a07cb550f27618822b530c6aa954e0820020ae7 | [
"MIT"
] | 8 | 2020-10-09T20:23:39.000Z | 2022-03-31T00:56:47.000Z | import pytest
from pyemv import ac
def test_generate_ac_exception() -> None:
# SK < 16 bytes
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_ac(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAA"),
data=bytes.fromhex("12345678901214"),
)
# SK > 16 bytes
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_ac(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC"),
data=bytes.fromhex("12345678901214"),
)
# Invalid padding type
with pytest.raises(
TypeError,
match="Padding type must be PaddingType Enum, not dict",
):
ac.generate_ac(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
data=bytes.fromhex("12345678901214"),
padding_type={}, # type: ignore
)
def test_generate_arpc_1_exception() -> None:
# SK < 16 bytes
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAA"),
arqc=bytes.fromhex("12345678"),
arpc_rc=bytes.fromhex("0000"),
)
# SK > 16 bytes
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC"),
arqc=bytes.fromhex("12345678"),
arpc_rc=bytes.fromhex("0000"),
)
# ARQC < 8 bytes
with pytest.raises(
ValueError,
match="ARQC must be 8 bytes long",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("12345678"),
arpc_rc=bytes.fromhex("0000"),
)
# ARQC > 16 bytes
with pytest.raises(
ValueError,
match="ARQC must be 8 bytes long",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890ABCDEF12"),
arpc_rc=bytes.fromhex("0000"),
)
# ARPC-RC < 2 bytes
with pytest.raises(
ValueError,
match="ARPC-RC must be 2 bytes long",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890123456"),
arpc_rc=bytes.fromhex("00"),
)
# ARPC-RC > 2 bytes
with pytest.raises(
ValueError,
match="ARPC-RC must be 2 bytes long",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890123456"),
arpc_rc=bytes.fromhex("001122"),
)
def test_generate_arpc_2_exception() -> None:
# SK < 16 bytes
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAA"),
arqc=bytes.fromhex("12345678901214"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
# SK > 16 bytes
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC"),
arqc=bytes.fromhex("12345678901214"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
# ARQC < 8 bytes
with pytest.raises(
ValueError,
match="ARQC must be 8 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("12345678901214"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
# ARQC > 16 bytes
with pytest.raises(
ValueError,
match="ARQC must be 8 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890ABCDEF12"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
# CSU < 4 bytes
with pytest.raises(
ValueError,
match="CSU must be 4 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890121456"),
csu=bytes.fromhex("123456"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
# CSU > 4 bytes
with pytest.raises(
ValueError,
match="CSU must be 4 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890121456"),
csu=bytes.fromhex("1234567890"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
# PAD > 8 bytes
with pytest.raises(
ValueError,
match="Proprietary Authentication Data must be 0-8 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890121456"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("123456789012345678"),
)
| 30.031414 | 84 | 0.591702 | import pytest
from pyemv import ac
def test_generate_ac_exception() -> None:
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_ac(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAA"),
data=bytes.fromhex("12345678901214"),
)
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_ac(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC"),
data=bytes.fromhex("12345678901214"),
)
with pytest.raises(
TypeError,
match="Padding type must be PaddingType Enum, not dict",
):
ac.generate_ac(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
data=bytes.fromhex("12345678901214"),
padding_type={},
)
def test_generate_arpc_1_exception() -> None:
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAA"),
arqc=bytes.fromhex("12345678"),
arpc_rc=bytes.fromhex("0000"),
)
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC"),
arqc=bytes.fromhex("12345678"),
arpc_rc=bytes.fromhex("0000"),
)
with pytest.raises(
ValueError,
match="ARQC must be 8 bytes long",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("12345678"),
arpc_rc=bytes.fromhex("0000"),
)
with pytest.raises(
ValueError,
match="ARQC must be 8 bytes long",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890ABCDEF12"),
arpc_rc=bytes.fromhex("0000"),
)
with pytest.raises(
ValueError,
match="ARPC-RC must be 2 bytes long",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890123456"),
arpc_rc=bytes.fromhex("00"),
)
with pytest.raises(
ValueError,
match="ARPC-RC must be 2 bytes long",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890123456"),
arpc_rc=bytes.fromhex("001122"),
)
def test_generate_arpc_2_exception() -> None:
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAA"),
arqc=bytes.fromhex("12345678901214"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC"),
arqc=bytes.fromhex("12345678901214"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
with pytest.raises(
ValueError,
match="ARQC must be 8 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("12345678901214"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
with pytest.raises(
ValueError,
match="ARQC must be 8 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890ABCDEF12"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
with pytest.raises(
ValueError,
match="CSU must be 4 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890121456"),
csu=bytes.fromhex("123456"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
with pytest.raises(
ValueError,
match="CSU must be 4 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890121456"),
csu=bytes.fromhex("1234567890"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
with pytest.raises(
ValueError,
match="Proprietary Authentication Data must be 0-8 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890121456"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("123456789012345678"),
)
| true | true |
f71c735f5bbe363994606cb49a6742b243a951ed | 2,203 | py | Python | DeepJIT/train.py | ZZR0/ISSTA21-JIT-DP | c2916f7c3b1d235ff2858220886d6a7da068bf8a | [
"MIT"
] | 14 | 2021-07-12T07:29:57.000Z | 2022-01-18T07:01:46.000Z | DeepJIT/train.py | ZZR0/ISSTA21-JIT-DP | c2916f7c3b1d235ff2858220886d6a7da068bf8a | [
"MIT"
] | null | null | null | DeepJIT/train.py | ZZR0/ISSTA21-JIT-DP | c2916f7c3b1d235ff2858220886d6a7da068bf8a | [
"MIT"
] | 7 | 2021-05-19T21:51:36.000Z | 2022-03-29T13:57:54.000Z | from model import DeepJIT
import torch
from tqdm import tqdm
from utils import mini_batches_train, save
import torch.nn as nn
import os, datetime
def train_model(data, params):
data_pad_msg, data_pad_code, data_labels, dict_msg, dict_code = data
# set up parameters
params.cuda = (not params.no_cuda) and torch.cuda.is_available()
del params.no_cuda
params.filter_sizes = [int(k) for k in params.filter_sizes.split(',')]
# params.save_dir = os.path.join(params.save_dir, datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S'))
params.vocab_msg, params.vocab_code = len(dict_msg), len(dict_code)
if len(data_labels.shape) == 1:
params.class_num = 1
else:
params.class_num = data_labels.shape[1]
params.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# create and train the defect model
model = DeepJIT(args=params)
if torch.cuda.is_available():
model = model.cuda()
optimizer = torch.optim.Adam(model.parameters(), lr=params.l2_reg_lambda)
criterion = nn.BCELoss()
for epoch in range(1, params.num_epochs + 1):
total_loss = 0
# building batches for training model
batches = mini_batches_train(X_msg=data_pad_msg, X_code=data_pad_code, Y=data_labels, mini_batch_size=params.batch_size)
for i, (batch) in enumerate(tqdm(batches)):
pad_msg, pad_code, labels = batch
if torch.cuda.is_available():
pad_msg, pad_code, labels = torch.tensor(pad_msg).cuda(), torch.tensor(
pad_code).cuda(), torch.cuda.FloatTensor(labels)
else:
pad_msg, pad_code, labels = torch.tensor(pad_msg).long(), torch.tensor(pad_code).long(), torch.tensor(
labels).float()
optimizer.zero_grad()
predict = model.forward(pad_msg, pad_code)
loss = criterion(predict, labels)
total_loss += loss
loss.backward()
optimizer.step()
print('Epoch %i / %i -- Total loss: %f' % (epoch, params.num_epochs, total_loss))
save(model, params.save_dir, 'epoch', epoch)
| 40.054545 | 128 | 0.635951 | from model import DeepJIT
import torch
from tqdm import tqdm
from utils import mini_batches_train, save
import torch.nn as nn
import os, datetime
def train_model(data, params):
data_pad_msg, data_pad_code, data_labels, dict_msg, dict_code = data
params.cuda = (not params.no_cuda) and torch.cuda.is_available()
del params.no_cuda
params.filter_sizes = [int(k) for k in params.filter_sizes.split(',')]
params.vocab_msg, params.vocab_code = len(dict_msg), len(dict_code)
if len(data_labels.shape) == 1:
params.class_num = 1
else:
params.class_num = data_labels.shape[1]
params.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = DeepJIT(args=params)
if torch.cuda.is_available():
model = model.cuda()
optimizer = torch.optim.Adam(model.parameters(), lr=params.l2_reg_lambda)
criterion = nn.BCELoss()
for epoch in range(1, params.num_epochs + 1):
total_loss = 0
batches = mini_batches_train(X_msg=data_pad_msg, X_code=data_pad_code, Y=data_labels, mini_batch_size=params.batch_size)
for i, (batch) in enumerate(tqdm(batches)):
pad_msg, pad_code, labels = batch
if torch.cuda.is_available():
pad_msg, pad_code, labels = torch.tensor(pad_msg).cuda(), torch.tensor(
pad_code).cuda(), torch.cuda.FloatTensor(labels)
else:
pad_msg, pad_code, labels = torch.tensor(pad_msg).long(), torch.tensor(pad_code).long(), torch.tensor(
labels).float()
optimizer.zero_grad()
predict = model.forward(pad_msg, pad_code)
loss = criterion(predict, labels)
total_loss += loss
loss.backward()
optimizer.step()
print('Epoch %i / %i -- Total loss: %f' % (epoch, params.num_epochs, total_loss))
save(model, params.save_dir, 'epoch', epoch)
| true | true |
f71c73b92c22513c6d322953955170aa4fd2838a | 524 | py | Python | BackEnd/testModel/mymodel/migrations/0012_auto_20180603_2214.py | WindyMen/BackEnd | d3e33b1b57734fcefda494793ed940e5b079c36b | [
"Apache-2.0"
] | 1 | 2018-06-28T02:35:15.000Z | 2018-06-28T02:35:15.000Z | BackEnd/testModel/mymodel/migrations/0012_auto_20180603_2214.py | WindyMen/BackEnd | d3e33b1b57734fcefda494793ed940e5b079c36b | [
"Apache-2.0"
] | null | null | null | BackEnd/testModel/mymodel/migrations/0012_auto_20180603_2214.py | WindyMen/BackEnd | d3e33b1b57734fcefda494793ed940e5b079c36b | [
"Apache-2.0"
] | null | null | null | # Generated by Django 2.0.3 on 2018-06-03 14:14
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mymodel', '0011_auto_20180603_2208'),
]
operations = [
migrations.AlterField(
model_name='room',
name='owner',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owner_rooms', to='mymodel.User'),
),
]
| 26.2 | 140 | 0.624046 |
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mymodel', '0011_auto_20180603_2208'),
]
operations = [
migrations.AlterField(
model_name='room',
name='owner',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owner_rooms', to='mymodel.User'),
),
]
| true | true |
f71c73d0a50c3eaf71f966a8af555fc36e37ec03 | 1,026 | py | Python | sortosm.py | TimSC/osm-to-gps-map | cedae6752b16e6f3e02a75f0a0ef784fd70298bf | [
"Unlicense"
] | 1 | 2020-06-15T02:22:08.000Z | 2020-06-15T02:22:08.000Z | sortosm.py | TimSC/osm-to-gps-map | cedae6752b16e6f3e02a75f0a0ef784fd70298bf | [
"Unlicense"
] | null | null | null | sortosm.py | TimSC/osm-to-gps-map | cedae6752b16e6f3e02a75f0a0ef784fd70298bf | [
"Unlicense"
] | null | null | null | import xml.etree.ElementTree as ET
import bz2, sys
def SortOsm(inFina, outFina):
fi = bz2.BZ2File(inFina)
root = ET.fromstring(fi.read())
fi.close()
objDict = {}
for obj in root:
if 'id' in obj.attrib:
i = int(obj.attrib['id'])
#print obj.tag, i
if obj.tag not in objDict:
objDict[obj.tag] = {}
objDict[obj.tag][i] = obj
#for ty in objDict:
# print ty, len(objDict[ty]), objDict[ty].keys()
outRoot = ET.Element("osm")
outTree = ET.ElementTree(outRoot)
outRoot.attrib = root.attrib
if 'node' in objDict:
keys = objDict['node'].keys()
keys.sort()
for i in keys:
outRoot.append(objDict['node'][i])
if 'way' in objDict:
keys = objDict['way'].keys()
keys.sort()
for i in keys:
outRoot.append(objDict['way'][i])
if 'relation' in objDict:
keys = objDict['relation'].keys()
keys.sort()
for i in keys:
outRoot.append(objDict['relation'][i])
fiOut = bz2.BZ2File(outFina,"w")
outTree.write(fiOut,"utf-8")
if __name__=="__main__":
SortOsm(sys.argv[1], sys.argv[2])
| 20.52 | 49 | 0.645224 | import xml.etree.ElementTree as ET
import bz2, sys
def SortOsm(inFina, outFina):
fi = bz2.BZ2File(inFina)
root = ET.fromstring(fi.read())
fi.close()
objDict = {}
for obj in root:
if 'id' in obj.attrib:
i = int(obj.attrib['id'])
if obj.tag not in objDict:
objDict[obj.tag] = {}
objDict[obj.tag][i] = obj
outRoot = ET.Element("osm")
outTree = ET.ElementTree(outRoot)
outRoot.attrib = root.attrib
if 'node' in objDict:
keys = objDict['node'].keys()
keys.sort()
for i in keys:
outRoot.append(objDict['node'][i])
if 'way' in objDict:
keys = objDict['way'].keys()
keys.sort()
for i in keys:
outRoot.append(objDict['way'][i])
if 'relation' in objDict:
keys = objDict['relation'].keys()
keys.sort()
for i in keys:
outRoot.append(objDict['relation'][i])
fiOut = bz2.BZ2File(outFina,"w")
outTree.write(fiOut,"utf-8")
if __name__=="__main__":
SortOsm(sys.argv[1], sys.argv[2])
| true | true |
f71c7496c320b6938b1d6b9ee7d678add526adf7 | 4,780 | py | Python | salt/pillar/libvirt.py | hvnsweeting/salt | abc9d3a0b51e6f5c4738cf71c221daf8b46fddcf | [
"Apache-2.0"
] | 2 | 2015-09-21T14:13:30.000Z | 2016-02-12T11:33:46.000Z | salt/pillar/libvirt.py | hvnsweeting/salt | abc9d3a0b51e6f5c4738cf71c221daf8b46fddcf | [
"Apache-2.0"
] | null | null | null | salt/pillar/libvirt.py | hvnsweeting/salt | abc9d3a0b51e6f5c4738cf71c221daf8b46fddcf | [
"Apache-2.0"
] | 2 | 2017-01-05T16:14:59.000Z | 2019-01-31T23:15:25.000Z | # -*- coding: utf-8 -*-
'''
Load up the libvirt keys into Pillar for a given minion if said keys have been generated using the libvirt key runner
'''
from __future__ import absolute_import
# Don't "fix" the above docstring to put it on two lines, as the sphinx
# autosummary pulls only the first line for its description.
# Import python libs
import os
import subprocess
# Import salt libs
import salt.utils
def __virtual__():
return salt.utils.which('certtool') is not None
def ext_pillar(minion_id,
pillar, # pylint: disable=W0613
command): # pylint: disable=W0613
'''
Read in the generated libvirt keys
'''
key_dir = os.path.join(
__opts__['pki_dir'],
'libvirt',
minion_id)
cacert = os.path.join(__opts__['pki_dir'],
'libvirt',
'cacert.pem')
if not os.path.isdir(key_dir):
# No keys have been generated
gen_hyper_keys(minion_id)
ret = {}
for key in os.listdir(key_dir):
if not key.endswith('.pem'):
continue
fn_ = os.path.join(key_dir, key)
with salt.utils.fopen(fn_, 'r') as fp_:
ret['libvirt.{0}'.format(key)] = fp_.read()
with salt.utils.fopen(cacert, 'r') as fp_:
ret['libvirt.cacert.pem'] = fp_.read()
return ret
def gen_hyper_keys(minion_id,
country='US',
state='Utah',
locality='Salt Lake City',
organization='Salted'):
'''
Generate the keys to be used by libvirt hypervisors, this routine gens
the keys and applies them to the pillar for the hypervisor minions
'''
key_dir = os.path.join(
__opts__['pki_dir'],
'libvirt')
if not os.path.isdir(key_dir):
os.makedirs(key_dir)
cakey = os.path.join(key_dir, 'cakey.pem')
cacert = os.path.join(key_dir, 'cacert.pem')
cainfo = os.path.join(key_dir, 'ca.info')
if not os.path.isfile(cainfo):
with salt.utils.fopen(cainfo, 'w+') as fp_:
fp_.write('cn = salted\nca\ncert_signing_key')
if not os.path.isfile(cakey):
subprocess.call(
'certtool --generate-privkey > {0}'.format(cakey),
shell=True)
if not os.path.isfile(cacert):
cmd = ('certtool --generate-self-signed --load-privkey {0} '
'--template {1} --outfile {2}').format(cakey, cainfo, cacert)
subprocess.call(cmd, shell=True)
sub_dir = os.path.join(key_dir, minion_id)
if not os.path.isdir(sub_dir):
os.makedirs(sub_dir)
priv = os.path.join(sub_dir, 'serverkey.pem')
cert = os.path.join(sub_dir, 'servercert.pem')
srvinfo = os.path.join(sub_dir, 'server.info')
cpriv = os.path.join(sub_dir, 'clientkey.pem')
ccert = os.path.join(sub_dir, 'clientcert.pem')
clientinfo = os.path.join(sub_dir, 'client.info')
if not os.path.isfile(srvinfo):
with salt.utils.fopen(srvinfo, 'w+') as fp_:
infodat = ('organization = salted\ncn = {0}\ntls_www_server'
'\nencryption_key\nsigning_key'
'\ndigitalSignature').format(
__grains__['fqdn'])
fp_.write(infodat)
if not os.path.isfile(priv):
subprocess.call(
'certtool --generate-privkey > {0}'.format(priv),
shell=True)
if not os.path.isfile(cert):
cmd = ('certtool --generate-certificate --load-privkey {0} '
'--load-ca-certificate {1} --load-ca-privkey {2} '
'--template {3} --outfile {4}'
).format(priv, cacert, cakey, srvinfo, cert)
subprocess.call(cmd, shell=True)
if not os.path.isfile(clientinfo):
with salt.utils.fopen(clientinfo, 'w+') as fp_:
infodat = ('country = {0}\nstate = {1}\nlocality = '
'{2}\norganization = {3}\ncn = {4}\n'
'tls_www_client\nencryption_key\nsigning_key\n'
'digitalSignature'
).format(
country,
state,
locality,
organization,
__grains__['fqdn'])
fp_.write(infodat)
if not os.path.isfile(cpriv):
subprocess.call(
'certtool --generate-privkey > {0}'.format(cpriv),
shell=True)
if not os.path.isfile(ccert):
cmd = ('certtool --generate-certificate --load-privkey {0} '
'--load-ca-certificate {1} --load-ca-privkey {2} '
'--template {3} --outfile {4}'
).format(cpriv, cacert, cakey, clientinfo, ccert)
subprocess.call(cmd, shell=True)
| 37.637795 | 117 | 0.561715 |
from __future__ import absolute_import
# autosummary pulls only the first line for its description.
# Import python libs
import os
import subprocess
# Import salt libs
import salt.utils
def __virtual__():
return salt.utils.which('certtool') is not None
def ext_pillar(minion_id,
pillar, # pylint: disable=W0613
command): # pylint: disable=W0613
key_dir = os.path.join(
__opts__['pki_dir'],
'libvirt',
minion_id)
cacert = os.path.join(__opts__['pki_dir'],
'libvirt',
'cacert.pem')
if not os.path.isdir(key_dir):
# No keys have been generated
gen_hyper_keys(minion_id)
ret = {}
for key in os.listdir(key_dir):
if not key.endswith('.pem'):
continue
fn_ = os.path.join(key_dir, key)
with salt.utils.fopen(fn_, 'r') as fp_:
ret['libvirt.{0}'.format(key)] = fp_.read()
with salt.utils.fopen(cacert, 'r') as fp_:
ret['libvirt.cacert.pem'] = fp_.read()
return ret
def gen_hyper_keys(minion_id,
country='US',
state='Utah',
locality='Salt Lake City',
organization='Salted'):
key_dir = os.path.join(
__opts__['pki_dir'],
'libvirt')
if not os.path.isdir(key_dir):
os.makedirs(key_dir)
cakey = os.path.join(key_dir, 'cakey.pem')
cacert = os.path.join(key_dir, 'cacert.pem')
cainfo = os.path.join(key_dir, 'ca.info')
if not os.path.isfile(cainfo):
with salt.utils.fopen(cainfo, 'w+') as fp_:
fp_.write('cn = salted\nca\ncert_signing_key')
if not os.path.isfile(cakey):
subprocess.call(
'certtool --generate-privkey > {0}'.format(cakey),
shell=True)
if not os.path.isfile(cacert):
cmd = ('certtool --generate-self-signed --load-privkey {0} '
'--template {1} --outfile {2}').format(cakey, cainfo, cacert)
subprocess.call(cmd, shell=True)
sub_dir = os.path.join(key_dir, minion_id)
if not os.path.isdir(sub_dir):
os.makedirs(sub_dir)
priv = os.path.join(sub_dir, 'serverkey.pem')
cert = os.path.join(sub_dir, 'servercert.pem')
srvinfo = os.path.join(sub_dir, 'server.info')
cpriv = os.path.join(sub_dir, 'clientkey.pem')
ccert = os.path.join(sub_dir, 'clientcert.pem')
clientinfo = os.path.join(sub_dir, 'client.info')
if not os.path.isfile(srvinfo):
with salt.utils.fopen(srvinfo, 'w+') as fp_:
infodat = ('organization = salted\ncn = {0}\ntls_www_server'
'\nencryption_key\nsigning_key'
'\ndigitalSignature').format(
__grains__['fqdn'])
fp_.write(infodat)
if not os.path.isfile(priv):
subprocess.call(
'certtool --generate-privkey > {0}'.format(priv),
shell=True)
if not os.path.isfile(cert):
cmd = ('certtool --generate-certificate --load-privkey {0} '
'--load-ca-certificate {1} --load-ca-privkey {2} '
'--template {3} --outfile {4}'
).format(priv, cacert, cakey, srvinfo, cert)
subprocess.call(cmd, shell=True)
if not os.path.isfile(clientinfo):
with salt.utils.fopen(clientinfo, 'w+') as fp_:
infodat = ('country = {0}\nstate = {1}\nlocality = '
'{2}\norganization = {3}\ncn = {4}\n'
'tls_www_client\nencryption_key\nsigning_key\n'
'digitalSignature'
).format(
country,
state,
locality,
organization,
__grains__['fqdn'])
fp_.write(infodat)
if not os.path.isfile(cpriv):
subprocess.call(
'certtool --generate-privkey > {0}'.format(cpriv),
shell=True)
if not os.path.isfile(ccert):
cmd = ('certtool --generate-certificate --load-privkey {0} '
'--load-ca-certificate {1} --load-ca-privkey {2} '
'--template {3} --outfile {4}'
).format(cpriv, cacert, cakey, clientinfo, ccert)
subprocess.call(cmd, shell=True)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.