hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71bf52008d28c422bd88b5ecd34e9e1fab1fa11 | 2,202 | py | Python | models/ModelUtil/util.py | Easonyesheng/StereoCameraToolk | 660e43019d0687e96e6da3aca48c1c423ae5abff | [
"MIT"
] | 27 | 2020-10-16T07:21:35.000Z | 2022-03-11T02:56:13.000Z | models/ModelUtil/util.py | Easonyesheng/StereoCamera | 9319b7f4e5ce36833de722a15e1074e82b8b4f84 | [
"MIT"
] | null | null | null | models/ModelUtil/util.py | Easonyesheng/StereoCamera | 9319b7f4e5ce36833de722a15e1074e82b8b4f84 | [
"MIT"
] | 6 | 2021-02-01T09:54:40.000Z | 2022-03-11T03:16:39.000Z | """Utility """
import numpy as np
import cv2
import os
import logging
def check_string_is_empty(string):
"""name
check string empty or not
Args:
Returns:
"""
if string == '':
return True
return False
def check_numpy_array(array):
"""name
check array empty or not
Args:
Returns:
True - Exist
"""
try:
array.all()
except AttributeError:
return False
return True
def after_cv_imshow():
"""name
close all the show window if press 'esc'
set after cv2.imshow()
Args:
Returns:
"""
k = cv2.waitKey(0)
if k == 27:
cv2.destroyAllWindows()
def save_img_with_prefix(img, path, name):
"""name
save as 'path/name.jpg'
Args:
Returns:
"""
cv2.imwrite(os.path.join(path,name+'.jpg'), img)
def img_show(img, name):
"""
"""
cv2.startWindowThread()
img = img / np.max(img)
cv2.imshow(name, img)
after_cv_imshow()
def test_dir_if_not_create(path):
"""name
save as 'path/name.jpg'
Args:
Returns:
"""
if os.path.isdir(path):
return True
else:
print('Create New Folder:', path)
os.makedirs(path)
return True
def log_init(logfilename):
"""name
save as 'path/name.jpg'
Args:
Returns:
"""
# logging.basicConfig(filename=logfilename, level=logging.INFO)
# logging.basicConfig(format='%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s',
# filename=logfilename,
# level=logging.DEBUG)
logger = logging.getLogger() # 不加名称设置root logger
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s: - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
# 使用FileHandler输出到文件
fh = logging.FileHandler(logfilename, 'w')
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
# 使用StreamHandler输出到屏幕
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
# 添加两个Handler
logger.addHandler(ch)
logger.addHandler(fh) | 18.504202 | 108 | 0.583106 |
import numpy as np
import cv2
import os
import logging
def check_string_is_empty(string):
if string == '':
return True
return False
def check_numpy_array(array):
try:
array.all()
except AttributeError:
return False
return True
def after_cv_imshow():
k = cv2.waitKey(0)
if k == 27:
cv2.destroyAllWindows()
def save_img_with_prefix(img, path, name):
cv2.imwrite(os.path.join(path,name+'.jpg'), img)
def img_show(img, name):
cv2.startWindowThread()
img = img / np.max(img)
cv2.imshow(name, img)
after_cv_imshow()
def test_dir_if_not_create(path):
if os.path.isdir(path):
return True
else:
print('Create New Folder:', path)
os.makedirs(path)
return True
def log_init(logfilename):
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s: - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
fh = logging.FileHandler(logfilename, 'w')
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
logger.addHandler(fh) | true | true |
f71bf5a6846601e2c447085faeecf596137b1330 | 4,935 | py | Python | airflow/api/common/experimental/trigger_dag.py | IGIT-CN/airflow | a6e5bcd59198afe5716813e84ebc4c59eade532c | [
"Apache-2.0"
] | 2 | 2020-03-24T14:47:18.000Z | 2020-03-24T14:48:17.000Z | airflow/api/common/experimental/trigger_dag.py | IGIT-CN/airflow | a6e5bcd59198afe5716813e84ebc4c59eade532c | [
"Apache-2.0"
] | 1 | 2021-09-29T17:37:13.000Z | 2021-09-29T17:37:13.000Z | airflow/api/common/experimental/trigger_dag.py | IGIT-CN/airflow | a6e5bcd59198afe5716813e84ebc4c59eade532c | [
"Apache-2.0"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Triggering DAG runs APIs."""
import json
from datetime import datetime
from typing import List, Optional, Union
from airflow.exceptions import DagNotFound, DagRunAlreadyExists
from airflow.models import DagBag, DagModel, DagRun
from airflow.utils import timezone
from airflow.utils.state import State
from airflow.utils.types import DagRunType
def _trigger_dag(
dag_id: str,
dag_bag: DagBag,
dag_run: DagModel,
run_id: Optional[str],
conf: Optional[Union[dict, str]],
execution_date: Optional[datetime],
replace_microseconds: bool,
) -> List[DagRun]: # pylint: disable=too-many-arguments
"""Triggers DAG run.
:param dag_id: DAG ID
:param dag_bag: DAG Bag model
:param dag_run: DAG Run model
:param run_id: ID of the dag_run
:param conf: configuration
:param execution_date: date of execution
:param replace_microseconds: whether microseconds should be zeroed
:return: list of triggered dags
"""
dag = dag_bag.get_dag(dag_id) # prefetch dag if it is stored serialized
if dag_id not in dag_bag.dags:
raise DagNotFound("Dag id {} not found".format(dag_id))
execution_date = execution_date if execution_date else timezone.utcnow()
if not timezone.is_localized(execution_date):
raise ValueError("The execution_date should be localized")
if replace_microseconds:
execution_date = execution_date.replace(microsecond=0)
if dag.default_args and 'start_date' in dag.default_args:
min_dag_start_date = dag.default_args["start_date"]
if min_dag_start_date and execution_date < min_dag_start_date:
raise ValueError(
"The execution_date [{0}] should be >= start_date [{1}] from DAG's default_args".format(
execution_date.isoformat(),
min_dag_start_date.isoformat()))
if not run_id:
run_id = "{}{}".format(DagRunType.MANUAL.value, execution_date.isoformat())
dag_run_id = dag_run.find(dag_id=dag_id, run_id=run_id)
if dag_run_id:
raise DagRunAlreadyExists("Run id {} already exists for dag id {}".format(
run_id,
dag_id
))
run_conf = None
if conf:
if isinstance(conf, dict):
run_conf = conf
else:
run_conf = json.loads(conf)
triggers = []
dags_to_trigger = [dag]
while dags_to_trigger:
dag = dags_to_trigger.pop()
trigger = dag.create_dagrun(
run_id=run_id,
execution_date=execution_date,
state=State.RUNNING,
conf=run_conf,
external_trigger=True,
)
triggers.append(trigger)
if dag.subdags:
dags_to_trigger.extend(dag.subdags)
return triggers
def trigger_dag(
dag_id: str,
run_id: Optional[str] = None,
conf: Optional[Union[dict, str]] = None,
execution_date: Optional[datetime] = None,
replace_microseconds: bool = True,
) -> Optional[DagRun]:
"""Triggers execution of DAG specified by dag_id
:param dag_id: DAG ID
:param run_id: ID of the dag_run
:param conf: configuration
:param execution_date: date of execution
:param replace_microseconds: whether microseconds should be zeroed
:return: first dag run triggered - even if more than one Dag Runs were triggered or None
"""
dag_model = DagModel.get_current(dag_id)
if dag_model is None:
raise DagNotFound("Dag id {} not found in DagModel".format(dag_id))
def read_store_serialized_dags():
from airflow.configuration import conf
return conf.getboolean('core', 'store_serialized_dags')
dagbag = DagBag(
dag_folder=dag_model.fileloc,
store_serialized_dags=read_store_serialized_dags()
)
dag_run = DagRun()
triggers = _trigger_dag(
dag_id=dag_id,
dag_run=dag_run,
dag_bag=dagbag,
run_id=run_id,
conf=conf,
execution_date=execution_date,
replace_microseconds=replace_microseconds,
)
return triggers[0] if triggers else None
| 34.270833 | 104 | 0.680446 |
import json
from datetime import datetime
from typing import List, Optional, Union
from airflow.exceptions import DagNotFound, DagRunAlreadyExists
from airflow.models import DagBag, DagModel, DagRun
from airflow.utils import timezone
from airflow.utils.state import State
from airflow.utils.types import DagRunType
def _trigger_dag(
dag_id: str,
dag_bag: DagBag,
dag_run: DagModel,
run_id: Optional[str],
conf: Optional[Union[dict, str]],
execution_date: Optional[datetime],
replace_microseconds: bool,
) -> List[DagRun]:
dag = dag_bag.get_dag(dag_id)
if dag_id not in dag_bag.dags:
raise DagNotFound("Dag id {} not found".format(dag_id))
execution_date = execution_date if execution_date else timezone.utcnow()
if not timezone.is_localized(execution_date):
raise ValueError("The execution_date should be localized")
if replace_microseconds:
execution_date = execution_date.replace(microsecond=0)
if dag.default_args and 'start_date' in dag.default_args:
min_dag_start_date = dag.default_args["start_date"]
if min_dag_start_date and execution_date < min_dag_start_date:
raise ValueError(
"The execution_date [{0}] should be >= start_date [{1}] from DAG's default_args".format(
execution_date.isoformat(),
min_dag_start_date.isoformat()))
if not run_id:
run_id = "{}{}".format(DagRunType.MANUAL.value, execution_date.isoformat())
dag_run_id = dag_run.find(dag_id=dag_id, run_id=run_id)
if dag_run_id:
raise DagRunAlreadyExists("Run id {} already exists for dag id {}".format(
run_id,
dag_id
))
run_conf = None
if conf:
if isinstance(conf, dict):
run_conf = conf
else:
run_conf = json.loads(conf)
triggers = []
dags_to_trigger = [dag]
while dags_to_trigger:
dag = dags_to_trigger.pop()
trigger = dag.create_dagrun(
run_id=run_id,
execution_date=execution_date,
state=State.RUNNING,
conf=run_conf,
external_trigger=True,
)
triggers.append(trigger)
if dag.subdags:
dags_to_trigger.extend(dag.subdags)
return triggers
def trigger_dag(
dag_id: str,
run_id: Optional[str] = None,
conf: Optional[Union[dict, str]] = None,
execution_date: Optional[datetime] = None,
replace_microseconds: bool = True,
) -> Optional[DagRun]:
dag_model = DagModel.get_current(dag_id)
if dag_model is None:
raise DagNotFound("Dag id {} not found in DagModel".format(dag_id))
def read_store_serialized_dags():
from airflow.configuration import conf
return conf.getboolean('core', 'store_serialized_dags')
dagbag = DagBag(
dag_folder=dag_model.fileloc,
store_serialized_dags=read_store_serialized_dags()
)
dag_run = DagRun()
triggers = _trigger_dag(
dag_id=dag_id,
dag_run=dag_run,
dag_bag=dagbag,
run_id=run_id,
conf=conf,
execution_date=execution_date,
replace_microseconds=replace_microseconds,
)
return triggers[0] if triggers else None
| true | true |
f71bf6d7e4ee3f8699dd771f06d36619668ab2db | 8,225 | py | Python | authors/apps/notify/tests/test_mail_list_model.py | andela/Ah-backend-valkyrie | f0eb64c27e1fe37d5c81e4b9a8762dcf3c336a79 | [
"BSD-3-Clause"
] | null | null | null | authors/apps/notify/tests/test_mail_list_model.py | andela/Ah-backend-valkyrie | f0eb64c27e1fe37d5c81e4b9a8762dcf3c336a79 | [
"BSD-3-Clause"
] | 46 | 2019-01-08T13:16:41.000Z | 2021-04-30T20:47:08.000Z | authors/apps/notify/tests/test_mail_list_model.py | andela/Ah-backend-valkyrie | f0eb64c27e1fe37d5c81e4b9a8762dcf3c336a79 | [
"BSD-3-Clause"
] | 3 | 2019-01-07T08:21:59.000Z | 2019-09-20T06:43:18.000Z | import json
from authors.apps.authentication.tests.base import BaseTestMethods
from authors.apps.authentication.models import User
from rest_framework.reverse import reverse
from rest_framework import status
class NotificationTests(BaseTestMethods):
def test_create_mail_list(self):
user = self.register_and_loginUser()
token = user.data['token']
auth = f'Bearer {token}'
url = reverse('mail-list-status')
response = self.client.get(url, HTTP_AUTHORIZATION=auth)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIsInstance(response.data, dict)
self.assertEqual(
response.data['user']['email'], 'testuser@andela.com')
def test_create_update_notification_status(self):
user = self.register_and_loginUser()
token = user.data['token']
auth = f'Bearer {token}'
url = reverse('mail-list-status')
data = {'recieve_email_notifications': 'false'}
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=auth)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['recieve_email_notifications'], False)
def test_fetch_all_user_notifications(self):
user_1 = self.register_and_loginUser()
user_1_token = user_1.data['token']
user_2 = self.register_and_login_user2()
user_2_token = user_2.data['token']
this_user_2 = User.objects.get(email=user_2.data['email'])
response = self.client.post(
'/api/v1/users/{}/profile/follow'.format(this_user_2.username),
HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content)[
'profile']['following'], True
)
# fetch notification object
url = reverse('all-notifications')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
self.assertEqual(notifications.status_code, status.HTTP_200_OK)
self.assertEqual(
notifications.data['count'], 1)
def test_cant_fetch_notifications_for_different_user(self):
# register and login user
user_1 = self.register_and_loginUser()
user_1_token = user_1.data['token']
user_2 = self.register_and_login_user2()
this_user_2 = User.objects.get(email=user_2.data['email'])
# follow a registered user
response = self.client.post(
'/api/v1/users/{}/profile/follow'.format(this_user_2.username),
HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content)[
'profile']['following'], True
)
# fetch user notification objects
url = reverse('all-notifications')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(
notifications.data['message'],
'You currently dont have any notifications')
def test_fetch_all_user_unread_notifications(self):
# register and login user
user_1 = self.register_and_loginUser()
user_1_token = user_1.data['token']
user_2 = self.register_and_login_user2()
user_2_token = user_2.data['token']
this_user_2 = User.objects.get(email=user_2.data['email'])
# follow a registered user
response = self.client.post(
'/api/v1/users/{}/profile/follow'.format(this_user_2.username),
HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content)[
'profile']['following'], True
)
# fetch notification object
url = reverse('unread-notifications')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
self.assertEqual(notifications.status_code, status.HTTP_200_OK)
self.assertEqual(
notifications.data['count'], 1)
def test_failed_fetch_all_user_unread_notifications(self):
# register and login user
self.register_and_loginUser()
user_2 = self.register_and_login_user2()
user_2_token = user_2.data['token']
# fetch notification object
url = reverse('unread-notifications')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
self.assertEqual(
notifications.data['message'],
'You currently dont have any unread notifications')
def test_fetch_all_user_read_notofications(self):
# register and login user
user_1 = self.register_and_loginUser()
user_1_token = user_1.data['token']
user_2 = self.register_and_login_user2()
user_2_token = user_2.data['token']
this_user_2 = User.objects.get(email=user_2.data['email'])
# follow a registered user
response = self.client.post(
'/api/v1/users/{}/profile/follow'.format(this_user_2.username),
HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content)[
'profile']['following'], True
)
# mark notifications as read
url = reverse('mark-all-as-read')
self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
# fetch notification object
url = reverse('read-notifications')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
self.assertEqual(
notifications.data['count'], 1)
def test_failed_fetch_all_user_read_notifications(self):
# register and login user
user_1 = self.register_and_loginUser()
user_1_token = user_1.data['token']
user_2 = self.register_and_login_user2()
user_2_token = user_2.data['token']
this_user_2 = User.objects.get(email=user_2.data['email'])
# follow a registered user
response = self.client.post(
'/api/v1/users/{}/profile/follow'.format(this_user_2.username),
HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content)[
'profile']['following'], True
)
# fetch notification object
url = reverse('read-notifications')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
self.assertEqual(
notifications.data['message'], 'You currently dont have any read notifications')
def test_mark_all_notofications_as_read(self):
# register and login user
user_1 = self.register_and_loginUser()
user_1_token = user_1.data['token']
user_2 = self.register_and_login_user2()
user_2_token = user_2.data['token']
this_user_2 = User.objects.get(email=user_2.data['email'])
# follow a registered user
response = self.client.post(
'/api/v1/users/{}/profile/follow'.format(this_user_2.username),
HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content)[
'profile']['following'], True
)
# mark notifications as read
url = reverse('mark-all-as-read')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
self.assertEqual(
notifications.data['message'], 'All notifications marked as read')
| 39.927184 | 92 | 0.646565 | import json
from authors.apps.authentication.tests.base import BaseTestMethods
from authors.apps.authentication.models import User
from rest_framework.reverse import reverse
from rest_framework import status
class NotificationTests(BaseTestMethods):
def test_create_mail_list(self):
user = self.register_and_loginUser()
token = user.data['token']
auth = f'Bearer {token}'
url = reverse('mail-list-status')
response = self.client.get(url, HTTP_AUTHORIZATION=auth)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIsInstance(response.data, dict)
self.assertEqual(
response.data['user']['email'], 'testuser@andela.com')
def test_create_update_notification_status(self):
user = self.register_and_loginUser()
token = user.data['token']
auth = f'Bearer {token}'
url = reverse('mail-list-status')
data = {'recieve_email_notifications': 'false'}
response = self.client.put(url, data=data, HTTP_AUTHORIZATION=auth)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['recieve_email_notifications'], False)
def test_fetch_all_user_notifications(self):
user_1 = self.register_and_loginUser()
user_1_token = user_1.data['token']
user_2 = self.register_and_login_user2()
user_2_token = user_2.data['token']
this_user_2 = User.objects.get(email=user_2.data['email'])
response = self.client.post(
'/api/v1/users/{}/profile/follow'.format(this_user_2.username),
HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content)[
'profile']['following'], True
)
url = reverse('all-notifications')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
self.assertEqual(notifications.status_code, status.HTTP_200_OK)
self.assertEqual(
notifications.data['count'], 1)
def test_cant_fetch_notifications_for_different_user(self):
user_1 = self.register_and_loginUser()
user_1_token = user_1.data['token']
user_2 = self.register_and_login_user2()
this_user_2 = User.objects.get(email=user_2.data['email'])
response = self.client.post(
'/api/v1/users/{}/profile/follow'.format(this_user_2.username),
HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content)[
'profile']['following'], True
)
url = reverse('all-notifications')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(
notifications.data['message'],
'You currently dont have any notifications')
def test_fetch_all_user_unread_notifications(self):
user_1 = self.register_and_loginUser()
user_1_token = user_1.data['token']
user_2 = self.register_and_login_user2()
user_2_token = user_2.data['token']
this_user_2 = User.objects.get(email=user_2.data['email'])
response = self.client.post(
'/api/v1/users/{}/profile/follow'.format(this_user_2.username),
HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content)[
'profile']['following'], True
)
url = reverse('unread-notifications')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
self.assertEqual(notifications.status_code, status.HTTP_200_OK)
self.assertEqual(
notifications.data['count'], 1)
def test_failed_fetch_all_user_unread_notifications(self):
self.register_and_loginUser()
user_2 = self.register_and_login_user2()
user_2_token = user_2.data['token']
url = reverse('unread-notifications')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
self.assertEqual(
notifications.data['message'],
'You currently dont have any unread notifications')
def test_fetch_all_user_read_notofications(self):
user_1 = self.register_and_loginUser()
user_1_token = user_1.data['token']
user_2 = self.register_and_login_user2()
user_2_token = user_2.data['token']
this_user_2 = User.objects.get(email=user_2.data['email'])
response = self.client.post(
'/api/v1/users/{}/profile/follow'.format(this_user_2.username),
HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content)[
'profile']['following'], True
)
url = reverse('mark-all-as-read')
self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
url = reverse('read-notifications')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
self.assertEqual(
notifications.data['count'], 1)
def test_failed_fetch_all_user_read_notifications(self):
user_1 = self.register_and_loginUser()
user_1_token = user_1.data['token']
user_2 = self.register_and_login_user2()
user_2_token = user_2.data['token']
this_user_2 = User.objects.get(email=user_2.data['email'])
response = self.client.post(
'/api/v1/users/{}/profile/follow'.format(this_user_2.username),
HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content)[
'profile']['following'], True
)
url = reverse('read-notifications')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
self.assertEqual(
notifications.data['message'], 'You currently dont have any read notifications')
def test_mark_all_notofications_as_read(self):
user_1 = self.register_and_loginUser()
user_1_token = user_1.data['token']
user_2 = self.register_and_login_user2()
user_2_token = user_2.data['token']
this_user_2 = User.objects.get(email=user_2.data['email'])
response = self.client.post(
'/api/v1/users/{}/profile/follow'.format(this_user_2.username),
HTTP_AUTHORIZATION=f'Bearer {user_1_token}')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content)[
'profile']['following'], True
)
url = reverse('mark-all-as-read')
notifications = self.client.get(
url, format='json', HTTP_AUTHORIZATION=f'Bearer {user_2_token}')
self.assertEqual(
notifications.data['message'], 'All notifications marked as read')
| true | true |
f71bf861822822818626a562f2930b021abda1d2 | 596 | py | Python | x2py/__init__.py | jaykang920/x2py | b8bd473f94ff4b9576e984cc384f4159ab71278d | [
"MIT"
] | null | null | null | x2py/__init__.py | jaykang920/x2py | b8bd473f94ff4b9576e984cc384f4159ab71278d | [
"MIT"
] | 1 | 2019-06-05T09:35:09.000Z | 2020-07-02T09:46:46.000Z | x2py/__init__.py | jaykang920/x2py | b8bd473f94ff4b9576e984cc384f4159ab71278d | [
"MIT"
] | null | null | null | # Copyright (c) 2017, 2018 Jae-jun Kang
# See the file LICENSE for details.
"""Import core names of x2py."""
__version__ = '0.4.3'
from x2py.buffer_transform import BufferTransform
from x2py.builtin_events import *
from x2py.case import Case
from x2py.config import Config
from x2py.coroutine import Coroutine, CoroutineHandler
from x2py.event import Event
from x2py.event_factory import EventFactory
from x2py.event_sink import EventSink
from x2py.flow import Flow
from x2py.hub import Hub
from x2py.link import Link
from x2py.flows import *
from x2py.util import *
from x2py.yields import *
| 27.090909 | 54 | 0.798658 |
__version__ = '0.4.3'
from x2py.buffer_transform import BufferTransform
from x2py.builtin_events import *
from x2py.case import Case
from x2py.config import Config
from x2py.coroutine import Coroutine, CoroutineHandler
from x2py.event import Event
from x2py.event_factory import EventFactory
from x2py.event_sink import EventSink
from x2py.flow import Flow
from x2py.hub import Hub
from x2py.link import Link
from x2py.flows import *
from x2py.util import *
from x2py.yields import *
| true | true |
f71bf97329b821d2dc2e07b818ab784fb1ed22a7 | 2,815 | py | Python | test/lint/check-doc.py | BakedInside/beanscore | daa9b2ddbfd3305881749bda7f32146738154260 | [
"MIT"
] | null | null | null | test/lint/check-doc.py | BakedInside/beanscore | daa9b2ddbfd3305881749bda7f32146738154260 | [
"MIT"
] | null | null | null | test/lint/check-doc.py | BakedInside/beanscore | daa9b2ddbfd3305881749bda7f32146738154260 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2015-2020 The Beans Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
This checks if all command line args are documented.
Return value is 0 to indicate no error.
Author: @MarcoFalke
'''
from subprocess import check_output
import re
FOLDER_GREP = 'src'
FOLDER_TEST = 'src/test/'
REGEX_ARG = r'(?:ForceSet|SoftSet|Get|Is)(?:Bool)?Args?(?:Set)?\("(-[^"]+)"'
REGEX_DOC = r'AddArg\("(-[^"=]+?)(?:=|")'
CMD_ROOT_DIR = '$(git rev-parse --show-toplevel)/{}'.format(FOLDER_GREP)
CMD_GREP_ARGS = r"git grep --perl-regexp '{}' -- {} ':(exclude){}'".format(REGEX_ARG, CMD_ROOT_DIR, FOLDER_TEST)
CMD_GREP_WALLET_ARGS = r"git grep --function-context 'void WalletInit::AddWalletOptions' -- {} | grep AddArg".format(CMD_ROOT_DIR)
CMD_GREP_WALLET_HIDDEN_ARGS = r"git grep --function-context 'void DummyWalletInit::AddWalletOptions' -- {}".format(CMD_ROOT_DIR)
CMD_GREP_DOCS = r"git grep --perl-regexp '{}' {}".format(REGEX_DOC, CMD_ROOT_DIR)
# list unsupported, deprecated and duplicate args as they need no documentation
SET_DOC_OPTIONAL = set(['-h', '-help', '-dbcrashratio', '-forcecompactdb', '-zapwallettxes'])
def lint_missing_argument_documentation():
used = check_output(CMD_GREP_ARGS, shell=True).decode('utf8').strip()
docd = check_output(CMD_GREP_DOCS, shell=True).decode('utf8').strip()
args_used = set(re.findall(re.compile(REGEX_ARG), used))
args_docd = set(re.findall(re.compile(REGEX_DOC), docd)).union(SET_DOC_OPTIONAL)
args_need_doc = args_used.difference(args_docd)
args_unknown = args_docd.difference(args_used)
print("Args used : {}".format(len(args_used)))
print("Args documented : {}".format(len(args_docd)))
print("Args undocumented: {}".format(len(args_need_doc)))
print(args_need_doc)
print("Args unknown : {}".format(len(args_unknown)))
print(args_unknown)
assert 0 == len(args_need_doc), "Please document the following arguments: {}".format(args_need_doc)
def lint_missing_hidden_wallet_args():
wallet_args = check_output(CMD_GREP_WALLET_ARGS, shell=True).decode('utf8').strip()
wallet_hidden_args = check_output(CMD_GREP_WALLET_HIDDEN_ARGS, shell=True).decode('utf8').strip()
wallet_args = set(re.findall(re.compile(REGEX_DOC), wallet_args))
wallet_hidden_args = set(re.findall(re.compile(r' "([^"=]+)'), wallet_hidden_args))
hidden_missing = wallet_args.difference(wallet_hidden_args)
if hidden_missing:
assert 0, "Please add {} to the hidden args in DummyWalletInit::AddWalletOptions".format(hidden_missing)
def main():
lint_missing_argument_documentation()
lint_missing_hidden_wallet_args()
if __name__ == "__main__":
main()
| 42.014925 | 130 | 0.722913 |
from subprocess import check_output
import re
FOLDER_GREP = 'src'
FOLDER_TEST = 'src/test/'
REGEX_ARG = r'(?:ForceSet|SoftSet|Get|Is)(?:Bool)?Args?(?:Set)?\("(-[^"]+)"'
REGEX_DOC = r'AddArg\("(-[^"=]+?)(?:=|")'
CMD_ROOT_DIR = '$(git rev-parse --show-toplevel)/{}'.format(FOLDER_GREP)
CMD_GREP_ARGS = r"git grep --perl-regexp '{}' -- {} ':(exclude){}'".format(REGEX_ARG, CMD_ROOT_DIR, FOLDER_TEST)
CMD_GREP_WALLET_ARGS = r"git grep --function-context 'void WalletInit::AddWalletOptions' -- {} | grep AddArg".format(CMD_ROOT_DIR)
CMD_GREP_WALLET_HIDDEN_ARGS = r"git grep --function-context 'void DummyWalletInit::AddWalletOptions' -- {}".format(CMD_ROOT_DIR)
CMD_GREP_DOCS = r"git grep --perl-regexp '{}' {}".format(REGEX_DOC, CMD_ROOT_DIR)
SET_DOC_OPTIONAL = set(['-h', '-help', '-dbcrashratio', '-forcecompactdb', '-zapwallettxes'])
def lint_missing_argument_documentation():
used = check_output(CMD_GREP_ARGS, shell=True).decode('utf8').strip()
docd = check_output(CMD_GREP_DOCS, shell=True).decode('utf8').strip()
args_used = set(re.findall(re.compile(REGEX_ARG), used))
args_docd = set(re.findall(re.compile(REGEX_DOC), docd)).union(SET_DOC_OPTIONAL)
args_need_doc = args_used.difference(args_docd)
args_unknown = args_docd.difference(args_used)
print("Args used : {}".format(len(args_used)))
print("Args documented : {}".format(len(args_docd)))
print("Args undocumented: {}".format(len(args_need_doc)))
print(args_need_doc)
print("Args unknown : {}".format(len(args_unknown)))
print(args_unknown)
assert 0 == len(args_need_doc), "Please document the following arguments: {}".format(args_need_doc)
def lint_missing_hidden_wallet_args():
wallet_args = check_output(CMD_GREP_WALLET_ARGS, shell=True).decode('utf8').strip()
wallet_hidden_args = check_output(CMD_GREP_WALLET_HIDDEN_ARGS, shell=True).decode('utf8').strip()
wallet_args = set(re.findall(re.compile(REGEX_DOC), wallet_args))
wallet_hidden_args = set(re.findall(re.compile(r' "([^"=]+)'), wallet_hidden_args))
hidden_missing = wallet_args.difference(wallet_hidden_args)
if hidden_missing:
assert 0, "Please add {} to the hidden args in DummyWalletInit::AddWalletOptions".format(hidden_missing)
def main():
lint_missing_argument_documentation()
lint_missing_hidden_wallet_args()
if __name__ == "__main__":
main()
| true | true |
f71bf97c41d24bf4383061ccf726c43fc54c6be9 | 4,880 | py | Python | py3status/modules/file_status.py | JackDoan/py3status | e7f56fc0bec8c1a625328c3549b88f66ee8b41ab | [
"BSD-3-Clause"
] | null | null | null | py3status/modules/file_status.py | JackDoan/py3status | e7f56fc0bec8c1a625328c3549b88f66ee8b41ab | [
"BSD-3-Clause"
] | null | null | null | py3status/modules/file_status.py | JackDoan/py3status | e7f56fc0bec8c1a625328c3549b88f66ee8b41ab | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Display if files or directories exists.
Configuration parameters:
cache_timeout: refresh interval for this module (default 10)
format: display format for this module
(default '\?color=path [\?if=path ●|■]')
format_path: format for paths (default '{basename}')
format_path_separator: show separator if more than one (default ' ')
paths: specify a string or a list of paths to check (default None)
thresholds: specify color thresholds to use
(default [(0, 'bad'), (1, 'good')])
Format placeholders:
{format_path} format for paths
{path} number of paths, eg 1, 2, 3
format_path placeholders:
{basename} basename of pathname
{pathname} pathname
Color options:
color_bad: files or directories does not exist
color_good: files or directories exists
Color thresholds:
format:
path: print a color based on the number of paths
Examples:
# add multiple paths with wildcard or with pathnames
```
file_status {
paths = ['/tmp/test*', '~user/test1', '~/Videos/*.mp4']
}
# colorize basenames
file_status {
paths = ['~/.config/i3/modules/*.py']
format = '{format_path}'
format_path = '\?color=good {basename}'
format_path_separator = ', '
}
```
@author obb, Moritz Lüdecke, Cyril Levis (@cyrinux)
SAMPLE OUTPUT
{'color': '#00FF00', 'full_text': u'\u25cf'}
missing
{'color': '#FF0000', 'full_text': u'\u25a0'}
"""
from glob import glob
from os.path import basename, expanduser
STRING_NO_PATHS = "missing paths"
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 10
format = u"\?color=path [\?if=path \u25cf|\u25a0]"
format_path = u"{basename}"
format_path_separator = u" "
paths = None
thresholds = [(0, "bad"), (1, "good")]
class Meta:
deprecated = {
"rename": [
{
"param": "format_available",
"new": "icon_available",
"msg": "obsolete parameter use `icon_available`",
},
{
"param": "format_unavailable",
"new": "icon_unavailable",
"msg": "obsolete parameter use `icon_unavailable`",
},
{
"param": "path",
"new": "paths",
"msg": "obsolete parameter use `paths`",
},
],
"rename_placeholder": [
{"placeholder": "paths", "new": "path", "format_strings": ["format"]}
],
}
def post_config_hook(self):
if not self.paths:
raise Exception(STRING_NO_PATHS)
# icon deprecation
on = getattr(self, "icon_available", u"\u25cf")
off = getattr(self, "icon_unavailable", u"\u25a0")
new_icon = u"\?color=path [\?if=path {}|{}]".format(on, off)
self.format = self.format.replace("{icon}", new_icon)
# convert str to list + expand path
if not isinstance(self.paths, list):
self.paths = [self.paths]
self.paths = list(map(expanduser, self.paths))
self.init = {"format_path": []}
if self.py3.format_contains(self.format, "format_path"):
self.init["format_path"] = self.py3.get_placeholders_list(self.format_path)
def file_status(self):
# init datas
paths = sorted([files for path in self.paths for files in glob(path)])
count_path = len(paths)
format_path = None
# format paths
if self.init["format_path"]:
new_data = []
format_path_separator = self.py3.safe_format(self.format_path_separator)
for pathname in paths:
path = {}
for key in self.init["format_path"]:
if key == "basename":
value = basename(pathname)
elif key == "pathname":
value = pathname
else:
continue
path[key] = self.py3.safe_format(value)
new_data.append(self.py3.safe_format(self.format_path, path))
format_path = self.py3.composite_join(format_path_separator, new_data)
if self.thresholds:
self.py3.threshold_get_color(count_path, "path")
self.py3.threshold_get_color(count_path, "paths")
return {
"cached_until": self.py3.time_in(self.cache_timeout),
"full_text": self.py3.safe_format(
self.format,
{"path": count_path, "paths": count_path, "format_path": format_path},
),
}
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| 30.310559 | 87 | 0.565369 |
from glob import glob
from os.path import basename, expanduser
STRING_NO_PATHS = "missing paths"
class Py3status:
cache_timeout = 10
format = u"\?color=path [\?if=path \u25cf|\u25a0]"
format_path = u"{basename}"
format_path_separator = u" "
paths = None
thresholds = [(0, "bad"), (1, "good")]
class Meta:
deprecated = {
"rename": [
{
"param": "format_available",
"new": "icon_available",
"msg": "obsolete parameter use `icon_available`",
},
{
"param": "format_unavailable",
"new": "icon_unavailable",
"msg": "obsolete parameter use `icon_unavailable`",
},
{
"param": "path",
"new": "paths",
"msg": "obsolete parameter use `paths`",
},
],
"rename_placeholder": [
{"placeholder": "paths", "new": "path", "format_strings": ["format"]}
],
}
def post_config_hook(self):
if not self.paths:
raise Exception(STRING_NO_PATHS)
on = getattr(self, "icon_available", u"\u25cf")
off = getattr(self, "icon_unavailable", u"\u25a0")
new_icon = u"\?color=path [\?if=path {}|{}]".format(on, off)
self.format = self.format.replace("{icon}", new_icon)
if not isinstance(self.paths, list):
self.paths = [self.paths]
self.paths = list(map(expanduser, self.paths))
self.init = {"format_path": []}
if self.py3.format_contains(self.format, "format_path"):
self.init["format_path"] = self.py3.get_placeholders_list(self.format_path)
def file_status(self):
paths = sorted([files for path in self.paths for files in glob(path)])
count_path = len(paths)
format_path = None
if self.init["format_path"]:
new_data = []
format_path_separator = self.py3.safe_format(self.format_path_separator)
for pathname in paths:
path = {}
for key in self.init["format_path"]:
if key == "basename":
value = basename(pathname)
elif key == "pathname":
value = pathname
else:
continue
path[key] = self.py3.safe_format(value)
new_data.append(self.py3.safe_format(self.format_path, path))
format_path = self.py3.composite_join(format_path_separator, new_data)
if self.thresholds:
self.py3.threshold_get_color(count_path, "path")
self.py3.threshold_get_color(count_path, "paths")
return {
"cached_until": self.py3.time_in(self.cache_timeout),
"full_text": self.py3.safe_format(
self.format,
{"path": count_path, "paths": count_path, "format_path": format_path},
),
}
if __name__ == "__main__":
from py3status.module_test import module_test
module_test(Py3status)
| true | true |
f71bfb7e6138d20826c27d50c937e36b4fb9c103 | 454 | py | Python | lesson_4_transformation/lesson_4_affine_transformation.py | DewMaple/opencv-learning | 51991a5b9badf24cda740c1377f6be30dea91e1d | [
"MIT"
] | null | null | null | lesson_4_transformation/lesson_4_affine_transformation.py | DewMaple/opencv-learning | 51991a5b9badf24cda740c1377f6be30dea91e1d | [
"MIT"
] | null | null | null | lesson_4_transformation/lesson_4_affine_transformation.py | DewMaple/opencv-learning | 51991a5b9badf24cda740c1377f6be30dea91e1d | [
"MIT"
] | 1 | 2018-09-10T15:51:23.000Z | 2018-09-10T15:51:23.000Z | import cv2
import numpy as np
from utils import find_image
image_path = find_image('girls_01.jpg')
img = cv2.imread(image_path)
rows, cols, channel = img.shape
pts_src = np.float32([[50, 50], [200, 50], [50, 200]])
pts_dst = np.float32([[10, 100], [200, 80], [100, 650]])
M = cv2.getAffineTransform(pts_src, pts_dst)
res = cv2.warpAffine(img, M, (cols, rows))
cv2.imshow('transformation by three points', res)
cv2.waitKey(0)
cv2.destroyAllWindows()
| 23.894737 | 56 | 0.707048 | import cv2
import numpy as np
from utils import find_image
image_path = find_image('girls_01.jpg')
img = cv2.imread(image_path)
rows, cols, channel = img.shape
pts_src = np.float32([[50, 50], [200, 50], [50, 200]])
pts_dst = np.float32([[10, 100], [200, 80], [100, 650]])
M = cv2.getAffineTransform(pts_src, pts_dst)
res = cv2.warpAffine(img, M, (cols, rows))
cv2.imshow('transformation by three points', res)
cv2.waitKey(0)
cv2.destroyAllWindows()
| true | true |
f71bfbbb90ec970a284d4548547ab11e37b35557 | 21,393 | py | Python | androidtv/constants.py | deviant-aut/python-androidtv | 4bd5421e107949784b292a5f4a0397875a18e908 | [
"MIT"
] | 10 | 2022-01-17T14:46:04.000Z | 2022-03-19T16:19:06.000Z | androidtv/constants.py | deviant-aut/python-androidtv | 4bd5421e107949784b292a5f4a0397875a18e908 | [
"MIT"
] | 5 | 2022-01-18T20:33:18.000Z | 2022-03-30T15:57:24.000Z | androidtv/constants.py | deviant-aut/python-androidtv | 4bd5421e107949784b292a5f4a0397875a18e908 | [
"MIT"
] | 7 | 2022-01-25T01:26:47.000Z | 2022-03-13T05:54:53.000Z | """Constants used throughout the code.
**Links**
* `ADB key event codes <https://developer.android.com/reference/android/view/KeyEvent>`_
* `MediaSession PlaybackState property <https://developer.android.com/reference/android/media/session/PlaybackState.html>`_
"""
import re
import sys
if sys.version_info[0] == 3 and sys.version_info[1] >= 5:
from enum import IntEnum, unique
else: # pragma: no cover
IntEnum = object
def unique(cls):
"""A class decorator that does nothing."""
return cls
@unique
class DeviceEnum(IntEnum):
"""An enum for the various device types."""
BASETV = 0
ANDROIDTV = 1
FIRETV = 2
# Intents
INTENT_LAUNCH = "android.intent.category.LEANBACK_LAUNCHER"
INTENT_LAUNCH_FIRETV = "android.intent.category.LAUNCHER"
INTENT_HOME = "android.intent.category.HOME"
# Customizable commands
CUSTOM_AUDIO_STATE = "audio_state"
CUSTOM_CURRENT_APP = "current_app"
CUSTOM_CURRENT_APP_MEDIA_SESSION_STATE = "current_app_media_session_state"
CUSTOM_HDMI_INPUT = "hdmi_input"
CUSTOM_LAUNCH_APP = "launch_app"
CUSTOM_RUNNING_APPS = "running_apps"
CUSTOM_TURN_OFF = "turn_off"
CUSTOM_TURN_ON = "turn_on"
CUSTOMIZABLE_COMMANDS = {
CUSTOM_AUDIO_STATE,
CUSTOM_CURRENT_APP,
CUSTOM_CURRENT_APP_MEDIA_SESSION_STATE,
CUSTOM_HDMI_INPUT,
CUSTOM_LAUNCH_APP,
CUSTOM_RUNNING_APPS,
CUSTOM_TURN_OFF,
CUSTOM_TURN_ON,
}
#: The subset of `CUSTOMIZABLE_COMMANDS` that is potentially used in the ``update()`` method
HA_CUSTOMIZABLE_COMMANDS = (
CUSTOM_AUDIO_STATE,
CUSTOM_CURRENT_APP_MEDIA_SESSION_STATE,
CUSTOM_HDMI_INPUT,
CUSTOM_LAUNCH_APP,
CUSTOM_RUNNING_APPS,
CUSTOM_TURN_OFF,
CUSTOM_TURN_ON,
)
# echo '1' if the previous shell command was successful
CMD_SUCCESS1 = r" && echo -e '1\c'"
# echo '1' if the previous shell command was successful, echo '0' if it was not
CMD_SUCCESS1_FAILURE0 = r" && echo -e '1\c' || echo -e '0\c'"
#: Get the audio state
CMD_AUDIO_STATE = r"dumpsys audio | grep paused | grep -qv 'Buffer Queue' && echo -e '1\c' || (dumpsys audio | grep started | grep -qv 'Buffer Queue' && echo '2\c' || echo '0\c')"
#: Get the audio state for an Android 11 device
CMD_AUDIO_STATE11 = (
"CURRENT_AUDIO_STATE=$(dumpsys audio | sed -r -n '/[0-9]{2}-[0-9]{2}.*player piid:.*state:.*$/h; ${x;p;}') && "
+ r"echo $CURRENT_AUDIO_STATE | grep -q paused && echo -e '1\c' || { echo $CURRENT_AUDIO_STATE | grep -q started && echo '2\c' || echo '0\c' ; }"
)
#: Determine whether the device is awake
CMD_AWAKE = "dumpsys power | grep mWakefulness | grep -q Awake"
#: Parse current application identifier from dumpsys output and assign it to ``CURRENT_APP`` variable (assumes dumpsys output is momentarily set to ``CURRENT_APP`` variable)
CMD_PARSE_CURRENT_APP = "CURRENT_APP=${CURRENT_APP#*ActivityRecord{* * } && CURRENT_APP=${CURRENT_APP#*{* * } && CURRENT_APP=${CURRENT_APP%%/*} && CURRENT_APP=${CURRENT_APP%\\}*}"
#: Parse current application for an Android 11 device
CMD_PARSE_CURRENT_APP11 = "CURRENT_APP=${CURRENT_APP%%/*} && CURRENT_APP=${CURRENT_APP##* }"
#: Assign focused application identifier to ``CURRENT_APP`` variable
CMD_DEFINE_CURRENT_APP_VARIABLE = (
"CURRENT_APP=$(dumpsys window windows | grep -E 'mCurrentFocus|mFocusedApp') && " + CMD_PARSE_CURRENT_APP
)
#: Assign focused application identifier to ``CURRENT_APP`` variable for an Android 11 device
CMD_DEFINE_CURRENT_APP_VARIABLE11 = (
"CURRENT_APP=$(dumpsys window windows | grep 'Window #1') && " + CMD_PARSE_CURRENT_APP11
)
#: Output identifier for current/focused application
CMD_CURRENT_APP = CMD_DEFINE_CURRENT_APP_VARIABLE + " && echo $CURRENT_APP"
#: Output identifier for current/focused application for an Android 11 device
CMD_CURRENT_APP11 = CMD_DEFINE_CURRENT_APP_VARIABLE11 + " && echo $CURRENT_APP"
#: Assign focused application identifier to ``CURRENT_APP`` variable (for a Google TV device)
CMD_DEFINE_CURRENT_APP_VARIABLE_GOOGLE_TV = (
"CURRENT_APP=$(dumpsys activity a . | grep mResumedActivity) && " + CMD_PARSE_CURRENT_APP
)
#: Output identifier for current/focused application (for a Google TV device)
CMD_CURRENT_APP_GOOGLE_TV = CMD_DEFINE_CURRENT_APP_VARIABLE_GOOGLE_TV + " && echo $CURRENT_APP"
#: Get the HDMI input
CMD_HDMI_INPUT = (
"dumpsys activity starter | grep -E -o '(ExternalTv|HDMI)InputService/HW[0-9]' -m 1 | grep -o 'HW[0-9]'"
)
#: Get the HDMI input for an Android 11 device
CMD_HDMI_INPUT11 = (
"(HDMI=$(dumpsys tv_input | grep 'ResourceClientProfile {.*}' | grep -o -E '(hdmi_port=[0-9]|TV)') && { echo ${HDMI/hdmi_port=/HW} | cut -d' ' -f1 ; }) || "
+ CMD_HDMI_INPUT
)
#: Launch an app if it is not already the current app (assumes the variable ``CURRENT_APP`` has already been set)
CMD_LAUNCH_APP_CONDITION = (
"if [ $CURRENT_APP != '{0}' ]; then monkey -p {0} -c " + INTENT_LAUNCH + " --pct-syskeys 0 1; fi"
)
#: Launch an app if it is not already the current app (assumes the variable ``CURRENT_APP`` has already been set) on a Fire TV
CMD_LAUNCH_APP_CONDITION_FIRETV = (
"if [ $CURRENT_APP != '{0}' ]; then monkey -p {0} -c " + INTENT_LAUNCH_FIRETV + " --pct-syskeys 0 1; fi"
)
#: Launch an app if it is not already the current app
CMD_LAUNCH_APP = (
CMD_DEFINE_CURRENT_APP_VARIABLE.replace("{", "{{").replace("}", "}}") + " && " + CMD_LAUNCH_APP_CONDITION
)
#: Launch an app if it is not already the current app on an Android 11 device
CMD_LAUNCH_APP11 = (
CMD_DEFINE_CURRENT_APP_VARIABLE11.replace("{", "{{").replace("}", "}}") + " && " + CMD_LAUNCH_APP_CONDITION
)
#: Launch an app on a Fire TV device
CMD_LAUNCH_APP_FIRETV = (
CMD_DEFINE_CURRENT_APP_VARIABLE.replace("{", "{{").replace("}", "}}") + " && " + CMD_LAUNCH_APP_CONDITION_FIRETV
)
#: Launch an app on a Google TV device
CMD_LAUNCH_APP_GOOGLE_TV = (
CMD_DEFINE_CURRENT_APP_VARIABLE_GOOGLE_TV.replace("{", "{{").replace("}", "}}") + " && " + CMD_LAUNCH_APP_CONDITION
)
#: Get the state from ``dumpsys media_session``; this assumes that the variable ``CURRENT_APP`` has been defined
CMD_MEDIA_SESSION_STATE = "dumpsys media_session | grep -A 100 'Sessions Stack' | grep -A 100 $CURRENT_APP | grep -m 1 'state=PlaybackState {'"
#: Determine the current app and get the state from ``dumpsys media_session``
CMD_CURRENT_APP_MEDIA_SESSION_STATE = CMD_CURRENT_APP + " && " + CMD_MEDIA_SESSION_STATE
#: Determine the current app and get the state from ``dumpsys media_session`` for an Android 11 device
CMD_CURRENT_APP_MEDIA_SESSION_STATE11 = CMD_CURRENT_APP11 + " && " + CMD_MEDIA_SESSION_STATE
#: Determine the current app and get the state from ``dumpsys media_session`` for a Google TV device
CMD_CURRENT_APP_MEDIA_SESSION_STATE_GOOGLE_TV = CMD_CURRENT_APP_GOOGLE_TV + " && " + CMD_MEDIA_SESSION_STATE
#: Get the running apps for an Android TV device
CMD_RUNNING_APPS_ANDROIDTV = "ps -A | grep u0_a"
#: Get the running apps for a Fire TV device
CMD_RUNNING_APPS_FIRETV = "ps | grep u0_a"
#: Get installed apps
CMD_INSTALLED_APPS = "pm list packages"
#: Determine if the device is on
CMD_SCREEN_ON = (
"(dumpsys power | grep 'Display Power' | grep -q 'state=ON' || dumpsys power | grep -q 'mScreenOn=true')"
)
#: Get the "STREAM_MUSIC" block from ``dumpsys audio``
CMD_STREAM_MUSIC = r"dumpsys audio | grep '\- STREAM_MUSIC:' -A 11"
#: Turn off an Android TV device (note: `KEY_POWER = 26` is defined below)
CMD_TURN_OFF_ANDROIDTV = CMD_SCREEN_ON + " && input keyevent 26"
#: Turn off a Fire TV device (note: `KEY_SLEEP = 223` is defined below)
CMD_TURN_OFF_FIRETV = CMD_SCREEN_ON + " && input keyevent 223"
#: Turn on an Android TV device (note: `KEY_POWER = 26` is defined below)
CMD_TURN_ON_ANDROIDTV = CMD_SCREEN_ON + " || input keyevent 26"
#: Turn on a Fire TV device (note: `KEY_POWER = 26` and `KEY_HOME = 3` are defined below)
CMD_TURN_ON_FIRETV = CMD_SCREEN_ON + " || (input keyevent 26 && input keyevent 3)"
#: Get the wake lock size
CMD_WAKE_LOCK_SIZE = "dumpsys power | grep Locks | grep 'size='"
#: Determine if the device is on, the screen is on, and get the wake lock size
CMD_SCREEN_ON_AWAKE_WAKE_LOCK_SIZE = (
CMD_SCREEN_ON + CMD_SUCCESS1_FAILURE0 + " && " + CMD_AWAKE + CMD_SUCCESS1_FAILURE0 + " && " + CMD_WAKE_LOCK_SIZE
)
# `getprop` commands
CMD_MANUFACTURER = "getprop ro.product.manufacturer"
CMD_MODEL = "getprop ro.product.model"
CMD_SERIALNO = "getprop ro.serialno"
CMD_VERSION = "getprop ro.build.version.release"
# Commands for getting the MAC address
CMD_MAC_WLAN0 = "ip addr show wlan0 | grep -m 1 ether"
CMD_MAC_ETH0 = "ip addr show eth0 | grep -m 1 ether"
#: The command used for getting the device properties
CMD_DEVICE_PROPERTIES = CMD_MANUFACTURER + " && " + CMD_MODEL + " && " + CMD_SERIALNO + " && " + CMD_VERSION
# ADB key event codes
# https://developer.android.com/reference/android/view/KeyEvent
KEY_BACK = 4
KEY_BLUE = 186
KEY_CENTER = 23
KEY_COMPONENT1 = 249
KEY_COMPONENT2 = 250
KEY_COMPOSITE1 = 247
KEY_COMPOSITE2 = 248
KEY_DOWN = 20
KEY_END = 123
KEY_ENTER = 66
KEY_ESCAPE = 111
KEY_FAST_FORWARD = 90
KEY_GREEN = 184
KEY_HDMI1 = 243
KEY_HDMI2 = 244
KEY_HDMI3 = 245
KEY_HDMI4 = 246
KEY_HOME = 3
KEY_INPUT = 178
KEY_LEFT = 21
KEY_MENU = 82
KEY_MOVE_HOME = 122
KEY_MUTE = 164
KEY_NEXT = 87
KEY_PAIRING = 225
KEY_PAUSE = 127
KEY_PLAY = 126
KEY_PLAY_PAUSE = 85
KEY_POWER = 26
KEY_PREVIOUS = 88
KEY_RED = 183
KEY_RESUME = 224
KEY_REWIND = 89
KEY_RIGHT = 22
KEY_SAT = 237
KEY_SEARCH = 84
KEY_SETTINGS = 176
KEY_SLEEP = 223
KEY_SPACE = 62
KEY_STOP = 86
KEY_SUSPEND = 276
KEY_SYSDOWN = 281
KEY_SYSLEFT = 282
KEY_SYSRIGHT = 283
KEY_SYSUP = 280
KEY_TEXT = 233
KEY_TOP = 122
KEY_UP = 19
KEY_VGA = 251
KEY_VOLUME_DOWN = 25
KEY_VOLUME_UP = 24
KEY_WAKEUP = 224
KEY_YELLOW = 185
# Alphanumeric key event codes
KEY_0 = 7
KEY_1 = 8
KEY_2 = 9
KEY_3 = 10
KEY_4 = 11
KEY_5 = 12
KEY_6 = 13
KEY_7 = 14
KEY_8 = 15
KEY_9 = 16
KEY_A = 29
KEY_B = 30
KEY_C = 31
KEY_D = 32
KEY_E = 33
KEY_F = 34
KEY_G = 35
KEY_H = 36
KEY_I = 37
KEY_J = 38
KEY_K = 39
KEY_L = 40
KEY_M = 41
KEY_N = 42
KEY_O = 43
KEY_P = 44
KEY_Q = 45
KEY_R = 46
KEY_S = 47
KEY_T = 48
KEY_U = 49
KEY_V = 50
KEY_W = 51
KEY_X = 52
KEY_Y = 53
KEY_Z = 54
# Android TV keys
KEYS = {
"BACK": KEY_BACK,
"BLUE": KEY_BLUE,
"CENTER": KEY_CENTER,
"COMPONENT1": KEY_COMPONENT1,
"COMPONENT2": KEY_COMPONENT2,
"COMPOSITE1": KEY_COMPOSITE1,
"COMPOSITE2": KEY_COMPOSITE2,
"DOWN": KEY_DOWN,
"END": KEY_END,
"ENTER": KEY_ENTER,
"ESCAPE": KEY_ESCAPE,
"FAST_FORWARD": KEY_FAST_FORWARD,
"GREEN": KEY_GREEN,
"HDMI1": KEY_HDMI1,
"HDMI2": KEY_HDMI2,
"HDMI3": KEY_HDMI3,
"HDMI4": KEY_HDMI4,
"HOME": KEY_HOME,
"INPUT": KEY_INPUT,
"LEFT": KEY_LEFT,
"MENU": KEY_MENU,
"MOVE_HOME": KEY_MOVE_HOME,
"MUTE": KEY_MUTE,
"PAIRING": KEY_PAIRING,
"POWER": KEY_POWER,
"RED": KEY_RED,
"RESUME": KEY_RESUME,
"REWIND": KEY_REWIND,
"RIGHT": KEY_RIGHT,
"SAT": KEY_SAT,
"SEARCH": KEY_SEARCH,
"SETTINGS": KEY_SETTINGS,
"SLEEP": KEY_SLEEP,
"SUSPEND": KEY_SUSPEND,
"SYSDOWN": KEY_SYSDOWN,
"SYSLEFT": KEY_SYSLEFT,
"SYSRIGHT": KEY_SYSRIGHT,
"SYSUP": KEY_SYSUP,
"TEXT": KEY_TEXT,
"TOP": KEY_TOP,
"UP": KEY_UP,
"VGA": KEY_VGA,
"VOLUME_DOWN": KEY_VOLUME_DOWN,
"VOLUME_UP": KEY_VOLUME_UP,
"WAKEUP": KEY_WAKEUP,
"YELLOW": KEY_YELLOW,
}
# Android TV / Fire TV states
STATE_ON = "on"
STATE_IDLE = "idle"
STATE_OFF = "off"
STATE_PLAYING = "playing"
STATE_PAUSED = "paused"
STATE_STANDBY = "standby"
STATE_STOPPED = "stopped"
STATE_UNKNOWN = "unknown"
#: States that are valid (used by :func:`~androidtv.basetv.state_detection_rules_validator`)
VALID_STATES = (STATE_IDLE, STATE_OFF, STATE_PLAYING, STATE_PAUSED, STATE_STANDBY)
#: Properties that can be used to determine the current state (used by :func:`~androidtv.basetv.state_detection_rules_validator`)
VALID_STATE_PROPERTIES = ("audio_state", "media_session_state")
#: Properties that can be checked for custom state detection (used by :func:`~androidtv.basetv.state_detection_rules_validator`)
VALID_PROPERTIES = VALID_STATE_PROPERTIES + ("wake_lock_size",)
#: The required type for each entry in :py:const:`VALID_PROPERTIES` (used by :func:`~androidtv.basetv.state_detection_rules_validator`)
VALID_PROPERTIES_TYPES = {"audio_state": str, "media_session_state": int, "wake_lock_size": int}
# https://developer.android.com/reference/android/media/session/PlaybackState.html
#: States for the :attr:`~androidtv.basetv.basetv.BaseTV.media_session_state` property
MEDIA_SESSION_STATES = {0: None, 1: STATE_STOPPED, 2: STATE_PAUSED, 3: STATE_PLAYING}
# Apps
APP_AE_TV = "com.aetn.aetv.watch"
APP_AMAZON_PRIME_VIDEO = "com.amazon.avod.thirdpartyclient"
APP_AMAZON_VIDEO = "com.amazon.avod"
APP_APPLE_TV_PLUS = "com.apple.atve.android.appletv"
APP_APPLE_TV_PLUS_FIRETV = "com.apple.atve.amazon.appletv"
APP_APPLE_TV_PLUS_SONY = "com.apple.atve.sony.appletv"
APP_ATV_LAUNCHER = "com.google.android.tvlauncher"
APP_BELL_FIBE = "com.quickplay.android.bellmediaplayer"
APP_CBC_GEM = "ca.cbc.android.cbctv"
APP_COMEDY_CENTRAL = "com.vmn.android.comedycentral"
APP_CRAVE = "ca.bellmedia.cravetv"
APP_DAILYMOTION = "com.dailymotion.dailymotion"
APP_DEEZER = "deezer.android.tv"
APP_DISNEY_PLUS = "com.disney.disneyplus"
APP_DISNEY_PLUS_HOTSTAR = "in.startv.hotstar"
APP_DS_PHOTO = "com.synology.dsphoto"
APP_DS_VIDEO = "com.synology.dsvideo"
APP_ES_FILE_EXPLORER = "com.estrongs.android.pop"
APP_FACEBOOK = "com.facebook.katana"
APP_FAWESOME = "com.future.moviesByFawesomeAndroidTV"
APP_FIREFOX = "org.mozilla.tv.firefox"
APP_FIRETV_PACKAGE_LAUNCHER = "com.amazon.tv.launcher"
APP_FIRETV_PACKAGE_SETTINGS = "com.amazon.tv.settings"
APP_FIRETV_STORE = "com.amazon.venezia"
APP_FOOD_NETWORK_GO = "tv.accedo.foodnetwork"
APP_FRANCE_TV = "fr.francetv.pluzz"
APP_GLOBAL_TV = "com.shawmedia.smglobal"
APP_GOOGLE_CAST = "com.google.android.apps.mediashell"
APP_GOOGLE_TV_LAUNCHER = "com.google.android.apps.tv.launcherx"
APP_HAYSTACK_NEWS = "com.haystack.android"
APP_HBO_GO = "eu.hbogo.androidtv.production"
APP_HBO_GO_2 = "com.HBO"
APP_HOICHOI = "com.viewlift.hoichoi"
APP_HULU = "com.hulu.plus"
APP_HUNGAMA_PLAY = "com.hungama.movies.tv"
APP_IMDB_TV = "com.amazon.imdb.tv.android.app"
APP_IPTV = "ru.iptvremote.android.iptv"
APP_IPTV_SMARTERS_PRO = "com.nst.iptvsmarterstvbox"
APP_JELLYFIN_TV = "org.jellyfin.androidtv"
APP_JIO_CINEMA = "com.jio.media.stb.ondemand"
APP_KODI = "org.xbmc.kodi"
APP_LIVE_CHANNELS = "com.google.android.tv"
APP_MIJN_RADIO = "org.samsonsen.nederlandse.radio.holland.nl"
APP_MOLOTOV = "tv.molotov.app"
APP_MRMC = "tv.mrmc.mrmc"
APP_MRMC_LITE = "tv.mrmc.mrmc.lite"
APP_MX_PLAYER = "com.mxtech.videoplayer.ad"
APP_NETFLIX = "com.netflix.ninja"
APP_NLZIET = "nl.nlziet"
APP_NOS = "nl.nos.app"
APP_NPO = "nl.uitzendinggemist"
APP_OCS = "com.orange.ocsgo"
APP_PLAY_GAMES = "com.google.android.play.games"
APP_PLAY_MUSIC = "com.google.android.music"
APP_PLAY_STORE = "com.android.vending"
APP_PLAY_VIDEOS = "com.google.android.videos"
APP_PLEX = "com.plexapp.android"
APP_PRIME_VIDEO = "com.amazon.amazonvideo.livingroom"
APP_PRIME_VIDEO_FIRETV = "com.amazon.firebat"
APP_SETTINGS = "com.android.tv.settings"
APP_SMART_YOUTUBE_TV = "com.liskovsoft.videomanager"
APP_SONY_ACTION_MENU = "com.sony.dtv.scrums.action"
APP_SONY_ALBUM = "com.sony.dtv.osat.album"
APP_SONY_BRAVIA_SYNC_MENU = "com.sony.dtv.braviasyncmenu"
APP_SONY_BRAVIA_TUTORIALS = "com.sony.dtv.bravialifehack"
APP_SONY_DISCOVER = "com.sony.dtv.discovery"
APP_SONY_HELP = "com.sony.dtv.smarthelp"
APP_SONY_INTERNET_BROWSER = "com.vewd.core.integration.dia"
APP_SONY_LIV = "com.sonyliv"
APP_SONY_MUSIC = "com.sony.dtv.osat.music"
APP_SONY_SCREEN_MIRRORING = "com.sony.dtv.networkapp.wifidirect"
APP_SONY_SELECT = "com.sony.dtv.sonyselect"
APP_SONY_TIMERS = "com.sony.dtv.timers"
APP_SONY_TV = "com.sony.dtv.tvx"
APP_SONY_VIDEO = "com.sony.dtv.osat.video"
APP_SPORT1 = "de.sport1.firetv.video"
APP_SPOTIFY = "com.spotify.tv.android"
APP_STEAM_LINK = "com.valvesoftware.steamlink"
APP_SYFY = "com.amazon.webapps.nbc.syfy"
APP_T2 = "tv.perception.clients.tv.android"
APP_TED = "com.ted.android.tv"
APP_TUNEIN = "tunein.player"
APP_TVHEADEND = "de.cyberdream.dreamepg.tvh.tv.player"
APP_TWITCH = "tv.twitch.android.app"
APP_TWITCH_FIRETV = "tv.twitch.android.viewer"
APP_VEVO = "com.vevo.tv"
APP_VH1 = "com.mtvn.vh1android"
APP_VIMEO = "com.vimeo.android.videoapp"
APP_VLC = "org.videolan.vlc"
APP_VOYO = "com.phonegap.voyo"
APP_VRV = "com.ellation.vrv"
APP_WAIPU_TV = "de.exaring.waipu.firetv.live"
APP_WATCH_TNT = "com.turner.tnt.android.networkapp"
APP_YOUTUBE = "com.google.android.youtube.tv"
APP_YOUTUBE_FIRETV = "com.amazon.firetv.youtube"
APP_YOUTUBE_KIDS = "com.google.android.youtube.tvkids"
APP_YOUTUBE_KIDS_FIRETV = "com.amazon.firetv.youtube.kids"
APP_YOUTUBE_MUSIC = "com.google.android.youtube.tvmusic"
APP_YOUTUBE_TV = "com.google.android.youtube.tvunplugged"
APP_ZEE5 = "com.graymatrix.did"
APP_ZIGGO_GO_TV = "com.ziggo.tv"
APPS = {
APP_AE_TV: "A&E",
APP_AMAZON_PRIME_VIDEO: "Amazon Prime Video",
APP_AMAZON_VIDEO: "Amazon Video",
APP_APPLE_TV_PLUS: "Apple TV+",
APP_APPLE_TV_PLUS_FIRETV: "Apple TV+ (Fire TV)",
APP_APPLE_TV_PLUS_SONY: "Apple TV+ (Sony)",
APP_ATV_LAUNCHER: "Android TV Launcher",
APP_BELL_FIBE: "Bell Fibe",
APP_CBC_GEM: "CBC Gem",
APP_COMEDY_CENTRAL: "Comedy Central",
APP_CRAVE: "Crave",
APP_DAILYMOTION: "Dailymotion",
APP_DEEZER: "Deezer",
APP_DISNEY_PLUS: "Disney+",
APP_DISNEY_PLUS_HOTSTAR: "Disney+ Hotstar",
APP_DS_PHOTO: "DS photo",
APP_DS_VIDEO: "DS video",
APP_ES_FILE_EXPLORER: "ES File Explorer",
APP_FACEBOOK: "Facebook Watch",
APP_FAWESOME: "Fawsome",
APP_FIREFOX: "Firefox",
APP_FIRETV_STORE: "FireTV Store",
APP_FOOD_NETWORK_GO: "Food Network GO",
APP_FRANCE_TV: "France TV",
APP_GLOBAL_TV: "Global TV",
APP_GOOGLE_CAST: "Google Cast",
APP_GOOGLE_TV_LAUNCHER: "Google TV Launcher",
APP_HAYSTACK_NEWS: "Haystack News",
APP_HBO_GO: "HBO GO",
APP_HBO_GO_2: "HBO GO (2)",
APP_HOICHOI: "Hoichoi",
APP_HULU: "Hulu",
APP_HUNGAMA_PLAY: "Hungama Play",
APP_IMDB_TV: "IMDb TV",
APP_IPTV: "IPTV",
APP_IPTV_SMARTERS_PRO: "IPTV Smarters Pro",
APP_JELLYFIN_TV: "Jellyfin",
APP_JIO_CINEMA: "Jio Cinema",
APP_KODI: "Kodi",
APP_LIVE_CHANNELS: "Live Channels",
APP_MIJN_RADIO: "Mijn Radio",
APP_MOLOTOV: "Molotov",
APP_MRMC: "MrMC",
APP_MRMC_LITE: "MrMC Lite",
APP_MX_PLAYER: "MX Player",
APP_NETFLIX: "Netflix",
APP_NLZIET: "NLZIET",
APP_NOS: "NOS",
APP_NPO: "NPO",
APP_OCS: "OCS",
APP_PLAY_GAMES: "Play Games",
APP_PLAY_MUSIC: "Play Music",
APP_PLAY_STORE: "Play Store",
APP_PLAY_VIDEOS: "Play Movies & TV",
APP_PLEX: "Plex",
APP_PRIME_VIDEO: "Prime Video",
APP_PRIME_VIDEO_FIRETV: "Prime Video (FireTV)",
APP_SETTINGS: "Settings",
APP_SMART_YOUTUBE_TV: "Smart YouTube TV",
APP_SONY_ACTION_MENU: "Action Menu",
APP_SONY_ALBUM: "Album",
APP_SONY_BRAVIA_SYNC_MENU: "Sync Menu",
APP_SONY_BRAVIA_TUTORIALS: "BRAVIA Tutorials",
APP_SONY_DISCOVER: "Discover",
APP_SONY_HELP: "Help",
APP_SONY_INTERNET_BROWSER: "Internet Browser",
APP_SONY_LIV: "SonyLIV",
APP_SONY_MUSIC: "Music",
APP_SONY_SCREEN_MIRRORING: "Screen mirroring",
APP_SONY_SELECT: "Sony Select",
APP_SONY_TIMERS: "Timers",
APP_SONY_TV: "TV",
APP_SONY_VIDEO: "Video",
APP_SPORT1: "Sport 1",
APP_SPOTIFY: "Spotify",
APP_STEAM_LINK: "Steam Link",
APP_SYFY: "Syfy",
APP_T2: "T-2 TV",
APP_TED: "TED",
APP_TUNEIN: "TuneIn Radio",
APP_TVHEADEND: "DreamPlayer TVHeadend",
APP_TWITCH: "Twitch",
APP_TWITCH_FIRETV: "Twitch (FireTV)",
APP_VEVO: "Vevo",
APP_VH1: "VH1",
APP_VIMEO: "Vimeo",
APP_VLC: "VLC",
APP_VOYO: "VOYO",
APP_VRV: "VRV",
APP_WAIPU_TV: "Waipu TV",
APP_WATCH_TNT: "Watch TNT",
APP_YOUTUBE: "YouTube",
APP_YOUTUBE_FIRETV: "YouTube (FireTV)",
APP_YOUTUBE_KIDS: "YouTube Kids",
APP_YOUTUBE_KIDS_FIRETV: "YouTube Kids (FireTV)",
APP_YOUTUBE_MUSIC: "YouTube Music",
APP_YOUTUBE_TV: "YouTube TV",
APP_ZEE5: "ZEE5",
APP_ZIGGO_GO_TV: "Ziggo GO TV",
}
# Regular expressions
REGEX_MEDIA_SESSION_STATE = re.compile(r"state=(?P<state>[0-9]+)", re.MULTILINE)
REGEX_WAKE_LOCK_SIZE = re.compile(r"size=(?P<size>[0-9]+)")
# Regular expression patterns
DEVICE_REGEX_PATTERN = r"Devices: (.*?)\W"
MAC_REGEX_PATTERN = "ether (.*?) brd"
MAX_VOLUME_REGEX_PATTERN = r"Max: (\d{1,})"
MUTED_REGEX_PATTERN = r"Muted: (.*?)\W"
STREAM_MUSIC_REGEX_PATTERN = "STREAM_MUSIC(.*?)- STREAM"
VOLUME_REGEX_PATTERN = r"\): (\d{1,})"
#: Default authentication timeout (in s) for :meth:`adb_shell.handle.tcp_handle.TcpHandle.connect` and :meth:`adb_shell.handle.tcp_handle_async.TcpHandleAsync.connect`
DEFAULT_AUTH_TIMEOUT_S = 10.0
#: Default transport timeout (in s) for :meth:`adb_shell.handle.tcp_handle.TcpHandle.connect` and :meth:`adb_shell.handle.tcp_handle_async.TcpHandleAsync.connect`
DEFAULT_TRANSPORT_TIMEOUT_S = 1.0
#: Default timeout (in s) for :class:`adb_shell.handle.tcp_handle.TcpHandle` and :class:`adb_shell.handle.tcp_handle_async.TcpHandleAsync`
DEFAULT_ADB_TIMEOUT_S = 9.0
#: Default timeout for acquiring the lock that protects ADB commands
DEFAULT_LOCK_TIMEOUT_S = 3.0
| 34.338684 | 179 | 0.72865 |
import re
import sys
if sys.version_info[0] == 3 and sys.version_info[1] >= 5:
from enum import IntEnum, unique
else:
IntEnum = object
def unique(cls):
"""A class decorator that does nothing."""
return cls
@unique
class DeviceEnum(IntEnum):
BASETV = 0
ANDROIDTV = 1
FIRETV = 2
INTENT_LAUNCH = "android.intent.category.LEANBACK_LAUNCHER"
INTENT_LAUNCH_FIRETV = "android.intent.category.LAUNCHER"
INTENT_HOME = "android.intent.category.HOME"
CUSTOM_AUDIO_STATE = "audio_state"
CUSTOM_CURRENT_APP = "current_app"
CUSTOM_CURRENT_APP_MEDIA_SESSION_STATE = "current_app_media_session_state"
CUSTOM_HDMI_INPUT = "hdmi_input"
CUSTOM_LAUNCH_APP = "launch_app"
CUSTOM_RUNNING_APPS = "running_apps"
CUSTOM_TURN_OFF = "turn_off"
CUSTOM_TURN_ON = "turn_on"
CUSTOMIZABLE_COMMANDS = {
CUSTOM_AUDIO_STATE,
CUSTOM_CURRENT_APP,
CUSTOM_CURRENT_APP_MEDIA_SESSION_STATE,
CUSTOM_HDMI_INPUT,
CUSTOM_LAUNCH_APP,
CUSTOM_RUNNING_APPS,
CUSTOM_TURN_OFF,
CUSTOM_TURN_ON,
}
HA_CUSTOMIZABLE_COMMANDS = (
CUSTOM_AUDIO_STATE,
CUSTOM_CURRENT_APP_MEDIA_SESSION_STATE,
CUSTOM_HDMI_INPUT,
CUSTOM_LAUNCH_APP,
CUSTOM_RUNNING_APPS,
CUSTOM_TURN_OFF,
CUSTOM_TURN_ON,
)
CMD_SUCCESS1 = r" && echo -e '1\c'"
CMD_SUCCESS1_FAILURE0 = r" && echo -e '1\c' || echo -e '0\c'"
CMD_AUDIO_STATE = r"dumpsys audio | grep paused | grep -qv 'Buffer Queue' && echo -e '1\c' || (dumpsys audio | grep started | grep -qv 'Buffer Queue' && echo '2\c' || echo '0\c')"
CMD_AUDIO_STATE11 = (
"CURRENT_AUDIO_STATE=$(dumpsys audio | sed -r -n '/[0-9]{2}-[0-9]{2}.*player piid:.*state:.*$/h; ${x;p;}') && "
+ r"echo $CURRENT_AUDIO_STATE | grep -q paused && echo -e '1\c' || { echo $CURRENT_AUDIO_STATE | grep -q started && echo '2\c' || echo '0\c' ; }"
)
CMD_AWAKE = "dumpsys power | grep mWakefulness | grep -q Awake"
CMD_PARSE_CURRENT_APP = "CURRENT_APP=${CURRENT_APP#*ActivityRecord{* * } && CURRENT_APP=${CURRENT_APP#*{* * } && CURRENT_APP=${CURRENT_APP%%/*} && CURRENT_APP=${CURRENT_APP%\\}*}"
CMD_PARSE_CURRENT_APP11 = "CURRENT_APP=${CURRENT_APP%%/*} && CURRENT_APP=${CURRENT_APP##* }"
CMD_DEFINE_CURRENT_APP_VARIABLE = (
"CURRENT_APP=$(dumpsys window windows | grep -E 'mCurrentFocus|mFocusedApp') && " + CMD_PARSE_CURRENT_APP
)
CMD_DEFINE_CURRENT_APP_VARIABLE11 = (
"CURRENT_APP=$(dumpsys window windows | grep 'Window #1') && " + CMD_PARSE_CURRENT_APP11
)
CMD_CURRENT_APP = CMD_DEFINE_CURRENT_APP_VARIABLE + " && echo $CURRENT_APP"
CMD_CURRENT_APP11 = CMD_DEFINE_CURRENT_APP_VARIABLE11 + " && echo $CURRENT_APP"
CMD_DEFINE_CURRENT_APP_VARIABLE_GOOGLE_TV = (
"CURRENT_APP=$(dumpsys activity a . | grep mResumedActivity) && " + CMD_PARSE_CURRENT_APP
)
CMD_CURRENT_APP_GOOGLE_TV = CMD_DEFINE_CURRENT_APP_VARIABLE_GOOGLE_TV + " && echo $CURRENT_APP"
CMD_HDMI_INPUT = (
"dumpsys activity starter | grep -E -o '(ExternalTv|HDMI)InputService/HW[0-9]' -m 1 | grep -o 'HW[0-9]'"
)
CMD_HDMI_INPUT11 = (
"(HDMI=$(dumpsys tv_input | grep 'ResourceClientProfile {.*}' | grep -o -E '(hdmi_port=[0-9]|TV)') && { echo ${HDMI/hdmi_port=/HW} | cut -d' ' -f1 ; }) || "
+ CMD_HDMI_INPUT
)
CMD_LAUNCH_APP_CONDITION = (
"if [ $CURRENT_APP != '{0}' ]; then monkey -p {0} -c " + INTENT_LAUNCH + " --pct-syskeys 0 1; fi"
)
CMD_LAUNCH_APP_CONDITION_FIRETV = (
"if [ $CURRENT_APP != '{0}' ]; then monkey -p {0} -c " + INTENT_LAUNCH_FIRETV + " --pct-syskeys 0 1; fi"
)
CMD_LAUNCH_APP = (
CMD_DEFINE_CURRENT_APP_VARIABLE.replace("{", "{{").replace("}", "}}") + " && " + CMD_LAUNCH_APP_CONDITION
)
CMD_LAUNCH_APP11 = (
CMD_DEFINE_CURRENT_APP_VARIABLE11.replace("{", "{{").replace("}", "}}") + " && " + CMD_LAUNCH_APP_CONDITION
)
CMD_LAUNCH_APP_FIRETV = (
CMD_DEFINE_CURRENT_APP_VARIABLE.replace("{", "{{").replace("}", "}}") + " && " + CMD_LAUNCH_APP_CONDITION_FIRETV
)
CMD_LAUNCH_APP_GOOGLE_TV = (
CMD_DEFINE_CURRENT_APP_VARIABLE_GOOGLE_TV.replace("{", "{{").replace("}", "}}") + " && " + CMD_LAUNCH_APP_CONDITION
)
CMD_MEDIA_SESSION_STATE = "dumpsys media_session | grep -A 100 'Sessions Stack' | grep -A 100 $CURRENT_APP | grep -m 1 'state=PlaybackState {'"
CMD_CURRENT_APP_MEDIA_SESSION_STATE = CMD_CURRENT_APP + " && " + CMD_MEDIA_SESSION_STATE
CMD_CURRENT_APP_MEDIA_SESSION_STATE11 = CMD_CURRENT_APP11 + " && " + CMD_MEDIA_SESSION_STATE
CMD_CURRENT_APP_MEDIA_SESSION_STATE_GOOGLE_TV = CMD_CURRENT_APP_GOOGLE_TV + " && " + CMD_MEDIA_SESSION_STATE
CMD_RUNNING_APPS_ANDROIDTV = "ps -A | grep u0_a"
CMD_RUNNING_APPS_FIRETV = "ps | grep u0_a"
CMD_INSTALLED_APPS = "pm list packages"
CMD_SCREEN_ON = (
"(dumpsys power | grep 'Display Power' | grep -q 'state=ON' || dumpsys power | grep -q 'mScreenOn=true')"
)
CMD_STREAM_MUSIC = r"dumpsys audio | grep '\- STREAM_MUSIC:' -A 11"
CMD_TURN_OFF_ANDROIDTV = CMD_SCREEN_ON + " && input keyevent 26"
CMD_TURN_OFF_FIRETV = CMD_SCREEN_ON + " && input keyevent 223"
CMD_TURN_ON_ANDROIDTV = CMD_SCREEN_ON + " || input keyevent 26"
CMD_TURN_ON_FIRETV = CMD_SCREEN_ON + " || (input keyevent 26 && input keyevent 3)"
CMD_WAKE_LOCK_SIZE = "dumpsys power | grep Locks | grep 'size='"
CMD_SCREEN_ON_AWAKE_WAKE_LOCK_SIZE = (
CMD_SCREEN_ON + CMD_SUCCESS1_FAILURE0 + " && " + CMD_AWAKE + CMD_SUCCESS1_FAILURE0 + " && " + CMD_WAKE_LOCK_SIZE
)
CMD_MANUFACTURER = "getprop ro.product.manufacturer"
CMD_MODEL = "getprop ro.product.model"
CMD_SERIALNO = "getprop ro.serialno"
CMD_VERSION = "getprop ro.build.version.release"
CMD_MAC_WLAN0 = "ip addr show wlan0 | grep -m 1 ether"
CMD_MAC_ETH0 = "ip addr show eth0 | grep -m 1 ether"
CMD_DEVICE_PROPERTIES = CMD_MANUFACTURER + " && " + CMD_MODEL + " && " + CMD_SERIALNO + " && " + CMD_VERSION
KEY_BACK = 4
KEY_BLUE = 186
KEY_CENTER = 23
KEY_COMPONENT1 = 249
KEY_COMPONENT2 = 250
KEY_COMPOSITE1 = 247
KEY_COMPOSITE2 = 248
KEY_DOWN = 20
KEY_END = 123
KEY_ENTER = 66
KEY_ESCAPE = 111
KEY_FAST_FORWARD = 90
KEY_GREEN = 184
KEY_HDMI1 = 243
KEY_HDMI2 = 244
KEY_HDMI3 = 245
KEY_HDMI4 = 246
KEY_HOME = 3
KEY_INPUT = 178
KEY_LEFT = 21
KEY_MENU = 82
KEY_MOVE_HOME = 122
KEY_MUTE = 164
KEY_NEXT = 87
KEY_PAIRING = 225
KEY_PAUSE = 127
KEY_PLAY = 126
KEY_PLAY_PAUSE = 85
KEY_POWER = 26
KEY_PREVIOUS = 88
KEY_RED = 183
KEY_RESUME = 224
KEY_REWIND = 89
KEY_RIGHT = 22
KEY_SAT = 237
KEY_SEARCH = 84
KEY_SETTINGS = 176
KEY_SLEEP = 223
KEY_SPACE = 62
KEY_STOP = 86
KEY_SUSPEND = 276
KEY_SYSDOWN = 281
KEY_SYSLEFT = 282
KEY_SYSRIGHT = 283
KEY_SYSUP = 280
KEY_TEXT = 233
KEY_TOP = 122
KEY_UP = 19
KEY_VGA = 251
KEY_VOLUME_DOWN = 25
KEY_VOLUME_UP = 24
KEY_WAKEUP = 224
KEY_YELLOW = 185
KEY_0 = 7
KEY_1 = 8
KEY_2 = 9
KEY_3 = 10
KEY_4 = 11
KEY_5 = 12
KEY_6 = 13
KEY_7 = 14
KEY_8 = 15
KEY_9 = 16
KEY_A = 29
KEY_B = 30
KEY_C = 31
KEY_D = 32
KEY_E = 33
KEY_F = 34
KEY_G = 35
KEY_H = 36
KEY_I = 37
KEY_J = 38
KEY_K = 39
KEY_L = 40
KEY_M = 41
KEY_N = 42
KEY_O = 43
KEY_P = 44
KEY_Q = 45
KEY_R = 46
KEY_S = 47
KEY_T = 48
KEY_U = 49
KEY_V = 50
KEY_W = 51
KEY_X = 52
KEY_Y = 53
KEY_Z = 54
KEYS = {
"BACK": KEY_BACK,
"BLUE": KEY_BLUE,
"CENTER": KEY_CENTER,
"COMPONENT1": KEY_COMPONENT1,
"COMPONENT2": KEY_COMPONENT2,
"COMPOSITE1": KEY_COMPOSITE1,
"COMPOSITE2": KEY_COMPOSITE2,
"DOWN": KEY_DOWN,
"END": KEY_END,
"ENTER": KEY_ENTER,
"ESCAPE": KEY_ESCAPE,
"FAST_FORWARD": KEY_FAST_FORWARD,
"GREEN": KEY_GREEN,
"HDMI1": KEY_HDMI1,
"HDMI2": KEY_HDMI2,
"HDMI3": KEY_HDMI3,
"HDMI4": KEY_HDMI4,
"HOME": KEY_HOME,
"INPUT": KEY_INPUT,
"LEFT": KEY_LEFT,
"MENU": KEY_MENU,
"MOVE_HOME": KEY_MOVE_HOME,
"MUTE": KEY_MUTE,
"PAIRING": KEY_PAIRING,
"POWER": KEY_POWER,
"RED": KEY_RED,
"RESUME": KEY_RESUME,
"REWIND": KEY_REWIND,
"RIGHT": KEY_RIGHT,
"SAT": KEY_SAT,
"SEARCH": KEY_SEARCH,
"SETTINGS": KEY_SETTINGS,
"SLEEP": KEY_SLEEP,
"SUSPEND": KEY_SUSPEND,
"SYSDOWN": KEY_SYSDOWN,
"SYSLEFT": KEY_SYSLEFT,
"SYSRIGHT": KEY_SYSRIGHT,
"SYSUP": KEY_SYSUP,
"TEXT": KEY_TEXT,
"TOP": KEY_TOP,
"UP": KEY_UP,
"VGA": KEY_VGA,
"VOLUME_DOWN": KEY_VOLUME_DOWN,
"VOLUME_UP": KEY_VOLUME_UP,
"WAKEUP": KEY_WAKEUP,
"YELLOW": KEY_YELLOW,
}
STATE_ON = "on"
STATE_IDLE = "idle"
STATE_OFF = "off"
STATE_PLAYING = "playing"
STATE_PAUSED = "paused"
STATE_STANDBY = "standby"
STATE_STOPPED = "stopped"
STATE_UNKNOWN = "unknown"
VALID_STATES = (STATE_IDLE, STATE_OFF, STATE_PLAYING, STATE_PAUSED, STATE_STANDBY)
VALID_STATE_PROPERTIES = ("audio_state", "media_session_state")
VALID_PROPERTIES = VALID_STATE_PROPERTIES + ("wake_lock_size",)
VALID_PROPERTIES_TYPES = {"audio_state": str, "media_session_state": int, "wake_lock_size": int}
MEDIA_SESSION_STATES = {0: None, 1: STATE_STOPPED, 2: STATE_PAUSED, 3: STATE_PLAYING}
APP_AE_TV = "com.aetn.aetv.watch"
APP_AMAZON_PRIME_VIDEO = "com.amazon.avod.thirdpartyclient"
APP_AMAZON_VIDEO = "com.amazon.avod"
APP_APPLE_TV_PLUS = "com.apple.atve.android.appletv"
APP_APPLE_TV_PLUS_FIRETV = "com.apple.atve.amazon.appletv"
APP_APPLE_TV_PLUS_SONY = "com.apple.atve.sony.appletv"
APP_ATV_LAUNCHER = "com.google.android.tvlauncher"
APP_BELL_FIBE = "com.quickplay.android.bellmediaplayer"
APP_CBC_GEM = "ca.cbc.android.cbctv"
APP_COMEDY_CENTRAL = "com.vmn.android.comedycentral"
APP_CRAVE = "ca.bellmedia.cravetv"
APP_DAILYMOTION = "com.dailymotion.dailymotion"
APP_DEEZER = "deezer.android.tv"
APP_DISNEY_PLUS = "com.disney.disneyplus"
APP_DISNEY_PLUS_HOTSTAR = "in.startv.hotstar"
APP_DS_PHOTO = "com.synology.dsphoto"
APP_DS_VIDEO = "com.synology.dsvideo"
APP_ES_FILE_EXPLORER = "com.estrongs.android.pop"
APP_FACEBOOK = "com.facebook.katana"
APP_FAWESOME = "com.future.moviesByFawesomeAndroidTV"
APP_FIREFOX = "org.mozilla.tv.firefox"
APP_FIRETV_PACKAGE_LAUNCHER = "com.amazon.tv.launcher"
APP_FIRETV_PACKAGE_SETTINGS = "com.amazon.tv.settings"
APP_FIRETV_STORE = "com.amazon.venezia"
APP_FOOD_NETWORK_GO = "tv.accedo.foodnetwork"
APP_FRANCE_TV = "fr.francetv.pluzz"
APP_GLOBAL_TV = "com.shawmedia.smglobal"
APP_GOOGLE_CAST = "com.google.android.apps.mediashell"
APP_GOOGLE_TV_LAUNCHER = "com.google.android.apps.tv.launcherx"
APP_HAYSTACK_NEWS = "com.haystack.android"
APP_HBO_GO = "eu.hbogo.androidtv.production"
APP_HBO_GO_2 = "com.HBO"
APP_HOICHOI = "com.viewlift.hoichoi"
APP_HULU = "com.hulu.plus"
APP_HUNGAMA_PLAY = "com.hungama.movies.tv"
APP_IMDB_TV = "com.amazon.imdb.tv.android.app"
APP_IPTV = "ru.iptvremote.android.iptv"
APP_IPTV_SMARTERS_PRO = "com.nst.iptvsmarterstvbox"
APP_JELLYFIN_TV = "org.jellyfin.androidtv"
APP_JIO_CINEMA = "com.jio.media.stb.ondemand"
APP_KODI = "org.xbmc.kodi"
APP_LIVE_CHANNELS = "com.google.android.tv"
APP_MIJN_RADIO = "org.samsonsen.nederlandse.radio.holland.nl"
APP_MOLOTOV = "tv.molotov.app"
APP_MRMC = "tv.mrmc.mrmc"
APP_MRMC_LITE = "tv.mrmc.mrmc.lite"
APP_MX_PLAYER = "com.mxtech.videoplayer.ad"
APP_NETFLIX = "com.netflix.ninja"
APP_NLZIET = "nl.nlziet"
APP_NOS = "nl.nos.app"
APP_NPO = "nl.uitzendinggemist"
APP_OCS = "com.orange.ocsgo"
APP_PLAY_GAMES = "com.google.android.play.games"
APP_PLAY_MUSIC = "com.google.android.music"
APP_PLAY_STORE = "com.android.vending"
APP_PLAY_VIDEOS = "com.google.android.videos"
APP_PLEX = "com.plexapp.android"
APP_PRIME_VIDEO = "com.amazon.amazonvideo.livingroom"
APP_PRIME_VIDEO_FIRETV = "com.amazon.firebat"
APP_SETTINGS = "com.android.tv.settings"
APP_SMART_YOUTUBE_TV = "com.liskovsoft.videomanager"
APP_SONY_ACTION_MENU = "com.sony.dtv.scrums.action"
APP_SONY_ALBUM = "com.sony.dtv.osat.album"
APP_SONY_BRAVIA_SYNC_MENU = "com.sony.dtv.braviasyncmenu"
APP_SONY_BRAVIA_TUTORIALS = "com.sony.dtv.bravialifehack"
APP_SONY_DISCOVER = "com.sony.dtv.discovery"
APP_SONY_HELP = "com.sony.dtv.smarthelp"
APP_SONY_INTERNET_BROWSER = "com.vewd.core.integration.dia"
APP_SONY_LIV = "com.sonyliv"
APP_SONY_MUSIC = "com.sony.dtv.osat.music"
APP_SONY_SCREEN_MIRRORING = "com.sony.dtv.networkapp.wifidirect"
APP_SONY_SELECT = "com.sony.dtv.sonyselect"
APP_SONY_TIMERS = "com.sony.dtv.timers"
APP_SONY_TV = "com.sony.dtv.tvx"
APP_SONY_VIDEO = "com.sony.dtv.osat.video"
APP_SPORT1 = "de.sport1.firetv.video"
APP_SPOTIFY = "com.spotify.tv.android"
APP_STEAM_LINK = "com.valvesoftware.steamlink"
APP_SYFY = "com.amazon.webapps.nbc.syfy"
APP_T2 = "tv.perception.clients.tv.android"
APP_TED = "com.ted.android.tv"
APP_TUNEIN = "tunein.player"
APP_TVHEADEND = "de.cyberdream.dreamepg.tvh.tv.player"
APP_TWITCH = "tv.twitch.android.app"
APP_TWITCH_FIRETV = "tv.twitch.android.viewer"
APP_VEVO = "com.vevo.tv"
APP_VH1 = "com.mtvn.vh1android"
APP_VIMEO = "com.vimeo.android.videoapp"
APP_VLC = "org.videolan.vlc"
APP_VOYO = "com.phonegap.voyo"
APP_VRV = "com.ellation.vrv"
APP_WAIPU_TV = "de.exaring.waipu.firetv.live"
APP_WATCH_TNT = "com.turner.tnt.android.networkapp"
APP_YOUTUBE = "com.google.android.youtube.tv"
APP_YOUTUBE_FIRETV = "com.amazon.firetv.youtube"
APP_YOUTUBE_KIDS = "com.google.android.youtube.tvkids"
APP_YOUTUBE_KIDS_FIRETV = "com.amazon.firetv.youtube.kids"
APP_YOUTUBE_MUSIC = "com.google.android.youtube.tvmusic"
APP_YOUTUBE_TV = "com.google.android.youtube.tvunplugged"
APP_ZEE5 = "com.graymatrix.did"
APP_ZIGGO_GO_TV = "com.ziggo.tv"
APPS = {
APP_AE_TV: "A&E",
APP_AMAZON_PRIME_VIDEO: "Amazon Prime Video",
APP_AMAZON_VIDEO: "Amazon Video",
APP_APPLE_TV_PLUS: "Apple TV+",
APP_APPLE_TV_PLUS_FIRETV: "Apple TV+ (Fire TV)",
APP_APPLE_TV_PLUS_SONY: "Apple TV+ (Sony)",
APP_ATV_LAUNCHER: "Android TV Launcher",
APP_BELL_FIBE: "Bell Fibe",
APP_CBC_GEM: "CBC Gem",
APP_COMEDY_CENTRAL: "Comedy Central",
APP_CRAVE: "Crave",
APP_DAILYMOTION: "Dailymotion",
APP_DEEZER: "Deezer",
APP_DISNEY_PLUS: "Disney+",
APP_DISNEY_PLUS_HOTSTAR: "Disney+ Hotstar",
APP_DS_PHOTO: "DS photo",
APP_DS_VIDEO: "DS video",
APP_ES_FILE_EXPLORER: "ES File Explorer",
APP_FACEBOOK: "Facebook Watch",
APP_FAWESOME: "Fawsome",
APP_FIREFOX: "Firefox",
APP_FIRETV_STORE: "FireTV Store",
APP_FOOD_NETWORK_GO: "Food Network GO",
APP_FRANCE_TV: "France TV",
APP_GLOBAL_TV: "Global TV",
APP_GOOGLE_CAST: "Google Cast",
APP_GOOGLE_TV_LAUNCHER: "Google TV Launcher",
APP_HAYSTACK_NEWS: "Haystack News",
APP_HBO_GO: "HBO GO",
APP_HBO_GO_2: "HBO GO (2)",
APP_HOICHOI: "Hoichoi",
APP_HULU: "Hulu",
APP_HUNGAMA_PLAY: "Hungama Play",
APP_IMDB_TV: "IMDb TV",
APP_IPTV: "IPTV",
APP_IPTV_SMARTERS_PRO: "IPTV Smarters Pro",
APP_JELLYFIN_TV: "Jellyfin",
APP_JIO_CINEMA: "Jio Cinema",
APP_KODI: "Kodi",
APP_LIVE_CHANNELS: "Live Channels",
APP_MIJN_RADIO: "Mijn Radio",
APP_MOLOTOV: "Molotov",
APP_MRMC: "MrMC",
APP_MRMC_LITE: "MrMC Lite",
APP_MX_PLAYER: "MX Player",
APP_NETFLIX: "Netflix",
APP_NLZIET: "NLZIET",
APP_NOS: "NOS",
APP_NPO: "NPO",
APP_OCS: "OCS",
APP_PLAY_GAMES: "Play Games",
APP_PLAY_MUSIC: "Play Music",
APP_PLAY_STORE: "Play Store",
APP_PLAY_VIDEOS: "Play Movies & TV",
APP_PLEX: "Plex",
APP_PRIME_VIDEO: "Prime Video",
APP_PRIME_VIDEO_FIRETV: "Prime Video (FireTV)",
APP_SETTINGS: "Settings",
APP_SMART_YOUTUBE_TV: "Smart YouTube TV",
APP_SONY_ACTION_MENU: "Action Menu",
APP_SONY_ALBUM: "Album",
APP_SONY_BRAVIA_SYNC_MENU: "Sync Menu",
APP_SONY_BRAVIA_TUTORIALS: "BRAVIA Tutorials",
APP_SONY_DISCOVER: "Discover",
APP_SONY_HELP: "Help",
APP_SONY_INTERNET_BROWSER: "Internet Browser",
APP_SONY_LIV: "SonyLIV",
APP_SONY_MUSIC: "Music",
APP_SONY_SCREEN_MIRRORING: "Screen mirroring",
APP_SONY_SELECT: "Sony Select",
APP_SONY_TIMERS: "Timers",
APP_SONY_TV: "TV",
APP_SONY_VIDEO: "Video",
APP_SPORT1: "Sport 1",
APP_SPOTIFY: "Spotify",
APP_STEAM_LINK: "Steam Link",
APP_SYFY: "Syfy",
APP_T2: "T-2 TV",
APP_TED: "TED",
APP_TUNEIN: "TuneIn Radio",
APP_TVHEADEND: "DreamPlayer TVHeadend",
APP_TWITCH: "Twitch",
APP_TWITCH_FIRETV: "Twitch (FireTV)",
APP_VEVO: "Vevo",
APP_VH1: "VH1",
APP_VIMEO: "Vimeo",
APP_VLC: "VLC",
APP_VOYO: "VOYO",
APP_VRV: "VRV",
APP_WAIPU_TV: "Waipu TV",
APP_WATCH_TNT: "Watch TNT",
APP_YOUTUBE: "YouTube",
APP_YOUTUBE_FIRETV: "YouTube (FireTV)",
APP_YOUTUBE_KIDS: "YouTube Kids",
APP_YOUTUBE_KIDS_FIRETV: "YouTube Kids (FireTV)",
APP_YOUTUBE_MUSIC: "YouTube Music",
APP_YOUTUBE_TV: "YouTube TV",
APP_ZEE5: "ZEE5",
APP_ZIGGO_GO_TV: "Ziggo GO TV",
}
REGEX_MEDIA_SESSION_STATE = re.compile(r"state=(?P<state>[0-9]+)", re.MULTILINE)
REGEX_WAKE_LOCK_SIZE = re.compile(r"size=(?P<size>[0-9]+)")
DEVICE_REGEX_PATTERN = r"Devices: (.*?)\W"
MAC_REGEX_PATTERN = "ether (.*?) brd"
MAX_VOLUME_REGEX_PATTERN = r"Max: (\d{1,})"
MUTED_REGEX_PATTERN = r"Muted: (.*?)\W"
STREAM_MUSIC_REGEX_PATTERN = "STREAM_MUSIC(.*?)- STREAM"
VOLUME_REGEX_PATTERN = r"\): (\d{1,})"
DEFAULT_AUTH_TIMEOUT_S = 10.0
DEFAULT_TRANSPORT_TIMEOUT_S = 1.0
DEFAULT_ADB_TIMEOUT_S = 9.0
DEFAULT_LOCK_TIMEOUT_S = 3.0
| true | true |
f71bfc3874575acdd3b3cfdfd9209ae815ab7e10 | 7,497 | py | Python | tf/experiment6.py | wichtounet/frameworks | e0cac9d4ffbbf0b1e9d2491eb70bf2c6154f313b | [
"MIT"
] | 20 | 2016-12-01T17:39:04.000Z | 2019-08-25T12:50:05.000Z | tf/experiment6.py | wichtounet/frameworks | e0cac9d4ffbbf0b1e9d2491eb70bf2c6154f313b | [
"MIT"
] | 4 | 2017-10-08T13:56:21.000Z | 2019-01-21T12:49:09.000Z | tf/experiment6.py | wichtounet/frameworks | e0cac9d4ffbbf0b1e9d2491eb70bf2c6154f313b | [
"MIT"
] | 7 | 2018-05-09T01:29:20.000Z | 2019-02-09T10:49:39.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import keras
from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img
import argparse
import gzip
import os
import sys
import time
import os
import math
import numpy
from PIL import Image
import numpy
from six.moves import urllib
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
batch_size = 128
batches = 10009
num_epochs = 5
num_classes = 1000
FLAGS = None
from urllib.request import urlretrieve
from os.path import isfile, isdir
import tarfile
import pickle
def data_type():
return tf.float32
def get_batch():
index = 0
global current_index
global training_images
global training_labels
B = numpy.zeros(shape=(batch_size, 256, 256, 3))
L = numpy.zeros(shape=(batch_size))
while index < batch_size:
try:
img = load_img(training_images[current_index])
B[index] = img_to_array(img)
B[index] /= 255
L[index] = training_labels[current_index]
index = index + 1
current_index = current_index + 1
except:
print("Ignore image {}".format(training_images[current_index]))
current_index = current_index + 1
return B, keras.utils.to_categorical(L, num_classes)
def main(_):
global current_index
global training_images
global training_labels
label_counter = 0
training_images = []
training_labels = []
for subdir, dirs, files in os.walk('/data/datasets/imagenet_resized/train/'):
for folder in dirs:
for folder_subdir, folder_dirs, folder_files in os.walk(os.path.join(subdir, folder)):
for file in folder_files:
training_images.append(os.path.join(folder_subdir, file))
training_labels.append(label_counter)
label_counter = label_counter + 1
nice_n = math.floor(len(training_images) / batch_size) * batch_size
print(nice_n)
print(len(training_images))
print(len(training_labels))
import random
perm = list(range(len(training_images)))
random.shuffle(perm)
training_images = [training_images[index] for index in perm]
training_labels = [training_labels[index] for index in perm]
print("Data is ready...")
train_data_node = tf.placeholder(data_type(), shape=(batch_size, 256, 256, 3))
train_labels_node = tf.placeholder(tf.int64, shape=(batch_size,1000))
# Convolutional weights
conv1_weights = tf.Variable(tf.truncated_normal([3, 3, 3, 16], stddev=0.1, dtype=data_type()))
conv1_biases = tf.Variable(tf.zeros([16], dtype=data_type()))
conv2_weights = tf.Variable(tf.truncated_normal([3, 3, 16, 16], stddev=0.1, dtype=data_type()))
conv2_biases = tf.Variable(tf.zeros([16], dtype=data_type()))
conv3_weights = tf.Variable(tf.truncated_normal([3, 3, 16, 32], stddev=0.1, dtype=data_type()))
conv3_biases = tf.Variable(tf.zeros([32], dtype=data_type()))
conv4_weights = tf.Variable(tf.truncated_normal([3, 3, 32, 32], stddev=0.1, dtype=data_type()))
conv4_biases = tf.Variable(tf.zeros([32], dtype=data_type()))
conv5_weights = tf.Variable(tf.truncated_normal([3, 3, 32, 32], stddev=0.1, dtype=data_type()))
conv5_biases = tf.Variable(tf.zeros([32], dtype=data_type()))
# Fully connected weights
fc1_weights = tf.Variable(tf.truncated_normal([2048, 2048], stddev=0.1, dtype=data_type()))
fc1_biases = tf.Variable(tf.constant(0.1, shape=[2048], dtype=data_type()))
fc2_weights = tf.Variable(tf.truncated_normal([2048, 1000], stddev=0.1, dtype=data_type()))
fc2_biases = tf.Variable(tf.constant(0.1, shape=[1000], dtype=data_type()))
def model(data):
# Conv 1
conv = tf.nn.conv2d(data, conv1_weights, strides=[1, 1, 1, 1], padding='SAME')
relu = tf.nn.relu(tf.nn.bias_add(conv, conv1_biases))
pool = tf.nn.max_pool(relu, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
# Conv 2
conv = tf.nn.conv2d(pool, conv2_weights, strides=[1, 1, 1, 1], padding='SAME')
relu = tf.nn.relu(tf.nn.bias_add(conv, conv2_biases))
pool = tf.nn.max_pool(relu, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
# Conv 3
conv = tf.nn.conv2d(pool, conv3_weights, strides=[1, 1, 1, 1], padding='SAME')
relu = tf.nn.relu(tf.nn.bias_add(conv, conv3_biases))
pool = tf.nn.max_pool(relu, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
# Conv 4
conv = tf.nn.conv2d(pool, conv4_weights, strides=[1, 1, 1, 1], padding='SAME')
relu = tf.nn.relu(tf.nn.bias_add(conv, conv4_biases))
pool = tf.nn.max_pool(relu, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
# Conv 5
conv = tf.nn.conv2d(pool, conv5_weights, strides=[1, 1, 1, 1], padding='SAME')
relu = tf.nn.relu(tf.nn.bias_add(conv, conv5_biases))
pool = tf.nn.max_pool(relu, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
# Fully Connected
reshape = tf.reshape(pool, [batch_size, 2048])
hidden = tf.nn.relu(tf.matmul(reshape, fc1_weights) + fc1_biases)
return tf.matmul(hidden, fc2_weights) + fc2_biases
# Training computation: logits + cross-entropy loss.
logits = model(train_data_node)
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits = logits, labels = train_labels_node))
# Use simple momentum for the optimization.
optimizer = tf.train.MomentumOptimizer(learning_rate=0.01, momentum=0.9).minimize(loss)
acc_pred = tf.equal(tf.argmax(logits,1), tf.argmax(train_labels_node,1))
accuracy = tf.reduce_mean(tf.cast(acc_pred, tf.float32))
# Predictions for the current training minibatch.
# train_prediction = tf.nn.softmax(logits)
# Create a local session to run the training.
with tf.Session() as sess:
# Run all the initializers to prepare the trainable parameters.
tf.global_variables_initializer().run(session = sess)
print('Initialized!')
for epoch in range(0, num_epochs):
current_index = 0
while current_index + batch_size < len(training_images):
start_time = time.time()
b, l = get_batch()
feed_dict = {train_data_node: b, train_labels_node: l}
# Run the optimizer to update weights.
_, batch_loss, batch_accuracy = sess.run([optimizer, loss, accuracy], feed_dict=feed_dict)
end_time = time.time()
print('batch {}/{} loss: {} accuracy: {} duration: {}ms'.format(int(current_index / batch_size), int(nice_n / batch_size), batch_loss, batch_accuracy, 1000 * (end_time - start_time)), flush = True)
print('epoch {}/{}'.format(epoch, num_epochs))
# Finally print the result!
current_index = 0
acc = 0.0
while current_index + batch_size < len(training_images):
b, l = get_batch()
feed_dict = {train_data_node: b, train_labels_node: l}
[batch_accuracy] = sess.run([accuracy], feed_dict=feed_dict)
print('Test batch accuracy:', batch_accuracy, flush = True)
acc += batch_accuracy
acc /= batches
print('Test accuracy: %.1f%%' % acc)
tf.app.run(main=main, argv=[sys.argv[0]])
| 35.870813 | 213 | 0.652528 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import keras
from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img
import argparse
import gzip
import os
import sys
import time
import os
import math
import numpy
from PIL import Image
import numpy
from six.moves import urllib
from six.moves import xrange
import tensorflow as tf
batch_size = 128
batches = 10009
num_epochs = 5
num_classes = 1000
FLAGS = None
from urllib.request import urlretrieve
from os.path import isfile, isdir
import tarfile
import pickle
def data_type():
return tf.float32
def get_batch():
index = 0
global current_index
global training_images
global training_labels
B = numpy.zeros(shape=(batch_size, 256, 256, 3))
L = numpy.zeros(shape=(batch_size))
while index < batch_size:
try:
img = load_img(training_images[current_index])
B[index] = img_to_array(img)
B[index] /= 255
L[index] = training_labels[current_index]
index = index + 1
current_index = current_index + 1
except:
print("Ignore image {}".format(training_images[current_index]))
current_index = current_index + 1
return B, keras.utils.to_categorical(L, num_classes)
def main(_):
global current_index
global training_images
global training_labels
label_counter = 0
training_images = []
training_labels = []
for subdir, dirs, files in os.walk('/data/datasets/imagenet_resized/train/'):
for folder in dirs:
for folder_subdir, folder_dirs, folder_files in os.walk(os.path.join(subdir, folder)):
for file in folder_files:
training_images.append(os.path.join(folder_subdir, file))
training_labels.append(label_counter)
label_counter = label_counter + 1
nice_n = math.floor(len(training_images) / batch_size) * batch_size
print(nice_n)
print(len(training_images))
print(len(training_labels))
import random
perm = list(range(len(training_images)))
random.shuffle(perm)
training_images = [training_images[index] for index in perm]
training_labels = [training_labels[index] for index in perm]
print("Data is ready...")
train_data_node = tf.placeholder(data_type(), shape=(batch_size, 256, 256, 3))
train_labels_node = tf.placeholder(tf.int64, shape=(batch_size,1000))
conv1_weights = tf.Variable(tf.truncated_normal([3, 3, 3, 16], stddev=0.1, dtype=data_type()))
conv1_biases = tf.Variable(tf.zeros([16], dtype=data_type()))
conv2_weights = tf.Variable(tf.truncated_normal([3, 3, 16, 16], stddev=0.1, dtype=data_type()))
conv2_biases = tf.Variable(tf.zeros([16], dtype=data_type()))
conv3_weights = tf.Variable(tf.truncated_normal([3, 3, 16, 32], stddev=0.1, dtype=data_type()))
conv3_biases = tf.Variable(tf.zeros([32], dtype=data_type()))
conv4_weights = tf.Variable(tf.truncated_normal([3, 3, 32, 32], stddev=0.1, dtype=data_type()))
conv4_biases = tf.Variable(tf.zeros([32], dtype=data_type()))
conv5_weights = tf.Variable(tf.truncated_normal([3, 3, 32, 32], stddev=0.1, dtype=data_type()))
conv5_biases = tf.Variable(tf.zeros([32], dtype=data_type()))
fc1_weights = tf.Variable(tf.truncated_normal([2048, 2048], stddev=0.1, dtype=data_type()))
fc1_biases = tf.Variable(tf.constant(0.1, shape=[2048], dtype=data_type()))
fc2_weights = tf.Variable(tf.truncated_normal([2048, 1000], stddev=0.1, dtype=data_type()))
fc2_biases = tf.Variable(tf.constant(0.1, shape=[1000], dtype=data_type()))
def model(data):
conv = tf.nn.conv2d(data, conv1_weights, strides=[1, 1, 1, 1], padding='SAME')
relu = tf.nn.relu(tf.nn.bias_add(conv, conv1_biases))
pool = tf.nn.max_pool(relu, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
conv = tf.nn.conv2d(pool, conv2_weights, strides=[1, 1, 1, 1], padding='SAME')
relu = tf.nn.relu(tf.nn.bias_add(conv, conv2_biases))
pool = tf.nn.max_pool(relu, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
conv = tf.nn.conv2d(pool, conv3_weights, strides=[1, 1, 1, 1], padding='SAME')
relu = tf.nn.relu(tf.nn.bias_add(conv, conv3_biases))
pool = tf.nn.max_pool(relu, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
conv = tf.nn.conv2d(pool, conv4_weights, strides=[1, 1, 1, 1], padding='SAME')
relu = tf.nn.relu(tf.nn.bias_add(conv, conv4_biases))
pool = tf.nn.max_pool(relu, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
conv = tf.nn.conv2d(pool, conv5_weights, strides=[1, 1, 1, 1], padding='SAME')
relu = tf.nn.relu(tf.nn.bias_add(conv, conv5_biases))
pool = tf.nn.max_pool(relu, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
reshape = tf.reshape(pool, [batch_size, 2048])
hidden = tf.nn.relu(tf.matmul(reshape, fc1_weights) + fc1_biases)
return tf.matmul(hidden, fc2_weights) + fc2_biases
logits = model(train_data_node)
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits = logits, labels = train_labels_node))
optimizer = tf.train.MomentumOptimizer(learning_rate=0.01, momentum=0.9).minimize(loss)
acc_pred = tf.equal(tf.argmax(logits,1), tf.argmax(train_labels_node,1))
accuracy = tf.reduce_mean(tf.cast(acc_pred, tf.float32))
with tf.Session() as sess:
tf.global_variables_initializer().run(session = sess)
print('Initialized!')
for epoch in range(0, num_epochs):
current_index = 0
while current_index + batch_size < len(training_images):
start_time = time.time()
b, l = get_batch()
feed_dict = {train_data_node: b, train_labels_node: l}
_, batch_loss, batch_accuracy = sess.run([optimizer, loss, accuracy], feed_dict=feed_dict)
end_time = time.time()
print('batch {}/{} loss: {} accuracy: {} duration: {}ms'.format(int(current_index / batch_size), int(nice_n / batch_size), batch_loss, batch_accuracy, 1000 * (end_time - start_time)), flush = True)
print('epoch {}/{}'.format(epoch, num_epochs))
current_index = 0
acc = 0.0
while current_index + batch_size < len(training_images):
b, l = get_batch()
feed_dict = {train_data_node: b, train_labels_node: l}
[batch_accuracy] = sess.run([accuracy], feed_dict=feed_dict)
print('Test batch accuracy:', batch_accuracy, flush = True)
acc += batch_accuracy
acc /= batches
print('Test accuracy: %.1f%%' % acc)
tf.app.run(main=main, argv=[sys.argv[0]])
| true | true |
f71bfd19f426d8f03872939dfd8bb1e2d6d2dcdf | 332 | py | Python | week1/the_real_deal/is_prime.py | sevgo/Programming101 | ac25c4d9695563b449a629c60ec77a739c9f5be3 | [
"BSD-3-Clause"
] | null | null | null | week1/the_real_deal/is_prime.py | sevgo/Programming101 | ac25c4d9695563b449a629c60ec77a739c9f5be3 | [
"BSD-3-Clause"
] | 1 | 2021-09-16T05:44:31.000Z | 2021-09-16T05:44:31.000Z | week1/the_real_deal/is_prime.py | sevgo/Programming101 | ac25c4d9695563b449a629c60ec77a739c9f5be3 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
from sum_of_divisors import sum_of_divisors
def is_prime(n):
# If N is prime it could only be devided
# to 1 and N, so sum of divisors has to be
# equal to N + 1
return n + 1 == sum_of_divisors(n)
if __name__ == "__main__":
number = int(input("Number: "))
print (is_prime(number))
| 22.133333 | 46 | 0.653614 |
from sum_of_divisors import sum_of_divisors
def is_prime(n):
return n + 1 == sum_of_divisors(n)
if __name__ == "__main__":
number = int(input("Number: "))
print (is_prime(number))
| true | true |
f71bfd37000184d8a9e59d68561043e6514fa6d5 | 4,211 | py | Python | src/data_imputation_paper/imputation/dl.py | se-jaeger/data-imputation-paper | 498d2d871302d917f58ecf6a9576e3a3451c5faa | [
"Apache-2.0"
] | 2 | 2022-01-18T09:59:01.000Z | 2022-02-02T10:01:45.000Z | src/data_imputation_paper/imputation/dl.py | se-jaeger/data-imputation-paper | 498d2d871302d917f58ecf6a9576e3a3451c5faa | [
"Apache-2.0"
] | null | null | null | src/data_imputation_paper/imputation/dl.py | se-jaeger/data-imputation-paper | 498d2d871302d917f58ecf6a9576e3a3451c5faa | [
"Apache-2.0"
] | null | null | null | import logging
from typing import Dict, List, Optional, Tuple
import pandas as pd
from autokeras import StructuredDataClassifier, StructuredDataRegressor
from tensorflow.keras import Model
from ._base import BaseImputer
logger = logging.getLogger()
class AutoKerasImputer(BaseImputer):
def __init__(
self,
max_trials: Optional[int] = 10,
tuner: Optional[str] = None,
validation_split: Optional[float] = 0.2,
epochs: Optional[int] = 10,
seed: Optional[int] = None
):
"""
Deep Learning-learning based imputation mehtod. It uses AutoKeras to find good architecture/hyperparameters.
Args:
max_trials (Optional[int], optional): maximum number of trials for model selection. Defaults to 10.
tuner (Optional[str], optional): AutoKeras hyperparameter tuning strategy. Defaults to None.
validation_split (Optional[float], optional): validation split for AutoKeras fit. Defaults to 0.2.
epochs (Optional[int], optional): number of epochs for AutoKeras fit. Defaults to 10.
seed (Optional[int], optional): Seed to make behavior deterministic. Defaults to None.
"""
super().__init__(
seed=seed
)
self.max_trials = max_trials
self.epochs = epochs
self.validation_split = validation_split
self.tuner = tuner
self._predictors: Dict[str, Model] = {}
def get_best_hyperparameters(self):
super().get_best_hyperparameters()
return {
column: self._predictors[column].tuner.get_best_hyperparameters()[0].values
for column in self._predictors.keys()
}
def fit(self, data: pd.DataFrame, target_columns: List[str]) -> BaseImputer:
super().fit(data=data, target_columns=target_columns)
# cast categorical columns to strings fixes problems where categories are integer values and treated as regression task
data = self._categorical_columns_to_string(data.copy()) # We don't want to change the input dataframe -> copy it
for target_column in self._target_columns:
missing_mask = data[target_column].isna()
feature_cols = [c for c in self._categorical_columns + self._numerical_columns if c != target_column]
if target_column in self._numerical_columns:
StructuredDataModelSearch = StructuredDataRegressor
elif target_column in self._categorical_columns:
StructuredDataModelSearch = StructuredDataClassifier
self._predictors[target_column] = StructuredDataModelSearch(
column_names=feature_cols,
overwrite=True,
max_trials=self.max_trials,
tuner=self.tuner,
directory="../models"
)
self._predictors[target_column].fit(
x=data.loc[~missing_mask, feature_cols],
y=data.loc[~missing_mask, target_column],
epochs=self.epochs
)
self._fitted = True
return self
def transform(self, data: pd.DataFrame) -> Tuple[pd.DataFrame, pd.DataFrame]:
super().transform(data=data)
imputed_mask = data[self._target_columns].isna()
# save the original dtypes because ..
dtypes = data.dtypes
# ... dtypes of data need to be same as for fitting
data = self._categorical_columns_to_string(data.copy()) # We don't want to change the input dataframe -> copy it
for target_column in self._target_columns:
feature_cols = [c for c in self._categorical_columns + self._numerical_columns if c != target_column]
missing_mask = data[target_column].isna()
amount_missing_in_columns = missing_mask.sum()
if amount_missing_in_columns > 0:
data.loc[missing_mask, target_column] = self._predictors[target_column].predict(data.loc[missing_mask, feature_cols])[:, 0]
logger.debug(f'Imputed {amount_missing_in_columns} values in column {target_column}')
self._restore_dtype(data, dtypes)
return data, imputed_mask
| 37.265487 | 139 | 0.654001 | import logging
from typing import Dict, List, Optional, Tuple
import pandas as pd
from autokeras import StructuredDataClassifier, StructuredDataRegressor
from tensorflow.keras import Model
from ._base import BaseImputer
logger = logging.getLogger()
class AutoKerasImputer(BaseImputer):
def __init__(
self,
max_trials: Optional[int] = 10,
tuner: Optional[str] = None,
validation_split: Optional[float] = 0.2,
epochs: Optional[int] = 10,
seed: Optional[int] = None
):
super().__init__(
seed=seed
)
self.max_trials = max_trials
self.epochs = epochs
self.validation_split = validation_split
self.tuner = tuner
self._predictors: Dict[str, Model] = {}
def get_best_hyperparameters(self):
super().get_best_hyperparameters()
return {
column: self._predictors[column].tuner.get_best_hyperparameters()[0].values
for column in self._predictors.keys()
}
def fit(self, data: pd.DataFrame, target_columns: List[str]) -> BaseImputer:
super().fit(data=data, target_columns=target_columns)
data = self._categorical_columns_to_string(data.copy())
for target_column in self._target_columns:
missing_mask = data[target_column].isna()
feature_cols = [c for c in self._categorical_columns + self._numerical_columns if c != target_column]
if target_column in self._numerical_columns:
StructuredDataModelSearch = StructuredDataRegressor
elif target_column in self._categorical_columns:
StructuredDataModelSearch = StructuredDataClassifier
self._predictors[target_column] = StructuredDataModelSearch(
column_names=feature_cols,
overwrite=True,
max_trials=self.max_trials,
tuner=self.tuner,
directory="../models"
)
self._predictors[target_column].fit(
x=data.loc[~missing_mask, feature_cols],
y=data.loc[~missing_mask, target_column],
epochs=self.epochs
)
self._fitted = True
return self
def transform(self, data: pd.DataFrame) -> Tuple[pd.DataFrame, pd.DataFrame]:
super().transform(data=data)
imputed_mask = data[self._target_columns].isna()
# save the original dtypes because ..
dtypes = data.dtypes
# ... dtypes of data need to be same as for fitting
data = self._categorical_columns_to_string(data.copy()) # We don't want to change the input dataframe -> copy it
for target_column in self._target_columns:
feature_cols = [c for c in self._categorical_columns + self._numerical_columns if c != target_column]
missing_mask = data[target_column].isna()
amount_missing_in_columns = missing_mask.sum()
if amount_missing_in_columns > 0:
data.loc[missing_mask, target_column] = self._predictors[target_column].predict(data.loc[missing_mask, feature_cols])[:, 0]
logger.debug(f'Imputed {amount_missing_in_columns} values in column {target_column}')
self._restore_dtype(data, dtypes)
return data, imputed_mask
| true | true |
f71bfd5b0f2615891de5ac70368d3a37c96767b7 | 17,386 | py | Python | HER_mod/rl_modules/get_path_costs.py | schrammlb2/policy-guided-sst | 8dce6619b9c771c39915c60fe9c54270ea1e621e | [
"Apache-2.0"
] | null | null | null | HER_mod/rl_modules/get_path_costs.py | schrammlb2/policy-guided-sst | 8dce6619b9c771c39915c60fe9c54270ea1e621e | [
"Apache-2.0"
] | null | null | null | HER_mod/rl_modules/get_path_costs.py | schrammlb2/policy-guided-sst | 8dce6619b9c771c39915c60fe9c54270ea1e621e | [
"Apache-2.0"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import pickle
import os
from scipy import stats
from HER_mod.rl_modules.tsp import generate_path
from HER_mod.rl_modules.hyperparams import NUM_GOALS, NUM_AGENTS
gd_step_list = [0,2,5, 10, 20, 40]
# NUM_AGENTS = 3
N=200
def get_path_costs(train_pos_agent, train_vel_agent, perm_search=True):
pos_run_time_list = []
vel_run_time_list = []
# gd_step_list = [0,5,10]
num_agents = NUM_AGENTS
num_goals=NUM_GOALS
n=N
# gd_step_list = [0,1]
# num_agents = 2
# num_goals=2
# n=2
pos_time_list = []
vel_time_list = []
for _ in range(num_agents):
pos_agent = train_pos_agent()
vel_agent = train_vel_agent()
pos_agent_time_list = []
vel_agent_time_list = []
for i in range(n):
# goals = [np.random.rand(2)*2-1 for i in range(num_goals)]
# pos = np.random.rand(2)*2-1
goals = generate_path(num_goals + 1)
pos = goals[0]
goals = goals[1:-1]
# pos_agent_time_list = []
min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = pos_agent.find_shortest_path(pos, goals, gd_steps=0, perm_search=perm_search)
pos_test_time_list = [len(min_trajectory)]*len(gd_step_list)
pos_agent_time_list.append(pos_test_time_list)
vel_test_time_list = []
for gd_steps in gd_step_list:
min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = vel_agent.find_shortest_path(pos, goals, gd_steps=gd_steps, perm_search=perm_search)
vel_test_time_list.append(len(min_trajectory))
vel_agent_time_list.append(vel_test_time_list)
pos_time_list.append(pos_agent_time_list)
vel_time_list.append(vel_agent_time_list)
vel_time_list = np.array(vel_time_list).squeeze()
pos_time_list = np.array(pos_time_list).squeeze()
relative_time_change = (vel_time_list-pos_time_list)/pos_time_list
relative_time_change = np.mean(relative_time_change, axis=1)
try:
pickle.dump(vel_time_list, open("velocity_target.pkl", 'wb'))
pickle.dump(pos_time_list, open("no_velocity_target.pkl", 'wb'))
pickle.dump(relative_time_change, open("relative_time_change.pkl", 'wb'))
except:
print("pickle failure")
import pdb
pdb.set_trace()
mean = relative_time_change.mean(axis=0)
t_score = stats.t.ppf(.975, num_agents)
ci = t_score*relative_time_change.std(axis=0)/(num_agents**.5)
steps = np.array(gd_step_list)
plt.plot(steps, mean)
plt.fill_between(steps, mean+ci, mean-ci, alpha=.4)
plt.xlabel("Gradient steps")
plt.ylabel("Relative Improvement vs standard HER")
plt.title("Relative Improvement")
plt.savefig(os.path.join('results', "Relative Improvement" + '.png'))
plt.close()
# import pdb
# pdb.set_trace()
# def method_comparison(train_pos_agent, train_vel_agent):
# # method_list = ['random search', "gradient descent", "gradient descent (40 steps)", "random", "0 velocity target"]
# method_list = ['random search', "gradient descent", "random", "0 velocity target"]
# method_runtime_dict = {'greedy': []}
# for method in method_list:
# method_runtime_dict[method] = []
# num_agents = NUM_AGENTS
# num_goals=NUM_GOALS
# n=N
# pos_time_list = []
# vel_time_list = []
# for _ in range(num_agents):
# pos_agent = train_pos_agent()
# vel_agent = train_vel_agent()
# for method in method_runtime_dict.keys():
# method_runtime_dict[method].append([])
# for i in range(n):
# # goals = [np.random.rand(2)*2-1 for i in range(num_goals)]
# # pos = np.random.rand(2)*2-1
# goals = generate_path(num_goals + 1)
# pos = goals[0]
# goals = goals[1:-1]
# # pos_agent_time_list = []
# min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = pos_agent.select_path(pos, goals, method="0 velocity target")
# # pos_test_time_list = [len(min_trajectory)]*len(gd_step_list)
# method_runtime_dict['greedy'][-1].append(len(min_trajectory))
# # vel_test_time_list = []
# for method in method_list:
# min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = vel_agent.select_path(pos, goals, method=method)
# method_runtime_dict[method][-1].append(len(min_trajectory))
# # vel_agent_time_list.append(vel_test_time_list)
# greedy = method_runtime_dict['greedy']
# method_runtime_dict = {method: np.array(method_runtime_dict[method]) for method in method_runtime_dict.keys()}
# performance_dict = {method: (method_runtime_dict[method].mean(), 2*(method_runtime_dict[method].mean(axis=-1)).std()/(num_agents**.5)) for method in method_runtime_dict.keys()}
# relative_time_dict = {method: (method_runtime_dict[method] - greedy)/greedy for method in method_list}
# improvement_dict = {method: (relative_time_dict[method].mean(), 2*(relative_time_dict[method].mean(axis=-1)).std()/(num_agents**.5)) for method in method_list}
# performance_list = [performance_dict[m][0] for m in method_runtime_dict.keys()]
# performance_ci_list = [performance_dict[m][1] for m in method_runtime_dict.keys()]
# relative_time_list = [improvement_dict[m][0] for m in method_list]
# relative_time_ci_list = [improvement_dict[m][1] for m in method_list]
# plt.xticks(range(len(method_runtime_dict.keys())), list(method_runtime_dict.keys()))
# plt.xlabel("Method")
# plt.ylabel('Time to complete')
# plt.title('Comparison of velocity target-setting methods')
# plt.bar(range(len(performance_list)), performance_list, yerr=performance_ci_list)
# plt.savefig(os.path.join('results', "Method comparison -- Performance" + '.png'))
# plt.close()
# plt.xticks(range(len(method_list)), method_list)
# plt.xlabel("Method")
# plt.ylabel('Cost reduction over greedy baseline')
# plt.title('Comparison of velocity target-setting methods')
# plt.bar(range(len(relative_time_list)), relative_time_list, yerr=relative_time_ci_list)
# plt.savefig(os.path.join('results', "Method comparison -- Relative Improvement" + '.png'))
# plt.close()
def method_comparison(train_pos_agent, train_vel_agent):
method_list = ['random search', "gradient descent", "gradient descent (40 steps)", "random", "0 velocity target"]
# method_list = ['random search', "gradient descent", "random", "0 velocity target"]
method_runtime_dict = {'greedy': []}
for method in method_list:
method_runtime_dict[method] = []
num_agents = NUM_AGENTS
num_goals=NUM_GOALS
n=N
pos_time_list = []
vel_time_list = []
failed_counter_dict = {'greedy': 0}
for method in method_list:
failed_counter_dict[method] = 0
for _ in range(num_agents):
pos_agent = train_pos_agent()
vel_agent = train_vel_agent()
for method in method_runtime_dict.keys():
method_runtime_dict[method].append([])
for i in range(n):
# goals = [np.random.rand(2)*2-1 for i in range(num_goals)]
# pos = np.random.rand(2)*2-1
goals = generate_path(num_goals + 1)
pos = goals[0]
goals = goals[1:-1]
# pos_agent_time_list = []
min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = pos_agent.select_path(pos, goals, method="0 velocity target")
# pos_test_time_list = [len(min_trajectory)]*len(gd_step_list)
if successful:
method_runtime_dict['greedy'][-1].append(len(min_trajectory))
else:
method_runtime_dict['greedy'][-1].append("NULL")
failed_counter_dict['greedy'] += 1
# vel_test_time_list = []
for method in method_list:
min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = vel_agent.select_path(pos, goals, method=method)
if successful:
method_runtime_dict[method][-1].append(len(min_trajectory))
else:
method_runtime_dict[method][-1].append("NULL")
failed_counter_dict[method] += 1
# vel_agent_time_list.append(vel_test_time_list)
success_rates = {method: 1-failed_counter_dict[method]/(num_agents*n) for method in failed_counter_dict.keys()}
greedy = method_runtime_dict['greedy']
agent_performance_dict = {}
mean_performance_dict = {}
ci_performance_dict = {}
improvement_dict = {}
mean_improvement_dict = {}
ci_improvement_dict = {}
t_score = stats.t.ppf(.975, num_agents)
for method in method_runtime_dict.keys():
agent_performance_dict[method] = [[time for time in agent_list if time != "NULL"] for agent_list in method_runtime_dict[method]]
agent_performance_dict[method] = [sum(agent_list)/len(agent_list) for agent_list in agent_performance_dict[method]]
mean = sum(agent_performance_dict[method])/len(agent_performance_dict[method])
mean_performance_dict[method] = mean
ci_performance_dict[method] = t_score*sum([(v-mean)**2 for v in agent_performance_dict[method]])**.5/len(agent_performance_dict[method])
improvement_list = []
mean_list = []
for agent_ind in range(num_agents):
agent_list = method_runtime_dict[method][agent_ind]
greedy_list = greedy[agent_ind]
improvement_list.append([(agent_list[i] - greedy_list[i])/greedy_list[i] for i in range(n) if (agent_list[i] != "NULL" and greedy_list[i]!= "NULL")])
mean_list.append(sum(improvement_list[agent_ind])/len(improvement_list[agent_ind]))
mean = sum(mean_list)/len(mean_list)
mean_improvement_dict[method] = mean
ci_improvement_dict[method] = t_score*sum([(v-mean)**2 for v in mean_list])**.5/len(mean_list)
# agent_improvement_dict[method] = [[(time - greedy_time)/greedy_time for time in agent_list if time != "NULL"] for agent_list in method_runtime_dict[method]]
# agent_performance_dict[method] = [sum(agent_list)/len(agent_list) for agent_list in agent_performance_dict[method]]
# mean_performance_dict[method] = sum(agent_performance_dict[method])/len(agent_performance_dict[method])
# ci_performance_dict[method] = 2*sum([(v-mean)**2 for v in agent_performance_dict[method]])**.5/len(agent_performance_dict[method])
# method_runtime_dict = {method: np.array(method_runtime_dict[method]) for method in method_runtime_dict.keys()}
# mean_performance_dict = {method: method_runtime_dict[method] for method in method_runtime_dict.keys()}
# relative_time_dict = {method: (method_runtime_dict[method] - greedy)/greedy for method in method_list}
# improvement_dict = {method: (relative_time_dict[method].mean(), 2*(relative_time_dict[method].mean(axis=-1)).std()/(num_agents**.5)) for method in method_list}
# greedy = method_runtime_dict['greedy']
# method_runtime_dict = {method: np.array(method_runtime_dict[method]) for method in method_runtime_dict.keys()}
# performance_dict = {method: (method_runtime_dict[method].mean(), 2*(method_runtime_dict[method].mean(axis=-1)).std()/(num_agents**.5)) for method in method_runtime_dict.keys()}
# relative_time_dict = {method: (method_runtime_dict[method] - greedy)/greedy for method in method_list}
# improvement_dict = {method: (relative_time_dict[method].mean(), 2*(relative_time_dict[method].mean(axis=-1)).std()/(num_agents**.5)) for method in method_list}
performance_list = [mean_performance_dict[m] for m in method_runtime_dict.keys()]
performance_ci_list = [ci_performance_dict[m] for m in method_runtime_dict.keys()]
relative_time_list = [mean_improvement_dict[m] for m in method_list]
relative_time_ci_list = [ci_improvement_dict[m] for m in method_list]
sr_list = [success_rates[m] for m in method_runtime_dict.keys()]#method_list]
# plt.xticks(range(len(method_list)), method_list)
plt.xticks(range(len(method_runtime_dict.keys())), list(method_runtime_dict.keys()))
plt.xlabel("Method")
plt.ylabel('Success rate')
plt.title('Comparison of velocity target-setting methods')
plt.bar(range(len(sr_list)), sr_list)
plt.savefig(os.path.join('results', "Method comparison -- Success Rate" + '.png'))
plt.close()
plt.xticks(range(len(method_runtime_dict.keys())), list(method_runtime_dict.keys()))
plt.xlabel("Method")
plt.ylabel('Time to complete')
plt.title('Comparison of velocity target-setting methods')
plt.bar(range(len(performance_list)), performance_list, yerr=performance_ci_list)
plt.savefig(os.path.join('results', "Method comparison -- Performance" + '.png'))
plt.close()
plt.xticks(range(len(method_list)), method_list)
plt.xlabel("Method")
plt.ylabel('Cost reduction over greedy baseline')
plt.title('Comparison of velocity target-setting methods')
plt.bar(range(len(relative_time_list)), relative_time_list, yerr=relative_time_ci_list)
plt.savefig(os.path.join('results', "Method comparison -- Relative Improvement" + '.png'))
plt.close()
def get_random_search_costs(train_vel_agent, perm_search=True):
pos_run_time_list = []
vel_run_time_list = []
# gd_step_list = [0,5,10]
num_agents = NUM_AGENTS
num_goals=NUM_GOALS
n=N
# gd_step_list = [0,1]
# num_agents = 2
# num_goals=2
# n=2
rand_time_list = []
gd_time_list = []
for _ in range(num_agents):
vel_agent = train_vel_agent()
rand_search_time_list = []
gd_search_time_list = []
for i in range(n):
# goals = [np.random.rand(2)*2-1 for i in range(num_goals)]
# pos = np.random.rand(2)*2-1
goals = generate_path(num_goals + 1)
pos = goals[0]
goals = goals[1:-1]
rand_test_time_list = []
gd_test_time_list = []
for gd_steps in gd_step_list:
# min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = vel_agent.find_shortest_path(pos, goals, gd_steps=gd_steps, random_start=True, perm_search=perm_search)
min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = vel_agent.find_shortest_path(pos, goals, gd_steps=gd_steps, random_start=False, perm_search=perm_search)
print("GD: " + str(min_time))
gd_test_time_list.append(len(min_trajectory))
min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = vel_agent.find_shortest_path(pos, goals, gd_steps=gd_steps, random_search=True, perm_search=perm_search)
print("random_search: " + str(min_time))
rand_test_time_list.append(len(min_trajectory))
rand_search_time_list.append(rand_test_time_list)
gd_search_time_list.append(gd_test_time_list)
rand_time_list.append(rand_search_time_list)
gd_time_list.append(gd_search_time_list)
rand_time_list = np.array(rand_time_list).squeeze()
gd_time_list = np.array(gd_time_list).squeeze()
# best = np.minimum(rand_time_list.min(axis=2),gd_time_list.min(axis=2))
relative_time_change = (gd_time_list-rand_time_list)/rand_time_list
relative_time_change = np.mean(relative_time_change, axis=1)
# try:
# pickle.dump(vel_time_list, open("velocity_target.pkl", 'wb'))
# pickle.dump(pos_time_list, open("no_velocity_target.pkl", 'wb'))
# pickle.dump(relative_time_change, open("relative_time_change.pkl", 'wb'))
# except:
# print("pickle failure")
# import pdb
# pdb.set_trace()
mean = relative_time_change.mean(axis=0)
ci = 2*relative_time_change.std(axis=0)/(num_agents**.5)
steps = np.array(gd_step_list)
plt.plot(steps, mean)
plt.fill_between(steps, mean+ci, mean-ci, alpha=.4)
plt.xlabel("Gradient steps")
plt.ylabel("Relative Improvement vs random search")
plt.title("Relative Improvement vs random search")
plt.savefig(os.path.join('results', "Improvement vs random search" + '.png'))
plt.close()
t_score = stats.t.ppf(.975, num_agents)
rands = rand_time_list.mean(axis=1)
rand_mean = rands.mean(axis=0)
rand_ci = t_score*rands.std(axis=0)/(num_agents**.5)
gds = gd_time_list.mean(axis=1)
gd_mean = gds.mean(axis=0)
gd_ci = t_score*gds.std(axis=0)/(num_agents**.5)
plt.plot(steps, rand_mean, color='red', label='Random Search')
plt.fill_between(steps, rand_mean+rand_ci, rand_mean-rand_ci, alpha=.4, color='red')
plt.plot(steps, gd_mean, color='blue', label='Gradient Descent')
plt.fill_between(steps, gd_mean+gd_ci, gd_mean-gd_ci, alpha=.4, color='blue')
plt.legend()
plt.xlabel("Gradient steps")
plt.ylabel("Relative Improvement vs random search")
plt.title("Relative Improvement vs random search")
plt.savefig(os.path.join('results', "Gradient Descent vs random search" + '.png'))
plt.close()
| 44.352041 | 195 | 0.673588 | import numpy as np
import matplotlib.pyplot as plt
import pickle
import os
from scipy import stats
from HER_mod.rl_modules.tsp import generate_path
from HER_mod.rl_modules.hyperparams import NUM_GOALS, NUM_AGENTS
gd_step_list = [0,2,5, 10, 20, 40]
N=200
def get_path_costs(train_pos_agent, train_vel_agent, perm_search=True):
pos_run_time_list = []
vel_run_time_list = []
num_agents = NUM_AGENTS
num_goals=NUM_GOALS
n=N
pos_time_list = []
vel_time_list = []
for _ in range(num_agents):
pos_agent = train_pos_agent()
vel_agent = train_vel_agent()
pos_agent_time_list = []
vel_agent_time_list = []
for i in range(n):
goals = generate_path(num_goals + 1)
pos = goals[0]
goals = goals[1:-1]
min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = pos_agent.find_shortest_path(pos, goals, gd_steps=0, perm_search=perm_search)
pos_test_time_list = [len(min_trajectory)]*len(gd_step_list)
pos_agent_time_list.append(pos_test_time_list)
vel_test_time_list = []
for gd_steps in gd_step_list:
min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = vel_agent.find_shortest_path(pos, goals, gd_steps=gd_steps, perm_search=perm_search)
vel_test_time_list.append(len(min_trajectory))
vel_agent_time_list.append(vel_test_time_list)
pos_time_list.append(pos_agent_time_list)
vel_time_list.append(vel_agent_time_list)
vel_time_list = np.array(vel_time_list).squeeze()
pos_time_list = np.array(pos_time_list).squeeze()
relative_time_change = (vel_time_list-pos_time_list)/pos_time_list
relative_time_change = np.mean(relative_time_change, axis=1)
try:
pickle.dump(vel_time_list, open("velocity_target.pkl", 'wb'))
pickle.dump(pos_time_list, open("no_velocity_target.pkl", 'wb'))
pickle.dump(relative_time_change, open("relative_time_change.pkl", 'wb'))
except:
print("pickle failure")
import pdb
pdb.set_trace()
mean = relative_time_change.mean(axis=0)
t_score = stats.t.ppf(.975, num_agents)
ci = t_score*relative_time_change.std(axis=0)/(num_agents**.5)
steps = np.array(gd_step_list)
plt.plot(steps, mean)
plt.fill_between(steps, mean+ci, mean-ci, alpha=.4)
plt.xlabel("Gradient steps")
plt.ylabel("Relative Improvement vs standard HER")
plt.title("Relative Improvement")
plt.savefig(os.path.join('results', "Relative Improvement" + '.png'))
plt.close()
[]
num_agents = NUM_AGENTS
num_goals=NUM_GOALS
n=N
pos_time_list = []
vel_time_list = []
failed_counter_dict = {'greedy': 0}
for method in method_list:
failed_counter_dict[method] = 0
for _ in range(num_agents):
pos_agent = train_pos_agent()
vel_agent = train_vel_agent()
for method in method_runtime_dict.keys():
method_runtime_dict[method].append([])
for i in range(n):
goals = generate_path(num_goals + 1)
pos = goals[0]
goals = goals[1:-1]
min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = pos_agent.select_path(pos, goals, method="0 velocity target")
if successful:
method_runtime_dict['greedy'][-1].append(len(min_trajectory))
else:
method_runtime_dict['greedy'][-1].append("NULL")
failed_counter_dict['greedy'] += 1
for method in method_list:
min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = vel_agent.select_path(pos, goals, method=method)
if successful:
method_runtime_dict[method][-1].append(len(min_trajectory))
else:
method_runtime_dict[method][-1].append("NULL")
failed_counter_dict[method] += 1
success_rates = {method: 1-failed_counter_dict[method]/(num_agents*n) for method in failed_counter_dict.keys()}
greedy = method_runtime_dict['greedy']
agent_performance_dict = {}
mean_performance_dict = {}
ci_performance_dict = {}
improvement_dict = {}
mean_improvement_dict = {}
ci_improvement_dict = {}
t_score = stats.t.ppf(.975, num_agents)
for method in method_runtime_dict.keys():
agent_performance_dict[method] = [[time for time in agent_list if time != "NULL"] for agent_list in method_runtime_dict[method]]
agent_performance_dict[method] = [sum(agent_list)/len(agent_list) for agent_list in agent_performance_dict[method]]
mean = sum(agent_performance_dict[method])/len(agent_performance_dict[method])
mean_performance_dict[method] = mean
ci_performance_dict[method] = t_score*sum([(v-mean)**2 for v in agent_performance_dict[method]])**.5/len(agent_performance_dict[method])
improvement_list = []
mean_list = []
for agent_ind in range(num_agents):
agent_list = method_runtime_dict[method][agent_ind]
greedy_list = greedy[agent_ind]
improvement_list.append([(agent_list[i] - greedy_list[i])/greedy_list[i] for i in range(n) if (agent_list[i] != "NULL" and greedy_list[i]!= "NULL")])
mean_list.append(sum(improvement_list[agent_ind])/len(improvement_list[agent_ind]))
mean = sum(mean_list)/len(mean_list)
mean_improvement_dict[method] = mean
ci_improvement_dict[method] = t_score*sum([(v-mean)**2 for v in mean_list])**.5/len(mean_list)
performance_list = [mean_performance_dict[m] for m in method_runtime_dict.keys()]
performance_ci_list = [ci_performance_dict[m] for m in method_runtime_dict.keys()]
relative_time_list = [mean_improvement_dict[m] for m in method_list]
relative_time_ci_list = [ci_improvement_dict[m] for m in method_list]
sr_list = [success_rates[m] for m in method_runtime_dict.keys()]
plt.xticks(range(len(method_runtime_dict.keys())), list(method_runtime_dict.keys()))
plt.xlabel("Method")
plt.ylabel('Success rate')
plt.title('Comparison of velocity target-setting methods')
plt.bar(range(len(sr_list)), sr_list)
plt.savefig(os.path.join('results', "Method comparison -- Success Rate" + '.png'))
plt.close()
plt.xticks(range(len(method_runtime_dict.keys())), list(method_runtime_dict.keys()))
plt.xlabel("Method")
plt.ylabel('Time to complete')
plt.title('Comparison of velocity target-setting methods')
plt.bar(range(len(performance_list)), performance_list, yerr=performance_ci_list)
plt.savefig(os.path.join('results', "Method comparison -- Performance" + '.png'))
plt.close()
plt.xticks(range(len(method_list)), method_list)
plt.xlabel("Method")
plt.ylabel('Cost reduction over greedy baseline')
plt.title('Comparison of velocity target-setting methods')
plt.bar(range(len(relative_time_list)), relative_time_list, yerr=relative_time_ci_list)
plt.savefig(os.path.join('results', "Method comparison -- Relative Improvement" + '.png'))
plt.close()
def get_random_search_costs(train_vel_agent, perm_search=True):
pos_run_time_list = []
vel_run_time_list = []
num_agents = NUM_AGENTS
num_goals=NUM_GOALS
n=N
rand_time_list = []
gd_time_list = []
for _ in range(num_agents):
vel_agent = train_vel_agent()
rand_search_time_list = []
gd_search_time_list = []
for i in range(n):
goals = generate_path(num_goals + 1)
pos = goals[0]
goals = goals[1:-1]
rand_test_time_list = []
gd_test_time_list = []
for gd_steps in gd_step_list:
min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = vel_agent.find_shortest_path(pos, goals, gd_steps=gd_steps, random_start=False, perm_search=perm_search)
print("GD: " + str(min_time))
gd_test_time_list.append(len(min_trajectory))
min_time, min_trajectory, min_path, min_vel_path, successful, pass_vals = vel_agent.find_shortest_path(pos, goals, gd_steps=gd_steps, random_search=True, perm_search=perm_search)
print("random_search: " + str(min_time))
rand_test_time_list.append(len(min_trajectory))
rand_search_time_list.append(rand_test_time_list)
gd_search_time_list.append(gd_test_time_list)
rand_time_list.append(rand_search_time_list)
gd_time_list.append(gd_search_time_list)
rand_time_list = np.array(rand_time_list).squeeze()
gd_time_list = np.array(gd_time_list).squeeze()
relative_time_change = (gd_time_list-rand_time_list)/rand_time_list
relative_time_change = np.mean(relative_time_change, axis=1)
mean = relative_time_change.mean(axis=0)
ci = 2*relative_time_change.std(axis=0)/(num_agents**.5)
steps = np.array(gd_step_list)
plt.plot(steps, mean)
plt.fill_between(steps, mean+ci, mean-ci, alpha=.4)
plt.xlabel("Gradient steps")
plt.ylabel("Relative Improvement vs random search")
plt.title("Relative Improvement vs random search")
plt.savefig(os.path.join('results', "Improvement vs random search" + '.png'))
plt.close()
t_score = stats.t.ppf(.975, num_agents)
rands = rand_time_list.mean(axis=1)
rand_mean = rands.mean(axis=0)
rand_ci = t_score*rands.std(axis=0)/(num_agents**.5)
gds = gd_time_list.mean(axis=1)
gd_mean = gds.mean(axis=0)
gd_ci = t_score*gds.std(axis=0)/(num_agents**.5)
plt.plot(steps, rand_mean, color='red', label='Random Search')
plt.fill_between(steps, rand_mean+rand_ci, rand_mean-rand_ci, alpha=.4, color='red')
plt.plot(steps, gd_mean, color='blue', label='Gradient Descent')
plt.fill_between(steps, gd_mean+gd_ci, gd_mean-gd_ci, alpha=.4, color='blue')
plt.legend()
plt.xlabel("Gradient steps")
plt.ylabel("Relative Improvement vs random search")
plt.title("Relative Improvement vs random search")
plt.savefig(os.path.join('results', "Gradient Descent vs random search" + '.png'))
plt.close()
| true | true |
f71bfd9867454fe2f4dc8062137be298a7bcf44a | 40,230 | py | Python | python/ccxt/async_support/coinbasepro.py | dgdiginex/ccxt | cccd590576cbf48d26cf9e3f65cc54fdd466a139 | [
"MIT"
] | 1 | 2021-02-08T21:56:13.000Z | 2021-02-08T21:56:13.000Z | python/ccxt/async_support/coinbasepro.py | yucelalbar/ccxt | 672510401fba809172fac8272e1af463c778358a | [
"MIT"
] | null | null | null | python/ccxt/async_support/coinbasepro.py | yucelalbar/ccxt | 672510401fba809172fac8272e1af463c778358a | [
"MIT"
] | 2 | 2020-10-13T03:24:08.000Z | 2020-10-15T06:25:07.000Z | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
# -----------------------------------------------------------------------------
try:
basestring # Python 3
except NameError:
basestring = str # Python 2
import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NotSupported
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import OnMaintenance
from ccxt.base.decimal_to_precision import TICK_SIZE
class coinbasepro(Exchange):
def describe(self):
return self.deep_extend(super(coinbasepro, self).describe(), {
'id': 'coinbasepro',
'name': 'Coinbase Pro',
'countries': ['US'],
'rateLimit': 1000,
'userAgent': self.userAgents['chrome'],
'pro': True,
'has': {
'cancelAllOrders': True,
'cancelOrder': True,
'CORS': True,
'createDepositAddress': True,
'createOrder': True,
'deposit': True,
'fetchAccounts': True,
'fetchBalance': True,
'fetchCurrencies': True,
'fetchClosedOrders': True,
'fetchDepositAddress': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchOrderTrades': True,
'fetchTime': True,
'fetchTicker': True,
'fetchTrades': True,
'fetchTransactions': True,
'withdraw': True,
},
'timeframes': {
'1m': 60,
'5m': 300,
'15m': 900,
'1h': 3600,
'6h': 21600,
'1d': 86400,
},
'urls': {
'test': {
'public': 'https://api-public.sandbox.pro.coinbase.com',
'private': 'https://api-public.sandbox.pro.coinbase.com',
},
'logo': 'https://user-images.githubusercontent.com/1294454/41764625-63b7ffde-760a-11e8-996d-a6328fa9347a.jpg',
'api': {
'public': 'https://api.pro.coinbase.com',
'private': 'https://api.pro.coinbase.com',
},
'www': 'https://pro.coinbase.com/',
'doc': 'https://docs.pro.coinbase.com',
'fees': [
'https://docs.pro.coinbase.com/#fees',
'https://support.pro.coinbase.com/customer/en/portal/articles/2945310-fees',
],
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'password': True,
},
'api': {
'public': {
'get': [
'currencies',
'products',
'products/{id}',
'products/{id}/book',
'products/{id}/candles',
'products/{id}/stats',
'products/{id}/ticker',
'products/{id}/trades',
'time',
],
},
'private': {
'get': [
'accounts',
'accounts/{id}',
'accounts/{id}/holds',
'accounts/{id}/ledger',
'accounts/{id}/transfers',
'coinbase-accounts',
'coinbase-accounts/{id}/addresses',
'fills',
'funding',
'fees',
'margin/profile_information',
'margin/buying_power',
'margin/withdrawal_power',
'margin/withdrawal_power_all',
'margin/exit_plan',
'margin/liquidation_history',
'margin/position_refresh_amounts',
'margin/status',
'oracle',
'orders',
'orders/{id}',
'orders/client:{client_oid}',
'otc/orders',
'payment-methods',
'position',
'profiles',
'profiles/{id}',
'reports/{report_id}',
'transfers',
'transfers/{transfer_id}',
'users/self/trailing-volume',
'users/self/exchange-limits',
'withdrawals/fee-estimate',
],
'post': [
'conversions',
'deposits/coinbase-account',
'deposits/payment-method',
'coinbase-accounts/{id}/addresses',
'funding/repay',
'orders',
'position/close',
'profiles/margin-transfer',
'profiles/transfer',
'reports',
'withdrawals/coinbase',
'withdrawals/coinbase-account',
'withdrawals/crypto',
'withdrawals/payment-method',
],
'delete': [
'orders',
'orders/client:{client_oid}',
'orders/{id}',
],
},
},
'precisionMode': TICK_SIZE,
'fees': {
'trading': {
'tierBased': True, # complicated tier system per coin
'percentage': True,
'maker': 0.5 / 100, # highest fee of all tiers
'taker': 0.5 / 100, # highest fee of all tiers
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {
'BCH': 0,
'BTC': 0,
'LTC': 0,
'ETH': 0,
'EUR': 0.15,
'USD': 25,
},
'deposit': {
'BCH': 0,
'BTC': 0,
'LTC': 0,
'ETH': 0,
'EUR': 0.15,
'USD': 10,
},
},
},
'exceptions': {
'exact': {
'Insufficient funds': InsufficientFunds,
'NotFound': OrderNotFound,
'Invalid API Key': AuthenticationError,
'invalid signature': AuthenticationError,
'Invalid Passphrase': AuthenticationError,
'Invalid order id': InvalidOrder,
'Private rate limit exceeded': RateLimitExceeded,
'Trading pair not available': PermissionDenied,
'Product not found': InvalidOrder,
},
'broad': {
'Order already done': OrderNotFound,
'order not found': OrderNotFound,
'price too small': InvalidOrder,
'price too precise': InvalidOrder,
'under maintenance': OnMaintenance,
'size is too small': InvalidOrder,
'Cancel only mode': OnMaintenance, # https://github.com/ccxt/ccxt/issues/7690
},
},
})
async def fetch_currencies(self, params={}):
response = await self.publicGetCurrencies(params)
#
# [
# {
# id: 'XTZ',
# name: 'Tezos',
# min_size: '0.000001',
# status: 'online',
# message: '',
# max_precision: '0.000001',
# convertible_to: [],
# details: {
# type: 'crypto',
# symbol: 'Τ',
# network_confirmations: 60,
# sort_order: 53,
# crypto_address_link: 'https://tzstats.com/{{address}}',
# crypto_transaction_link: 'https://tzstats.com/{{txId}}',
# push_payment_methods: ['crypto'],
# group_types: [],
# display_name: '',
# processing_time_seconds: 0,
# min_withdrawal_amount: 1
# }
# }
# ]
#
result = {}
for i in range(0, len(response)):
currency = response[i]
id = self.safe_string(currency, 'id')
name = self.safe_string(currency, 'name')
code = self.safe_currency_code(id)
details = self.safe_value(currency, 'details', {})
precision = self.safe_float(currency, 'max_precision')
status = self.safe_string(currency, 'status')
active = (status == 'online')
result[code] = {
'id': id,
'code': code,
'info': currency,
'type': self.safe_string(details, 'type'),
'name': name,
'active': active,
'fee': None,
'precision': precision,
'limits': {
'amount': {
'min': self.safe_float(details, 'min_size'),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
'withdraw': {
'min': self.safe_float(details, 'min_withdrawal_amount'),
'max': None,
},
},
}
return result
async def fetch_markets(self, params={}):
response = await self.publicGetProducts(params)
#
# [
# {
# "id":"ZEC-BTC",
# "base_currency":"ZEC",
# "quote_currency":"BTC",
# "base_min_size":"0.01000000",
# "base_max_size":"1500.00000000",
# "quote_increment":"0.00000100",
# "base_increment":"0.00010000",
# "display_name":"ZEC/BTC",
# "min_market_funds":"0.001",
# "max_market_funds":"30",
# "margin_enabled":false,
# "post_only":false,
# "limit_only":false,
# "cancel_only":false,
# "trading_disabled":false,
# "status":"online",
# "status_message":""
# }
# ]
#
result = []
for i in range(0, len(response)):
market = response[i]
id = self.safe_string(market, 'id')
baseId = self.safe_string(market, 'base_currency')
quoteId = self.safe_string(market, 'quote_currency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
priceLimits = {
'min': self.safe_float(market, 'quote_increment'),
'max': None,
}
precision = {
'amount': self.safe_float(market, 'base_increment'),
'price': self.safe_float(market, 'quote_increment'),
}
status = self.safe_string(market, 'status')
active = (status == 'online')
result.append(self.extend(self.fees['trading'], {
'id': id,
'symbol': symbol,
'baseId': baseId,
'quoteId': quoteId,
'base': base,
'quote': quote,
'precision': precision,
'limits': {
'amount': {
'min': self.safe_float(market, 'base_min_size'),
'max': self.safe_float(market, 'base_max_size'),
},
'price': priceLimits,
'cost': {
'min': self.safe_float(market, 'min_market_funds'),
'max': self.safe_float(market, 'max_market_funds'),
},
},
'active': active,
'info': market,
}))
return result
async def fetch_accounts(self, params={}):
response = await self.privateGetAccounts(params)
#
# [
# {
# id: '4aac9c60-cbda-4396-9da4-4aa71e95fba0',
# currency: 'BTC',
# balance: '0.0000000000000000',
# available: '0',
# hold: '0.0000000000000000',
# profile_id: 'b709263e-f42a-4c7d-949a-a95c83d065da'
# },
# {
# id: 'f75fa69a-1ad1-4a80-bd61-ee7faa6135a3',
# currency: 'USDC',
# balance: '0.0000000000000000',
# available: '0',
# hold: '0.0000000000000000',
# profile_id: 'b709263e-f42a-4c7d-949a-a95c83d065da'
# },
# ]
#
result = []
for i in range(0, len(response)):
account = response[i]
accountId = self.safe_string(account, 'id')
currencyId = self.safe_string(account, 'currency')
code = self.safe_currency_code(currencyId)
result.append({
'id': accountId,
'type': None,
'currency': code,
'info': account,
})
return result
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privateGetAccounts(params)
result = {'info': response}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = {
'free': self.safe_float(balance, 'available'),
'used': self.safe_float(balance, 'hold'),
'total': self.safe_float(balance, 'balance'),
}
result[code] = account
return self.parse_balance(result)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
# level 1 - only the best bid and ask
# level 2 - top 50 bids and asks(aggregated)
# level 3 - full order book(non aggregated)
request = {
'id': self.market_id(symbol),
'level': 2, # 1 best bidask, 2 aggregated, 3 full
}
response = await self.publicGetProductsIdBook(self.extend(request, params))
#
# {
# "sequence":1924393896,
# "bids":[
# ["0.01825","24.34811287",2],
# ["0.01824","72.5463",3],
# ["0.01823","424.54298049",6],
# ],
# "asks":[
# ["0.01826","171.10414904",4],
# ["0.01827","22.60427028",1],
# ["0.01828","397.46018784",7],
# ]
# }
#
orderbook = self.parse_order_book(response)
orderbook['nonce'] = self.safe_integer(response, 'sequence')
return orderbook
def parse_ticker(self, ticker, market=None):
#
# publicGetProductsIdTicker
#
# {
# "trade_id":843439,
# "price":"0.997999",
# "size":"80.29769",
# "time":"2020-01-28T02:13:33.012523Z",
# "bid":"0.997094",
# "ask":"0.998",
# "volume":"1903188.03750000"
# }
#
# publicGetProductsIdStats
#
# {
# "open": "34.19000000",
# "high": "95.70000000",
# "low": "7.06000000",
# "volume": "2.41000000"
# }
#
timestamp = self.parse8601(self.safe_value(ticker, 'time'))
bid = self.safe_float(ticker, 'bid')
ask = self.safe_float(ticker, 'ask')
last = self.safe_float(ticker, 'price')
symbol = None if (market is None) else market['symbol']
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': bid,
'bidVolume': None,
'ask': ask,
'askVolume': None,
'vwap': None,
'open': self.safe_float(ticker, 'open'),
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, 'volume'),
'quoteVolume': None,
'info': ticker,
}
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'id': market['id'],
}
# publicGetProductsIdTicker or publicGetProductsIdStats
method = self.safe_string(self.options, 'fetchTickerMethod', 'publicGetProductsIdTicker')
response = await getattr(self, method)(self.extend(request, params))
#
# publicGetProductsIdTicker
#
# {
# "trade_id":843439,
# "price":"0.997999",
# "size":"80.29769",
# "time":"2020-01-28T02:13:33.012523Z",
# "bid":"0.997094",
# "ask":"0.998",
# "volume":"1903188.03750000"
# }
#
# publicGetProductsIdStats
#
# {
# "open": "34.19000000",
# "high": "95.70000000",
# "low": "7.06000000",
# "volume": "2.41000000"
# }
#
return self.parse_ticker(response, market)
def parse_trade(self, trade, market=None):
#
# {
# type: 'match',
# trade_id: 82047307,
# maker_order_id: '0f358725-2134-435e-be11-753912a326e0',
# taker_order_id: '252b7002-87a3-425c-ac73-f5b9e23f3caf',
# side: 'sell',
# size: '0.00513192',
# price: '9314.78',
# product_id: 'BTC-USD',
# sequence: 12038915443,
# time: '2020-01-31T20:03:41.158814Z'
# }
#
timestamp = self.parse8601(self.safe_string_2(trade, 'time', 'created_at'))
marketId = self.safe_string(trade, 'product_id')
symbol = self.safe_symbol(marketId, market, '-')
feeRate = None
feeCurrency = None
takerOrMaker = None
if market is not None:
feeCurrency = market['quote']
if 'liquidity' in trade:
takerOrMaker = 'taker' if (trade['liquidity'] == 'T') else 'maker'
feeRate = market[takerOrMaker]
feeCost = self.safe_float_2(trade, 'fill_fees', 'fee')
fee = {
'cost': feeCost,
'currency': feeCurrency,
'rate': feeRate,
}
type = None
id = self.safe_string(trade, 'trade_id')
side = 'sell' if (trade['side'] == 'buy') else 'buy'
orderId = self.safe_string(trade, 'order_id')
# Coinbase Pro returns inverted side to fetchMyTrades vs fetchTrades
if orderId is not None:
side = 'buy' if (trade['side'] == 'buy') else 'sell'
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'size')
return {
'id': id,
'order': orderId,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': type,
'takerOrMaker': takerOrMaker,
'side': side,
'price': price,
'amount': amount,
'fee': fee,
'cost': price * amount,
}
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
# as of 2018-08-23
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'product_id': market['id'],
}
if limit is not None:
request['limit'] = limit
response = await self.privateGetFills(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'id': market['id'], # fixes issue #2
}
response = await self.publicGetProductsIdTrades(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def parse_ohlcv(self, ohlcv, market=None):
#
# [
# 1591514160,
# 0.02507,
# 0.02507,
# 0.02507,
# 0.02507,
# 0.02816506
# ]
#
return [
self.safe_timestamp(ohlcv, 0),
self.safe_float(ohlcv, 3),
self.safe_float(ohlcv, 2),
self.safe_float(ohlcv, 1),
self.safe_float(ohlcv, 4),
self.safe_float(ohlcv, 5),
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
granularity = self.timeframes[timeframe]
request = {
'id': market['id'],
'granularity': granularity,
}
if since is not None:
request['start'] = self.iso8601(since)
if limit is None:
# https://docs.pro.coinbase.com/#get-historic-rates
limit = 300 # max = 300
request['end'] = self.iso8601(self.sum((limit - 1) * granularity * 1000, since))
response = await self.publicGetProductsIdCandles(self.extend(request, params))
#
# [
# [1591514160,0.02507,0.02507,0.02507,0.02507,0.02816506],
# [1591514100,0.02507,0.02507,0.02507,0.02507,1.63830323],
# [1591514040,0.02505,0.02507,0.02505,0.02507,0.19918178]
# ]
#
return self.parse_ohlcvs(response, market, timeframe, since, limit)
async def fetch_time(self, params={}):
response = await self.publicGetTime(params)
#
# {
# "iso":"2020-05-12T08:00:51.504Z",
# "epoch":1589270451.504
# }
#
return self.safe_timestamp(response, 'epoch')
def parse_order_status(self, status):
statuses = {
'pending': 'open',
'active': 'open',
'open': 'open',
'done': 'closed',
'canceled': 'canceled',
'canceling': 'open',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
timestamp = self.parse8601(self.safe_string(order, 'created_at'))
marketId = self.safe_string(order, 'product_id')
market = self.safe_market(marketId, market, '-')
status = self.parse_order_status(self.safe_string(order, 'status'))
price = self.safe_float(order, 'price')
filled = self.safe_float(order, 'filled_size')
amount = self.safe_float(order, 'size', filled)
remaining = None
if amount is not None:
if filled is not None:
remaining = amount - filled
cost = self.safe_float(order, 'executed_value')
feeCost = self.safe_float(order, 'fill_fees')
fee = None
if feeCost is not None:
feeCurrencyCode = None
if market is not None:
feeCurrencyCode = market['quote']
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
'rate': None,
}
id = self.safe_string(order, 'id')
type = self.safe_string(order, 'type')
side = self.safe_string(order, 'side')
return {
'id': id,
'clientOrderId': None,
'info': order,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'status': status,
'symbol': market['symbol'],
'type': type,
'side': side,
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'fee': fee,
'average': None,
'trades': None,
}
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'id': id,
}
response = await self.privateGetOrdersId(self.extend(request, params))
return self.parse_order(response)
async def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
request = {
'order_id': id,
}
response = await self.privateGetFills(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
'status': 'all',
}
market = None
if symbol is not None:
market = self.market(symbol)
request['product_id'] = market['id']
response = await self.privateGetOrders(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {}
market = None
if symbol is not None:
market = self.market(symbol)
request['product_id'] = market['id']
response = await self.privateGetOrders(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
'status': 'done',
}
market = None
if symbol is not None:
market = self.market(symbol)
request['product_id'] = market['id']
response = await self.privateGetOrders(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
# oid = str(self.nonce())
request = {
'product_id': self.market_id(symbol),
'side': side,
'size': self.amount_to_precision(symbol, amount),
'type': type,
}
if type == 'limit':
request['price'] = self.price_to_precision(symbol, price)
response = await self.privatePostOrders(self.extend(request, params))
return self.parse_order(response)
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
return await self.privateDeleteOrdersId({'id': id})
async def cancel_all_orders(self, symbol=None, params={}):
return await self.privateDeleteOrders(params)
def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}):
market = self.markets[symbol]
rate = market[takerOrMaker]
cost = amount * price
currency = market['quote']
return {
'type': takerOrMaker,
'currency': currency,
'rate': rate,
'cost': float(self.currency_to_precision(currency, rate * cost)),
}
async def fetch_payment_methods(self, params={}):
return await self.privateGetPaymentMethods(params)
async def deposit(self, code, amount, address, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
'amount': amount,
}
method = 'privatePostDeposits'
if 'payment_method_id' in params:
# deposit from a payment_method, like a bank account
method += 'PaymentMethod'
elif 'coinbase_account_id' in params:
# deposit into Coinbase Pro account from a Coinbase account
method += 'CoinbaseAccount'
else:
# deposit methodotherwise we did not receive a supported deposit location
# relevant docs link for the Googlers
# https://docs.pro.coinbase.com/#deposits
raise NotSupported(self.id + ' deposit() requires one of `coinbase_account_id` or `payment_method_id` extra params')
response = await getattr(self, method)(self.extend(request, params))
if not response:
raise ExchangeError(self.id + ' deposit() error: ' + self.json(response))
return {
'info': response,
'id': response['id'],
}
async def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
'amount': amount,
}
method = 'privatePostWithdrawals'
if 'payment_method_id' in params:
method += 'PaymentMethod'
elif 'coinbase_account_id' in params:
method += 'CoinbaseAccount'
else:
method += 'Crypto'
request['crypto_address'] = address
response = await getattr(self, method)(self.extend(request, params))
if not response:
raise ExchangeError(self.id + ' withdraw() error: ' + self.json(response))
return {
'info': response,
'id': response['id'],
}
async def fetch_transactions(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
await self.load_accounts()
currency = None
id = self.safe_string(params, 'id') # account id
if id is None:
if code is None:
raise ArgumentsRequired(self.id + ' fetchTransactions() requires a currency code argument if no account id specified in params')
currency = self.currency(code)
accountsByCurrencyCode = self.index_by(self.accounts, 'currency')
account = self.safe_value(accountsByCurrencyCode, code)
if account is None:
raise ExchangeError(self.id + ' fetchTransactions() could not find account id for ' + code)
id = account['id']
request = {
'id': id,
}
if limit is not None:
request['limit'] = limit
response = await self.privateGetAccountsIdTransfers(self.extend(request, params))
for i in range(0, len(response)):
response[i]['currency'] = code
return self.parse_transactions(response, currency, since, limit)
def parse_transaction_status(self, transaction):
canceled = self.safe_value(transaction, 'canceled_at')
if canceled:
return 'canceled'
processed = self.safe_value(transaction, 'processed_at')
completed = self.safe_value(transaction, 'completed_at')
if completed:
return 'ok'
elif processed and not completed:
return 'failed'
else:
return 'pending'
def parse_transaction(self, transaction, currency=None):
details = self.safe_value(transaction, 'details', {})
id = self.safe_string(transaction, 'id')
txid = self.safe_string(details, 'crypto_transaction_hash')
timestamp = self.parse8601(self.safe_string(transaction, 'created_at'))
updated = self.parse8601(self.safe_string(transaction, 'processed_at'))
currencyId = self.safe_string(transaction, 'currency')
code = self.safe_currency_code(currencyId, currency)
fee = None
status = self.parse_transaction_status(transaction)
amount = self.safe_float(transaction, 'amount')
type = self.safe_string(transaction, 'type')
address = self.safe_string(details, 'crypto_address')
tag = self.safe_string(details, 'destination_tag')
address = self.safe_string(transaction, 'crypto_address', address)
if type == 'withdraw':
type = 'withdrawal'
address = self.safe_string(details, 'sent_to_address', address)
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'tag': tag,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': updated,
'fee': fee,
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
request = '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if method == 'GET':
if query:
request += '?' + self.urlencode(query)
url = self.urls['api'][api] + request
if api == 'private':
self.check_required_credentials()
nonce = str(self.nonce())
payload = ''
if method != 'GET':
if query:
body = self.json(query)
payload = body
what = nonce + method + request + payload
secret = self.base64_to_binary(self.secret)
signature = self.hmac(self.encode(what), secret, hashlib.sha256, 'base64')
headers = {
'CB-ACCESS-KEY': self.apiKey,
'CB-ACCESS-SIGN': signature,
'CB-ACCESS-TIMESTAMP': nonce,
'CB-ACCESS-PASSPHRASE': self.password,
'Content-Type': 'application/json',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
accounts = self.safe_value(self.options, 'coinbaseAccounts')
if accounts is None:
accounts = await self.privateGetCoinbaseAccounts()
self.options['coinbaseAccounts'] = accounts # cache it
self.options['coinbaseAccountsByCurrencyId'] = self.index_by(accounts, 'currency')
currencyId = currency['id']
account = self.safe_value(self.options['coinbaseAccountsByCurrencyId'], currencyId)
if account is None:
# eslint-disable-next-line quotes
raise InvalidAddress(self.id + " fetchDepositAddress() could not find currency code " + code + " with id = " + currencyId + " in self.options['coinbaseAccountsByCurrencyId']")
request = {
'id': account['id'],
}
response = await self.privateGetCoinbaseAccountsIdAddresses(self.extend(request, params))
address = self.safe_string(response, 'address')
tag = self.safe_string(response, 'destination_tag')
return {
'currency': code,
'address': self.check_address(address),
'tag': tag,
'info': response,
}
async def create_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
accounts = self.safe_value(self.options, 'coinbaseAccounts')
if accounts is None:
accounts = await self.privateGetCoinbaseAccounts()
self.options['coinbaseAccounts'] = accounts # cache it
self.options['coinbaseAccountsByCurrencyId'] = self.index_by(accounts, 'currency')
currencyId = currency['id']
account = self.safe_value(self.options['coinbaseAccountsByCurrencyId'], currencyId)
if account is None:
# eslint-disable-next-line quotes
raise InvalidAddress(self.id + " fetchDepositAddress() could not find currency code " + code + " with id = " + currencyId + " in self.options['coinbaseAccountsByCurrencyId']")
request = {
'id': account['id'],
}
response = await self.privatePostCoinbaseAccountsIdAddresses(self.extend(request, params))
address = self.safe_string(response, 'address')
tag = self.safe_string(response, 'destination_tag')
return {
'currency': code,
'address': self.check_address(address),
'tag': tag,
'info': response,
}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if (code == 400) or (code == 404):
if body[0] == '{':
message = self.safe_string(response, 'message')
feedback = self.id + ' ' + message
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
raise ExchangeError(feedback) # unknown message
raise ExchangeError(self.id + ' ' + body)
async def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = await self.fetch2(path, api, method, params, headers, body)
if not isinstance(response, basestring):
if 'message' in response:
raise ExchangeError(self.id + ' ' + self.json(response))
return response
| 39.402547 | 187 | 0.492667 |
rt.base.exchange import Exchange
try:
basestring
except NameError:
basestring = str
import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NotSupported
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import OnMaintenance
from ccxt.base.decimal_to_precision import TICK_SIZE
class coinbasepro(Exchange):
def describe(self):
return self.deep_extend(super(coinbasepro, self).describe(), {
'id': 'coinbasepro',
'name': 'Coinbase Pro',
'countries': ['US'],
'rateLimit': 1000,
'userAgent': self.userAgents['chrome'],
'pro': True,
'has': {
'cancelAllOrders': True,
'cancelOrder': True,
'CORS': True,
'createDepositAddress': True,
'createOrder': True,
'deposit': True,
'fetchAccounts': True,
'fetchBalance': True,
'fetchCurrencies': True,
'fetchClosedOrders': True,
'fetchDepositAddress': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchOrderTrades': True,
'fetchTime': True,
'fetchTicker': True,
'fetchTrades': True,
'fetchTransactions': True,
'withdraw': True,
},
'timeframes': {
'1m': 60,
'5m': 300,
'15m': 900,
'1h': 3600,
'6h': 21600,
'1d': 86400,
},
'urls': {
'test': {
'public': 'https://api-public.sandbox.pro.coinbase.com',
'private': 'https://api-public.sandbox.pro.coinbase.com',
},
'logo': 'https://user-images.githubusercontent.com/1294454/41764625-63b7ffde-760a-11e8-996d-a6328fa9347a.jpg',
'api': {
'public': 'https://api.pro.coinbase.com',
'private': 'https://api.pro.coinbase.com',
},
'www': 'https://pro.coinbase.com/',
'doc': 'https://docs.pro.coinbase.com',
'fees': [
'https://docs.pro.coinbase.com/#fees',
'https://support.pro.coinbase.com/customer/en/portal/articles/2945310-fees',
],
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'password': True,
},
'api': {
'public': {
'get': [
'currencies',
'products',
'products/{id}',
'products/{id}/book',
'products/{id}/candles',
'products/{id}/stats',
'products/{id}/ticker',
'products/{id}/trades',
'time',
],
},
'private': {
'get': [
'accounts',
'accounts/{id}',
'accounts/{id}/holds',
'accounts/{id}/ledger',
'accounts/{id}/transfers',
'coinbase-accounts',
'coinbase-accounts/{id}/addresses',
'fills',
'funding',
'fees',
'margin/profile_information',
'margin/buying_power',
'margin/withdrawal_power',
'margin/withdrawal_power_all',
'margin/exit_plan',
'margin/liquidation_history',
'margin/position_refresh_amounts',
'margin/status',
'oracle',
'orders',
'orders/{id}',
'orders/client:{client_oid}',
'otc/orders',
'payment-methods',
'position',
'profiles',
'profiles/{id}',
'reports/{report_id}',
'transfers',
'transfers/{transfer_id}',
'users/self/trailing-volume',
'users/self/exchange-limits',
'withdrawals/fee-estimate',
],
'post': [
'conversions',
'deposits/coinbase-account',
'deposits/payment-method',
'coinbase-accounts/{id}/addresses',
'funding/repay',
'orders',
'position/close',
'profiles/margin-transfer',
'profiles/transfer',
'reports',
'withdrawals/coinbase',
'withdrawals/coinbase-account',
'withdrawals/crypto',
'withdrawals/payment-method',
],
'delete': [
'orders',
'orders/client:{client_oid}',
'orders/{id}',
],
},
},
'precisionMode': TICK_SIZE,
'fees': {
'trading': {
'tierBased': True,
'percentage': True,
'maker': 0.5 / 100,
'taker': 0.5 / 100,
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {
'BCH': 0,
'BTC': 0,
'LTC': 0,
'ETH': 0,
'EUR': 0.15,
'USD': 25,
},
'deposit': {
'BCH': 0,
'BTC': 0,
'LTC': 0,
'ETH': 0,
'EUR': 0.15,
'USD': 10,
},
},
},
'exceptions': {
'exact': {
'Insufficient funds': InsufficientFunds,
'NotFound': OrderNotFound,
'Invalid API Key': AuthenticationError,
'invalid signature': AuthenticationError,
'Invalid Passphrase': AuthenticationError,
'Invalid order id': InvalidOrder,
'Private rate limit exceeded': RateLimitExceeded,
'Trading pair not available': PermissionDenied,
'Product not found': InvalidOrder,
},
'broad': {
'Order already done': OrderNotFound,
'order not found': OrderNotFound,
'price too small': InvalidOrder,
'price too precise': InvalidOrder,
'under maintenance': OnMaintenance,
'size is too small': InvalidOrder,
'Cancel only mode': OnMaintenance,
},
},
})
async def fetch_currencies(self, params={}):
response = await self.publicGetCurrencies(params)
result = {}
for i in range(0, len(response)):
currency = response[i]
id = self.safe_string(currency, 'id')
name = self.safe_string(currency, 'name')
code = self.safe_currency_code(id)
details = self.safe_value(currency, 'details', {})
precision = self.safe_float(currency, 'max_precision')
status = self.safe_string(currency, 'status')
active = (status == 'online')
result[code] = {
'id': id,
'code': code,
'info': currency,
'type': self.safe_string(details, 'type'),
'name': name,
'active': active,
'fee': None,
'precision': precision,
'limits': {
'amount': {
'min': self.safe_float(details, 'min_size'),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
'withdraw': {
'min': self.safe_float(details, 'min_withdrawal_amount'),
'max': None,
},
},
}
return result
async def fetch_markets(self, params={}):
response = await self.publicGetProducts(params)
result = []
for i in range(0, len(response)):
market = response[i]
id = self.safe_string(market, 'id')
baseId = self.safe_string(market, 'base_currency')
quoteId = self.safe_string(market, 'quote_currency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
priceLimits = {
'min': self.safe_float(market, 'quote_increment'),
'max': None,
}
precision = {
'amount': self.safe_float(market, 'base_increment'),
'price': self.safe_float(market, 'quote_increment'),
}
status = self.safe_string(market, 'status')
active = (status == 'online')
result.append(self.extend(self.fees['trading'], {
'id': id,
'symbol': symbol,
'baseId': baseId,
'quoteId': quoteId,
'base': base,
'quote': quote,
'precision': precision,
'limits': {
'amount': {
'min': self.safe_float(market, 'base_min_size'),
'max': self.safe_float(market, 'base_max_size'),
},
'price': priceLimits,
'cost': {
'min': self.safe_float(market, 'min_market_funds'),
'max': self.safe_float(market, 'max_market_funds'),
},
},
'active': active,
'info': market,
}))
return result
async def fetch_accounts(self, params={}):
response = await self.privateGetAccounts(params)
result = []
for i in range(0, len(response)):
account = response[i]
accountId = self.safe_string(account, 'id')
currencyId = self.safe_string(account, 'currency')
code = self.safe_currency_code(currencyId)
result.append({
'id': accountId,
'type': None,
'currency': code,
'info': account,
})
return result
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privateGetAccounts(params)
result = {'info': response}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = {
'free': self.safe_float(balance, 'available'),
'used': self.safe_float(balance, 'hold'),
'total': self.safe_float(balance, 'balance'),
}
result[code] = account
return self.parse_balance(result)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
request = {
'id': self.market_id(symbol),
'level': 2,
}
response = await self.publicGetProductsIdBook(self.extend(request, params))
orderbook = self.parse_order_book(response)
orderbook['nonce'] = self.safe_integer(response, 'sequence')
return orderbook
def parse_ticker(self, ticker, market=None):
timestamp = self.parse8601(self.safe_value(ticker, 'time'))
bid = self.safe_float(ticker, 'bid')
ask = self.safe_float(ticker, 'ask')
last = self.safe_float(ticker, 'price')
symbol = None if (market is None) else market['symbol']
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': bid,
'bidVolume': None,
'ask': ask,
'askVolume': None,
'vwap': None,
'open': self.safe_float(ticker, 'open'),
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, 'volume'),
'quoteVolume': None,
'info': ticker,
}
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'id': market['id'],
}
method = self.safe_string(self.options, 'fetchTickerMethod', 'publicGetProductsIdTicker')
response = await getattr(self, method)(self.extend(request, params))
return self.parse_ticker(response, market)
def parse_trade(self, trade, market=None):
timestamp = self.parse8601(self.safe_string_2(trade, 'time', 'created_at'))
marketId = self.safe_string(trade, 'product_id')
symbol = self.safe_symbol(marketId, market, '-')
feeRate = None
feeCurrency = None
takerOrMaker = None
if market is not None:
feeCurrency = market['quote']
if 'liquidity' in trade:
takerOrMaker = 'taker' if (trade['liquidity'] == 'T') else 'maker'
feeRate = market[takerOrMaker]
feeCost = self.safe_float_2(trade, 'fill_fees', 'fee')
fee = {
'cost': feeCost,
'currency': feeCurrency,
'rate': feeRate,
}
type = None
id = self.safe_string(trade, 'trade_id')
side = 'sell' if (trade['side'] == 'buy') else 'buy'
orderId = self.safe_string(trade, 'order_id')
if orderId is not None:
side = 'buy' if (trade['side'] == 'buy') else 'sell'
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'size')
return {
'id': id,
'order': orderId,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': type,
'takerOrMaker': takerOrMaker,
'side': side,
'price': price,
'amount': amount,
'fee': fee,
'cost': price * amount,
}
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'product_id': market['id'],
}
if limit is not None:
request['limit'] = limit
response = await self.privateGetFills(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'id': market['id'], }
response = await self.publicGetProductsIdTrades(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def parse_ohlcv(self, ohlcv, market=None):
return [
self.safe_timestamp(ohlcv, 0),
self.safe_float(ohlcv, 3),
self.safe_float(ohlcv, 2),
self.safe_float(ohlcv, 1),
self.safe_float(ohlcv, 4),
self.safe_float(ohlcv, 5),
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
granularity = self.timeframes[timeframe]
request = {
'id': market['id'],
'granularity': granularity,
}
if since is not None:
request['start'] = self.iso8601(since)
if limit is None:
mit = 300
request['end'] = self.iso8601(self.sum((limit - 1) * granularity * 1000, since))
response = await self.publicGetProductsIdCandles(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
async def fetch_time(self, params={}):
response = await self.publicGetTime(params)
return self.safe_timestamp(response, 'epoch')
def parse_order_status(self, status):
statuses = {
'pending': 'open',
'active': 'open',
'open': 'open',
'done': 'closed',
'canceled': 'canceled',
'canceling': 'open',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
timestamp = self.parse8601(self.safe_string(order, 'created_at'))
marketId = self.safe_string(order, 'product_id')
market = self.safe_market(marketId, market, '-')
status = self.parse_order_status(self.safe_string(order, 'status'))
price = self.safe_float(order, 'price')
filled = self.safe_float(order, 'filled_size')
amount = self.safe_float(order, 'size', filled)
remaining = None
if amount is not None:
if filled is not None:
remaining = amount - filled
cost = self.safe_float(order, 'executed_value')
feeCost = self.safe_float(order, 'fill_fees')
fee = None
if feeCost is not None:
feeCurrencyCode = None
if market is not None:
feeCurrencyCode = market['quote']
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
'rate': None,
}
id = self.safe_string(order, 'id')
type = self.safe_string(order, 'type')
side = self.safe_string(order, 'side')
return {
'id': id,
'clientOrderId': None,
'info': order,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'status': status,
'symbol': market['symbol'],
'type': type,
'side': side,
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'fee': fee,
'average': None,
'trades': None,
}
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'id': id,
}
response = await self.privateGetOrdersId(self.extend(request, params))
return self.parse_order(response)
async def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
request = {
'order_id': id,
}
response = await self.privateGetFills(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
'status': 'all',
}
market = None
if symbol is not None:
market = self.market(symbol)
request['product_id'] = market['id']
response = await self.privateGetOrders(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {}
market = None
if symbol is not None:
market = self.market(symbol)
request['product_id'] = market['id']
response = await self.privateGetOrders(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
'status': 'done',
}
market = None
if symbol is not None:
market = self.market(symbol)
request['product_id'] = market['id']
response = await self.privateGetOrders(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
request = {
'product_id': self.market_id(symbol),
'side': side,
'size': self.amount_to_precision(symbol, amount),
'type': type,
}
if type == 'limit':
request['price'] = self.price_to_precision(symbol, price)
response = await self.privatePostOrders(self.extend(request, params))
return self.parse_order(response)
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
return await self.privateDeleteOrdersId({'id': id})
async def cancel_all_orders(self, symbol=None, params={}):
return await self.privateDeleteOrders(params)
def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}):
market = self.markets[symbol]
rate = market[takerOrMaker]
cost = amount * price
currency = market['quote']
return {
'type': takerOrMaker,
'currency': currency,
'rate': rate,
'cost': float(self.currency_to_precision(currency, rate * cost)),
}
async def fetch_payment_methods(self, params={}):
return await self.privateGetPaymentMethods(params)
async def deposit(self, code, amount, address, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
'amount': amount,
}
method = 'privatePostDeposits'
if 'payment_method_id' in params:
method += 'PaymentMethod'
elif 'coinbase_account_id' in params:
method += 'CoinbaseAccount'
else:
raise NotSupported(self.id + ' deposit() requires one of `coinbase_account_id` or `payment_method_id` extra params')
response = await getattr(self, method)(self.extend(request, params))
if not response:
raise ExchangeError(self.id + ' deposit() error: ' + self.json(response))
return {
'info': response,
'id': response['id'],
}
async def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
'amount': amount,
}
method = 'privatePostWithdrawals'
if 'payment_method_id' in params:
method += 'PaymentMethod'
elif 'coinbase_account_id' in params:
method += 'CoinbaseAccount'
else:
method += 'Crypto'
request['crypto_address'] = address
response = await getattr(self, method)(self.extend(request, params))
if not response:
raise ExchangeError(self.id + ' withdraw() error: ' + self.json(response))
return {
'info': response,
'id': response['id'],
}
async def fetch_transactions(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
await self.load_accounts()
currency = None
id = self.safe_string(params, 'id')
if id is None:
if code is None:
raise ArgumentsRequired(self.id + ' fetchTransactions() requires a currency code argument if no account id specified in params')
currency = self.currency(code)
accountsByCurrencyCode = self.index_by(self.accounts, 'currency')
account = self.safe_value(accountsByCurrencyCode, code)
if account is None:
raise ExchangeError(self.id + ' fetchTransactions() could not find account id for ' + code)
id = account['id']
request = {
'id': id,
}
if limit is not None:
request['limit'] = limit
response = await self.privateGetAccountsIdTransfers(self.extend(request, params))
for i in range(0, len(response)):
response[i]['currency'] = code
return self.parse_transactions(response, currency, since, limit)
def parse_transaction_status(self, transaction):
canceled = self.safe_value(transaction, 'canceled_at')
if canceled:
return 'canceled'
processed = self.safe_value(transaction, 'processed_at')
completed = self.safe_value(transaction, 'completed_at')
if completed:
return 'ok'
elif processed and not completed:
return 'failed'
else:
return 'pending'
def parse_transaction(self, transaction, currency=None):
details = self.safe_value(transaction, 'details', {})
id = self.safe_string(transaction, 'id')
txid = self.safe_string(details, 'crypto_transaction_hash')
timestamp = self.parse8601(self.safe_string(transaction, 'created_at'))
updated = self.parse8601(self.safe_string(transaction, 'processed_at'))
currencyId = self.safe_string(transaction, 'currency')
code = self.safe_currency_code(currencyId, currency)
fee = None
status = self.parse_transaction_status(transaction)
amount = self.safe_float(transaction, 'amount')
type = self.safe_string(transaction, 'type')
address = self.safe_string(details, 'crypto_address')
tag = self.safe_string(details, 'destination_tag')
address = self.safe_string(transaction, 'crypto_address', address)
if type == 'withdraw':
type = 'withdrawal'
address = self.safe_string(details, 'sent_to_address', address)
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'tag': tag,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': updated,
'fee': fee,
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
request = '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if method == 'GET':
if query:
request += '?' + self.urlencode(query)
url = self.urls['api'][api] + request
if api == 'private':
self.check_required_credentials()
nonce = str(self.nonce())
payload = ''
if method != 'GET':
if query:
body = self.json(query)
payload = body
what = nonce + method + request + payload
secret = self.base64_to_binary(self.secret)
signature = self.hmac(self.encode(what), secret, hashlib.sha256, 'base64')
headers = {
'CB-ACCESS-KEY': self.apiKey,
'CB-ACCESS-SIGN': signature,
'CB-ACCESS-TIMESTAMP': nonce,
'CB-ACCESS-PASSPHRASE': self.password,
'Content-Type': 'application/json',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
accounts = self.safe_value(self.options, 'coinbaseAccounts')
if accounts is None:
accounts = await self.privateGetCoinbaseAccounts()
self.options['coinbaseAccounts'] = accounts
self.options['coinbaseAccountsByCurrencyId'] = self.index_by(accounts, 'currency')
currencyId = currency['id']
account = self.safe_value(self.options['coinbaseAccountsByCurrencyId'], currencyId)
if account is None:
raise InvalidAddress(self.id + " fetchDepositAddress() could not find currency code " + code + " with id = " + currencyId + " in self.options['coinbaseAccountsByCurrencyId']")
request = {
'id': account['id'],
}
response = await self.privateGetCoinbaseAccountsIdAddresses(self.extend(request, params))
address = self.safe_string(response, 'address')
tag = self.safe_string(response, 'destination_tag')
return {
'currency': code,
'address': self.check_address(address),
'tag': tag,
'info': response,
}
async def create_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
accounts = self.safe_value(self.options, 'coinbaseAccounts')
if accounts is None:
accounts = await self.privateGetCoinbaseAccounts()
self.options['coinbaseAccounts'] = accounts
self.options['coinbaseAccountsByCurrencyId'] = self.index_by(accounts, 'currency')
currencyId = currency['id']
account = self.safe_value(self.options['coinbaseAccountsByCurrencyId'], currencyId)
if account is None:
raise InvalidAddress(self.id + " fetchDepositAddress() could not find currency code " + code + " with id = " + currencyId + " in self.options['coinbaseAccountsByCurrencyId']")
request = {
'id': account['id'],
}
response = await self.privatePostCoinbaseAccountsIdAddresses(self.extend(request, params))
address = self.safe_string(response, 'address')
tag = self.safe_string(response, 'destination_tag')
return {
'currency': code,
'address': self.check_address(address),
'tag': tag,
'info': response,
}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if (code == 400) or (code == 404):
if body[0] == '{':
message = self.safe_string(response, 'message')
feedback = self.id + ' ' + message
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
raise ExchangeError(feedback)
raise ExchangeError(self.id + ' ' + body)
async def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = await self.fetch2(path, api, method, params, headers, body)
if not isinstance(response, basestring):
if 'message' in response:
raise ExchangeError(self.id + ' ' + self.json(response))
return response
| true | true |
f71bff11a394180fe27dbab6c598aaf8b04768c6 | 10,059 | py | Python | mlflow/spacy.py | Roffild/mlflow | 8351d82e6ad4103bc58159175b29b406abb1e052 | [
"Apache-2.0"
] | null | null | null | mlflow/spacy.py | Roffild/mlflow | 8351d82e6ad4103bc58159175b29b406abb1e052 | [
"Apache-2.0"
] | null | null | null | mlflow/spacy.py | Roffild/mlflow | 8351d82e6ad4103bc58159175b29b406abb1e052 | [
"Apache-2.0"
] | null | null | null | """
The ``mlflow.spacy`` module provides an API for logging and loading spaCy models.
This module exports spacy models with the following flavors:
spaCy (native) format
This is the main flavor that can be loaded back into spaCy.
:py:mod:`mlflow.pyfunc`
Produced for use by generic pyfunc-based deployment tools and batch inference.
"""
from __future__ import absolute_import
import logging
import os
import pandas as pd
import yaml
import mlflow
from mlflow import pyfunc
from mlflow.exceptions import MlflowException
from mlflow.models import Model, ModelSignature
from mlflow.models.utils import ModelInputExample
from mlflow.tracking.artifact_utils import _download_artifact_from_uri
from mlflow.utils.environment import _mlflow_conda_env
from mlflow.utils.model_utils import _get_flavor_configuration
FLAVOR_NAME = "spacy"
_logger = logging.getLogger(__name__)
def get_default_conda_env():
"""
:return: The default Conda environment for MLflow Models produced by calls to
:func:`save_model()` and :func:`log_model()`.
"""
import spacy
return _mlflow_conda_env(
additional_conda_deps=None,
additional_pip_deps=[
"spacy=={}".format(spacy.__version__),
],
additional_conda_channels=None)
def save_model(spacy_model, path, conda_env=None, mlflow_model=Model()):
"""
Save a spaCy model to a path on the local file system.
:param spacy_model: spaCy model to be saved.
:param path: Local path where the model is to be saved.
:param conda_env: Either a dictionary representation of a Conda environment or the path to a
Conda environment yaml file. If provided, this describes the environment
this model should be run in. At minimum, it should specify the dependencies
contained in :func:`get_default_conda_env()`. If ``None``, the default
:func:`get_default_conda_env()` environment is added to the model.
The following is an *example* dictionary representation of a Conda
environment::
{
'name': 'mlflow-env',
'channels': ['defaults'],
'dependencies': [
'python=3.7.0',
'pip': [
'spacy==2.2.3'
]
]
}
:param mlflow_model: :py:mod:`mlflow.models.Model` this flavor is being added to.
"""
import spacy
path = os.path.abspath(path)
if os.path.exists(path):
raise MlflowException("Unable to save MLflow model to {path} - path '{path}' "
"already exists".format(path=path))
model_data_subpath = "model.spacy"
model_data_path = os.path.join(path, model_data_subpath)
os.makedirs(model_data_path)
# Save spacy-model
spacy_model.to_disk(path=model_data_path)
conda_env_subpath = "conda.yaml"
if conda_env is None:
conda_env = get_default_conda_env()
elif not isinstance(conda_env, dict):
with open(conda_env, "r") as f:
conda_env = yaml.safe_load(f)
with open(os.path.join(path, conda_env_subpath), "w") as f:
yaml.safe_dump(conda_env, stream=f, default_flow_style=False)
# Save the pyfunc flavor if at least one text categorizer in spaCy pipeline
if any([isinstance(pipe_component[1], spacy.pipeline.TextCategorizer)
for pipe_component in spacy_model.pipeline]):
pyfunc.add_to_model(mlflow_model, loader_module="mlflow.spacy",
data=model_data_subpath, env=conda_env_subpath)
else:
_logger.warning(
"Generating only the spacy flavor for the provided spacy model. This means the model "
"can be loaded back via `mlflow.spacy.load_model`, but cannot be loaded back using "
"pyfunc APIs like `mlflow.pyfunc.load_model` or via the `mlflow models` CLI commands. "
"MLflow will only generate the pyfunc flavor for spacy models containing a pipeline "
"component that is an instance of spacy.pipeline.TextCategorizer.")
mlflow_model.add_flavor(FLAVOR_NAME, spacy_version=spacy.__version__, data=model_data_subpath)
mlflow_model.save(os.path.join(path, "MLmodel"))
def log_model(spacy_model, artifact_path, conda_env=None, registered_model_name=None,
signature: ModelSignature = None, input_example: ModelInputExample = None, **kwargs):
"""
Log a spaCy model as an MLflow artifact for the current run.
:param spacy_model: spaCy model to be saved.
:param artifact_path: Run-relative artifact path.
:param conda_env: Either a dictionary representation of a Conda environment or the path to a
Conda environment yaml file. If provided, this decsribes the environment
this model should be run in. At minimum, it should specify the dependencies
contained in :func:`get_default_conda_env()`. If ``None``, the default
:func:`get_default_conda_env()` environment is added to the model.
The following is an *example* dictionary representation of a Conda
environment::
{
'name': 'mlflow-env',
'channels': ['defaults'],
'dependencies': [
'python=3.7.0',
'pip': [
'spacy==2.2.3'
]
]
}
:param registered_model_name: (Experimental) If given, create a model version under
``registered_model_name``, also creating a registered model if one
with the given name does not exist.
:param signature: (Experimental) :py:class:`ModelSignature <mlflow.models.ModelSignature>`
describes model input and output :py:class:`Schema <mlflow.types.Schema>`.
The model signature can be :py:func:`inferred <mlflow.models.infer_signature>`
from datasets with valid model input (e.g. the training dataset) and valid
model output (e.g. model predictions generated on the training dataset),
for example:
.. code-block:: python
from mlflow.models.signature import infer_signature
train = df.drop_column("target_label")
signature = infer_signature(train, model.predict(train))
:param input_example: (Experimental) Input example provides one or several instances of valid
model input. The example can be used as a hint of what data to feed the
model. The given example will be converted to a Pandas DataFrame and then
serialized to json using the Pandas split-oriented format. Bytes are
base64-encoded.
:param kwargs: kwargs to pass to ``spacy.save_model`` method.
"""
Model.log(artifact_path=artifact_path, flavor=mlflow.spacy,
registered_model_name=registered_model_name,
spacy_model=spacy_model, conda_env=conda_env,
signature=signature, input_example=input_example, **kwargs)
def _load_model(path):
import spacy
path = os.path.abspath(path)
return spacy.load(path)
class _SpacyModelWrapper:
def __init__(self, spacy_model):
self.spacy_model = spacy_model
def predict(self, dataframe):
"""
Only works for predicting using text categorizer.
Not suitable for other pipeline components (e.g: parser)
:param dataframe: pandas dataframe containing texts to be categorized
expected shape is (n_rows,1 column)
:return: dataframe with predictions
"""
if len(dataframe.columns) != 1:
raise MlflowException('Shape of input dataframe must be (n_rows, 1column)')
return pd.DataFrame({
'predictions': dataframe.ix[:, 0].apply(lambda text: self.spacy_model(text).cats)
})
def _load_pyfunc(path):
"""
Load PyFunc implementation. Called by ``pyfunc.load_pyfunc``.
:param path: Local filesystem path to the MLflow Model with the ``spacy`` flavor.
"""
return _SpacyModelWrapper(_load_model(path))
def load_model(model_uri):
"""
Load a spaCy model from a local file (if ``run_id`` is ``None``) or a run.
:param model_uri: The location, in URI format, of the MLflow model. For example:
- ``/Users/me/path/to/local/model``
- ``relative/path/to/local/model``
- ``s3://my_bucket/path/to/model``
- ``runs:/<mlflow_run_id>/run-relative/path/to/model``
- ``models:/<model_name>/<model_version>``
- ``models:/<model_name>/<stage>``
For more information about supported URI schemes, see
`Referencing Artifacts <https://www.mlflow.org/docs/latest/concepts.html#
artifact-locations>`_.
:return: A spaCy loaded model
"""
local_model_path = _download_artifact_from_uri(artifact_uri=model_uri)
flavor_conf = _get_flavor_configuration(model_path=local_model_path, flavor_name=FLAVOR_NAME)
# Flavor configurations for models saved in MLflow version <= 0.8.0 may not contain a
# `data` key; in this case, we assume the model artifact path to be `model.spacy`
spacy_model_file_path = os.path.join(local_model_path, flavor_conf.get("data", "model.spacy"))
return _load_model(path=spacy_model_file_path)
| 43.357759 | 100 | 0.617457 |
from __future__ import absolute_import
import logging
import os
import pandas as pd
import yaml
import mlflow
from mlflow import pyfunc
from mlflow.exceptions import MlflowException
from mlflow.models import Model, ModelSignature
from mlflow.models.utils import ModelInputExample
from mlflow.tracking.artifact_utils import _download_artifact_from_uri
from mlflow.utils.environment import _mlflow_conda_env
from mlflow.utils.model_utils import _get_flavor_configuration
FLAVOR_NAME = "spacy"
_logger = logging.getLogger(__name__)
def get_default_conda_env():
import spacy
return _mlflow_conda_env(
additional_conda_deps=None,
additional_pip_deps=[
"spacy=={}".format(spacy.__version__),
],
additional_conda_channels=None)
def save_model(spacy_model, path, conda_env=None, mlflow_model=Model()):
import spacy
path = os.path.abspath(path)
if os.path.exists(path):
raise MlflowException("Unable to save MLflow model to {path} - path '{path}' "
"already exists".format(path=path))
model_data_subpath = "model.spacy"
model_data_path = os.path.join(path, model_data_subpath)
os.makedirs(model_data_path)
spacy_model.to_disk(path=model_data_path)
conda_env_subpath = "conda.yaml"
if conda_env is None:
conda_env = get_default_conda_env()
elif not isinstance(conda_env, dict):
with open(conda_env, "r") as f:
conda_env = yaml.safe_load(f)
with open(os.path.join(path, conda_env_subpath), "w") as f:
yaml.safe_dump(conda_env, stream=f, default_flow_style=False)
if any([isinstance(pipe_component[1], spacy.pipeline.TextCategorizer)
for pipe_component in spacy_model.pipeline]):
pyfunc.add_to_model(mlflow_model, loader_module="mlflow.spacy",
data=model_data_subpath, env=conda_env_subpath)
else:
_logger.warning(
"Generating only the spacy flavor for the provided spacy model. This means the model "
"can be loaded back via `mlflow.spacy.load_model`, but cannot be loaded back using "
"pyfunc APIs like `mlflow.pyfunc.load_model` or via the `mlflow models` CLI commands. "
"MLflow will only generate the pyfunc flavor for spacy models containing a pipeline "
"component that is an instance of spacy.pipeline.TextCategorizer.")
mlflow_model.add_flavor(FLAVOR_NAME, spacy_version=spacy.__version__, data=model_data_subpath)
mlflow_model.save(os.path.join(path, "MLmodel"))
def log_model(spacy_model, artifact_path, conda_env=None, registered_model_name=None,
signature: ModelSignature = None, input_example: ModelInputExample = None, **kwargs):
Model.log(artifact_path=artifact_path, flavor=mlflow.spacy,
registered_model_name=registered_model_name,
spacy_model=spacy_model, conda_env=conda_env,
signature=signature, input_example=input_example, **kwargs)
def _load_model(path):
import spacy
path = os.path.abspath(path)
return spacy.load(path)
class _SpacyModelWrapper:
def __init__(self, spacy_model):
self.spacy_model = spacy_model
def predict(self, dataframe):
if len(dataframe.columns) != 1:
raise MlflowException('Shape of input dataframe must be (n_rows, 1column)')
return pd.DataFrame({
'predictions': dataframe.ix[:, 0].apply(lambda text: self.spacy_model(text).cats)
})
def _load_pyfunc(path):
return _SpacyModelWrapper(_load_model(path))
def load_model(model_uri):
local_model_path = _download_artifact_from_uri(artifact_uri=model_uri)
flavor_conf = _get_flavor_configuration(model_path=local_model_path, flavor_name=FLAVOR_NAME)
spacy_model_file_path = os.path.join(local_model_path, flavor_conf.get("data", "model.spacy"))
return _load_model(path=spacy_model_file_path)
| true | true |
f71bff714d7bf1d24454e59616e23e72f9782452 | 629 | py | Python | December Month Challenge/2LinkedListRandomNodeReservoirSampling.py | adesh-gadge/LeetCodePractice | 4b142c102e64ec93465af7f4193762e8fd2866ec | [
"MIT"
] | null | null | null | December Month Challenge/2LinkedListRandomNodeReservoirSampling.py | adesh-gadge/LeetCodePractice | 4b142c102e64ec93465af7f4193762e8fd2866ec | [
"MIT"
] | null | null | null | December Month Challenge/2LinkedListRandomNodeReservoirSampling.py | adesh-gadge/LeetCodePractice | 4b142c102e64ec93465af7f4193762e8fd2866ec | [
"MIT"
] | null | null | null | import random
class Solution:
def __init__(self, head: ListNode):
"""
@param head The linked list's head.
Note that the head is guaranteed to be not null, so it contains at least one node.
"""
self.head = head
def getRandom(self) -> int:
"""
Returns a random node's value.
"""
scope = 1
chosen_value = 0
curr = self.head
while curr:
if random.random() < 1/scope:
chosen_value = curr.val
curr = curr.next
scope +=1
return chosen_value
| 23.296296 | 90 | 0.497615 | import random
class Solution:
def __init__(self, head: ListNode):
self.head = head
def getRandom(self) -> int:
scope = 1
chosen_value = 0
curr = self.head
while curr:
if random.random() < 1/scope:
chosen_value = curr.val
curr = curr.next
scope +=1
return chosen_value
| true | true |
f71bff92a942ff8d052d7208bd78c572f2d01c55 | 132 | py | Python | artistapp/artist/admin.py | fallprojects/ArtistApp | 5564a1f7f4fc95261beb462abfa4ca53f3e5c17f | [
"MIT"
] | null | null | null | artistapp/artist/admin.py | fallprojects/ArtistApp | 5564a1f7f4fc95261beb462abfa4ca53f3e5c17f | [
"MIT"
] | null | null | null | artistapp/artist/admin.py | fallprojects/ArtistApp | 5564a1f7f4fc95261beb462abfa4ca53f3e5c17f | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register([Content,Profile,Comment])
| 22 | 46 | 0.787879 | from django.contrib import admin
from .models import *
admin.site.register([Content,Profile,Comment])
| true | true |
f71c00b21518c6757cb661f74b98f03394d9f8a6 | 749 | py | Python | parqueo/urls.py | gersonjuarez/Laboratorio | 3ed5dc57136ec593e3edb8ef8ca3b29abeb7dabc | [
"bzip2-1.0.6"
] | null | null | null | parqueo/urls.py | gersonjuarez/Laboratorio | 3ed5dc57136ec593e3edb8ef8ca3b29abeb7dabc | [
"bzip2-1.0.6"
] | null | null | null | parqueo/urls.py | gersonjuarez/Laboratorio | 3ed5dc57136ec593e3edb8ef8ca3b29abeb7dabc | [
"bzip2-1.0.6"
] | null | null | null | """parqueo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| 34.045455 | 77 | 0.708945 | from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| true | true |
f71c0110baa8d07d6722fcdc95decbbb7f63ec63 | 2,576 | py | Python | arviz/plots/backends/matplotlib/distplot.py | StanczakDominik/arviz | ec33b4cc7d4a6d5ba95a87a43ef226a49c2cb287 | [
"Apache-2.0"
] | null | null | null | arviz/plots/backends/matplotlib/distplot.py | StanczakDominik/arviz | ec33b4cc7d4a6d5ba95a87a43ef226a49c2cb287 | [
"Apache-2.0"
] | null | null | null | arviz/plots/backends/matplotlib/distplot.py | StanczakDominik/arviz | ec33b4cc7d4a6d5ba95a87a43ef226a49c2cb287 | [
"Apache-2.0"
] | null | null | null | """Matplotlib distplot."""
import warnings
import matplotlib.pyplot as plt
import numpy as np
from . import backend_show
from ...kdeplot import plot_kde
from ...plot_utils import matplotlib_kwarg_dealiaser
from ....numeric_utils import get_bins
def plot_dist(
values,
values2,
color,
kind,
cumulative,
label,
rotated,
rug,
bw,
quantiles,
contour,
fill_last,
textsize,
plot_kwargs,
fill_kwargs,
rug_kwargs,
contour_kwargs,
contourf_kwargs,
pcolormesh_kwargs,
hist_kwargs,
ax,
backend_kwargs,
show,
):
"""Matplotlib distplot."""
if backend_kwargs is not None:
warnings.warn(
(
"Argument backend_kwargs has not effect in matplotlib.plot_dist"
"Supplied value won't be used"
)
)
backend_kwargs = None
if ax is None:
ax = plt.gca()
if kind == "hist":
ax = _histplot_mpl_op(
values=values, values2=values2, rotated=rotated, ax=ax, hist_kwargs=hist_kwargs
)
elif kind == "kde":
plot_kwargs = matplotlib_kwarg_dealiaser(plot_kwargs, "plot")
plot_kwargs.setdefault("color", color)
legend = label is not None
ax = plot_kde(
values,
values2,
cumulative=cumulative,
rug=rug,
label=label,
bw=bw,
quantiles=quantiles,
rotated=rotated,
contour=contour,
legend=legend,
fill_last=fill_last,
textsize=textsize,
plot_kwargs=plot_kwargs,
fill_kwargs=fill_kwargs,
rug_kwargs=rug_kwargs,
contour_kwargs=contour_kwargs,
contourf_kwargs=contourf_kwargs,
pcolormesh_kwargs=pcolormesh_kwargs,
ax=ax,
backend="matplotlib",
backend_kwargs=backend_kwargs,
show=show,
)
if backend_show(show):
plt.show()
return ax
def _histplot_mpl_op(values, values2, rotated, ax, hist_kwargs):
"""Add a histogram for the data to the axes."""
if values2 is not None:
raise NotImplementedError("Insert hexbin plot here")
bins = hist_kwargs.pop("bins")
if bins is None:
bins = get_bins(values)
ax.hist(np.asarray(values).flatten(), bins=bins, **hist_kwargs)
if rotated:
ax.set_yticks(bins[:-1])
else:
ax.set_xticks(bins[:-1])
if hist_kwargs.get("label") is not None:
ax.legend()
return ax
| 24.301887 | 91 | 0.588898 | import warnings
import matplotlib.pyplot as plt
import numpy as np
from . import backend_show
from ...kdeplot import plot_kde
from ...plot_utils import matplotlib_kwarg_dealiaser
from ....numeric_utils import get_bins
def plot_dist(
values,
values2,
color,
kind,
cumulative,
label,
rotated,
rug,
bw,
quantiles,
contour,
fill_last,
textsize,
plot_kwargs,
fill_kwargs,
rug_kwargs,
contour_kwargs,
contourf_kwargs,
pcolormesh_kwargs,
hist_kwargs,
ax,
backend_kwargs,
show,
):
if backend_kwargs is not None:
warnings.warn(
(
"Argument backend_kwargs has not effect in matplotlib.plot_dist"
"Supplied value won't be used"
)
)
backend_kwargs = None
if ax is None:
ax = plt.gca()
if kind == "hist":
ax = _histplot_mpl_op(
values=values, values2=values2, rotated=rotated, ax=ax, hist_kwargs=hist_kwargs
)
elif kind == "kde":
plot_kwargs = matplotlib_kwarg_dealiaser(plot_kwargs, "plot")
plot_kwargs.setdefault("color", color)
legend = label is not None
ax = plot_kde(
values,
values2,
cumulative=cumulative,
rug=rug,
label=label,
bw=bw,
quantiles=quantiles,
rotated=rotated,
contour=contour,
legend=legend,
fill_last=fill_last,
textsize=textsize,
plot_kwargs=plot_kwargs,
fill_kwargs=fill_kwargs,
rug_kwargs=rug_kwargs,
contour_kwargs=contour_kwargs,
contourf_kwargs=contourf_kwargs,
pcolormesh_kwargs=pcolormesh_kwargs,
ax=ax,
backend="matplotlib",
backend_kwargs=backend_kwargs,
show=show,
)
if backend_show(show):
plt.show()
return ax
def _histplot_mpl_op(values, values2, rotated, ax, hist_kwargs):
if values2 is not None:
raise NotImplementedError("Insert hexbin plot here")
bins = hist_kwargs.pop("bins")
if bins is None:
bins = get_bins(values)
ax.hist(np.asarray(values).flatten(), bins=bins, **hist_kwargs)
if rotated:
ax.set_yticks(bins[:-1])
else:
ax.set_xticks(bins[:-1])
if hist_kwargs.get("label") is not None:
ax.legend()
return ax
| true | true |
f71c01c109a11f63936b4d9b8326f9b1b1a2d93f | 452 | py | Python | cookies/ex2/main.py | acandreani/ads_web_exercicios | a97ee7ebd0dba9e308b8e2d2318e577903f83f72 | [
"MIT"
] | 1 | 2019-03-13T14:33:28.000Z | 2019-03-13T14:33:28.000Z | cookies/ex2/main.py | acandreani/ads_web_exercicios | a97ee7ebd0dba9e308b8e2d2318e577903f83f72 | [
"MIT"
] | 1 | 2021-06-23T20:56:49.000Z | 2021-06-23T20:56:49.000Z | cookies/ex2/main.py | acandreani/ads_web_exercicios | a97ee7ebd0dba9e308b8e2d2318e577903f83f72 | [
"MIT"
] | 1 | 2019-04-24T13:10:58.000Z | 2019-04-24T13:10:58.000Z | from flask import Flask, render_template, request, session,redirect, url_for, escape, request, make_response
app = Flask(__name__)
# configure a chave secreta
app.secret_key = "segredo"
@app.route('/')
def index():
return render_template("index.html")
# use cookies.get(key) instead of cookies[key] to not get a
# KeyError if the cookie is missing.
if __name__== "__main__":
app.run(host="0.0.0.0",debug= True)
| 22.6 | 110 | 0.679204 | from flask import Flask, render_template, request, session,redirect, url_for, escape, request, make_response
app = Flask(__name__)
app.secret_key = "segredo"
@app.route('/')
def index():
return render_template("index.html")
if __name__== "__main__":
app.run(host="0.0.0.0",debug= True)
| true | true |
f71c0215c62089a43b38c7a560870eff98f3266c | 540 | py | Python | Exercicios-Python/exercicios-curso-em-video/d004.py | PedroGoes16/Estudos | 142a697a1d375590bb76847a74ed2b8f9fa44a9d | [
"MIT"
] | null | null | null | Exercicios-Python/exercicios-curso-em-video/d004.py | PedroGoes16/Estudos | 142a697a1d375590bb76847a74ed2b8f9fa44a9d | [
"MIT"
] | null | null | null | Exercicios-Python/exercicios-curso-em-video/d004.py | PedroGoes16/Estudos | 142a697a1d375590bb76847a74ed2b8f9fa44a9d | [
"MIT"
] | null | null | null | n = input('Digite algo: ')
print('É composto por número e letras? ',n.isalnum())
print('É composto somente por letras maiúsculas? ',n.isupper())
print('É composto somente por letras? ',n.isalpha())
print('É composto somente por números? ',n.isnumeric())
print('É um número decimal? ',n.isdecimal())
print('É composto somente por dígitos? ',n.isdigit())
print('É composto somente por letras minúsculas? ',n.islower())
print('É imprimível? ',n.isprintable())
print('É somente um espaço vazio? ',n.isspace())
print('É um título? ',n.istitle()) | 49.090909 | 63 | 0.709259 | n = input('Digite algo: ')
print('É composto por número e letras? ',n.isalnum())
print('É composto somente por letras maiúsculas? ',n.isupper())
print('É composto somente por letras? ',n.isalpha())
print('É composto somente por números? ',n.isnumeric())
print('É um número decimal? ',n.isdecimal())
print('É composto somente por dígitos? ',n.isdigit())
print('É composto somente por letras minúsculas? ',n.islower())
print('É imprimível? ',n.isprintable())
print('É somente um espaço vazio? ',n.isspace())
print('É um título? ',n.istitle()) | true | true |
f71c027cc59d99ded5c1d7d4bb2ac9fc391c9882 | 9,319 | py | Python | squares.py | IAmUnStTV/Tetris-Python | 4a676b6f72ceabce796592611f2541665e4010be | [
"Apache-2.0"
] | 16 | 2019-03-20T12:33:53.000Z | 2021-10-16T12:13:39.000Z | squares.py | IAmUnStTV/Tetris-Python | 4a676b6f72ceabce796592611f2541665e4010be | [
"Apache-2.0"
] | 2 | 2019-04-01T14:07:07.000Z | 2021-07-15T14:08:58.000Z | squares.py | IAmUnStTV/Tetris-Python | 4a676b6f72ceabce796592611f2541665e4010be | [
"Apache-2.0"
] | 12 | 2019-03-30T11:32:30.000Z | 2021-09-15T02:49:00.000Z | from random import randrange
from pygame import Rect, draw
from clock import Clock
class Squares:
"""method for malipulating squares in the game"""
def __init__(self, st, status, screen):
self.st = st
self.status = status
self.screen = screen
self.empty_line = ['none' for i in range(st.square_num_x)]
self.squares = [self.empty_line.copy() for i in range(st.square_num_y)]
self.new_sq(self)
self.clock = Clock(st)
# draw all squares
def draw_squares(self):
self.screen.fill(self.st.space_color)
self.draw_tip(self)
self.draw_exist_sq(self)
self.draw_curr_sq(self)
# update squares' information
def update(self):
updated = False # for update screen
# vertical drop, straight drop
if self.status.straight_drop and self.clock.is_time_to_straight_drop():
updated = True
self.drop_straight(self)
self.clock.update_straight_drop()
# vertical drop, force drop
elif self.clock.is_time_to_drop():
updated = True
self.drop(self)
self.clock.update_drop()
# vertical drop, quick drop
elif self.status.down and self.clock.is_time_to_quick_drop():
updated = True
self.drop(self)
self.clock.update_quick_drop()
# rotation
if self.status.rotate and self.clock.is_time_to_rotate():
updated = True
self.rotate(self)
self.clock.update_rotate()
# horizontal move
if self.status.right:
updated = True
if self.clock.is_time_to_move() or self.clock.is_time_to_quick_right():
self.right(self)
self.clock.update_move()
if self.status.left:
updated = True
if self.clock.is_time_to_move() or self.clock.is_time_to_quick_left():
self.left(self)
self.clock.update_move()
# crash detection
if self.should_stop(self):
updated = True
self.stop(self)
return updated
# renew current square
@staticmethod
def new_sq(self):
self.curr_sq = self.st.new.copy()
shape = self.get_shape(self)
self.origin_shape = shape['pos']
self.curr_shape = shape['pos']
self.curr_color = shape['color']
self.rotate_limit = shape['rotate']
self.rotate_curr = 1
# if new squares are crashed, game over.
if not self.valid(self, self.curr_sq, self.curr_shape):
self.status.game_status = self.status.GAMEOVER
# return a random shape dictionary
@staticmethod
def get_shape(self):
shape_index = randrange(0, self.st.shape_num)
return self.st.shapes[shape_index].copy()
@staticmethod
def drop_straight(self):
while not self.should_stop(self):
self.curr_sq[0] += 1
@staticmethod
def drop(self):
new_sq = self.curr_sq.copy()
new_sq[0] += 1
if self.valid(self, new_sq, self.curr_shape):
self.curr_sq = new_sq
@staticmethod
def rotate(self):
new_shape = self.get_rotated_shape(self)
# regular check
if self.valid(self, self.curr_sq, new_shape):
self.curr_shape = new_shape
# move horizontally if not valid
else:
tolerance = 2
for i in range(tolerance):
# left
new_sq_left = self.curr_sq.copy()
new_sq_left[1] -= 1
if self.valid(self, new_sq_left, new_shape):
self.curr_sq = new_sq_left
self.curr_shape = new_shape
return
# right
new_sq_right = self.curr_sq.copy()
new_sq_right[1] += 1
if self.valid(self, new_sq_right, new_shape):
self.curr_sq = new_sq_right
self.curr_shape = new_shape
return
@staticmethod
def get_rotated_shape(self):
# rotation limit must not exceed, if exceed, reset it
if self.rotate_curr >= self.rotate_limit:
self.rotate_curr = 1
new_shape = self.origin_shape
else:
self.rotate_curr += 1
new_shape = []
for sq in self.curr_shape:
new_shape.append([sq[1], -sq[0]])
return new_shape
@staticmethod
def right(self):
new_sq = self.curr_sq.copy()
new_sq[1] += 1
if self.valid(self, new_sq, self.curr_shape):
self.curr_sq = new_sq
@staticmethod
def left(self):
new_sq = self.curr_sq.copy()
new_sq[1] -= 1
if self.valid(self, new_sq, self.curr_shape):
self.curr_sq = new_sq
@staticmethod
def stop(self):
# wait for a moment before stop, give player time to adjust
if not self.clock.is_time_to_stop():
self.clock.update_should_stop(True)
return
else:
self.clock.update_should_stop(None)
self.clock.update_stop()
# copy squares to map
for sq in self.curr_shape:
x = sq[1] + self.curr_sq[1]
y = sq[0] + self.curr_sq[0]
if y >= 0:
self.squares[y][x] = self.curr_color
x = self.curr_sq[1]
y = self.curr_sq[0]
if y >= 0:
self.squares[y][x] = self.curr_color
full_lines = self.clean_full_lines(self)
self.status.score += full_lines # add score
self.new_sq(self)
# delete full lines and insert empty lines at the front
@staticmethod
def clean_full_lines(self):
full_lines = 0
for index, line in enumerate(self.squares):
if line.count('none') == 0:
full_lines += 1
self.st.time_drop *= self.st.time_drop_adjust # adjust time
self.squares.pop(index)
self.squares.insert(0, self.empty_line.copy())
return full_lines
# validate current squares of shapes relative to center with with one drop vertically
@staticmethod
def should_stop(self):
# check shape squares
for sq in self.curr_shape:
x = sq[1] + self.curr_sq[1]
y = sq[0] + self.curr_sq[0] + 1
if y - 1 >= 0 and not self.valid_sq(self, [y, x]):
return True
# check center square
x = self.curr_sq[1]
y = self.curr_sq[0] + 1
return not (self.valid_sq(self, [y, x]))
# validate the given center square and shape squires relative to center square
@staticmethod
def valid(self, square, shape):
# check shape squares
for sq in shape:
x = sq[1] + square[1]
y = sq[0] + square[0]
if y >= 0 and not (self.valid_sq(self, [y, x])):
return False
# check center square
return self.valid_sq(self, square)
@staticmethod
def valid_sq(self, sq):
# check border
if sq[0] >= self.st.square_num_y or \
sq[1] >= self.st.square_num_x or \
sq[1] < 0:
return False
# check crash
return self.squares[sq[0]][sq[1]] == 'none'
@staticmethod
def draw_exist_sq(self):
for y, row in enumerate(self.squares):
for x, square in enumerate(row):
color = self.st.colors[self.squares[y][x]]
self.draw_square(self, y, x, color)
@staticmethod
def draw_tip(self):
# find the lowrest position
curr_sq = self.curr_sq.copy()
while not self.should_stop(self):
self.curr_sq[0] += 1
curr_sq, self.curr_sq = self.curr_sq, curr_sq
# draw their tips
color = self.st.colors['tip']
self.draw_square(self, curr_sq[0], curr_sq[1], color, True)
self.draw_square(self, curr_sq[0], curr_sq[1], self.st.colors['none'])
for y, x in self.curr_shape:
curr_y, curr_x = curr_sq[0], curr_sq[1]
self.draw_square(self, y + curr_y, x + curr_x, color, True)
self.draw_square(self, y + curr_y, x + curr_x, self.st.colors['none'])
@staticmethod
def draw_curr_sq(self):
# draw center
color = self.st.colors[self.curr_color]
self.draw_square(self, self.curr_sq[0], self.curr_sq[1], color)
# draw shapes
curr_y, curr_x = self.curr_sq[0], self.curr_sq[1]
for y, x in self.curr_shape:
self.draw_square(self, y + curr_y, x + curr_x, color)
# draw one single square with given information
@staticmethod
def draw_square(self, y, x, color, border=False):
x_pos = x * (self.st.square_space + self.st.square_length)
y_pos = y * (self.st.square_space + self.st.square_length)
length = self.st.square_length
# adding borders borders
if border:
y_pos -= self.st.square_space
x_pos -= self.st.square_space
length += 2 * self.st.square_space
rect = Rect(x_pos + self.st.square_space, y_pos + self.st.square_space, length, length)
draw.rect(self.screen, color, rect) | 35.166038 | 95 | 0.570877 | from random import randrange
from pygame import Rect, draw
from clock import Clock
class Squares:
def __init__(self, st, status, screen):
self.st = st
self.status = status
self.screen = screen
self.empty_line = ['none' for i in range(st.square_num_x)]
self.squares = [self.empty_line.copy() for i in range(st.square_num_y)]
self.new_sq(self)
self.clock = Clock(st)
def draw_squares(self):
self.screen.fill(self.st.space_color)
self.draw_tip(self)
self.draw_exist_sq(self)
self.draw_curr_sq(self)
def update(self):
updated = False # for update screen
# vertical drop, straight drop
if self.status.straight_drop and self.clock.is_time_to_straight_drop():
updated = True
self.drop_straight(self)
self.clock.update_straight_drop()
# vertical drop, force drop
elif self.clock.is_time_to_drop():
updated = True
self.drop(self)
self.clock.update_drop()
# vertical drop, quick drop
elif self.status.down and self.clock.is_time_to_quick_drop():
updated = True
self.drop(self)
self.clock.update_quick_drop()
# rotation
if self.status.rotate and self.clock.is_time_to_rotate():
updated = True
self.rotate(self)
self.clock.update_rotate()
# horizontal move
if self.status.right:
updated = True
if self.clock.is_time_to_move() or self.clock.is_time_to_quick_right():
self.right(self)
self.clock.update_move()
if self.status.left:
updated = True
if self.clock.is_time_to_move() or self.clock.is_time_to_quick_left():
self.left(self)
self.clock.update_move()
# crash detection
if self.should_stop(self):
updated = True
self.stop(self)
return updated
# renew current square
@staticmethod
def new_sq(self):
self.curr_sq = self.st.new.copy()
shape = self.get_shape(self)
self.origin_shape = shape['pos']
self.curr_shape = shape['pos']
self.curr_color = shape['color']
self.rotate_limit = shape['rotate']
self.rotate_curr = 1
# if new squares are crashed, game over.
if not self.valid(self, self.curr_sq, self.curr_shape):
self.status.game_status = self.status.GAMEOVER
# return a random shape dictionary
@staticmethod
def get_shape(self):
shape_index = randrange(0, self.st.shape_num)
return self.st.shapes[shape_index].copy()
@staticmethod
def drop_straight(self):
while not self.should_stop(self):
self.curr_sq[0] += 1
@staticmethod
def drop(self):
new_sq = self.curr_sq.copy()
new_sq[0] += 1
if self.valid(self, new_sq, self.curr_shape):
self.curr_sq = new_sq
@staticmethod
def rotate(self):
new_shape = self.get_rotated_shape(self)
# regular check
if self.valid(self, self.curr_sq, new_shape):
self.curr_shape = new_shape
# move horizontally if not valid
else:
tolerance = 2
for i in range(tolerance):
# left
new_sq_left = self.curr_sq.copy()
new_sq_left[1] -= 1
if self.valid(self, new_sq_left, new_shape):
self.curr_sq = new_sq_left
self.curr_shape = new_shape
return
# right
new_sq_right = self.curr_sq.copy()
new_sq_right[1] += 1
if self.valid(self, new_sq_right, new_shape):
self.curr_sq = new_sq_right
self.curr_shape = new_shape
return
@staticmethod
def get_rotated_shape(self):
# rotation limit must not exceed, if exceed, reset it
if self.rotate_curr >= self.rotate_limit:
self.rotate_curr = 1
new_shape = self.origin_shape
else:
self.rotate_curr += 1
new_shape = []
for sq in self.curr_shape:
new_shape.append([sq[1], -sq[0]])
return new_shape
@staticmethod
def right(self):
new_sq = self.curr_sq.copy()
new_sq[1] += 1
if self.valid(self, new_sq, self.curr_shape):
self.curr_sq = new_sq
@staticmethod
def left(self):
new_sq = self.curr_sq.copy()
new_sq[1] -= 1
if self.valid(self, new_sq, self.curr_shape):
self.curr_sq = new_sq
@staticmethod
def stop(self):
# wait for a moment before stop, give player time to adjust
if not self.clock.is_time_to_stop():
self.clock.update_should_stop(True)
return
else:
self.clock.update_should_stop(None)
self.clock.update_stop()
# copy squares to map
for sq in self.curr_shape:
x = sq[1] + self.curr_sq[1]
y = sq[0] + self.curr_sq[0]
if y >= 0:
self.squares[y][x] = self.curr_color
x = self.curr_sq[1]
y = self.curr_sq[0]
if y >= 0:
self.squares[y][x] = self.curr_color
full_lines = self.clean_full_lines(self)
self.status.score += full_lines # add score
self.new_sq(self)
# delete full lines and insert empty lines at the front
@staticmethod
def clean_full_lines(self):
full_lines = 0
for index, line in enumerate(self.squares):
if line.count('none') == 0:
full_lines += 1
self.st.time_drop *= self.st.time_drop_adjust # adjust time
self.squares.pop(index)
self.squares.insert(0, self.empty_line.copy())
return full_lines
# validate current squares of shapes relative to center with with one drop vertically
@staticmethod
def should_stop(self):
# check shape squares
for sq in self.curr_shape:
x = sq[1] + self.curr_sq[1]
y = sq[0] + self.curr_sq[0] + 1
if y - 1 >= 0 and not self.valid_sq(self, [y, x]):
return True
# check center square
x = self.curr_sq[1]
y = self.curr_sq[0] + 1
return not (self.valid_sq(self, [y, x]))
# validate the given center square and shape squires relative to center square
@staticmethod
def valid(self, square, shape):
# check shape squares
for sq in shape:
x = sq[1] + square[1]
y = sq[0] + square[0]
if y >= 0 and not (self.valid_sq(self, [y, x])):
return False
# check center square
return self.valid_sq(self, square)
@staticmethod
def valid_sq(self, sq):
# check border
if sq[0] >= self.st.square_num_y or \
sq[1] >= self.st.square_num_x or \
sq[1] < 0:
return False
# check crash
return self.squares[sq[0]][sq[1]] == 'none'
@staticmethod
def draw_exist_sq(self):
for y, row in enumerate(self.squares):
for x, square in enumerate(row):
color = self.st.colors[self.squares[y][x]]
self.draw_square(self, y, x, color)
@staticmethod
def draw_tip(self):
# find the lowrest position
curr_sq = self.curr_sq.copy()
while not self.should_stop(self):
self.curr_sq[0] += 1
curr_sq, self.curr_sq = self.curr_sq, curr_sq
# draw their tips
color = self.st.colors['tip']
self.draw_square(self, curr_sq[0], curr_sq[1], color, True)
self.draw_square(self, curr_sq[0], curr_sq[1], self.st.colors['none'])
for y, x in self.curr_shape:
curr_y, curr_x = curr_sq[0], curr_sq[1]
self.draw_square(self, y + curr_y, x + curr_x, color, True)
self.draw_square(self, y + curr_y, x + curr_x, self.st.colors['none'])
@staticmethod
def draw_curr_sq(self):
# draw center
color = self.st.colors[self.curr_color]
self.draw_square(self, self.curr_sq[0], self.curr_sq[1], color)
# draw shapes
curr_y, curr_x = self.curr_sq[0], self.curr_sq[1]
for y, x in self.curr_shape:
self.draw_square(self, y + curr_y, x + curr_x, color)
# draw one single square with given information
@staticmethod
def draw_square(self, y, x, color, border=False):
x_pos = x * (self.st.square_space + self.st.square_length)
y_pos = y * (self.st.square_space + self.st.square_length)
length = self.st.square_length
# adding borders borders
if border:
y_pos -= self.st.square_space
x_pos -= self.st.square_space
length += 2 * self.st.square_space
rect = Rect(x_pos + self.st.square_space, y_pos + self.st.square_space, length, length)
draw.rect(self.screen, color, rect) | true | true |
f71c030abc2d29b0f256a337f0e78e71b90e4000 | 3,839 | py | Python | spirit/topic/views.py | BinaryTree0/fer3 | 85c3bbf2f328e69ad4d7c01b6e2c8d4ef1d9e0a3 | [
"MIT"
] | null | null | null | spirit/topic/views.py | BinaryTree0/fer3 | 85c3bbf2f328e69ad4d7c01b6e2c8d4ef1d9e0a3 | [
"MIT"
] | 5 | 2021-06-08T21:03:58.000Z | 2022-03-12T00:18:43.000Z | spirit/topic/views.py | BinaryTree0/fer3 | 85c3bbf2f328e69ad4d7c01b6e2c8d4ef1d9e0a3 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponsePermanentRedirect
from djconfig import config
from ..core.utils.views import is_post, post_data
from ..core.utils.paginator import paginate, yt_paginate
from ..core.utils.ratelimit.decorators import ratelimit
from ..category.models import Category
from ..comment.models import MOVED
from ..comment.forms import CommentForm
from ..comment.utils import comment_posted
from ..comment.models import Comment
from .models import Topic
from .forms import TopicForm
from . import utils
@login_required
@ratelimit(rate='1/10s')
def publish(request, category_id=None):
if category_id:
get_object_or_404(
Category.objects.visible(),
pk=category_id)
user = request.user
form = TopicForm(
user=user,
data=post_data(request),
initial={'category': category_id})
cform = CommentForm(
user=user,
data=post_data(request))
if (is_post(request) and
all([form.is_valid(), cform.is_valid()]) and
not request.is_limited()):
if not user.st.update_post_hash(form.get_topic_hash()):
return redirect(
request.POST.get('next', None) or
form.get_category().get_absolute_url())
# wrap in transaction.atomic?
topic = form.save()
cform.topic = topic
comment = cform.save()
comment_posted(comment=comment, mentions=cform.mentions)
return redirect(topic.get_absolute_url())
return render(
request=request,
template_name='spirit/topic/publish.html',
context={'form': form, 'cform': cform})
@login_required
def update(request, pk):
topic = Topic.objects.for_update_or_404(pk, request.user)
category_id = topic.category_id
form = TopicForm(
user=request.user,
data=post_data(request),
instance=topic)
if is_post(request) and form.is_valid():
topic = form.save()
if topic.category_id != category_id:
Comment.create_moderation_action(
user=request.user, topic=topic, action=MOVED)
return redirect(request.POST.get('next', topic.get_absolute_url()))
return render(
request=request,
template_name='spirit/topic/update.html',
context={'form': form})
def detail(request, pk, slug):
topic = Topic.objects.get_public_or_404(pk, request.user)
if topic.slug != slug:
return HttpResponsePermanentRedirect(topic.get_absolute_url())
utils.topic_viewed(request=request, topic=topic)
comments = (
Comment.objects
.for_topic(topic=topic)
.with_likes(user=request.user)
.with_polls(user=request.user)
.order_by('date'))
comments = paginate(
comments,
per_page=config.comments_per_page,
page_number=request.GET.get('page', 1))
return render(
request=request,
template_name='spirit/topic/detail.html',
context={
'topic': topic,
'comments': comments})
def index_active(request):
categories = (
Category.objects
.visible()
.parents())
topics = (
Topic.objects
.visible()
.global_()
.with_bookmarks(user=request.user)
.order_by('-is_globally_pinned', '-last_active')
.select_related('category'))
topics = yt_paginate(
topics,
per_page=config.topics_per_page,
page_number=request.GET.get('page', 1))
return render(
request=request,
template_name='spirit/topic/active.html',
context={
'categories': categories,
'topics': topics})
| 29.75969 | 75 | 0.64496 |
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponsePermanentRedirect
from djconfig import config
from ..core.utils.views import is_post, post_data
from ..core.utils.paginator import paginate, yt_paginate
from ..core.utils.ratelimit.decorators import ratelimit
from ..category.models import Category
from ..comment.models import MOVED
from ..comment.forms import CommentForm
from ..comment.utils import comment_posted
from ..comment.models import Comment
from .models import Topic
from .forms import TopicForm
from . import utils
@login_required
@ratelimit(rate='1/10s')
def publish(request, category_id=None):
if category_id:
get_object_or_404(
Category.objects.visible(),
pk=category_id)
user = request.user
form = TopicForm(
user=user,
data=post_data(request),
initial={'category': category_id})
cform = CommentForm(
user=user,
data=post_data(request))
if (is_post(request) and
all([form.is_valid(), cform.is_valid()]) and
not request.is_limited()):
if not user.st.update_post_hash(form.get_topic_hash()):
return redirect(
request.POST.get('next', None) or
form.get_category().get_absolute_url())
topic = form.save()
cform.topic = topic
comment = cform.save()
comment_posted(comment=comment, mentions=cform.mentions)
return redirect(topic.get_absolute_url())
return render(
request=request,
template_name='spirit/topic/publish.html',
context={'form': form, 'cform': cform})
@login_required
def update(request, pk):
topic = Topic.objects.for_update_or_404(pk, request.user)
category_id = topic.category_id
form = TopicForm(
user=request.user,
data=post_data(request),
instance=topic)
if is_post(request) and form.is_valid():
topic = form.save()
if topic.category_id != category_id:
Comment.create_moderation_action(
user=request.user, topic=topic, action=MOVED)
return redirect(request.POST.get('next', topic.get_absolute_url()))
return render(
request=request,
template_name='spirit/topic/update.html',
context={'form': form})
def detail(request, pk, slug):
topic = Topic.objects.get_public_or_404(pk, request.user)
if topic.slug != slug:
return HttpResponsePermanentRedirect(topic.get_absolute_url())
utils.topic_viewed(request=request, topic=topic)
comments = (
Comment.objects
.for_topic(topic=topic)
.with_likes(user=request.user)
.with_polls(user=request.user)
.order_by('date'))
comments = paginate(
comments,
per_page=config.comments_per_page,
page_number=request.GET.get('page', 1))
return render(
request=request,
template_name='spirit/topic/detail.html',
context={
'topic': topic,
'comments': comments})
def index_active(request):
categories = (
Category.objects
.visible()
.parents())
topics = (
Topic.objects
.visible()
.global_()
.with_bookmarks(user=request.user)
.order_by('-is_globally_pinned', '-last_active')
.select_related('category'))
topics = yt_paginate(
topics,
per_page=config.topics_per_page,
page_number=request.GET.get('page', 1))
return render(
request=request,
template_name='spirit/topic/active.html',
context={
'categories': categories,
'topics': topics})
| true | true |
f71c03b58cbe884de8ef7d23450e58f05962f59e | 1,242 | py | Python | src/alocacao/camada_servicos/unit_of_work.py | ralphribeiro/APWP-T2 | 1ed5552a32ae9320eadbbd0489c2082a6f8750a8 | [
"MIT"
] | null | null | null | src/alocacao/camada_servicos/unit_of_work.py | ralphribeiro/APWP-T2 | 1ed5552a32ae9320eadbbd0489c2082a6f8750a8 | [
"MIT"
] | null | null | null | src/alocacao/camada_servicos/unit_of_work.py | ralphribeiro/APWP-T2 | 1ed5552a32ae9320eadbbd0489c2082a6f8750a8 | [
"MIT"
] | null | null | null | from __future__ import annotations
import abc
from alocacao.adapters import repository
from alocacao.config import DEFAULT_SESSION_FACTORY
class AbstractUOW(abc.ABC):
produtos: repository.AbstractRepository
def __enter__(self) -> AbstractUOW:
return self
def __exit__(self, *args):
self.rollback()
def commit(self):
self._commit()
def collect_new_messages(self):
for produto in self.produtos.seen:
while produto.eventos:
yield produto.eventos.pop(0)
@abc.abstractmethod
def _commit(self):
pass
@abc.abstractmethod
def rollback(self):
pass
class SQLAlchemyUOW(AbstractUOW):
def __init__(self, session_factory=DEFAULT_SESSION_FACTORY):
self.session_factory = session_factory
def __enter__(self):
self.session = self.session_factory()
self.produtos = repository.TrackingRepository(
repository.SQLAlchemyRepository(self.session)
)
return super().__enter__()
def __exit__(self, *args):
super().__exit__(*args)
self.session.close()
def _commit(self):
self.session.commit()
def rollback(self):
self.session.rollback()
| 23 | 64 | 0.661031 | from __future__ import annotations
import abc
from alocacao.adapters import repository
from alocacao.config import DEFAULT_SESSION_FACTORY
class AbstractUOW(abc.ABC):
produtos: repository.AbstractRepository
def __enter__(self) -> AbstractUOW:
return self
def __exit__(self, *args):
self.rollback()
def commit(self):
self._commit()
def collect_new_messages(self):
for produto in self.produtos.seen:
while produto.eventos:
yield produto.eventos.pop(0)
@abc.abstractmethod
def _commit(self):
pass
@abc.abstractmethod
def rollback(self):
pass
class SQLAlchemyUOW(AbstractUOW):
def __init__(self, session_factory=DEFAULT_SESSION_FACTORY):
self.session_factory = session_factory
def __enter__(self):
self.session = self.session_factory()
self.produtos = repository.TrackingRepository(
repository.SQLAlchemyRepository(self.session)
)
return super().__enter__()
def __exit__(self, *args):
super().__exit__(*args)
self.session.close()
def _commit(self):
self.session.commit()
def rollback(self):
self.session.rollback()
| true | true |
f71c040eaeb86dff7c316dbfe8fc57cc5b43e211 | 255 | py | Python | class9/e4/mytest/whatever.py | ktbyers/pynet_wantonik | 601bce26142b6741202c2bdafb9e0d0cec1b3c78 | [
"Apache-2.0"
] | 2 | 2017-05-11T12:05:15.000Z | 2021-07-15T18:13:19.000Z | class9/e4/mytest/whatever.py | ktbyers/pynet_wantonik | 601bce26142b6741202c2bdafb9e0d0cec1b3c78 | [
"Apache-2.0"
] | null | null | null | class9/e4/mytest/whatever.py | ktbyers/pynet_wantonik | 601bce26142b6741202c2bdafb9e0d0cec1b3c78 | [
"Apache-2.0"
] | 1 | 2017-05-11T12:05:18.000Z | 2017-05-11T12:05:18.000Z | #!/usr/bin/env python
'''Excercise_4 - class9 - Reusable Code'''
def func3():
'''func3 to print simple statement'''
print 'Excercise_4 from class9 - whatever.py'
if __name__ == "__main__":
print 'This is main program from whatever.py file.'
| 25.5 | 55 | 0.678431 |
'''Excercise_4 - class9 - Reusable Code'''
def func3():
'''func3 to print simple statement'''
print 'Excercise_4 from class9 - whatever.py'
if __name__ == "__main__":
print 'This is main program from whatever.py file.'
| false | true |
f71c045146eab77490951be322d1c2d7585e636d | 1,143 | py | Python | ISMLnextGen/ipTest.py | Ravenclaw-OIer/ISML_auto_voter | 9c53bd87530697d374163f571186542c3fc9734b | [
"MIT"
] | 128 | 2020-11-16T09:28:17.000Z | 2022-03-14T10:38:52.000Z | ISMLnextGen/ipTest.py | Ravenclaw-OIer/ISML_auto_voter | 9c53bd87530697d374163f571186542c3fc9734b | [
"MIT"
] | 7 | 2020-11-27T14:45:19.000Z | 2022-02-15T09:47:12.000Z | ISMLnextGen/ipTest.py | Ravenclaw-OIer/ISML_auto_voter | 9c53bd87530697d374163f571186542c3fc9734b | [
"MIT"
] | 11 | 2020-12-11T12:24:38.000Z | 2022-02-20T12:42:31.000Z | #coding:utf-8
#访问这个服务器会获得一些'ip:端口'字符串。仅用于测试
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
@tornado.gen.coroutine
def get(self, *args, **kwargs):
self.write('''
<!DOCTYPE html><html>
<head><meta charset="utf-8" />
<title>html<br>标签换行符详细介绍</title></head>
<body bgcolor="burlywood">
<p>我是一个段落。
<br>我是一个段落。<br/>
我是一个段落。</p>
<p>
<br>192.168.1.1:99999\n<br/>
<br>192.168.1.1:91241\n<br/>
<br>192.168.1.1:91343\n<br/>
<br>192.168.1.1:94223\n<br/>
<br>192.168.1.1:97546\n<br/>
<br>192.168.1.1:92342\n<br/>
</p>
</body></html>
''')
app=tornado.web.Application([
(r'/',MainHandler),
])
if __name__ == '__main__':
print('访问这个服务器:55556会获得一些“ip:端口”字符串。仅用于测试')
app.listen(55556)
tornado.ioloop.IOLoop.instance().start()
| 30.891892 | 59 | 0.451444 |
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
@tornado.gen.coroutine
def get(self, *args, **kwargs):
self.write('''
<!DOCTYPE html><html>
<head><meta charset="utf-8" />
<title>html<br>标签换行符详细介绍</title></head>
<body bgcolor="burlywood">
<p>我是一个段落。
<br>我是一个段落。<br/>
我是一个段落。</p>
<p>
<br>192.168.1.1:99999\n<br/>
<br>192.168.1.1:91241\n<br/>
<br>192.168.1.1:91343\n<br/>
<br>192.168.1.1:94223\n<br/>
<br>192.168.1.1:97546\n<br/>
<br>192.168.1.1:92342\n<br/>
</p>
</body></html>
''')
app=tornado.web.Application([
(r'/',MainHandler),
])
if __name__ == '__main__':
print('访问这个服务器:55556会获得一些“ip:端口”字符串。仅用于测试')
app.listen(55556)
tornado.ioloop.IOLoop.instance().start()
| true | true |
f71c04a43a1f6647a62748e5a3330dd9e4062b48 | 3,194 | py | Python | tweetconsumer.py | jvprosser/tweet2avro2kafka2kudu | 27eacea8a5361c94ae179d28bf455883c3803ecb | [
"Apache-2.0"
] | null | null | null | tweetconsumer.py | jvprosser/tweet2avro2kafka2kudu | 27eacea8a5361c94ae179d28bf455883c3803ecb | [
"Apache-2.0"
] | null | null | null | tweetconsumer.py | jvprosser/tweet2avro2kafka2kudu | 27eacea8a5361c94ae179d28bf455883c3803ecb | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import threading, logging, time
import io
import avro.schema
import avro.io
import pprint
import kudu
from kudu.client import Partitioning
from kafka import KafkaConsumer
pp = pprint.PrettyPrinter(indent=4,width=80)
twitter_schema='''
{"namespace": "example.avro", "type": "record",
"name": "StatusTweet",
"fields": [
{"name": "tweet_id" , "type": "long"},
{"name": "followers_count", "type": "int"},
{"name": "statuses_count" , "type": "int"},
{"name": "id_str" , "type": "string"},
{"name": "friends_count" , "type": "int"},
{"name": "text" , "type": "string"},
{"name": "tweet_ts" , "type": "long"},
{"name": "screen_name" , "type": "string"}
]
}
'''
class Consumer(threading.Thread):
daemon = True
def __init__(self, name, partition_list ):
threading.Thread.__init__(self)
self.name = name
self.partitions = partition_list
self.client = kudu.connect(host='ip-172-31-6-171', port=7051)
# Open a table
self.table = self.client.table('impala::DEFAULT.STATUS_TWEETS')
# Create a new session so that we can apply write operations
self.session = self.client.new_session()
def run(self):
consumer = KafkaConsumer(bootstrap_servers='ip-172-31-10-235:9092',
auto_offset_reset='earliest', enable_auto_commit=True)
consumer.subscribe(['twitterstream'])
print "in run"
for message in consumer:
schema = avro.schema.parse(twitter_schema)
bytes_reader = io.BytesIO(message.value)
decoder = avro.io.BinaryDecoder(bytes_reader)
reader = avro.io.DatumReader(schema)
data = reader.read(decoder)
print ("%s:%d:%d: key=%s %s" % (message.topic, message.partition,message.offset, message.key, data['text'][1:77] ))
op = self.table.new_insert({
'id_str' : data['id_str'] ,
'tweet_ts' : data['tweet_ts'] ,
'tweet_id' : data['tweet_id'] ,
'followers_count' : data['followers_count'] ,
'statuses_count' : data['statuses_count'] ,
'friends_count' : data['friends_count'] ,
'text' : data['text'] ,
'screen_name' : data['screen_name'] })
self.session.apply(op)
# Flush write operations, if failures occur, capture print them.
try:
self.session.flush()
except kudu.KuduBadStatus as e:
print(self.session.get_pending_errors())
def main():
threads = [
Consumer("three",(3))
]
for t in threads:
t.start()
time.sleep(100)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
| 31.623762 | 183 | 0.530056 |
import threading, logging, time
import io
import avro.schema
import avro.io
import pprint
import kudu
from kudu.client import Partitioning
from kafka import KafkaConsumer
pp = pprint.PrettyPrinter(indent=4,width=80)
twitter_schema='''
{"namespace": "example.avro", "type": "record",
"name": "StatusTweet",
"fields": [
{"name": "tweet_id" , "type": "long"},
{"name": "followers_count", "type": "int"},
{"name": "statuses_count" , "type": "int"},
{"name": "id_str" , "type": "string"},
{"name": "friends_count" , "type": "int"},
{"name": "text" , "type": "string"},
{"name": "tweet_ts" , "type": "long"},
{"name": "screen_name" , "type": "string"}
]
}
'''
class Consumer(threading.Thread):
daemon = True
def __init__(self, name, partition_list ):
threading.Thread.__init__(self)
self.name = name
self.partitions = partition_list
self.client = kudu.connect(host='ip-172-31-6-171', port=7051)
self.table = self.client.table('impala::DEFAULT.STATUS_TWEETS')
self.session = self.client.new_session()
def run(self):
consumer = KafkaConsumer(bootstrap_servers='ip-172-31-10-235:9092',
auto_offset_reset='earliest', enable_auto_commit=True)
consumer.subscribe(['twitterstream'])
print "in run"
for message in consumer:
schema = avro.schema.parse(twitter_schema)
bytes_reader = io.BytesIO(message.value)
decoder = avro.io.BinaryDecoder(bytes_reader)
reader = avro.io.DatumReader(schema)
data = reader.read(decoder)
print ("%s:%d:%d: key=%s %s" % (message.topic, message.partition,message.offset, message.key, data['text'][1:77] ))
op = self.table.new_insert({
'id_str' : data['id_str'] ,
'tweet_ts' : data['tweet_ts'] ,
'tweet_id' : data['tweet_id'] ,
'followers_count' : data['followers_count'] ,
'statuses_count' : data['statuses_count'] ,
'friends_count' : data['friends_count'] ,
'text' : data['text'] ,
'screen_name' : data['screen_name'] })
self.session.apply(op)
try:
self.session.flush()
except kudu.KuduBadStatus as e:
print(self.session.get_pending_errors())
def main():
threads = [
Consumer("three",(3))
]
for t in threads:
t.start()
time.sleep(100)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
| false | true |
f71c05e483d33dc4be29042dc38a37ccadce4386 | 6,756 | py | Python | Sensors/softskin.py | zhaocy14/SmartWalker | b025a7b4a2b305838a22fe4e6116ddb951c4d7bf | [
"MIT"
] | 2 | 2021-11-13T14:16:06.000Z | 2022-01-12T06:07:32.000Z | Sensors/softskin.py | zhaocy14/SmartWalker | b025a7b4a2b305838a22fe4e6116ddb951c4d7bf | [
"MIT"
] | null | null | null | Sensors/softskin.py | zhaocy14/SmartWalker | b025a7b4a2b305838a22fe4e6116ddb951c4d7bf | [
"MIT"
] | 3 | 2021-08-30T04:40:39.000Z | 2022-01-09T11:34:04.000Z | import serial
import serial.tools.list_ports
import numpy as np
import math
import threading
import re
import os
import sys
import time
import matplotlib.pyplot as plt
pwd = os.path.abspath(os.path.abspath(__file__))
father_path = os.path.abspath(os.path.dirname(pwd) + os.path.sep + "..")
sys.path.append(father_path)
data_path = os.path.abspath(
os.path.dirname(os.path.abspath(__file__)) + os.path.sep + ".." +
os.path.sep + "data")
def print_serial(port):
print("---------------[ %s ]---------------" % port.name)
print("Path: %s" % port.device)
print("Descript: %s" % port.description)
print("HWID: %s" % port.hwid)
if not None == port.manufacturer:
print("Manufacture: %s" % port.manufacturer)
if not None == port.product:
print("Product: %s" % port.product)
if not None == port.interface:
print("Interface: %s" % port.interface)
print()
def detect_serials(location="1-1.1:1.0", vid=0x10c4, pid=0xea60):
ports = serial.tools.list_ports.comports()
for port in ports:
print_serial(port)
if port.location.__contains__(location):
port_path = port.device
return port_path
else:
print("Cannot find the target device: %s" % location)
return None
class SoftSkin(object):
def __init__(self, is_STM32: bool = True):
port_name = detect_serials("1-1.3:1.0") # Arduino Mega 2560 ttyACM0
baud_rate = 115200
print(port_name, baud_rate)
self.serial = serial.Serial(port_name, baud_rate, timeout=None)
self.pwd = os.path.abspath(os.path.abspath(__file__))
self.father_path = os.path.abspath(os.path.dirname(pwd) + os.path.sep + "..")
self.serial = serial.Serial(port_name, baud_rate, timeout=None)
self.raw_data = [] # 保存一帧数据
self.base_data = [] # 建立一组基准值用于初始化
self.temp_data = []
self.port_num = 32
self.average_length = 10
self.average_buffer = np.zeros((self.average_length, self.port_num))
# detect abnormal signal
self.max_pressure = 0
self.safe_change_rate = 10
self.emergency_change_rate = 50
self.detect_length = 10
self.detect_buffer = np.zeros((self.detect_length, self.port_num))
self.skin_unlock_event = threading.Event()
self.skin_unlock_event.clear()
self.build_base_line_data()
pass
def read_data(self, is_shown=1):
try:
one_line_data = self.serial.readline().decode("utf-8")
# print(one_line_data)
one_line_data = one_line_data.strip('SS')
one_line_data = one_line_data.strip('\n')
one_line_data = one_line_data.strip('\r')
one_line_data = one_line_data.split('|')
# print(one_line_data)
if is_shown == 1:
print(one_line_data)
if len(one_line_data) == self.port_num:
one_line_data = list(map(float, one_line_data))
one_line_data = list(map(int, one_line_data))
self.raw_data = one_line_data
# print(self.raw_data, type(self.raw_data), type(self.raw_data[0]))
except BaseException as be:
print("Data Error:", be)
def build_base_line_data(self, initial_size=10):
"""
expired, no use
1.建立一组基准数值
检测异常值
取平均值
:return:
not in use because the original signals are stable enough
"""
base_list = []
for i in range(initial_size):
self.read_data(0)
if len(self.raw_data) == self.port_num:
temp_raw_data = self.raw_data
base_list += temp_raw_data
mean_base_list = np.array(base_list).reshape([-1, self.port_num])
add_col = np.ones(mean_base_list.shape[0]).reshape([1, -1])
mean_base_list = add_col.dot(mean_base_list) / mean_base_list.shape[0]
self.base_data = mean_base_list.tolist()[0]
self.base_data = list(map(lambda x: int(x) - 1, self.base_data))
print("base line data: ", self.base_data)
pass
def read_and_record(self, record=False, show=False, plot=False, plot_num=30):
file_path = data_path + os.path.sep + "Softskin.txt"
plot_array = np.zeros((plot_num, self.port_num))
if record:
file = open(file_path, 'w')
while True:
try:
# self.serial.flushInput()
self.read_data(0)
if len(self.raw_data) == len(self.base_data):
temp_data = np.array(self.raw_data) - np.array(self.base_data)
if show:
print(temp_data)
print(self.max_pressure)
if record:
time_index = time.time()
write_data = temp_data.tolist()
write_data.insert(0, time_index)
file.write(str(write_data) + '\n')
file.flush()
self.temp_data = temp_data
self.max_pressure = self.temp_data.max()
self.detect_buffer[0:-1, :] = self.detect_buffer[1:self.detect_length, :]
self.detect_buffer[-1, :] = np.array(self.temp_data)
if plot:
# plt.ion()
plot_array[0:plot_num - 1, :] = plot_array[1:plot_num, :]
plot_array[plot_num - 1, :] = np.array(temp_data)
plt.clf()
plt.xlabel('Time')
plt.ylabel('pressure')
plt.ylim((-10, 270))
plt.plot(range(0, plot_num), plot_array)
# plt.ioff()
# plt.show()
# plt.draw()
plt.pause(0.0000000001)
except BaseException as be:
print("Data Error:", be)
def update_from_STM32(self, STM32_data: np.ndarray):
try:
self.raw_data = STM32_data
except:
pass
def unlock(self):
while True:
change_rate = self.detect_buffer[-1, :] - self.detect_buffer[0, :]
change_rate = change_rate.max()
if self.safe_change_rate <= change_rate < self.emergency_change_rate:
print("unlock!")
break
time.sleep(0.1)
if __name__ == '__main__':
skin = SoftSkin()
# skin.build_base_line_data()
thread_reading = threading.Thread(target=skin.read_and_record, args=())
time.sleep(1)
thread_reading.start()
skin.unlock()
| 36.518919 | 93 | 0.558467 | import serial
import serial.tools.list_ports
import numpy as np
import math
import threading
import re
import os
import sys
import time
import matplotlib.pyplot as plt
pwd = os.path.abspath(os.path.abspath(__file__))
father_path = os.path.abspath(os.path.dirname(pwd) + os.path.sep + "..")
sys.path.append(father_path)
data_path = os.path.abspath(
os.path.dirname(os.path.abspath(__file__)) + os.path.sep + ".." +
os.path.sep + "data")
def print_serial(port):
print("---------------[ %s ]---------------" % port.name)
print("Path: %s" % port.device)
print("Descript: %s" % port.description)
print("HWID: %s" % port.hwid)
if not None == port.manufacturer:
print("Manufacture: %s" % port.manufacturer)
if not None == port.product:
print("Product: %s" % port.product)
if not None == port.interface:
print("Interface: %s" % port.interface)
print()
def detect_serials(location="1-1.1:1.0", vid=0x10c4, pid=0xea60):
ports = serial.tools.list_ports.comports()
for port in ports:
print_serial(port)
if port.location.__contains__(location):
port_path = port.device
return port_path
else:
print("Cannot find the target device: %s" % location)
return None
class SoftSkin(object):
def __init__(self, is_STM32: bool = True):
port_name = detect_serials("1-1.3:1.0")
baud_rate = 115200
print(port_name, baud_rate)
self.serial = serial.Serial(port_name, baud_rate, timeout=None)
self.pwd = os.path.abspath(os.path.abspath(__file__))
self.father_path = os.path.abspath(os.path.dirname(pwd) + os.path.sep + "..")
self.serial = serial.Serial(port_name, baud_rate, timeout=None)
self.raw_data = []
self.base_data = []
self.temp_data = []
self.port_num = 32
self.average_length = 10
self.average_buffer = np.zeros((self.average_length, self.port_num))
self.max_pressure = 0
self.safe_change_rate = 10
self.emergency_change_rate = 50
self.detect_length = 10
self.detect_buffer = np.zeros((self.detect_length, self.port_num))
self.skin_unlock_event = threading.Event()
self.skin_unlock_event.clear()
self.build_base_line_data()
pass
def read_data(self, is_shown=1):
try:
one_line_data = self.serial.readline().decode("utf-8")
one_line_data = one_line_data.strip('SS')
one_line_data = one_line_data.strip('\n')
one_line_data = one_line_data.strip('\r')
one_line_data = one_line_data.split('|')
if is_shown == 1:
print(one_line_data)
if len(one_line_data) == self.port_num:
one_line_data = list(map(float, one_line_data))
one_line_data = list(map(int, one_line_data))
self.raw_data = one_line_data
except BaseException as be:
print("Data Error:", be)
def build_base_line_data(self, initial_size=10):
base_list = []
for i in range(initial_size):
self.read_data(0)
if len(self.raw_data) == self.port_num:
temp_raw_data = self.raw_data
base_list += temp_raw_data
mean_base_list = np.array(base_list).reshape([-1, self.port_num])
add_col = np.ones(mean_base_list.shape[0]).reshape([1, -1])
mean_base_list = add_col.dot(mean_base_list) / mean_base_list.shape[0]
self.base_data = mean_base_list.tolist()[0]
self.base_data = list(map(lambda x: int(x) - 1, self.base_data))
print("base line data: ", self.base_data)
pass
def read_and_record(self, record=False, show=False, plot=False, plot_num=30):
file_path = data_path + os.path.sep + "Softskin.txt"
plot_array = np.zeros((plot_num, self.port_num))
if record:
file = open(file_path, 'w')
while True:
try:
self.read_data(0)
if len(self.raw_data) == len(self.base_data):
temp_data = np.array(self.raw_data) - np.array(self.base_data)
if show:
print(temp_data)
print(self.max_pressure)
if record:
time_index = time.time()
write_data = temp_data.tolist()
write_data.insert(0, time_index)
file.write(str(write_data) + '\n')
file.flush()
self.temp_data = temp_data
self.max_pressure = self.temp_data.max()
self.detect_buffer[0:-1, :] = self.detect_buffer[1:self.detect_length, :]
self.detect_buffer[-1, :] = np.array(self.temp_data)
if plot:
plot_array[0:plot_num - 1, :] = plot_array[1:plot_num, :]
plot_array[plot_num - 1, :] = np.array(temp_data)
plt.clf()
plt.xlabel('Time')
plt.ylabel('pressure')
plt.ylim((-10, 270))
plt.plot(range(0, plot_num), plot_array)
plt.pause(0.0000000001)
except BaseException as be:
print("Data Error:", be)
def update_from_STM32(self, STM32_data: np.ndarray):
try:
self.raw_data = STM32_data
except:
pass
def unlock(self):
while True:
change_rate = self.detect_buffer[-1, :] - self.detect_buffer[0, :]
change_rate = change_rate.max()
if self.safe_change_rate <= change_rate < self.emergency_change_rate:
print("unlock!")
break
time.sleep(0.1)
if __name__ == '__main__':
skin = SoftSkin()
thread_reading = threading.Thread(target=skin.read_and_record, args=())
time.sleep(1)
thread_reading.start()
skin.unlock()
| true | true |
f71c0620c464a57b3ebe69ac3d9aba39ae33da92 | 1,518 | py | Python | olass/models/base_query.py | ufbmi/olass-client | 2fc949d4d59959e7e3ba5ec737b20d8db856b54b | [
"MIT"
] | null | null | null | olass/models/base_query.py | ufbmi/olass-client | 2fc949d4d59959e7e3ba5ec737b20d8db856b54b | [
"MIT"
] | 1 | 2016-08-12T20:52:09.000Z | 2016-08-12T20:59:35.000Z | olass/models/base_query.py | ufbmi/olass-client | 2fc949d4d59959e7e3ba5ec737b20d8db856b54b | [
"MIT"
] | 1 | 2016-06-28T16:43:09.000Z | 2016-06-28T16:43:09.000Z | """
Goal: Provides paginate() function
"""
from sqlalchemy import orm
from olass.models.pagination import Pagination
class BaseQuery(orm.Query):
"""
@see: flask-sqlalchemy/flask_sqlalchemy/__init__.py
"""
def paginate(self, page=None, per_page=None, error_out=True):
"""Returns ``per_page`` items from page ``page``.
If no items are found and ``page`` is greater than 1, or if page is
less than 1, it aborts with 404. This behavior can be disabled by
passing ``error_out=False``.
If ``page`` or ``per_page`` are ``None``, they will default to 1 and 20
respectively. If the values are not ints and ``error_out`` is
``True``, this function will rais exceptions.
Returns a :class:`Pagination` object.
"""
if page is None:
page = 1
if per_page is None:
per_page = 20
if error_out and page < 1:
raise Exception("Pagination error: page < 1")
items = self.limit(per_page).offset(
(page - 1) * per_page).all()
if not items and page != 1 and error_out:
raise Exception("Pagination error: no items and page != 1")
# No need to count if we're on the first page and there are fewer
# items than we expected.
if page == 1 and len(items) < per_page:
total = len(items)
else:
total = self.order_by(None).count()
return Pagination(self, page, per_page, total, items)
| 30.36 | 79 | 0.597497 | from sqlalchemy import orm
from olass.models.pagination import Pagination
class BaseQuery(orm.Query):
def paginate(self, page=None, per_page=None, error_out=True):
if page is None:
page = 1
if per_page is None:
per_page = 20
if error_out and page < 1:
raise Exception("Pagination error: page < 1")
items = self.limit(per_page).offset(
(page - 1) * per_page).all()
if not items and page != 1 and error_out:
raise Exception("Pagination error: no items and page != 1")
# items than we expected.
if page == 1 and len(items) < per_page:
total = len(items)
else:
total = self.order_by(None).count()
return Pagination(self, page, per_page, total, items)
| true | true |
f71c06594a2607c810e62772c689e1182da44569 | 473 | py | Python | app/tasks/tasks.py | guomaoqiu/Celery_Flask_Demo | 7051361ca34c7e0d62678d8bd0e3c89403195d0f | [
"MIT"
] | 4 | 2018-04-10T10:06:03.000Z | 2021-02-06T04:48:50.000Z | app/tasks/tasks.py | guomaoqiu/Celery_Flask_Demo | 7051361ca34c7e0d62678d8bd0e3c89403195d0f | [
"MIT"
] | null | null | null | app/tasks/tasks.py | guomaoqiu/Celery_Flask_Demo | 7051361ca34c7e0d62678d8bd0e3c89403195d0f | [
"MIT"
] | 2 | 2018-06-05T08:34:19.000Z | 2018-08-13T10:41:29.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
from app import celery
from celery.utils.log import get_task_logger
import time
logger = get_task_logger(__name__)
# 定时导入
current_time = str(time.strftime('%Y-%m-%d %H:%M:%S'))
@celery.task(name="task1")
def task1():
print u"定时任务task1:每5秒执行一次" + current_time
# 记录日志
logger.info(u"导入成功")
@celery.task(name="task2")
def task2():
# 记录日志
print u"定时任务task2:每10秒执行一次" + current_time
logger.info(u"echo成功")
| 18.92 | 54 | 0.674419 |
from app import celery
from celery.utils.log import get_task_logger
import time
logger = get_task_logger(__name__)
current_time = str(time.strftime('%Y-%m-%d %H:%M:%S'))
@celery.task(name="task1")
def task1():
print u"定时任务task1:每5秒执行一次" + current_time
logger.info(u"导入成功")
@celery.task(name="task2")
def task2():
print u"定时任务task2:每10秒执行一次" + current_time
logger.info(u"echo成功")
| false | true |
f71c06f54e6701786b0f29d7880b93fa9b637655 | 649 | py | Python | gui.py | eduardokimmel/ofx_to_xlsx | f36dc430ca2424055feba8f04c7f48cd4741d82c | [
"MIT"
] | null | null | null | gui.py | eduardokimmel/ofx_to_xlsx | f36dc430ca2424055feba8f04c7f48cd4741d82c | [
"MIT"
] | null | null | null | gui.py | eduardokimmel/ofx_to_xlsx | f36dc430ca2424055feba8f04c7f48cd4741d82c | [
"MIT"
] | null | null | null | from tkinter.filedialog import askopenfilename
from tkinter import *
import cli
import gettext
window = Tk()
window.title("ofx_to_xlsx")
def close_window():
window.destroy()
def callback():
ofx = askopenfilename()
cli.run(ofx)
gettext.install('ofx_to_xlsx')
t = gettext.translation('gui_i18n', 'locale', fallback=True)
_ = t.gettext
frame = Frame(window)
frame.pack()
w1 = Label (frame,text = _("Select a OFX file to convert it to Excel"))
w1.pack()
arq = Button (frame, text = _("Select File"), command = callback)
arq.pack()
sair = Button (frame, text = _("Quit"), command = close_window)
sair.pack()
window.mainloop()
exit()
| 19.088235 | 71 | 0.702619 | from tkinter.filedialog import askopenfilename
from tkinter import *
import cli
import gettext
window = Tk()
window.title("ofx_to_xlsx")
def close_window():
window.destroy()
def callback():
ofx = askopenfilename()
cli.run(ofx)
gettext.install('ofx_to_xlsx')
t = gettext.translation('gui_i18n', 'locale', fallback=True)
_ = t.gettext
frame = Frame(window)
frame.pack()
w1 = Label (frame,text = _("Select a OFX file to convert it to Excel"))
w1.pack()
arq = Button (frame, text = _("Select File"), command = callback)
arq.pack()
sair = Button (frame, text = _("Quit"), command = close_window)
sair.pack()
window.mainloop()
exit()
| true | true |
f71c071affad74a0e7aea1a05a898c897c918ab8 | 691 | py | Python | emtract/model_inference.py | dvamossy/EmTract | 68a00e3d63fbc2c401b0d2b297bf96ffb75940e8 | [
"MIT"
] | 16 | 2021-12-02T18:59:56.000Z | 2022-03-31T11:42:12.000Z | emtract/model_inference.py | dvamossy/EmTract | 68a00e3d63fbc2c401b0d2b297bf96ffb75940e8 | [
"MIT"
] | null | null | null | emtract/model_inference.py | dvamossy/EmTract | 68a00e3d63fbc2c401b0d2b297bf96ffb75940e8 | [
"MIT"
] | 1 | 2021-12-09T06:05:22.000Z | 2021-12-09T06:05:22.000Z | import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
from emtract.model import Model, ModelType
import pandas as pd
class ModelInference:
MODEL_BASE_PATH = 'build/models/'
DATA_BASE_PATH = './emtract/data/'
def __init__(self, model_type):
if model_type == 'twitter':
self.model = Model(ModelType.TWITTER)
else:
self.model = Model(ModelType.STOCK_TWITS)
def inference(self, text):
return self.model.predict([text])
def file_inference(self, file_name, output):
df = pd.read_csv(file_name, header=None)
predictions = self.model.predict(df.iloc[:, 0].values)
predictions.to_csv(output, index=False)
| 27.64 | 62 | 0.662808 | import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
from emtract.model import Model, ModelType
import pandas as pd
class ModelInference:
MODEL_BASE_PATH = 'build/models/'
DATA_BASE_PATH = './emtract/data/'
def __init__(self, model_type):
if model_type == 'twitter':
self.model = Model(ModelType.TWITTER)
else:
self.model = Model(ModelType.STOCK_TWITS)
def inference(self, text):
return self.model.predict([text])
def file_inference(self, file_name, output):
df = pd.read_csv(file_name, header=None)
predictions = self.model.predict(df.iloc[:, 0].values)
predictions.to_csv(output, index=False)
| true | true |
f71c07a140ce757dc105e33553450a64b401d600 | 147 | py | Python | students/k33422/Alexandrin_Anton/Lr2/homework/main/admin.py | aytakr/ITMO_ICT_WebDevelopment_2021-2022 | 57c0eef5e1f413c7f031ee001d59e5122f990f26 | [
"MIT"
] | 7 | 2021-09-02T08:20:58.000Z | 2022-01-12T11:48:07.000Z | students/k33422/Alexandrin_Anton/Lr2/homework/main/admin.py | aytakr/ITMO_ICT_WebDevelopment_2021-2022 | 57c0eef5e1f413c7f031ee001d59e5122f990f26 | [
"MIT"
] | 76 | 2021-09-17T23:01:50.000Z | 2022-03-18T16:42:03.000Z | students/k33422/Alexandrin_Anton/Lr2/homework/main/admin.py | aytakr/ITMO_ICT_WebDevelopment_2021-2022 | 57c0eef5e1f413c7f031ee001d59e5122f990f26 | [
"MIT"
] | 60 | 2021-09-04T16:47:39.000Z | 2022-03-21T04:41:27.000Z | from django.contrib import admin
from .models import *
admin.site.register(Student)
admin.site.register(Homework)
admin.site.register(Assignment)
| 21 | 32 | 0.816327 | from django.contrib import admin
from .models import *
admin.site.register(Student)
admin.site.register(Homework)
admin.site.register(Assignment)
| true | true |
f71c0829da8f53bf5fb8a4964502e266c6e624a6 | 1,918 | py | Python | tests/test_rabbitmq_consumer_command.py | LaEmma/sparrow_cloud | fb9f76ea70b3ba5782c33f3b3379e2ffe4bab08c | [
"MIT"
] | null | null | null | tests/test_rabbitmq_consumer_command.py | LaEmma/sparrow_cloud | fb9f76ea70b3ba5782c33f3b3379e2ffe4bab08c | [
"MIT"
] | null | null | null | tests/test_rabbitmq_consumer_command.py | LaEmma/sparrow_cloud | fb9f76ea70b3ba5782c33f3b3379e2ffe4bab08c | [
"MIT"
] | null | null | null | import os
import unittest
from django.conf.urls import url
from django.http import HttpResponse
def task(*args, **kwargs):
print('*'*10)
def detail(request, question_id):
return HttpResponse("You're looking at question %s." % question_id)
urlpatterns = [
url(r'^/ssss/xxx/$', detail),
url(r'^/ssuuu/xxddx/$', detail),
]
class RestClientTestCase(unittest.TestCase):
def setUp(self):
os.environ["SPARROW_BROKER_HOST"] = "127.0.0.1:8001"
os.environ["SPARROW_BACKEND_HOST"] = "127.0.0.1:8002"
os.environ["DJANGO_SETTINGS_MODULE"] = "tests.mock_settings"
# @mock.patch('rabbitmq_consumer.RabbitMQConsumer.target_func_map', return_value='')
# @mock.patch('rabbitmq_consumer.RabbitMQConsumer.consume', return_value='接收任务成功')
# def test_consumer_command(self, mock_target_func_map, mock_consume):
# from django.conf import settings
# self.setup_settings(settings)
# django.setup()
# out = StringIO()
# call_command('rabbitmq_consumer', '--queue', 'QUEUE_CONF', stdout=out)
# self.assertEqual(out.read(), '')
def setup_settings(self, settings):
settings.XX = "1"
settings.SECRET_KEY = "ss"
settings.SPARROW_RABBITMQ_CONSUMER_CONF = {
"MESSAGE_BROKER_CONF": {
"USER_NAME": "test_name",
"PASSWORD": "test_password",
"VIRTUAL_HOST": "test_virtual",
"BROKER_SERVICE_CONF": "sparrow-test:8001",
},
"MESSAGE_BACKEND_CONF": {
"BACKEND_SERVICE_CONF": "sparrow-test:8001",
"API_PATH": "/api/sparrow_test/task/test_update/"
}
}
settings.QUEUE_CONF={
"QUEUE": "TEST_QUEUE",
"TARGET_FUNC_MAP": {
"ORDER_PAY_SUC_ONLINE": "./task",
}
}
settings.ROOT_URLCONF = __name__
| 31.442623 | 88 | 0.606361 | import os
import unittest
from django.conf.urls import url
from django.http import HttpResponse
def task(*args, **kwargs):
print('*'*10)
def detail(request, question_id):
return HttpResponse("You're looking at question %s." % question_id)
urlpatterns = [
url(r'^/ssss/xxx/$', detail),
url(r'^/ssuuu/xxddx/$', detail),
]
class RestClientTestCase(unittest.TestCase):
def setUp(self):
os.environ["SPARROW_BROKER_HOST"] = "127.0.0.1:8001"
os.environ["SPARROW_BACKEND_HOST"] = "127.0.0.1:8002"
os.environ["DJANGO_SETTINGS_MODULE"] = "tests.mock_settings"
# @mock.patch('rabbitmq_consumer.RabbitMQConsumer.target_func_map', return_value='')
# @mock.patch('rabbitmq_consumer.RabbitMQConsumer.consume', return_value='接收任务成功')
# def test_consumer_command(self, mock_target_func_map, mock_consume):
# from django.conf import settings
# self.setup_settings(settings)
# django.setup()
# out = StringIO()
# call_command('rabbitmq_consumer', '--queue', 'QUEUE_CONF', stdout=out)
# self.assertEqual(out.read(), '')
def setup_settings(self, settings):
settings.XX = "1"
settings.SECRET_KEY = "ss"
settings.SPARROW_RABBITMQ_CONSUMER_CONF = {
"MESSAGE_BROKER_CONF": {
"USER_NAME": "test_name",
"PASSWORD": "test_password",
"VIRTUAL_HOST": "test_virtual",
"BROKER_SERVICE_CONF": "sparrow-test:8001",
},
"MESSAGE_BACKEND_CONF": {
"BACKEND_SERVICE_CONF": "sparrow-test:8001",
"API_PATH": "/api/sparrow_test/task/test_update/"
}
}
settings.QUEUE_CONF={
"QUEUE": "TEST_QUEUE",
"TARGET_FUNC_MAP": {
"ORDER_PAY_SUC_ONLINE": "./task",
}
}
settings.ROOT_URLCONF = __name__
| true | true |
f71c08e2095d6c92591d4e24b87dfa59366adf76 | 1,108 | py | Python | manage.py | cjmabry/PoliChart | 787d987669de4891b1b1ac5f8ebc0ecd38ac2785 | [
"BSD-3-Clause"
] | null | null | null | manage.py | cjmabry/PoliChart | 787d987669de4891b1b1ac5f8ebc0ecd38ac2785 | [
"BSD-3-Clause"
] | null | null | null | manage.py | cjmabry/PoliChart | 787d987669de4891b1b1ac5f8ebc0ecd38ac2785 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import os
from flask.ext.script import Manager
from polichart import create_app, polling
from polichart.extensions import db
from polichart.utils import MALE
from flask import url_for
app = create_app()
manager = Manager(app)
@app.context_processor
def override_url_for():
return dict(url_for=dated_url_for)
def dated_url_for(endpoint, **values):
if endpoint == 'static':
filename = values.get('filename', None)
if filename:
file_path = os.path.join(app.root_path,
endpoint, filename)
values['q'] = int(os.stat(file_path).st_mtime)
return url_for(endpoint, **values)
@manager.command
def run():
"""Run in local machine."""
app.run()
@manager.command
def initdb():
"""Init/reset database."""
db.drop_all()
db.create_all()
db.session.commit()
polling.populate_db()
manager.add_option('-c', '--config',
dest="config",
required=False,
help="config file")
if __name__ == "__main__":
manager.run()
| 21.307692 | 58 | 0.617329 |
import os
from flask.ext.script import Manager
from polichart import create_app, polling
from polichart.extensions import db
from polichart.utils import MALE
from flask import url_for
app = create_app()
manager = Manager(app)
@app.context_processor
def override_url_for():
return dict(url_for=dated_url_for)
def dated_url_for(endpoint, **values):
if endpoint == 'static':
filename = values.get('filename', None)
if filename:
file_path = os.path.join(app.root_path,
endpoint, filename)
values['q'] = int(os.stat(file_path).st_mtime)
return url_for(endpoint, **values)
@manager.command
def run():
app.run()
@manager.command
def initdb():
db.drop_all()
db.create_all()
db.session.commit()
polling.populate_db()
manager.add_option('-c', '--config',
dest="config",
required=False,
help="config file")
if __name__ == "__main__":
manager.run()
| true | true |
f71c093c62e3d49d16d3c9cbf3b0a2a8b7fd68d6 | 2,128 | py | Python | integrationtest/vm/installation/upgrade/test_zs_upgd_1.3_latest_on_cos7.py | bgerxx/woodpecker | fdc51245945cc9be4d1f028988079213eb99b2ad | [
"Apache-2.0"
] | null | null | null | integrationtest/vm/installation/upgrade/test_zs_upgd_1.3_latest_on_cos7.py | bgerxx/woodpecker | fdc51245945cc9be4d1f028988079213eb99b2ad | [
"Apache-2.0"
] | null | null | null | integrationtest/vm/installation/upgrade/test_zs_upgd_1.3_latest_on_cos7.py | bgerxx/woodpecker | fdc51245945cc9be4d1f028988079213eb99b2ad | [
"Apache-2.0"
] | null | null | null | '''
@author: MengLai
'''
import os
import tempfile
import uuid
import time
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstacklib.utils.ssh as ssh
import zstackwoodpecker.operations.scenario_operations as scen_ops
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
tmp_file = '/tmp/%s' % uuid.uuid1().get_hex()
vm_inv = None
def test():
global vm_inv
test_util.test_dsc('Create test vm to test zstack upgrade by -u.')
image_name = os.environ.get('imageName_i_c7_z_1.3')
iso_path = os.environ.get('iso_path')
zstack_latest_version = os.environ.get('zstackLatestVersion')
zstack_latest_path = os.environ.get('zstackLatestInstaller')
vm_name = os.environ.get('vmName')
upgrade_script_path = os.environ.get('upgradeScript')
vm_inv = test_stub.create_vm_scenario(image_name, vm_name)
vm_ip = vm_inv.vmNics[0].ip
test_lib.lib_wait_target_up(vm_ip, 22)
test_stub.make_ssh_no_password(vm_ip, tmp_file)
test_util.test_logger('Update MN IP')
test_stub.update_mn_ip(vm_ip, vm_ip, tmp_file)
test_stub.reset_rabbitmq_for_13(vm_ip, tmp_file)
test_stub.start_mn(vm_ip, tmp_file)
test_stub.check_installation(vm_ip, tmp_file)
test_util.test_logger('Upgrade zstack to latest')
test_stub.update_iso(vm_ip, tmp_file, iso_path, upgrade_script_path)
test_stub.upgrade_zstack(vm_ip, zstack_latest_path, tmp_file)
test_stub.check_zstack_version(vm_ip, tmp_file, zstack_latest_version)
test_stub.start_mn(vm_ip, tmp_file)
test_stub.check_installation(vm_ip, tmp_file)
os.system('rm -f %s' % tmp_file)
test_stub.destroy_vm_scenario(vm_inv.uuid)
test_util.test_pass('ZStack upgrade Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
global vm_inv
os.system('rm -f %s' % tmp_file)
if vm_inv:
test_stub.destroy_vm_scenario(vm_inv.uuid)
test_lib.lib_error_cleanup(test_obj_dict)
| 33.777778 | 75 | 0.740132 | import os
import tempfile
import uuid
import time
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstacklib.utils.ssh as ssh
import zstackwoodpecker.operations.scenario_operations as scen_ops
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
tmp_file = '/tmp/%s' % uuid.uuid1().get_hex()
vm_inv = None
def test():
global vm_inv
test_util.test_dsc('Create test vm to test zstack upgrade by -u.')
image_name = os.environ.get('imageName_i_c7_z_1.3')
iso_path = os.environ.get('iso_path')
zstack_latest_version = os.environ.get('zstackLatestVersion')
zstack_latest_path = os.environ.get('zstackLatestInstaller')
vm_name = os.environ.get('vmName')
upgrade_script_path = os.environ.get('upgradeScript')
vm_inv = test_stub.create_vm_scenario(image_name, vm_name)
vm_ip = vm_inv.vmNics[0].ip
test_lib.lib_wait_target_up(vm_ip, 22)
test_stub.make_ssh_no_password(vm_ip, tmp_file)
test_util.test_logger('Update MN IP')
test_stub.update_mn_ip(vm_ip, vm_ip, tmp_file)
test_stub.reset_rabbitmq_for_13(vm_ip, tmp_file)
test_stub.start_mn(vm_ip, tmp_file)
test_stub.check_installation(vm_ip, tmp_file)
test_util.test_logger('Upgrade zstack to latest')
test_stub.update_iso(vm_ip, tmp_file, iso_path, upgrade_script_path)
test_stub.upgrade_zstack(vm_ip, zstack_latest_path, tmp_file)
test_stub.check_zstack_version(vm_ip, tmp_file, zstack_latest_version)
test_stub.start_mn(vm_ip, tmp_file)
test_stub.check_installation(vm_ip, tmp_file)
os.system('rm -f %s' % tmp_file)
test_stub.destroy_vm_scenario(vm_inv.uuid)
test_util.test_pass('ZStack upgrade Test Success')
def error_cleanup():
global vm_inv
os.system('rm -f %s' % tmp_file)
if vm_inv:
test_stub.destroy_vm_scenario(vm_inv.uuid)
test_lib.lib_error_cleanup(test_obj_dict)
| true | true |
f71c0a5782bf191b5540c2eb75fdf8f2a7d65eb4 | 4,189 | py | Python | tools/ISM_ticket_creation/ISM_ticket_creation.py | thahasinab/risksense_tools | 55399f21c680735e3c557484ec8788a33c1525e7 | [
"Apache-2.0"
] | null | null | null | tools/ISM_ticket_creation/ISM_ticket_creation.py | thahasinab/risksense_tools | 55399f21c680735e3c557484ec8788a33c1525e7 | [
"Apache-2.0"
] | null | null | null | tools/ISM_ticket_creation/ISM_ticket_creation.py | thahasinab/risksense_tools | 55399f21c680735e3c557484ec8788a33c1525e7 | [
"Apache-2.0"
] | null | null | null | import requests
import json
import time
import os
import toml
import zipfile
import csv
import pandas as pd
from datetime import date
from datetime import datetime
import logging
def incident_create(ism_url,ism_key,ism_attachment_url,final_directory,tag_name_list,flag_AH,assignee,assignee_desc,profile_link):
Patches_list=[]
Solution_list=[]
VRR_group_list=[]
VRR_list=[]
Plugin_id_list=[]
Asset_info_list=[]
Scanner_name_list=[]
Scanner_title_list=[]
df_App = pd.read_csv(final_directory+"/Assets_"+str(tag_name_list)+".csv", low_memory=False)
df_Host = pd.read_csv(final_directory+"/Findings_"+str(tag_name_list)+".csv", low_memory=False)
df_multiple = pd.read_csv(final_directory+"/Ticket_Findings_"+str(tag_name_list)+".csv", low_memory=False)
#print(assignee)
for j in range(len(df_multiple.axes[0])):
time.sleep(1)
flag = False
create = False
#print(flag_AH)
Scanner_name_list.append(df_multiple.iloc[j]['Scanner Name'])
if(flag_AH == "A"):
Plugin_id_list.append(df_multiple.iloc[j]["Scanner Plugin"])
Asset_info_list.append(df_App.iloc[0]["Address"])
elif(flag_AH == "H"):
Plugin_id_list.append(df_multiple.iloc[j]["Scanner Plugin ID"])
Asset_info_list.append(df_App.iloc[0]["IP Address"])
Scanner_title_list.append(df_multiple.iloc[j]["Vulnerability"])
VRR_list.append(df_multiple.iloc[j]["Vulnerability Risk Rating"])
VRR_group_list.append(df_multiple.iloc[j]["VRR Group"])
Solution_list.append(df_multiple.iloc[j]['Possible Solution'])
Patches_list.append(df_multiple.iloc[j]['Possible Patches'])
payload = json.dumps({"Category": "Account Lockout","Impact": "Medium","Priority": "3","ProfileLink": profile_link,"Service": "Email Service","Source": "Phone","Status": "Active","Subject": "Scanner Name : " + ' , '.join(map(str, Scanner_name_list)) + "|" + " Scanner Plugin ID : " + ' , '.join(map(str, Plugin_id_list)) + "|" + " Scanner Title : " + ' , '.join(map(str, Scanner_title_list)) ,"Symptom": 'Plugin information : \n----------------------------\nPlugin ID : ' + ' , '.join(map(str, Plugin_id_list)) + "\n\nVRR : " + ' , '.join(map(str, VRR_list)) + "|" + ' , '.join(map(str, VRR_group_list)) + "\n\n----------------------------------------------------------------------------------------------------\nAsset Information : \n----------------------------\n" + "Hostname : " + ' , '.join(map(str, Asset_info_list)) + "\n\nSolution : \n\n*) " + '\n*) '.join(map(str, Solution_list)) + "\n\nPatches : \n\n*) " + '\n*) '.join(map(str, Patches_list)),"Urgency": "Medium","Owner": assignee,"OwnerTeam": "Service Desk"})
headers = {
'Authorization': ism_key,
'Content-Type': 'application/json',
'Cookie': 'SID='
}
try:
response = requests.request("POST", ism_url, headers=headers, data=payload)
except Exception as e:
print(e,response.text)
logging.error(e,response.text)
Rec_id_json = response.json()
Rec_id = Rec_id_json["RecId"]
Incident_num = Rec_id_json["IncidentNumber"]
#print(Rec_id,Incident_num)
####### Attachment #######
files = [('file', open(final_directory+"/Assets_"+str(tag_name_list)+".csv",'rb') ), ('file',open(final_directory+"/Ticket_Findings_"+str(tag_name_list)+".csv",'rb') )]
payload={"ObjectID":Rec_id,"ObjectType":"incident#"}
headers = {
'Authorization': ism_key,
'Cookie': 'SID='
}
response = requests.request("POST", ism_attachment_url, headers=headers, data=payload,files=files)
if(response.status_code == 200):
print("Incident is created and attachment is included...")
logging.info("Incident is created and attachment is included...\n")
print(assignee_desc)
logging.info(assignee_desc)
else:
print("There is a problem in attaching the files to the ticket")
logging.error("There is a problem in attaching the files to the ticket\n")
print(assignee_desc)
logging.info(assignee_desc)
return Incident_num
| 46.544444 | 1,031 | 0.627596 | import requests
import json
import time
import os
import toml
import zipfile
import csv
import pandas as pd
from datetime import date
from datetime import datetime
import logging
def incident_create(ism_url,ism_key,ism_attachment_url,final_directory,tag_name_list,flag_AH,assignee,assignee_desc,profile_link):
Patches_list=[]
Solution_list=[]
VRR_group_list=[]
VRR_list=[]
Plugin_id_list=[]
Asset_info_list=[]
Scanner_name_list=[]
Scanner_title_list=[]
df_App = pd.read_csv(final_directory+"/Assets_"+str(tag_name_list)+".csv", low_memory=False)
df_Host = pd.read_csv(final_directory+"/Findings_"+str(tag_name_list)+".csv", low_memory=False)
df_multiple = pd.read_csv(final_directory+"/Ticket_Findings_"+str(tag_name_list)+".csv", low_memory=False)
for j in range(len(df_multiple.axes[0])):
time.sleep(1)
flag = False
create = False
Scanner_name_list.append(df_multiple.iloc[j]['Scanner Name'])
if(flag_AH == "A"):
Plugin_id_list.append(df_multiple.iloc[j]["Scanner Plugin"])
Asset_info_list.append(df_App.iloc[0]["Address"])
elif(flag_AH == "H"):
Plugin_id_list.append(df_multiple.iloc[j]["Scanner Plugin ID"])
Asset_info_list.append(df_App.iloc[0]["IP Address"])
Scanner_title_list.append(df_multiple.iloc[j]["Vulnerability"])
VRR_list.append(df_multiple.iloc[j]["Vulnerability Risk Rating"])
VRR_group_list.append(df_multiple.iloc[j]["VRR Group"])
Solution_list.append(df_multiple.iloc[j]['Possible Solution'])
Patches_list.append(df_multiple.iloc[j]['Possible Patches'])
payload = json.dumps({"Category": "Account Lockout","Impact": "Medium","Priority": "3","ProfileLink": profile_link,"Service": "Email Service","Source": "Phone","Status": "Active","Subject": "Scanner Name : " + ' , '.join(map(str, Scanner_name_list)) + "|" + " Scanner Plugin ID : " + ' , '.join(map(str, Plugin_id_list)) + "|" + " Scanner Title : " + ' , '.join(map(str, Scanner_title_list)) ,"Symptom": 'Plugin information : \n----------------------------\nPlugin ID : ' + ' , '.join(map(str, Plugin_id_list)) + "\n\nVRR : " + ' , '.join(map(str, VRR_list)) + "|" + ' , '.join(map(str, VRR_group_list)) + "\n\n----------------------------------------------------------------------------------------------------\nAsset Information : \n----------------------------\n" + "Hostname : " + ' , '.join(map(str, Asset_info_list)) + "\n\nSolution : \n\n*) " + '\n*) '.join(map(str, Solution_list)) + "\n\nPatches : \n\n*) " + '\n*) '.join(map(str, Patches_list)),"Urgency": "Medium","Owner": assignee,"OwnerTeam": "Service Desk"})
headers = {
'Authorization': ism_key,
'Content-Type': 'application/json',
'Cookie': 'SID='
}
try:
response = requests.request("POST", ism_url, headers=headers, data=payload)
except Exception as e:
print(e,response.text)
logging.error(e,response.text)
Rec_id_json = response.json()
Rec_id = Rec_id_json["RecId"]
Incident_num = Rec_id_json["IncidentNumber"]
sv",'rb') )]
payload={"ObjectID":Rec_id,"ObjectType":"incident#"}
headers = {
'Authorization': ism_key,
'Cookie': 'SID='
}
response = requests.request("POST", ism_attachment_url, headers=headers, data=payload,files=files)
if(response.status_code == 200):
print("Incident is created and attachment is included...")
logging.info("Incident is created and attachment is included...\n")
print(assignee_desc)
logging.info(assignee_desc)
else:
print("There is a problem in attaching the files to the ticket")
logging.error("There is a problem in attaching the files to the ticket\n")
print(assignee_desc)
logging.info(assignee_desc)
return Incident_num
| true | true |
f71c0b020116ae97bd345db15ac8c2ee8e6c6d43 | 28,999 | py | Python | venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py | EkremBayar/bayar | aad1a32044da671d0b4f11908416044753360b39 | [
"MIT"
] | 38,667 | 2015-01-01T00:15:34.000Z | 2022-03-31T22:57:03.000Z | venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py | EkremBayar/bayar | aad1a32044da671d0b4f11908416044753360b39 | [
"MIT"
] | 1,192 | 2015-01-03T07:59:34.000Z | 2022-03-31T13:22:26.000Z | venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py | EkremBayar/bayar | aad1a32044da671d0b4f11908416044753360b39 | [
"MIT"
] | 11,269 | 2015-01-01T08:41:17.000Z | 2022-03-31T16:12:52.000Z | """
shared options and groups
The principle here is to define options once, but *not* instantiate them
globally. One reason being that options with action='append' can carry state
between parses. pip parses general options twice internally, and shouldn't
pass on state. To be consistent, all options will follow this design.
"""
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
import os
import textwrap
import warnings
from functools import partial
from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
from textwrap import dedent
from typing import Any, Callable, Dict, Optional, Tuple
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.cli.parser import ConfigOptionParser
from pip._internal.cli.progress_bars import BAR_TYPES
from pip._internal.exceptions import CommandError
from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
from pip._internal.models.format_control import FormatControl
from pip._internal.models.index import PyPI
from pip._internal.models.target_python import TargetPython
from pip._internal.utils.hashes import STRONG_HASHES
from pip._internal.utils.misc import strtobool
def raise_option_error(parser, option, msg):
# type: (OptionParser, Option, str) -> None
"""
Raise an option parsing error using parser.error().
Args:
parser: an OptionParser instance.
option: an Option instance.
msg: the error text.
"""
msg = f"{option} error: {msg}"
msg = textwrap.fill(" ".join(msg.split()))
parser.error(msg)
def make_option_group(group, parser):
# type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
"""
Return an OptionGroup object
group -- assumed to be dict with 'name' and 'options' keys
parser -- an optparse Parser
"""
option_group = OptionGroup(parser, group["name"])
for option in group["options"]:
option_group.add_option(option())
return option_group
def check_install_build_global(options, check_options=None):
# type: (Values, Optional[Values]) -> None
"""Disable wheels if per-setup.py call options are set.
:param options: The OptionParser options to update.
:param check_options: The options to check, if not supplied defaults to
options.
"""
if check_options is None:
check_options = options
def getname(n):
# type: (str) -> Optional[Any]
return getattr(check_options, n, None)
names = ["build_options", "global_options", "install_options"]
if any(map(getname, names)):
control = options.format_control
control.disallow_binaries()
warnings.warn(
"Disabling all use of wheels due to the use of --build-option "
"/ --global-option / --install-option.",
stacklevel=2,
)
def check_dist_restriction(options, check_target=False):
# type: (Values, bool) -> None
"""Function for determining if custom platform options are allowed.
:param options: The OptionParser options.
:param check_target: Whether or not to check if --target is being used.
"""
dist_restriction_set = any(
[
options.python_version,
options.platforms,
options.abis,
options.implementation,
]
)
binary_only = FormatControl(set(), {":all:"})
sdist_dependencies_allowed = (
options.format_control != binary_only and not options.ignore_dependencies
)
# Installations or downloads using dist restrictions must not combine
# source distributions and dist-specific wheels, as they are not
# guaranteed to be locally compatible.
if dist_restriction_set and sdist_dependencies_allowed:
raise CommandError(
"When restricting platform and interpreter constraints using "
"--python-version, --platform, --abi, or --implementation, "
"either --no-deps must be set, or --only-binary=:all: must be "
"set and --no-binary must not be set (or must be set to "
":none:)."
)
if check_target:
if dist_restriction_set and not options.target_dir:
raise CommandError(
"Can not use any platform or abi specific options unless "
"installing via '--target'"
)
def _path_option_check(option, opt, value):
# type: (Option, str, str) -> str
return os.path.expanduser(value)
def _package_name_option_check(option, opt, value):
# type: (Option, str, str) -> str
return canonicalize_name(value)
class PipOption(Option):
TYPES = Option.TYPES + ("path", "package_name")
TYPE_CHECKER = Option.TYPE_CHECKER.copy()
TYPE_CHECKER["package_name"] = _package_name_option_check
TYPE_CHECKER["path"] = _path_option_check
###########
# options #
###########
help_ = partial(
Option,
"-h",
"--help",
dest="help",
action="help",
help="Show help.",
) # type: Callable[..., Option]
isolated_mode = partial(
Option,
"--isolated",
dest="isolated_mode",
action="store_true",
default=False,
help=(
"Run pip in an isolated mode, ignoring environment variables and user "
"configuration."
),
) # type: Callable[..., Option]
require_virtualenv = partial(
Option,
# Run only if inside a virtualenv, bail if not.
"--require-virtualenv",
"--require-venv",
dest="require_venv",
action="store_true",
default=False,
help=SUPPRESS_HELP,
) # type: Callable[..., Option]
verbose = partial(
Option,
"-v",
"--verbose",
dest="verbose",
action="count",
default=0,
help="Give more output. Option is additive, and can be used up to 3 times.",
) # type: Callable[..., Option]
no_color = partial(
Option,
"--no-color",
dest="no_color",
action="store_true",
default=False,
help="Suppress colored output.",
) # type: Callable[..., Option]
version = partial(
Option,
"-V",
"--version",
dest="version",
action="store_true",
help="Show version and exit.",
) # type: Callable[..., Option]
quiet = partial(
Option,
"-q",
"--quiet",
dest="quiet",
action="count",
default=0,
help=(
"Give less output. Option is additive, and can be used up to 3"
" times (corresponding to WARNING, ERROR, and CRITICAL logging"
" levels)."
),
) # type: Callable[..., Option]
progress_bar = partial(
Option,
"--progress-bar",
dest="progress_bar",
type="choice",
choices=list(BAR_TYPES.keys()),
default="on",
help=(
"Specify type of progress to be displayed ["
+ "|".join(BAR_TYPES.keys())
+ "] (default: %default)"
),
) # type: Callable[..., Option]
log = partial(
PipOption,
"--log",
"--log-file",
"--local-log",
dest="log",
metavar="path",
type="path",
help="Path to a verbose appending log.",
) # type: Callable[..., Option]
no_input = partial(
Option,
# Don't ask for input
"--no-input",
dest="no_input",
action="store_true",
default=False,
help="Disable prompting for input.",
) # type: Callable[..., Option]
proxy = partial(
Option,
"--proxy",
dest="proxy",
type="str",
default="",
help="Specify a proxy in the form [user:passwd@]proxy.server:port.",
) # type: Callable[..., Option]
retries = partial(
Option,
"--retries",
dest="retries",
type="int",
default=5,
help="Maximum number of retries each connection should attempt "
"(default %default times).",
) # type: Callable[..., Option]
timeout = partial(
Option,
"--timeout",
"--default-timeout",
metavar="sec",
dest="timeout",
type="float",
default=15,
help="Set the socket timeout (default %default seconds).",
) # type: Callable[..., Option]
def exists_action():
# type: () -> Option
return Option(
# Option when path already exist
"--exists-action",
dest="exists_action",
type="choice",
choices=["s", "i", "w", "b", "a"],
default=[],
action="append",
metavar="action",
help="Default action when a path already exists: "
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
)
cert = partial(
PipOption,
"--cert",
dest="cert",
type="path",
metavar="path",
help=(
"Path to PEM-encoded CA certificate bundle. "
"If provided, overrides the default. "
"See 'SSL Certificate Verification' in pip documentation "
"for more information."
),
) # type: Callable[..., Option]
client_cert = partial(
PipOption,
"--client-cert",
dest="client_cert",
type="path",
default=None,
metavar="path",
help="Path to SSL client certificate, a single file containing the "
"private key and the certificate in PEM format.",
) # type: Callable[..., Option]
index_url = partial(
Option,
"-i",
"--index-url",
"--pypi-url",
dest="index_url",
metavar="URL",
default=PyPI.simple_url,
help="Base URL of the Python Package Index (default %default). "
"This should point to a repository compliant with PEP 503 "
"(the simple repository API) or a local directory laid out "
"in the same format.",
) # type: Callable[..., Option]
def extra_index_url():
# type: () -> Option
return Option(
"--extra-index-url",
dest="extra_index_urls",
metavar="URL",
action="append",
default=[],
help="Extra URLs of package indexes to use in addition to "
"--index-url. Should follow the same rules as "
"--index-url.",
)
no_index = partial(
Option,
"--no-index",
dest="no_index",
action="store_true",
default=False,
help="Ignore package index (only looking at --find-links URLs instead).",
) # type: Callable[..., Option]
def find_links():
# type: () -> Option
return Option(
"-f",
"--find-links",
dest="find_links",
action="append",
default=[],
metavar="url",
help="If a URL or path to an html file, then parse for links to "
"archives such as sdist (.tar.gz) or wheel (.whl) files. "
"If a local path or file:// URL that's a directory, "
"then look for archives in the directory listing. "
"Links to VCS project URLs are not supported.",
)
def trusted_host():
# type: () -> Option
return Option(
"--trusted-host",
dest="trusted_hosts",
action="append",
metavar="HOSTNAME",
default=[],
help="Mark this host or host:port pair as trusted, even though it "
"does not have valid or any HTTPS.",
)
def constraints():
# type: () -> Option
return Option(
"-c",
"--constraint",
dest="constraints",
action="append",
default=[],
metavar="file",
help="Constrain versions using the given constraints file. "
"This option can be used multiple times.",
)
def requirements():
# type: () -> Option
return Option(
"-r",
"--requirement",
dest="requirements",
action="append",
default=[],
metavar="file",
help="Install from the given requirements file. "
"This option can be used multiple times.",
)
def editable():
# type: () -> Option
return Option(
"-e",
"--editable",
dest="editables",
action="append",
default=[],
metavar="path/url",
help=(
"Install a project in editable mode (i.e. setuptools "
'"develop mode") from a local project path or a VCS url.'
),
)
def _handle_src(option, opt_str, value, parser):
# type: (Option, str, str, OptionParser) -> None
value = os.path.abspath(value)
setattr(parser.values, option.dest, value)
src = partial(
PipOption,
"--src",
"--source",
"--source-dir",
"--source-directory",
dest="src_dir",
type="path",
metavar="dir",
default=get_src_prefix(),
action="callback",
callback=_handle_src,
help="Directory to check out editable projects into. "
'The default in a virtualenv is "<venv path>/src". '
'The default for global installs is "<current dir>/src".',
) # type: Callable[..., Option]
def _get_format_control(values, option):
# type: (Values, Option) -> Any
"""Get a format_control object."""
return getattr(values, option.dest)
def _handle_no_binary(option, opt_str, value, parser):
# type: (Option, str, str, OptionParser) -> None
existing = _get_format_control(parser.values, option)
FormatControl.handle_mutual_excludes(
value,
existing.no_binary,
existing.only_binary,
)
def _handle_only_binary(option, opt_str, value, parser):
# type: (Option, str, str, OptionParser) -> None
existing = _get_format_control(parser.values, option)
FormatControl.handle_mutual_excludes(
value,
existing.only_binary,
existing.no_binary,
)
def no_binary():
# type: () -> Option
format_control = FormatControl(set(), set())
return Option(
"--no-binary",
dest="format_control",
action="callback",
callback=_handle_no_binary,
type="str",
default=format_control,
help="Do not use binary packages. Can be supplied multiple times, and "
'each time adds to the existing value. Accepts either ":all:" to '
'disable all binary packages, ":none:" to empty the set (notice '
"the colons), or one or more package names with commas between "
"them (no colons). Note that some packages are tricky to compile "
"and may fail to install when this option is used on them.",
)
def only_binary():
# type: () -> Option
format_control = FormatControl(set(), set())
return Option(
"--only-binary",
dest="format_control",
action="callback",
callback=_handle_only_binary,
type="str",
default=format_control,
help="Do not use source packages. Can be supplied multiple times, and "
'each time adds to the existing value. Accepts either ":all:" to '
'disable all source packages, ":none:" to empty the set, or one '
"or more package names with commas between them. Packages "
"without binary distributions will fail to install when this "
"option is used on them.",
)
platforms = partial(
Option,
"--platform",
dest="platforms",
metavar="platform",
action="append",
default=None,
help=(
"Only use wheels compatible with <platform>. Defaults to the "
"platform of the running system. Use this option multiple times to "
"specify multiple platforms supported by the target interpreter."
),
) # type: Callable[..., Option]
# This was made a separate function for unit-testing purposes.
def _convert_python_version(value):
# type: (str) -> Tuple[Tuple[int, ...], Optional[str]]
"""
Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
:return: A 2-tuple (version_info, error_msg), where `error_msg` is
non-None if and only if there was a parsing error.
"""
if not value:
# The empty string is the same as not providing a value.
return (None, None)
parts = value.split(".")
if len(parts) > 3:
return ((), "at most three version parts are allowed")
if len(parts) == 1:
# Then we are in the case of "3" or "37".
value = parts[0]
if len(value) > 1:
parts = [value[0], value[1:]]
try:
version_info = tuple(int(part) for part in parts)
except ValueError:
return ((), "each version part must be an integer")
return (version_info, None)
def _handle_python_version(option, opt_str, value, parser):
# type: (Option, str, str, OptionParser) -> None
"""
Handle a provided --python-version value.
"""
version_info, error_msg = _convert_python_version(value)
if error_msg is not None:
msg = "invalid --python-version value: {!r}: {}".format(
value,
error_msg,
)
raise_option_error(parser, option=option, msg=msg)
parser.values.python_version = version_info
python_version = partial(
Option,
"--python-version",
dest="python_version",
metavar="python_version",
action="callback",
callback=_handle_python_version,
type="str",
default=None,
help=dedent(
"""\
The Python interpreter version to use for wheel and "Requires-Python"
compatibility checks. Defaults to a version derived from the running
interpreter. The version can be specified using up to three dot-separated
integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
version can also be given as a string without dots (e.g. "37" for 3.7.0).
"""
),
) # type: Callable[..., Option]
implementation = partial(
Option,
"--implementation",
dest="implementation",
metavar="implementation",
default=None,
help=(
"Only use wheels compatible with Python "
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
" or 'ip'. If not specified, then the current "
"interpreter implementation is used. Use 'py' to force "
"implementation-agnostic wheels."
),
) # type: Callable[..., Option]
abis = partial(
Option,
"--abi",
dest="abis",
metavar="abi",
action="append",
default=None,
help=(
"Only use wheels compatible with Python abi <abi>, e.g. 'pypy_41'. "
"If not specified, then the current interpreter abi tag is used. "
"Use this option multiple times to specify multiple abis supported "
"by the target interpreter. Generally you will need to specify "
"--implementation, --platform, and --python-version when using this "
"option."
),
) # type: Callable[..., Option]
def add_target_python_options(cmd_opts):
# type: (OptionGroup) -> None
cmd_opts.add_option(platforms())
cmd_opts.add_option(python_version())
cmd_opts.add_option(implementation())
cmd_opts.add_option(abis())
def make_target_python(options):
# type: (Values) -> TargetPython
target_python = TargetPython(
platforms=options.platforms,
py_version_info=options.python_version,
abis=options.abis,
implementation=options.implementation,
)
return target_python
def prefer_binary():
# type: () -> Option
return Option(
"--prefer-binary",
dest="prefer_binary",
action="store_true",
default=False,
help="Prefer older binary packages over newer source packages.",
)
cache_dir = partial(
PipOption,
"--cache-dir",
dest="cache_dir",
default=USER_CACHE_DIR,
metavar="dir",
type="path",
help="Store the cache data in <dir>.",
) # type: Callable[..., Option]
def _handle_no_cache_dir(option, opt, value, parser):
# type: (Option, str, str, OptionParser) -> None
"""
Process a value provided for the --no-cache-dir option.
This is an optparse.Option callback for the --no-cache-dir option.
"""
# The value argument will be None if --no-cache-dir is passed via the
# command-line, since the option doesn't accept arguments. However,
# the value can be non-None if the option is triggered e.g. by an
# environment variable, like PIP_NO_CACHE_DIR=true.
if value is not None:
# Then parse the string value to get argument error-checking.
try:
strtobool(value)
except ValueError as exc:
raise_option_error(parser, option=option, msg=str(exc))
# Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
# converted to 0 (like "false" or "no") caused cache_dir to be disabled
# rather than enabled (logic would say the latter). Thus, we disable
# the cache directory not just on values that parse to True, but (for
# backwards compatibility reasons) also on values that parse to False.
# In other words, always set it to False if the option is provided in
# some (valid) form.
parser.values.cache_dir = False
no_cache = partial(
Option,
"--no-cache-dir",
dest="cache_dir",
action="callback",
callback=_handle_no_cache_dir,
help="Disable the cache.",
) # type: Callable[..., Option]
no_deps = partial(
Option,
"--no-deps",
"--no-dependencies",
dest="ignore_dependencies",
action="store_true",
default=False,
help="Don't install package dependencies.",
) # type: Callable[..., Option]
build_dir = partial(
PipOption,
"-b",
"--build",
"--build-dir",
"--build-directory",
dest="build_dir",
type="path",
metavar="dir",
help=SUPPRESS_HELP,
) # type: Callable[..., Option]
ignore_requires_python = partial(
Option,
"--ignore-requires-python",
dest="ignore_requires_python",
action="store_true",
help="Ignore the Requires-Python information.",
) # type: Callable[..., Option]
no_build_isolation = partial(
Option,
"--no-build-isolation",
dest="build_isolation",
action="store_false",
default=True,
help="Disable isolation when building a modern source distribution. "
"Build dependencies specified by PEP 518 must be already installed "
"if this option is used.",
) # type: Callable[..., Option]
def _handle_no_use_pep517(option, opt, value, parser):
# type: (Option, str, str, OptionParser) -> None
"""
Process a value provided for the --no-use-pep517 option.
This is an optparse.Option callback for the no_use_pep517 option.
"""
# Since --no-use-pep517 doesn't accept arguments, the value argument
# will be None if --no-use-pep517 is passed via the command-line.
# However, the value can be non-None if the option is triggered e.g.
# by an environment variable, for example "PIP_NO_USE_PEP517=true".
if value is not None:
msg = """A value was passed for --no-use-pep517,
probably using either the PIP_NO_USE_PEP517 environment variable
or the "no-use-pep517" config file option. Use an appropriate value
of the PIP_USE_PEP517 environment variable or the "use-pep517"
config file option instead.
"""
raise_option_error(parser, option=option, msg=msg)
# Otherwise, --no-use-pep517 was passed via the command-line.
parser.values.use_pep517 = False
use_pep517 = partial(
Option,
"--use-pep517",
dest="use_pep517",
action="store_true",
default=None,
help="Use PEP 517 for building source distributions "
"(use --no-use-pep517 to force legacy behaviour).",
) # type: Any
no_use_pep517 = partial(
Option,
"--no-use-pep517",
dest="use_pep517",
action="callback",
callback=_handle_no_use_pep517,
default=None,
help=SUPPRESS_HELP,
) # type: Any
install_options = partial(
Option,
"--install-option",
dest="install_options",
action="append",
metavar="options",
help="Extra arguments to be supplied to the setup.py install "
'command (use like --install-option="--install-scripts=/usr/local/'
'bin"). Use multiple --install-option options to pass multiple '
"options to setup.py install. If you are using an option with a "
"directory path, be sure to use absolute path.",
) # type: Callable[..., Option]
build_options = partial(
Option,
"--build-option",
dest="build_options",
metavar="options",
action="append",
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
) # type: Callable[..., Option]
global_options = partial(
Option,
"--global-option",
dest="global_options",
action="append",
metavar="options",
help="Extra global options to be supplied to the setup.py "
"call before the install or bdist_wheel command.",
) # type: Callable[..., Option]
no_clean = partial(
Option,
"--no-clean",
action="store_true",
default=False,
help="Don't clean up build directories.",
) # type: Callable[..., Option]
pre = partial(
Option,
"--pre",
action="store_true",
default=False,
help="Include pre-release and development versions. By default, "
"pip only finds stable versions.",
) # type: Callable[..., Option]
disable_pip_version_check = partial(
Option,
"--disable-pip-version-check",
dest="disable_pip_version_check",
action="store_true",
default=False,
help="Don't periodically check PyPI to determine whether a new version "
"of pip is available for download. Implied with --no-index.",
) # type: Callable[..., Option]
def _handle_merge_hash(option, opt_str, value, parser):
# type: (Option, str, str, OptionParser) -> None
"""Given a value spelled "algo:digest", append the digest to a list
pointed to in a dict by the algo name."""
if not parser.values.hashes:
parser.values.hashes = {}
try:
algo, digest = value.split(":", 1)
except ValueError:
parser.error(
"Arguments to {} must be a hash name " # noqa
"followed by a value, like --hash=sha256:"
"abcde...".format(opt_str)
)
if algo not in STRONG_HASHES:
parser.error(
"Allowed hash algorithms for {} are {}.".format( # noqa
opt_str, ", ".join(STRONG_HASHES)
)
)
parser.values.hashes.setdefault(algo, []).append(digest)
hash = partial(
Option,
"--hash",
# Hash values eventually end up in InstallRequirement.hashes due to
# __dict__ copying in process_line().
dest="hashes",
action="callback",
callback=_handle_merge_hash,
type="string",
help="Verify that the package's archive matches this "
"hash before installing. Example: --hash=sha256:abcdef...",
) # type: Callable[..., Option]
require_hashes = partial(
Option,
"--require-hashes",
dest="require_hashes",
action="store_true",
default=False,
help="Require a hash to check each requirement against, for "
"repeatable installs. This option is implied when any package in a "
"requirements file has a --hash option.",
) # type: Callable[..., Option]
list_path = partial(
PipOption,
"--path",
dest="path",
type="path",
action="append",
help="Restrict to the specified installation path for listing "
"packages (can be used multiple times).",
) # type: Callable[..., Option]
def check_list_path_option(options):
# type: (Values) -> None
if options.path and (options.user or options.local):
raise CommandError("Cannot combine '--path' with '--user' or '--local'")
list_exclude = partial(
PipOption,
"--exclude",
dest="excludes",
action="append",
metavar="package",
type="package_name",
help="Exclude specified package from the output",
) # type: Callable[..., Option]
no_python_version_warning = partial(
Option,
"--no-python-version-warning",
dest="no_python_version_warning",
action="store_true",
default=False,
help="Silence deprecation warnings for upcoming unsupported Pythons.",
) # type: Callable[..., Option]
use_new_feature = partial(
Option,
"--use-feature",
dest="features_enabled",
metavar="feature",
action="append",
default=[],
choices=["2020-resolver", "fast-deps", "in-tree-build"],
help="Enable new functionality, that may be backward incompatible.",
) # type: Callable[..., Option]
use_deprecated_feature = partial(
Option,
"--use-deprecated",
dest="deprecated_features_enabled",
metavar="feature",
action="append",
default=[],
choices=["legacy-resolver"],
help=("Enable deprecated functionality, that will be removed in the future."),
) # type: Callable[..., Option]
##########
# groups #
##########
general_group = {
"name": "General Options",
"options": [
help_,
isolated_mode,
require_virtualenv,
verbose,
version,
quiet,
log,
no_input,
proxy,
retries,
timeout,
exists_action,
trusted_host,
cert,
client_cert,
cache_dir,
no_cache,
disable_pip_version_check,
no_color,
no_python_version_warning,
use_new_feature,
use_deprecated_feature,
],
} # type: Dict[str, Any]
index_group = {
"name": "Package Index Options",
"options": [
index_url,
extra_index_url,
no_index,
find_links,
],
} # type: Dict[str, Any]
| 28.291707 | 82 | 0.626746 |
import os
import textwrap
import warnings
from functools import partial
from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
from textwrap import dedent
from typing import Any, Callable, Dict, Optional, Tuple
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.cli.parser import ConfigOptionParser
from pip._internal.cli.progress_bars import BAR_TYPES
from pip._internal.exceptions import CommandError
from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
from pip._internal.models.format_control import FormatControl
from pip._internal.models.index import PyPI
from pip._internal.models.target_python import TargetPython
from pip._internal.utils.hashes import STRONG_HASHES
from pip._internal.utils.misc import strtobool
def raise_option_error(parser, option, msg):
msg = f"{option} error: {msg}"
msg = textwrap.fill(" ".join(msg.split()))
parser.error(msg)
def make_option_group(group, parser):
option_group = OptionGroup(parser, group["name"])
for option in group["options"]:
option_group.add_option(option())
return option_group
def check_install_build_global(options, check_options=None):
if check_options is None:
check_options = options
def getname(n):
return getattr(check_options, n, None)
names = ["build_options", "global_options", "install_options"]
if any(map(getname, names)):
control = options.format_control
control.disallow_binaries()
warnings.warn(
"Disabling all use of wheels due to the use of --build-option "
"/ --global-option / --install-option.",
stacklevel=2,
)
def check_dist_restriction(options, check_target=False):
dist_restriction_set = any(
[
options.python_version,
options.platforms,
options.abis,
options.implementation,
]
)
binary_only = FormatControl(set(), {":all:"})
sdist_dependencies_allowed = (
options.format_control != binary_only and not options.ignore_dependencies
)
if dist_restriction_set and sdist_dependencies_allowed:
raise CommandError(
"When restricting platform and interpreter constraints using "
"--python-version, --platform, --abi, or --implementation, "
"either --no-deps must be set, or --only-binary=:all: must be "
"set and --no-binary must not be set (or must be set to "
":none:)."
)
if check_target:
if dist_restriction_set and not options.target_dir:
raise CommandError(
"Can not use any platform or abi specific options unless "
"installing via '--target'"
)
def _path_option_check(option, opt, value):
return os.path.expanduser(value)
def _package_name_option_check(option, opt, value):
return canonicalize_name(value)
class PipOption(Option):
TYPES = Option.TYPES + ("path", "package_name")
TYPE_CHECKER = Option.TYPE_CHECKER.copy()
TYPE_CHECKER["package_name"] = _package_name_option_check
TYPE_CHECKER["path"] = _path_option_check
p.",
)
isolated_mode = partial(
Option,
"--isolated",
dest="isolated_mode",
action="store_true",
default=False,
help=(
"Run pip in an isolated mode, ignoring environment variables and user "
"configuration."
),
)
require_virtualenv = partial(
Option,
"--require-virtualenv",
"--require-venv",
dest="require_venv",
action="store_true",
default=False,
help=SUPPRESS_HELP,
)
verbose = partial(
Option,
"-v",
"--verbose",
dest="verbose",
action="count",
default=0,
help="Give more output. Option is additive, and can be used up to 3 times.",
)
no_color = partial(
Option,
"--no-color",
dest="no_color",
action="store_true",
default=False,
help="Suppress colored output.",
)
version = partial(
Option,
"-V",
"--version",
dest="version",
action="store_true",
help="Show version and exit.",
)
quiet = partial(
Option,
"-q",
"--quiet",
dest="quiet",
action="count",
default=0,
help=(
"Give less output. Option is additive, and can be used up to 3"
" times (corresponding to WARNING, ERROR, and CRITICAL logging"
" levels)."
),
)
progress_bar = partial(
Option,
"--progress-bar",
dest="progress_bar",
type="choice",
choices=list(BAR_TYPES.keys()),
default="on",
help=(
"Specify type of progress to be displayed ["
+ "|".join(BAR_TYPES.keys())
+ "] (default: %default)"
),
)
log = partial(
PipOption,
"--log",
"--log-file",
"--local-log",
dest="log",
metavar="path",
type="path",
help="Path to a verbose appending log.",
)
no_input = partial(
Option,
"--no-input",
dest="no_input",
action="store_true",
default=False,
help="Disable prompting for input.",
) # type: Callable[..., Option]
proxy = partial(
Option,
"--proxy",
dest="proxy",
type="str",
default="",
help="Specify a proxy in the form [user:passwd@]proxy.server:port.",
) # type: Callable[..., Option]
retries = partial(
Option,
"--retries",
dest="retries",
type="int",
default=5,
help="Maximum number of retries each connection should attempt "
"(default %default times).",
) # type: Callable[..., Option]
timeout = partial(
Option,
"--timeout",
"--default-timeout",
metavar="sec",
dest="timeout",
type="float",
default=15,
help="Set the socket timeout (default %default seconds).",
) # type: Callable[..., Option]
def exists_action():
# type: () -> Option
return Option(
# Option when path already exist
"--exists-action",
dest="exists_action",
type="choice",
choices=["s", "i", "w", "b", "a"],
default=[],
action="append",
metavar="action",
help="Default action when a path already exists: "
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
)
cert = partial(
PipOption,
"--cert",
dest="cert",
type="path",
metavar="path",
help=(
"Path to PEM-encoded CA certificate bundle. "
"If provided, overrides the default. "
"See 'SSL Certificate Verification' in pip documentation "
"for more information."
),
) # type: Callable[..., Option]
client_cert = partial(
PipOption,
"--client-cert",
dest="client_cert",
type="path",
default=None,
metavar="path",
help="Path to SSL client certificate, a single file containing the "
"private key and the certificate in PEM format.",
) # type: Callable[..., Option]
index_url = partial(
Option,
"-i",
"--index-url",
"--pypi-url",
dest="index_url",
metavar="URL",
default=PyPI.simple_url,
help="Base URL of the Python Package Index (default %default). "
"This should point to a repository compliant with PEP 503 "
"(the simple repository API) or a local directory laid out "
"in the same format.",
) # type: Callable[..., Option]
def extra_index_url():
# type: () -> Option
return Option(
"--extra-index-url",
dest="extra_index_urls",
metavar="URL",
action="append",
default=[],
help="Extra URLs of package indexes to use in addition to "
"--index-url. Should follow the same rules as "
"--index-url.",
)
no_index = partial(
Option,
"--no-index",
dest="no_index",
action="store_true",
default=False,
help="Ignore package index (only looking at --find-links URLs instead).",
) # type: Callable[..., Option]
def find_links():
# type: () -> Option
return Option(
"-f",
"--find-links",
dest="find_links",
action="append",
default=[],
metavar="url",
help="If a URL or path to an html file, then parse for links to "
"archives such as sdist (.tar.gz) or wheel (.whl) files. "
"If a local path or file:// URL that's a directory, "
"then look for archives in the directory listing. "
"Links to VCS project URLs are not supported.",
)
def trusted_host():
return Option(
"--trusted-host",
dest="trusted_hosts",
action="append",
metavar="HOSTNAME",
default=[],
help="Mark this host or host:port pair as trusted, even though it "
"does not have valid or any HTTPS.",
)
def constraints():
return Option(
"-c",
"--constraint",
dest="constraints",
action="append",
default=[],
metavar="file",
help="Constrain versions using the given constraints file. "
"This option can be used multiple times.",
)
def requirements():
return Option(
"-r",
"--requirement",
dest="requirements",
action="append",
default=[],
metavar="file",
help="Install from the given requirements file. "
"This option can be used multiple times.",
)
def editable():
return Option(
"-e",
"--editable",
dest="editables",
action="append",
default=[],
metavar="path/url",
help=(
"Install a project in editable mode (i.e. setuptools "
'"develop mode") from a local project path or a VCS url.'
),
)
def _handle_src(option, opt_str, value, parser):
value = os.path.abspath(value)
setattr(parser.values, option.dest, value)
src = partial(
PipOption,
"--src",
"--source",
"--source-dir",
"--source-directory",
dest="src_dir",
type="path",
metavar="dir",
default=get_src_prefix(),
action="callback",
callback=_handle_src,
help="Directory to check out editable projects into. "
'The default in a virtualenv is "<venv path>/src". '
'The default for global installs is "<current dir>/src".',
)
def _get_format_control(values, option):
return getattr(values, option.dest)
def _handle_no_binary(option, opt_str, value, parser):
existing = _get_format_control(parser.values, option)
FormatControl.handle_mutual_excludes(
value,
existing.no_binary,
existing.only_binary,
)
def _handle_only_binary(option, opt_str, value, parser):
existing = _get_format_control(parser.values, option)
FormatControl.handle_mutual_excludes(
value,
existing.only_binary,
existing.no_binary,
)
def no_binary():
format_control = FormatControl(set(), set())
return Option(
"--no-binary",
dest="format_control",
action="callback",
callback=_handle_no_binary,
type="str",
default=format_control,
help="Do not use binary packages. Can be supplied multiple times, and "
'each time adds to the existing value. Accepts either ":all:" to '
'disable all binary packages, ":none:" to empty the set (notice '
"the colons), or one or more package names with commas between "
"them (no colons). Note that some packages are tricky to compile "
"and may fail to install when this option is used on them.",
)
def only_binary():
format_control = FormatControl(set(), set())
return Option(
"--only-binary",
dest="format_control",
action="callback",
callback=_handle_only_binary,
type="str",
default=format_control,
help="Do not use source packages. Can be supplied multiple times, and "
'each time adds to the existing value. Accepts either ":all:" to '
'disable all source packages, ":none:" to empty the set, or one '
"or more package names with commas between them. Packages "
"without binary distributions will fail to install when this "
"option is used on them.",
)
platforms = partial(
Option,
"--platform",
dest="platforms",
metavar="platform",
action="append",
default=None,
help=(
"Only use wheels compatible with <platform>. Defaults to the "
"platform of the running system. Use this option multiple times to "
"specify multiple platforms supported by the target interpreter."
),
)
def _convert_python_version(value):
if not value:
return (None, None)
parts = value.split(".")
if len(parts) > 3:
return ((), "at most three version parts are allowed")
if len(parts) == 1:
value = parts[0]
if len(value) > 1:
parts = [value[0], value[1:]]
try:
version_info = tuple(int(part) for part in parts)
except ValueError:
return ((), "each version part must be an integer")
return (version_info, None)
def _handle_python_version(option, opt_str, value, parser):
version_info, error_msg = _convert_python_version(value)
if error_msg is not None:
msg = "invalid --python-version value: {!r}: {}".format(
value,
error_msg,
)
raise_option_error(parser, option=option, msg=msg)
parser.values.python_version = version_info
python_version = partial(
Option,
"--python-version",
dest="python_version",
metavar="python_version",
action="callback",
callback=_handle_python_version,
type="str",
default=None,
help=dedent(
"""\
The Python interpreter version to use for wheel and "Requires-Python"
compatibility checks. Defaults to a version derived from the running
interpreter. The version can be specified using up to three dot-separated
integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
version can also be given as a string without dots (e.g. "37" for 3.7.0).
"""
),
)
implementation = partial(
Option,
"--implementation",
dest="implementation",
metavar="implementation",
default=None,
help=(
"Only use wheels compatible with Python "
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
" or 'ip'. If not specified, then the current "
"interpreter implementation is used. Use 'py' to force "
"implementation-agnostic wheels."
),
)
abis = partial(
Option,
"--abi",
dest="abis",
metavar="abi",
action="append",
default=None,
help=(
"Only use wheels compatible with Python abi <abi>, e.g. 'pypy_41'. "
"If not specified, then the current interpreter abi tag is used. "
"Use this option multiple times to specify multiple abis supported "
"by the target interpreter. Generally you will need to specify "
"--implementation, --platform, and --python-version when using this "
"option."
),
)
def add_target_python_options(cmd_opts):
cmd_opts.add_option(platforms())
cmd_opts.add_option(python_version())
cmd_opts.add_option(implementation())
cmd_opts.add_option(abis())
def make_target_python(options):
target_python = TargetPython(
platforms=options.platforms,
py_version_info=options.python_version,
abis=options.abis,
implementation=options.implementation,
)
return target_python
def prefer_binary():
return Option(
"--prefer-binary",
dest="prefer_binary",
action="store_true",
default=False,
help="Prefer older binary packages over newer source packages.",
)
cache_dir = partial(
PipOption,
"--cache-dir",
dest="cache_dir",
default=USER_CACHE_DIR,
metavar="dir",
type="path",
help="Store the cache data in <dir>.",
)
def _handle_no_cache_dir(option, opt, value, parser):
# the value can be non-None if the option is triggered e.g. by an
# environment variable, like PIP_NO_CACHE_DIR=true.
if value is not None:
# Then parse the string value to get argument error-checking.
try:
strtobool(value)
except ValueError as exc:
raise_option_error(parser, option=option, msg=str(exc))
# Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
# converted to 0 (like "false" or "no") caused cache_dir to be disabled
# rather than enabled (logic would say the latter). Thus, we disable
# the cache directory not just on values that parse to True, but (for
# backwards compatibility reasons) also on values that parse to False.
# In other words, always set it to False if the option is provided in
# some (valid) form.
parser.values.cache_dir = False
no_cache = partial(
Option,
"--no-cache-dir",
dest="cache_dir",
action="callback",
callback=_handle_no_cache_dir,
help="Disable the cache.",
) # type: Callable[..., Option]
no_deps = partial(
Option,
"--no-deps",
"--no-dependencies",
dest="ignore_dependencies",
action="store_true",
default=False,
help="Don't install package dependencies.",
)
build_dir = partial(
PipOption,
"-b",
"--build",
"--build-dir",
"--build-directory",
dest="build_dir",
type="path",
metavar="dir",
help=SUPPRESS_HELP,
)
ignore_requires_python = partial(
Option,
"--ignore-requires-python",
dest="ignore_requires_python",
action="store_true",
help="Ignore the Requires-Python information.",
)
no_build_isolation = partial(
Option,
"--no-build-isolation",
dest="build_isolation",
action="store_false",
default=True,
help="Disable isolation when building a modern source distribution. "
"Build dependencies specified by PEP 518 must be already installed "
"if this option is used.",
)
def _handle_no_use_pep517(option, opt, value, parser):
# will be None if --no-use-pep517 is passed via the command-line.
# However, the value can be non-None if the option is triggered e.g.
# by an environment variable, for example "PIP_NO_USE_PEP517=true".
if value is not None:
msg = """A value was passed for --no-use-pep517,
probably using either the PIP_NO_USE_PEP517 environment variable
or the "no-use-pep517" config file option. Use an appropriate value
of the PIP_USE_PEP517 environment variable or the "use-pep517"
config file option instead.
"""
raise_option_error(parser, option=option, msg=msg)
# Otherwise, --no-use-pep517 was passed via the command-line.
parser.values.use_pep517 = False
use_pep517 = partial(
Option,
"--use-pep517",
dest="use_pep517",
action="store_true",
default=None,
help="Use PEP 517 for building source distributions "
"(use --no-use-pep517 to force legacy behaviour).",
) # type: Any
no_use_pep517 = partial(
Option,
"--no-use-pep517",
dest="use_pep517",
action="callback",
callback=_handle_no_use_pep517,
default=None,
help=SUPPRESS_HELP,
) # type: Any
install_options = partial(
Option,
"--install-option",
dest="install_options",
action="append",
metavar="options",
help="Extra arguments to be supplied to the setup.py install "
'command (use like --install-option="--install-scripts=/usr/local/'
'bin"). Use multiple --install-option options to pass multiple '
"options to setup.py install. If you are using an option with a "
"directory path, be sure to use absolute path.",
) # type: Callable[..., Option]
build_options = partial(
Option,
"--build-option",
dest="build_options",
metavar="options",
action="append",
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
) # type: Callable[..., Option]
global_options = partial(
Option,
"--global-option",
dest="global_options",
action="append",
metavar="options",
help="Extra global options to be supplied to the setup.py "
"call before the install or bdist_wheel command.",
) # type: Callable[..., Option]
no_clean = partial(
Option,
"--no-clean",
action="store_true",
default=False,
help="Don't clean up build directories.",
)
pre = partial(
Option,
"--pre",
action="store_true",
default=False,
help="Include pre-release and development versions. By default, "
"pip only finds stable versions.",
)
disable_pip_version_check = partial(
Option,
"--disable-pip-version-check",
dest="disable_pip_version_check",
action="store_true",
default=False,
help="Don't periodically check PyPI to determine whether a new version "
"of pip is available for download. Implied with --no-index.",
) # type: Callable[..., Option]
def _handle_merge_hash(option, opt_str, value, parser):
# type: (Option, str, str, OptionParser) -> None
if not parser.values.hashes:
parser.values.hashes = {}
try:
algo, digest = value.split(":", 1)
except ValueError:
parser.error(
"Arguments to {} must be a hash name " # noqa
"followed by a value, like --hash=sha256:"
"abcde...".format(opt_str)
)
if algo not in STRONG_HASHES:
parser.error(
"Allowed hash algorithms for {} are {}.".format( # noqa
opt_str, ", ".join(STRONG_HASHES)
)
)
parser.values.hashes.setdefault(algo, []).append(digest)
hash = partial(
Option,
"--hash",
# Hash values eventually end up in InstallRequirement.hashes due to
# __dict__ copying in process_line().
dest="hashes",
action="callback",
callback=_handle_merge_hash,
type="string",
help="Verify that the package's archive matches this "
"hash before installing. Example: --hash=sha256:abcdef...",
)
require_hashes = partial(
Option,
"--require-hashes",
dest="require_hashes",
action="store_true",
default=False,
help="Require a hash to check each requirement against, for "
"repeatable installs. This option is implied when any package in a "
"requirements file has a --hash option.",
)
list_path = partial(
PipOption,
"--path",
dest="path",
type="path",
action="append",
help="Restrict to the specified installation path for listing "
"packages (can be used multiple times).",
)
def check_list_path_option(options):
if options.path and (options.user or options.local):
raise CommandError("Cannot combine '--path' with '--user' or '--local'")
list_exclude = partial(
PipOption,
"--exclude",
dest="excludes",
action="append",
metavar="package",
type="package_name",
help="Exclude specified package from the output",
)
no_python_version_warning = partial(
Option,
"--no-python-version-warning",
dest="no_python_version_warning",
action="store_true",
default=False,
help="Silence deprecation warnings for upcoming unsupported Pythons.",
)
use_new_feature = partial(
Option,
"--use-feature",
dest="features_enabled",
metavar="feature",
action="append",
default=[],
choices=["2020-resolver", "fast-deps", "in-tree-build"],
help="Enable new functionality, that may be backward incompatible.",
)
use_deprecated_feature = partial(
Option,
"--use-deprecated",
dest="deprecated_features_enabled",
metavar="feature",
action="append",
default=[],
choices=["legacy-resolver"],
help=("Enable deprecated functionality, that will be removed in the future."),
)
isolated_mode,
require_virtualenv,
verbose,
version,
quiet,
log,
no_input,
proxy,
retries,
timeout,
exists_action,
trusted_host,
cert,
client_cert,
cache_dir,
no_cache,
disable_pip_version_check,
no_color,
no_python_version_warning,
use_new_feature,
use_deprecated_feature,
],
}
index_group = {
"name": "Package Index Options",
"options": [
index_url,
extra_index_url,
no_index,
find_links,
],
}
| true | true |
f71c0e613c8658e9c5d1b5f8b68e473cd366b6c5 | 8,773 | py | Python | src/sst/elements/memHierarchy/tests/testsuite_default_memHierarchy_sdl.py | sudhanshu2/sst-elements | d658e5e4b26e5725488f9e93528506ddb22072ee | [
"BSD-3-Clause"
] | null | null | null | src/sst/elements/memHierarchy/tests/testsuite_default_memHierarchy_sdl.py | sudhanshu2/sst-elements | d658e5e4b26e5725488f9e93528506ddb22072ee | [
"BSD-3-Clause"
] | null | null | null | src/sst/elements/memHierarchy/tests/testsuite_default_memHierarchy_sdl.py | sudhanshu2/sst-elements | d658e5e4b26e5725488f9e93528506ddb22072ee | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from sst_unittest import *
from sst_unittest_support import *
import os.path
################################################################################
# Code to support a single instance module initialize, must be called setUp method
module_init = 0
module_sema = threading.Semaphore()
def initializeTestModule_SingleInstance(class_inst):
global module_init
global module_sema
module_sema.acquire()
if module_init != 1:
try:
# Put your single instance Init Code Here
pass
except:
pass
module_init = 1
module_sema.release()
################################################################################
################################################################################
################################################################################
class testcase_memHierarchy_sdl(SSTTestCase):
def initializeClass(self, testName):
super(type(self), self).initializeClass(testName)
# Put test based setup code here. it is called before testing starts
# NOTE: This method is called once for every test
def setUp(self):
super(type(self), self).setUp()
initializeTestModule_SingleInstance(self)
# Put test based setup code here. it is called once before every test
def tearDown(self):
# Put test based teardown code here. it is called once after every test
super(type(self), self).tearDown()
#####
def test_memHierarchy_sdl_1(self):
# sdl-1 Simple CPU + 1 level cache + Memory
self.memHierarchy_Template("sdl-1")
def test_memHierarchy_sdl_2(self):
# sdl-2 Simple CPU + 1 level cache + DRAMSim Memory
self.memHierarchy_Template("sdl-2")
def test_memHierarchy_sdl_3(self):
# sdl-3 Simple CPU + 1 level cache + DRAMSim Memory (alternate block size)
self.memHierarchy_Template("sdl-3")
def test_memHierarchy_sdl2_1(self):
# sdl2-1 Simple CPU + 2 levels cache + Memory
self.memHierarchy_Template("sdl2-1")
def test_memHierarchy_sdl3_1(self):
# sdl3-1 2 Simple CPUs + 2 levels cache + Memory
self.memHierarchy_Template("sdl3-1")
def test_memHierarchy_sdl3_2(self):
# sdl3-2 2 Simple CPUs + 2 levels cache + DRAMSim Memory
self.memHierarchy_Template("sdl3-2")
def test_memHierarchy_sdl3_3(self):
self.memHierarchy_Template("sdl3-3")
def test_memHierarchy_sdl4_1(self):
self.memHierarchy_Template("sdl4-1")
@skip_on_sstsimulator_conf_empty_str("DRAMSIM", "LIBDIR", "DRAMSIM is not included as part of this build")
def test_memHierarchy_sdl4_2_dramsim(self):
self.memHierarchy_Template("sdl4-2", ignore_err_file=True)
@skip_on_sstsimulator_conf_empty_str("RAMULATOR", "LIBDIR", "RAMULATOR is not included as part of this build")
def test_memHierarchy_sdl4_2_ramulator(self):
self.memHierarchy_Template("sdl4-2-ramulator")
@skip_on_sstsimulator_conf_empty_str("DRAMSIM", "LIBDIR", "DRAMSIM is not included as part of this build")
def test_memHierarchy_sdl5_1_dramsim(self):
self.memHierarchy_Template("sdl5-1", ignore_err_file=True)
@skip_on_sstsimulator_conf_empty_str("RAMULATOR", "LIBDIR", "RAMULATOR is not included as part of this build")
def test_memHierarchy_sdl5_1_ramulator(self):
if testing_check_get_num_ranks() > 1 or testing_check_get_num_threads() > 1:
self.memHierarchy_Template("sdl5-1-ramulator_MC")
else:
self.memHierarchy_Template("sdl5-1-ramulator")
def test_memHierarchy_sdl8_1(self):
self.memHierarchy_Template("sdl8-1")
def test_memHierarchy_sdl8_3(self):
self.memHierarchy_Template("sdl8-3")
def test_memHierarchy_sdl8_4(self):
self.memHierarchy_Template("sdl8-4")
def test_memHierarchy_sdl9_1(self):
self.memHierarchy_Template("sdl9-1")
def test_memHierarchy_sdl9_2(self):
self.memHierarchy_Template("sdl9-2")
#####
def memHierarchy_Template(self, testcase, ignore_err_file=False):
# Get the path to the test files
test_path = self.get_testsuite_dir()
outdir = self.get_test_output_run_dir()
tmpdir = self.get_test_output_tmp_dir()
# Some tweeking of file names are due to inconsistencys with testcase name
testcasename_sdl = testcase.replace("_MC", "")
testcasename_out = testcase.replace("-", "_")
# Set the various file paths
testDataFileName=("test_memHierarchy_{0}".format(testcasename_out))
sdlfile = "{0}/{1}.py".format(test_path, testcasename_sdl)
reffile = "{0}/refFiles/{1}.out".format(test_path, testDataFileName)
outfile = "{0}/{1}.out".format(outdir, testDataFileName)
errfile = "{0}/{1}.err".format(outdir, testDataFileName)
mpioutfiles = "{0}/{1}.testfile".format(outdir, testDataFileName)
log_debug("testcase = {0}".format(testcase))
log_debug("sdl file = {0}".format(sdlfile))
log_debug("ref file = {0}".format(reffile))
# Run SST in the tests directory
self.run_sst(sdlfile, outfile, errfile, set_cwd=test_path, mpi_out_files=mpioutfiles)
# Lines to ignore
# These are generated by DRAMSim
ignore_lines = ["===== MemorySystem"]
ignore_lines.append("TOTAL_STORAGE : 2048MB | 1 Ranks | 16 Devices per rank")
ignore_lines.append("== Loading")
ignore_lines.append("DRAMSim2 Clock Frequency =1Hz, CPU Clock Frequency=1Hz")
ignore_lines.append("WARNING: UNKNOWN KEY 'DEBUG_TRANS_FLOW' IN INI FILE")
# This is generated by SST when the number of ranks/threads > # of components
ignore_lines.append("WARNING: No components are assigned to")
#These are warnings/info generated by SST/memH in debug mode
ignore_lines.append("Notice: memory controller's region is larger than the backend's mem_size")
ignore_lines.append("Region: start=")
# This may be present if ranks < 2
ignore_lines.append("not aligned to the request size")
# Statistics that count occupancy on each cycle sometimes diff in parallel execution
# due to the synchronization interval sometimes allowing the clock to run ahead a cycle or so
tol_stats = { "outstanding_requests" : [0, 0, 20, 0, 0], # Only diffs in number of cycles
"total_cycles" : [20, 'X', 20, 20, 20], # This stat is set once at the end of sim. May vary in all fields
"MSHR_occupancy" : [0, 0, 20, 0, 0] } # Only diffs in number of cycles
filesAreTheSame, statDiffs, othDiffs = testing_stat_output_diff(outfile, reffile, ignore_lines, tol_stats, True)
# Perform the tests
if ignore_err_file is False:
if os_test_file(errfile, "-s"):
log_testing_note("memHierarchy SDL test {0} has a Non-Empty Error File {1}".format(testDataFileName, errfile))
if filesAreTheSame:
log_debug(" -- Output file {0} passed check against the Reference File {1}".format(outfile, reffile))
else:
diffdata = self._prettyPrintDiffs(statDiffs, othDiffs)
log_failure(diffdata)
self.assertTrue(filesAreTheSame, "Output file {0} does not pass check against the Reference File {1} ".format(outfile, reffile))
###
# Remove lines containing any string found in 'remove_strs' from in_file
# If out_file != None, output is out_file
# Otherwise, in_file is overwritten
def _remove_lines_cleanup_file(self, remove_strs, in_file, out_file = None, append = False):
with open(in_file, 'r') as fp:
lines = fp.readlines()
if out_file == None:
out_file = in_file
if append == True:
mode = 'a'
else:
mode = 'w'
with open(out_file, mode) as fp:
if not append:
fp.truncate(0)
for line in lines:
skip = False
for search in remove_strs:
if search in line:
skip = True
continue
if not skip:
fp.write(line)
def _prettyPrintDiffs(self, stat_diff, oth_diff):
out = ""
if len(stat_diff) != 0:
out = "Statistic diffs:\n"
for x in stat_diff:
out += (x[0] + " " + ",".join(str(y) for y in x[1:]) + "\n")
if len(oth_diff) != 0:
out += "Non-statistic diffs:\n"
for x in oth_diff:
out += x[0] + " " + x[1] + "\n"
return out
| 40.428571 | 140 | 0.619514 |
from sst_unittest import *
from sst_unittest_support import *
import os.path
| true | true |
f71c0f38bd33c235f501fe3157c2816cd7eb598d | 2,430 | py | Python | data/p4VQE/R4/benchmark/startQiskit_noisy62.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/p4VQE/R4/benchmark/startQiskit_noisy62.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/p4VQE/R4/benchmark/startQiskit_noisy62.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=3
# total number=10
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.y(input_qubit[3]) # number=5
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.swap(input_qubit[1],input_qubit[0]) # number=6
prog.swap(input_qubit[1],input_qubit[0]) # number=7
prog.y(input_qubit[3]) # number=8
prog.y(input_qubit[3]) # number=9
# circuit end
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =5600
writefile = open("../data/startQiskit_noisy62.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = FakeYorktown()
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| 27 | 118 | 0.631687 |
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0])
prog.h(input_qubit[1])
prog.h(input_qubit[2])
prog.h(input_qubit[3])
prog.y(input_qubit[3])
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.swap(input_qubit[1],input_qubit[0])
prog.swap(input_qubit[1],input_qubit[0])
prog.y(input_qubit[3])
prog.y(input_qubit[3])
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =5600
writefile = open("../data/startQiskit_noisy62.csv", "w")
backend = FakeYorktown()
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| true | true |
f71c0f815fea461fc753f56d6e32829a423105fd | 431 | py | Python | project/tasks/sample_tasks.py | idjemaoune/django-celery | 411e854fc63a4d42be2a6e8861c4dc6b89969161 | [
"MIT"
] | null | null | null | project/tasks/sample_tasks.py | idjemaoune/django-celery | 411e854fc63a4d42be2a6e8861c4dc6b89969161 | [
"MIT"
] | null | null | null | project/tasks/sample_tasks.py | idjemaoune/django-celery | 411e854fc63a4d42be2a6e8861c4dc6b89969161 | [
"MIT"
] | null | null | null | # project/tasks/sample_tasks.py
import time
from celery import shared_task
@shared_task
def send_email(email_id, message):
time.sleep(10)
print(f"Email is sent to {email_id}. Message sent was - {message}")
@shared_task
def get_micro_app_status(app):
print(f"La micro app {app}. est UP")
@shared_task
def create_task(task_type):
time.sleep(int(task_type) * 10)
print("je suis execueter")
return True
| 17.958333 | 71 | 0.716937 |
import time
from celery import shared_task
@shared_task
def send_email(email_id, message):
time.sleep(10)
print(f"Email is sent to {email_id}. Message sent was - {message}")
@shared_task
def get_micro_app_status(app):
print(f"La micro app {app}. est UP")
@shared_task
def create_task(task_type):
time.sleep(int(task_type) * 10)
print("je suis execueter")
return True
| true | true |
f71c0fb67deffb5ad5e92f615b5c852fabdd95ff | 9,938 | py | Python | docs/_downloads/d923ca53b1bfbeb3c222ae46d65d485e/transfer_learning_tutorial.py | pleiades-s/PyTorch-tutorials-kr | 3d749ea2fe67363b5d46340b742308b744fa0419 | [
"BSD-3-Clause"
] | 2 | 2021-01-18T04:59:05.000Z | 2021-03-20T00:56:24.000Z | docs/_downloads/d923ca53b1bfbeb3c222ae46d65d485e/transfer_learning_tutorial.py | pleiades-s/PyTorch-tutorials-kr | 3d749ea2fe67363b5d46340b742308b744fa0419 | [
"BSD-3-Clause"
] | null | null | null | docs/_downloads/d923ca53b1bfbeb3c222ae46d65d485e/transfer_learning_tutorial.py | pleiades-s/PyTorch-tutorials-kr | 3d749ea2fe67363b5d46340b742308b744fa0419 | [
"BSD-3-Clause"
] | 1 | 2022-02-27T10:47:39.000Z | 2022-02-27T10:47:39.000Z | # -*- coding: utf-8 -*-
"""
컴퓨터 비전(Vision)을 위한 전이학습(Transfer Learning)
=======================================================
**Author**: `Sasank Chilamkurthy <https://chsasank.github.io>`_
**번역**: `박정환 <http://github.com/9bow>`_
이 튜토리얼에서는 전이학습(Transfer Learning)을 이용하여 이미지 분류를 위한
합성곱 신경망을 어떻게 학습시키는지 배워보겠습니다. 전이학습에 대해서는
`CS231n 노트 <http://cs231n.github.io/transfer-learning/>`__ 에서 더 많은 내용을
읽어보실 수 있습니다.
위 노트를 인용해보면,
실제로 충분한 크기의 데이터셋을 갖추기는 상대적으로 드물기 때문에,
(무작위 초기화를 통해) 맨 처음부터 합성곱 신경망(Convolutional
Network) 전체를 학습하는 사람은 매우 적습니다. 대신, 매우 큰 데이터셋(예.
100가지 분류에 대해 120만개의 이미지가 포함된 ImageNet)에서 합성곱
신경망(ConvNet)을 미리 학습한 후, 이 합성곱 신경망을 관심있는 작업
을 위한 초기 설정 또는 고정된 특징 추출기(fixed feature extractor)로 사용합니다.
이러한 전이학습 시나리오의 주요한 2가지는 다음과 같습니다:
- **합성곱 신경망의 미세조정(finetuning)**: 무작위 초기화 대신, 신경망을
ImageNet 1000 데이터셋 등으로 미리 학습한 신경망으로 초기화합니다. 학습의 나머지
과정들은 평상시와 같습니다.
- **고정된 특징 추출기로써의 합성곱 신경망**: 여기서는 마지막에 완전히 연결
된 계층을 제외한 모든 신경망의 가중치를 고정합니다. 이 마지막의 완전히 연결된
계층은 새로운 무작위의 가중치를 갖는 계층으로 대체되어 이 계층만 학습합니다.
"""
# License: BSD
# Author: Sasank Chilamkurthy
from __future__ import print_function, division
import torch
import torch.nn as nn
import torch.optim as optim
from torch.optim import lr_scheduler
import numpy as np
import torchvision
from torchvision import datasets, models, transforms
import matplotlib.pyplot as plt
import time
import os
import copy
plt.ion() # 대화형 모드
######################################################################
# 데이터 불러오기
# ---------------
#
# 데이터를 불러오기 위해 torchvision과 torch.utils.data 패키지를 사용하겠습니다.
#
# 여기서 풀고자 하는 문제는 **개미** 와 **벌** 을 분류하는 모델을 학습하는 것입니다.
# 개미와 벌 각각의 학습용 이미지는 대략 120장 정도 있고, 75개의 검증용 이미지가
# 있습니다. 일반적으로 맨 처음부터 학습을 한다면 이는 일반화하기에는 아주 작은
# 데이터셋입니다. 하지만 우리는 전이학습을 할 것이므로, 일반화를 제법 잘 할 수 있을
# 것입니다.
#
# 이 데이터셋은 ImageNet의 아주 작은 일부입니다.
#
# .. Note ::
# 데이터를 `여기 <https://download.pytorch.org/tutorial/hymenoptera_data.zip>`_
# 에서 다운로드 받아 현재 디렉토리에 압축을 푸십시오.
# 학습을 위해 데이터 증가(augmentation) 및 일반화(normalization)
# 검증을 위한 일반화
data_transforms = {
'train': transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
]),
'val': transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
]),
}
data_dir = 'data/hymenoptera_data'
image_datasets = {x: datasets.ImageFolder(os.path.join(data_dir, x),
data_transforms[x])
for x in ['train', 'val']}
dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=4,
shuffle=True, num_workers=4)
for x in ['train', 'val']}
dataset_sizes = {x: len(image_datasets[x]) for x in ['train', 'val']}
class_names = image_datasets['train'].classes
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
######################################################################
# 일부 이미지 시각화하기
# ^^^^^^^^^^^^^^^^^^^^^^^^^
# 데이터 증가를 이해하기 위해 일부 학습용 이미지를 시각화해보겠습니다.
def imshow(inp, title=None):
"""Imshow for Tensor."""
inp = inp.numpy().transpose((1, 2, 0))
mean = np.array([0.485, 0.456, 0.406])
std = np.array([0.229, 0.224, 0.225])
inp = std * inp + mean
inp = np.clip(inp, 0, 1)
plt.imshow(inp)
if title is not None:
plt.title(title)
plt.pause(0.001) # 갱신이 될 때까지 잠시 기다립니다.
# 학습 데이터의 배치를 얻습니다.
inputs, classes = next(iter(dataloaders['train']))
# 배치로부터 격자 형태의 이미지를 만듭니다.
out = torchvision.utils.make_grid(inputs)
imshow(out, title=[class_names[x] for x in classes])
######################################################################
# 모델 학습하기
# --------------
#
# 이제 모델을 학습하기 위한 일반 함수를 작성해보겠습니다. 여기서는 다음 내용들을
# 설명합니다:
#
# - 학습율(learning rate) 관리(scheduling)
# - 최적의 모델 구하기
#
# 아래에서 ``scheduler`` 매개변수는 ``torch.optim.lr_scheduler`` 의 LR 스케쥴러
# 객체(Object)입니다.
def train_model(model, criterion, optimizer, scheduler, num_epochs=25):
since = time.time()
best_model_wts = copy.deepcopy(model.state_dict())
best_acc = 0.0
for epoch in range(num_epochs):
print('Epoch {}/{}'.format(epoch, num_epochs - 1))
print('-' * 10)
# 각 에폭(epoch)은 학습 단계와 검증 단계를 갖습니다.
for phase in ['train', 'val']:
if phase == 'train':
model.train() # 모델을 학습 모드로 설정
else:
model.eval() # 모델을 평가 모드로 설정
running_loss = 0.0
running_corrects = 0
# 데이터를 반복
for inputs, labels in dataloaders[phase]:
inputs = inputs.to(device)
labels = labels.to(device)
# 매개변수 경사도를 0으로 설정
optimizer.zero_grad()
# 순전파
# 학습 시에만 연산 기록을 추적
with torch.set_grad_enabled(phase == 'train'):
outputs = model(inputs)
_, preds = torch.max(outputs, 1)
loss = criterion(outputs, labels)
# 학습 단계인 경우 역전파 + 최적화
if phase == 'train':
loss.backward()
optimizer.step()
# 통계
running_loss += loss.item() * inputs.size(0)
running_corrects += torch.sum(preds == labels.data)
if phase == 'train':
scheduler.step()
epoch_loss = running_loss / dataset_sizes[phase]
epoch_acc = running_corrects.double() / dataset_sizes[phase]
print('{} Loss: {:.4f} Acc: {:.4f}'.format(
phase, epoch_loss, epoch_acc))
# 모델을 깊은 복사(deep copy)함
if phase == 'val' and epoch_acc > best_acc:
best_acc = epoch_acc
best_model_wts = copy.deepcopy(model.state_dict())
print()
time_elapsed = time.time() - since
print('Training complete in {:.0f}m {:.0f}s'.format(
time_elapsed // 60, time_elapsed % 60))
print('Best val Acc: {:4f}'.format(best_acc))
# 가장 나은 모델 가중치를 불러옴
model.load_state_dict(best_model_wts)
return model
######################################################################
# 모델 예측값 시각화하기
# ^^^^^^^^^^^^^^^^^^^^^^^
#
# 일부 이미지에 대한 예측값을 보여주는 일반화된 함수입니다.
#
def visualize_model(model, num_images=6):
was_training = model.training
model.eval()
images_so_far = 0
fig = plt.figure()
with torch.no_grad():
for i, (inputs, labels) in enumerate(dataloaders['val']):
inputs = inputs.to(device)
labels = labels.to(device)
outputs = model(inputs)
_, preds = torch.max(outputs, 1)
for j in range(inputs.size()[0]):
images_so_far += 1
ax = plt.subplot(num_images//2, 2, images_so_far)
ax.axis('off')
ax.set_title('predicted: {}'.format(class_names[preds[j]]))
imshow(inputs.cpu().data[j])
if images_so_far == num_images:
model.train(mode=was_training)
return
model.train(mode=was_training)
######################################################################
# 합성곱 신경망 미세조정(finetuning)
# ----------------------------------
#
# 미리 학습한 모델을 불러온 후 마지막의 완전히 연결된 계층을 초기화합니다.
#
model_ft = models.resnet18(pretrained=True)
num_ftrs = model_ft.fc.in_features
# 여기서 각 출력 샘플의 크기는 2로 설정합니다.
# 또는, nn.Linear(num_ftrs, len (class_names))로 일반화할 수 있습니다.
model_ft.fc = nn.Linear(num_ftrs, 2)
model_ft = model_ft.to(device)
criterion = nn.CrossEntropyLoss()
# 모든 매개변수들이 최적화되었는지 관찰
optimizer_ft = optim.SGD(model_ft.parameters(), lr=0.001, momentum=0.9)
# 7 에폭마다 0.1씩 학습율 감소
exp_lr_scheduler = lr_scheduler.StepLR(optimizer_ft, step_size=7, gamma=0.1)
######################################################################
# 학습 및 평가하기
# ^^^^^^^^^^^^^^^^^^
#
# CPU에서는 15-25분 가량, GPU에서는 1분도 이내의 시간이 걸립니다.
#
model_ft = train_model(model_ft, criterion, optimizer_ft, exp_lr_scheduler,
num_epochs=25)
######################################################################
#
visualize_model(model_ft)
######################################################################
# 고정된 특징 추출기로써의 합성곱 신경망
# ---------------------------------------
#
# 이제, 마지막 계층을 제외한 신경망의 모든 부분을 고정해야 합니다.
# ``requires_grad == False`` 로 설정하여 매개변수를 고정하여 ``backward()`` 중에
# 경사도가 계산되지 않도록 해야합니다.
#
# 이에 대한 문서는
# `여기 <http://pytorch.org/docs/notes/autograd.html#excluding-subgraphs-from-backward>`__
# 에서 확인할 수 있습니다.
#
model_conv = torchvision.models.resnet18(pretrained=True)
for param in model_conv.parameters():
param.requires_grad = False
# 새로 생성된 모듈의 매개변수는 기본값이 requires_grad=True 임
num_ftrs = model_conv.fc.in_features
model_conv.fc = nn.Linear(num_ftrs, 2)
model_conv = model_conv.to(device)
criterion = nn.CrossEntropyLoss()
# 이전과는 다르게 마지막 계층의 매개변수들만 최적화되는지 관찰
optimizer_conv = optim.SGD(model_conv.fc.parameters(), lr=0.001, momentum=0.9)
# 7 에폭마다 0.1씩 학습율 감소
exp_lr_scheduler = lr_scheduler.StepLR(optimizer_conv, step_size=7, gamma=0.1)
######################################################################
# 학습 및 평가하기
# ^^^^^^^^^^^^^^^^^
#
# CPU에서 실행하는 경우 이전과 비교했을 때 약 절반 가량의 시간만이 소요될 것입니다.
# 이는 대부분의 신경망에서 경사도를 계산할 필요가 없기 때문입니다. 하지만,
# 순전파는 계산이 필요합니다.
#
model_conv = train_model(model_conv, criterion, optimizer_conv,
exp_lr_scheduler, num_epochs=25)
######################################################################
#
visualize_model(model_conv)
plt.ioff()
plt.show()
######################################################################
# 더 배워볼 내용
# -----------------
#
# 전이학습의 응용 사례(application)들을 더 알아보려면,
# :doc:`/intermediate/quantized_transfer_learning_tutorial` 을 참조해보세요.
#
#
| 29.229412 | 88 | 0.553934 |
from __future__ import print_function, division
import torch
import torch.nn as nn
import torch.optim as optim
from torch.optim import lr_scheduler
import numpy as np
import torchvision
from torchvision import datasets, models, transforms
import matplotlib.pyplot as plt
import time
import os
import copy
plt.ion()
| true | true |
f71c100fdfb127051c66f6dd3ec6cfe317c4ad61 | 3,783 | py | Python | my3b1b/old/MicoaelPrimo.py | Micoael/3b1b-styled-video-code | 036b339573e48f807e215bc7c7be9c6fe32b601d | [
"Apache-2.0"
] | 7 | 2020-03-02T23:56:39.000Z | 2020-06-08T15:05:46.000Z | my3b1b/old/MicoaelPrimo.py | Micoael/3b1b-styled-video-code | 036b339573e48f807e215bc7c7be9c6fe32b601d | [
"Apache-2.0"
] | null | null | null | my3b1b/old/MicoaelPrimo.py | Micoael/3b1b-styled-video-code | 036b339573e48f807e215bc7c7be9c6fe32b601d | [
"Apache-2.0"
] | null | null | null | from manimlib.imports import *
class StartingScene(Scene):
def construct(_):
e = Text("Manim homework by mp",font="Consolas",color=BLUE)
_.play(Write(e),run_time=3)
_.wait()
_.play(Uncreate(e))
A = Dot().move_to(np.array([0-2,0,0]))
B = Dot().move_to(np.array([9/10-2,12/10,0]))
C = Dot().move_to(np.array([5/2-2,0,0]))
D = B.copy().shift(9/10*UP+6/5*LEFT)
E = A.copy().shift(9/10*UP+6/5*LEFT)
F = B.copy().shift(8/5*UP+6/5*RIGHT)
G = C.copy().shift(8/5*UP+6/5*RIGHT)
H = A.copy().shift(5/2*DOWN)
I = C.copy().shift(5/2*DOWN)
lab = VGroup()
labtxt = [TextMobject("A").next_to(A).scale(0.5),
TextMobject("B").next_to(B).scale(0.5),
TextMobject("C").next_to(C).scale(0.5),
TextMobject("D").next_to(D).scale(0.5),
TextMobject("E").next_to(E).scale(0.5),
TextMobject("F").next_to(F).scale(0.5),
TextMobject("G").next_to(G).scale(0.5),
TextMobject("H").next_to(H).scale(0.5),
TextMobject("I").next_to(I).scale(0.5),
]
for i in range(len(labtxt)):
lab.add(labtxt[i])
original_trangle = Polygon(A.get_center(),B.get_center(),C.get_center(),color=ORANGE,fill_color = ORANGE,fill_opacity=0.5)
rect1 = Polygon(A.get_center(),B.get_center(),D.get_center(),E.get_center(),color=GREEN,fill_color = GREEN,fill_opacity=0.5)
rect2 = Polygon(B.get_center(),F.get_center(),G.get_center(),C.get_center(),color=GREEN,fill_color = GREEN,fill_opacity=0.5)
rect3 = Polygon(A.get_center(),C.get_center(),I.get_center(),H.get_center(),color=GREEN,fill_color = GREEN,fill_opacity=0.5)
tran1 = Polygon(D.get_center(),F.get_center(),B.get_center(),color=YELLOW,fill_color = YELLOW,fill_opacity=0.5)
tran2 = Polygon(E.get_center(),A.get_center(),H.get_center(),color=YELLOW,fill_color = YELLOW,fill_opacity=0.5)
tran3 = Polygon(C.get_center(),G.get_center(),I.get_center(),color=YELLOW,fill_color = YELLOW,fill_opacity=0.5)
def getc1(obj):
obj.move_to(tran1.get_center())
def getc2(obj):
obj.move_to(tran2.get_center())
def getc3(obj):
obj.move_to(tran3.get_center())
S1 = TexMobject("S1").add_updater(getc1)
S2 = TexMobject("S2").add_updater(getc2)
S3 = TexMobject("S3").add_updater(getc3)
trans = VGroup(tran1,tran2,tran3,S1,S2,S3)
# _.add(A,B,C,D,E,F,G,H,I,lab,original_trangle,rect1,rect2,rect3,tran1,tran2,tran3,S1,S2,S3)
_.play(ShowCreation(original_trangle))
_.wait()
_.play(ShowCreation(rect1),ShowCreation(rect2),ShowCreation(rect3))
_.wait()
_.play(ShowCreation(tran1),ShowCreation(tran2),ShowCreation(tran3)
,Write(S1),Write(S2),Write(S3) ,)
_.wait()
_.play(FadeOut(rect1),FadeOut(rect2),FadeOut(rect3))
_.wait()
_.play(Rotate(tran1,PI/2,about_point = B.get_center()),
Rotate(tran2,PI/2,about_point = A.get_center()),
Rotate(tran3,PI/2,about_point = C.get_center()) )
_.play(Transform(tran1,original_trangle))
_.play(Transform(tran2,original_trangle))
_.play(Transform(tran3,original_trangle))
S1.clear_updaters()
S2.clear_updaters()
S3.clear_updaters()
_.play(S1.shift,2*UP+1.5*LEFT)
_.play(S2.shift,2*UP)
_.play(S3.shift,2*UP+1.5*RIGHT)
eq = TextMobject("=").next_to(S1)
eq2 = TextMobject("=").next_to(S2)
_.play(Write(eq),Write(eq2))
| 49.12987 | 133 | 0.576791 | from manimlib.imports import *
class StartingScene(Scene):
def construct(_):
e = Text("Manim homework by mp",font="Consolas",color=BLUE)
_.play(Write(e),run_time=3)
_.wait()
_.play(Uncreate(e))
A = Dot().move_to(np.array([0-2,0,0]))
B = Dot().move_to(np.array([9/10-2,12/10,0]))
C = Dot().move_to(np.array([5/2-2,0,0]))
D = B.copy().shift(9/10*UP+6/5*LEFT)
E = A.copy().shift(9/10*UP+6/5*LEFT)
F = B.copy().shift(8/5*UP+6/5*RIGHT)
G = C.copy().shift(8/5*UP+6/5*RIGHT)
H = A.copy().shift(5/2*DOWN)
I = C.copy().shift(5/2*DOWN)
lab = VGroup()
labtxt = [TextMobject("A").next_to(A).scale(0.5),
TextMobject("B").next_to(B).scale(0.5),
TextMobject("C").next_to(C).scale(0.5),
TextMobject("D").next_to(D).scale(0.5),
TextMobject("E").next_to(E).scale(0.5),
TextMobject("F").next_to(F).scale(0.5),
TextMobject("G").next_to(G).scale(0.5),
TextMobject("H").next_to(H).scale(0.5),
TextMobject("I").next_to(I).scale(0.5),
]
for i in range(len(labtxt)):
lab.add(labtxt[i])
original_trangle = Polygon(A.get_center(),B.get_center(),C.get_center(),color=ORANGE,fill_color = ORANGE,fill_opacity=0.5)
rect1 = Polygon(A.get_center(),B.get_center(),D.get_center(),E.get_center(),color=GREEN,fill_color = GREEN,fill_opacity=0.5)
rect2 = Polygon(B.get_center(),F.get_center(),G.get_center(),C.get_center(),color=GREEN,fill_color = GREEN,fill_opacity=0.5)
rect3 = Polygon(A.get_center(),C.get_center(),I.get_center(),H.get_center(),color=GREEN,fill_color = GREEN,fill_opacity=0.5)
tran1 = Polygon(D.get_center(),F.get_center(),B.get_center(),color=YELLOW,fill_color = YELLOW,fill_opacity=0.5)
tran2 = Polygon(E.get_center(),A.get_center(),H.get_center(),color=YELLOW,fill_color = YELLOW,fill_opacity=0.5)
tran3 = Polygon(C.get_center(),G.get_center(),I.get_center(),color=YELLOW,fill_color = YELLOW,fill_opacity=0.5)
def getc1(obj):
obj.move_to(tran1.get_center())
def getc2(obj):
obj.move_to(tran2.get_center())
def getc3(obj):
obj.move_to(tran3.get_center())
S1 = TexMobject("S1").add_updater(getc1)
S2 = TexMobject("S2").add_updater(getc2)
S3 = TexMobject("S3").add_updater(getc3)
trans = VGroup(tran1,tran2,tran3,S1,S2,S3)
_.play(ShowCreation(original_trangle))
_.wait()
_.play(ShowCreation(rect1),ShowCreation(rect2),ShowCreation(rect3))
_.wait()
_.play(ShowCreation(tran1),ShowCreation(tran2),ShowCreation(tran3)
,Write(S1),Write(S2),Write(S3) ,)
_.wait()
_.play(FadeOut(rect1),FadeOut(rect2),FadeOut(rect3))
_.wait()
_.play(Rotate(tran1,PI/2,about_point = B.get_center()),
Rotate(tran2,PI/2,about_point = A.get_center()),
Rotate(tran3,PI/2,about_point = C.get_center()) )
_.play(Transform(tran1,original_trangle))
_.play(Transform(tran2,original_trangle))
_.play(Transform(tran3,original_trangle))
S1.clear_updaters()
S2.clear_updaters()
S3.clear_updaters()
_.play(S1.shift,2*UP+1.5*LEFT)
_.play(S2.shift,2*UP)
_.play(S3.shift,2*UP+1.5*RIGHT)
eq = TextMobject("=").next_to(S1)
eq2 = TextMobject("=").next_to(S2)
_.play(Write(eq),Write(eq2))
| true | true |
f71c111b67dac5359468b1d2de3970e43bfa4ea3 | 5,551 | py | Python | leetcode_python/Array/longest-arithmetic-subsequence.py | yennanliu/Python_basics | 6a597442d39468295946cefbfb11d08f61424dc3 | [
"Unlicense"
] | null | null | null | leetcode_python/Array/longest-arithmetic-subsequence.py | yennanliu/Python_basics | 6a597442d39468295946cefbfb11d08f61424dc3 | [
"Unlicense"
] | null | null | null | leetcode_python/Array/longest-arithmetic-subsequence.py | yennanliu/Python_basics | 6a597442d39468295946cefbfb11d08f61424dc3 | [
"Unlicense"
] | null | null | null | """
1027. Longest Arithmetic Subsequence
Medium
Given an array nums of integers, return the length of the longest arithmetic subsequence in nums.
Recall that a subsequence of an array nums is a list nums[i1], nums[i2], ..., nums[ik] with 0 <= i1 < i2 < ... < ik <= nums.length - 1, and that a sequence seq is arithmetic if seq[i+1] - seq[i] are all the same value (for 0 <= i < seq.length - 1).
Example 1:
Input: nums = [3,6,9,12]
Output: 4
Explanation:
The whole array is an arithmetic sequence with steps of length = 3.
Example 2:
Input: nums = [9,4,7,2,10]
Output: 3
Explanation:
The longest arithmetic subsequence is [4,7,10].
Example 3:
Input: nums = [20,1,15,3,10,5,8]
Output: 4
Explanation:
The longest arithmetic subsequence is [20,15,10,5].
Constraints:
2 <= nums.length <= 1000
0 <= nums[i] <= 500
"""
# V0
# IDEA : DP
class Solution:
def longestArithSeqLength(self, A):
dp = {}
for i in range(len(A)):
for j in range(i + 1, len(A)):
dp[j, A[j] - A[i]] = dp.get((i, A[j] - A[i]), 1) + 1
return max(dp.values())
# V0'
# IDEA : HASH TABLE
# https://leetcode.com/problems/longest-arithmetic-subsequence/discuss/274657/Short-Python-solution
class Solution:
def longestArithSeqLength(self, A):
aux, cnt, prefix = {a : {} for a in A}, {}, set()
for a in A:
cnt[a] = cnt[a] + 1 if a in cnt else 1
for b in prefix:
if a != b:
aux[a][a - b] = 1 + aux[b][a - b] if a - b in aux[b] else 2
prefix.add(a)
max_const = max(cnt.values())
max_aux = max(max(d.values()) for a, d in aux.items() if d)
return max(max_const, max_aux, 2)
# V1
# https://www.796t.com/article.php?id=154559
# http://www.noteanddata.com/leetcode-1027-Longest-Arithmetic-Sequence-Google-Interview-Problem-java-solution-note.html
# https://blog.csdn.net/w5688414/article/details/109696664
# V1
# IDEA : HASH
# https://leetcode.com/problems/longest-arithmetic-subsequence/discuss/274657/Short-Python-solution
class Solution:
def longestArithSeqLength(self, A):
aux, cnt, prefix = {a : {} for a in A}, {}, set()
for a in A:
cnt[a] = cnt[a] + 1 if a in cnt else 1
for b in prefix:
if a != b:
aux[a][a - b] = 1 + aux[b][a - b] if a - b in aux[b] else 2
prefix.add(a)
max_const = max(cnt.values())
max_aux = max(max(d.values()) for a, d in aux.items() if d)
return max(max_const, max_aux, 2)
# V1'
# https://leetcode.com/problems/longest-arithmetic-subsequence/discuss/275395/python-O(n**2)-solution
class Solution:
def longestArithSeqLength(self, A):
# Constant seq: '0000', O(len(A) )
ct = collections.Counter(A)
ans = max(2, max(ct[i] for i in ct))
# Increasing seq:'1234', O(len(A)**2 )
ansdic = {}
for i in range(len(A)):
for j in range(i):
a0, a1, a2 = A[j]*2-A[i], A[j], A[i]
if a0 == a1:continue
if (a0, a1) in ansdic:
ansdic[a1, a2] = ansdic[a0, a1] + 1
ans = max(ansdic[a1, a2], ans)
else:
ansdic[a1, a2] = 2
return ans
# V1''
# IDEA : HASH SET
# https://leetcode.com/problems/longest-arithmetic-subsequence/discuss/274625/simple-hash-Set-Python
class Solution(object):
def longestArithSeqLength(self, A):
res = 2
if len(A) <= 2:
return len(A)
cnt = {}
node = {}
mx = {}
curr = A[1] - A[0]
cnt[(curr,1)] = 2
node[curr] = set()
node[curr].add(1)
mx[curr] = 2
res = 2
for i in range(2,len(A)):
for j in range(i):
dis = A[i] - A[j]
if dis in node:
if j in node[dis]:
cnt[(dis,i)] = cnt[(dis,j)] + 1
#node[dis].remove(j)
node[dis].add(i)
mx[dis] = max(mx[dis], cnt[(dis,i)])
res = max(mx[dis],res)
else:
cnt[(dis,i)] = 2
node[dis].add(i)
else:
cnt[(dis,i)] = 2
node[dis] = set()
node[dis].add(i)
mx[dis] = 2
return res
# V1'''
# IDEA : DP
# https://leetcode.com/problems/longest-arithmetic-subsequence/discuss/274611/JavaC%2B%2BPython-DP
class Solution:
def longestArithSeqLength(self, A):
dp = {}
for i in range(len(A)):
for j in range(i + 1, len(A)):
dp[j, A[j] - A[i]] = dp.get((i, A[j] - A[i]), 1) + 1
return max(dp.values())
# V1''''
# IDEA : DP
# https://leetcode.com/problems/longest-arithmetic-subsequence/discuss/514742/Python-DP
class Solution:
def longestArithSeqLength(self, A):
DP = {}
A_len = len(A)
for right in range(1, A_len):
for left in range(right):
diff = A[right] - A[left]
#if (diff, left) in DP:
# DP[(diff, right)] = DP[(diff, left)] + 1
#else:
# DP[(diff, right)] = 2
DP[(diff, right)] = DP.get((diff,left), 1) + 1
return max(DP.values())
# V2 | 31.361582 | 248 | 0.503513 |
class Solution:
def longestArithSeqLength(self, A):
dp = {}
for i in range(len(A)):
for j in range(i + 1, len(A)):
dp[j, A[j] - A[i]] = dp.get((i, A[j] - A[i]), 1) + 1
return max(dp.values())
# IDEA : HASH TABLE
# https://leetcode.com/problems/longest-arithmetic-subsequence/discuss/274657/Short-Python-solution
class Solution:
def longestArithSeqLength(self, A):
aux, cnt, prefix = {a : {} for a in A}, {}, set()
for a in A:
cnt[a] = cnt[a] + 1 if a in cnt else 1
for b in prefix:
if a != b:
aux[a][a - b] = 1 + aux[b][a - b] if a - b in aux[b] else 2
prefix.add(a)
max_const = max(cnt.values())
max_aux = max(max(d.values()) for a, d in aux.items() if d)
return max(max_const, max_aux, 2)
# V1
# https://www.796t.com/article.php?id=154559
# http://www.noteanddata.com/leetcode-1027-Longest-Arithmetic-Sequence-Google-Interview-Problem-java-solution-note.html
# https://blog.csdn.net/w5688414/article/details/109696664
# V1
# IDEA : HASH
# https://leetcode.com/problems/longest-arithmetic-subsequence/discuss/274657/Short-Python-solution
class Solution:
def longestArithSeqLength(self, A):
aux, cnt, prefix = {a : {} for a in A}, {}, set()
for a in A:
cnt[a] = cnt[a] + 1 if a in cnt else 1
for b in prefix:
if a != b:
aux[a][a - b] = 1 + aux[b][a - b] if a - b in aux[b] else 2
prefix.add(a)
max_const = max(cnt.values())
max_aux = max(max(d.values()) for a, d in aux.items() if d)
return max(max_const, max_aux, 2)
# V1'
class Solution:
def longestArithSeqLength(self, A):
ct = collections.Counter(A)
ans = max(2, max(ct[i] for i in ct))
ansdic = {}
for i in range(len(A)):
for j in range(i):
a0, a1, a2 = A[j]*2-A[i], A[j], A[i]
if a0 == a1:continue
if (a0, a1) in ansdic:
ansdic[a1, a2] = ansdic[a0, a1] + 1
ans = max(ansdic[a1, a2], ans)
else:
ansdic[a1, a2] = 2
return ans
class Solution(object):
def longestArithSeqLength(self, A):
res = 2
if len(A) <= 2:
return len(A)
cnt = {}
node = {}
mx = {}
curr = A[1] - A[0]
cnt[(curr,1)] = 2
node[curr] = set()
node[curr].add(1)
mx[curr] = 2
res = 2
for i in range(2,len(A)):
for j in range(i):
dis = A[i] - A[j]
if dis in node:
if j in node[dis]:
cnt[(dis,i)] = cnt[(dis,j)] + 1
node[dis].add(i)
mx[dis] = max(mx[dis], cnt[(dis,i)])
res = max(mx[dis],res)
else:
cnt[(dis,i)] = 2
node[dis].add(i)
else:
cnt[(dis,i)] = 2
node[dis] = set()
node[dis].add(i)
mx[dis] = 2
return res
# IDEA : DP
# https://leetcode.com/problems/longest-arithmetic-subsequence/discuss/274611/JavaC%2B%2BPython-DP
class Solution:
def longestArithSeqLength(self, A):
dp = {}
for i in range(len(A)):
for j in range(i + 1, len(A)):
dp[j, A[j] - A[i]] = dp.get((i, A[j] - A[i]), 1) + 1
return max(dp.values())
# V1''''
# IDEA : DP
# https://leetcode.com/problems/longest-arithmetic-subsequence/discuss/514742/Python-DP
class Solution:
def longestArithSeqLength(self, A):
DP = {}
A_len = len(A)
for right in range(1, A_len):
for left in range(right):
diff = A[right] - A[left]
#if (diff, left) in DP:
# DP[(diff, right)] = DP[(diff, left)] + 1
#else:
# DP[(diff, right)] = 2
DP[(diff, right)] = DP.get((diff,left), 1) + 1
return max(DP.values())
# V2 | true | true |
f71c1149bd0ccd4c108d6852f0e7e33eb102e6e2 | 1,820 | py | Python | tests/util/test_i18n.py | zsluedem/MonkTrader | 760942a59919b34c876467bc0eb4afb30689cbc1 | [
"MIT"
] | 2 | 2018-11-17T06:39:36.000Z | 2019-01-18T13:14:15.000Z | tests/util/test_i18n.py | zsluedem/MonkTrader | 760942a59919b34c876467bc0eb4afb30689cbc1 | [
"MIT"
] | 37 | 2018-11-04T15:05:04.000Z | 2019-03-09T09:26:30.000Z | tests/util/test_i18n.py | zsluedem/MonkTrader | 760942a59919b34c876467bc0eb4afb30689cbc1 | [
"MIT"
] | null | null | null | #
# MIT License
#
# Copyright (c) 2018 WillQ
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import os
from unittest.mock import MagicMock, patch
from monkq.utils.i18n import LazyTranslation
def test_lazytranslation_not_setting() -> None:
with patch("monkq.utils.i18n.gettext", MagicMock()) as mockg:
mockg.find.return_value = None
trans = LazyTranslation()
trans.setup("CN")
trans.gettext("hello")
mockg.NullTranslations().gettext.assert_called()
def test_lazytranslation() -> None:
with patch("monkq.utils.i18n.gettext", MagicMock()) as mockg:
mockg.find.return_value = os.path.abspath(__file__)
trans = LazyTranslation()
trans.setup("CN")
trans.gettext("hello")
mockg.GNUTranslations().gettext.assert_called()
| 37.142857 | 79 | 0.737363 |
import os
from unittest.mock import MagicMock, patch
from monkq.utils.i18n import LazyTranslation
def test_lazytranslation_not_setting() -> None:
with patch("monkq.utils.i18n.gettext", MagicMock()) as mockg:
mockg.find.return_value = None
trans = LazyTranslation()
trans.setup("CN")
trans.gettext("hello")
mockg.NullTranslations().gettext.assert_called()
def test_lazytranslation() -> None:
with patch("monkq.utils.i18n.gettext", MagicMock()) as mockg:
mockg.find.return_value = os.path.abspath(__file__)
trans = LazyTranslation()
trans.setup("CN")
trans.gettext("hello")
mockg.GNUTranslations().gettext.assert_called()
| true | true |
f71c138dfb3853f24c17b2b530c0d786d88a9cf0 | 5,959 | py | Python | modelEpochs.py | JDMusc/Online-Bullying-Image-Classifcation | 9196c60c554cf160d68cb9e9c41fda124abebf63 | [
"MIT"
] | null | null | null | modelEpochs.py | JDMusc/Online-Bullying-Image-Classifcation | 9196c60c554cf160d68cb9e9c41fda124abebf63 | [
"MIT"
] | null | null | null | modelEpochs.py | JDMusc/Online-Bullying-Image-Classifcation | 9196c60c554cf160d68cb9e9c41fda124abebf63 | [
"MIT"
] | null | null | null | import copy
import numpy as np
from numpy import log10
import os
from toolz import pipe as p
from tensorboardX import SummaryWriter
import torch
import torch.nn as nn
import numpy as np
import preprocessing as pp
def findParam(model, name_filter):
if callable(name_filter):
fn = name_filter
else:
name_filter = [name_filter] if type(name_filter) is str else name_filter
fn = lambda param_name: all(
component in param_name for component in name_filter)
return [(pn, pv) for (pn, pv) in model.named_parameters() if fn(pn)]
def setParameterRequiresGrad(model, requires_grad = False, params = None):
params = model.parameters() if params is None else params
for param in params:
param.requires_grad = requires_grad
def runEpochs(
model, criterion,
dataloaders, dataset_sizes, device,
log_params_verbose, num_epochs,
optimizer, scheduler,
writer):
best_model_wts = copy.deepcopy(model.state_dict())
best_acc = 0.0
prev_model_wts = best_model_wts
for epoch in range(num_epochs):
epoch_acc, model_wts = _run_epoch(
model,
criterion, dataloaders, dataset_sizes, device,
epoch, log_params_verbose, num_epochs,
optimizer, scheduler, writer)
_log_coef_diffs(writer, epoch, prev_model_wts, model_wts)
prev_model_wts = model_wts
if epoch_acc > best_acc:
best_acc = epoch_acc
best_model_wts = model_wts
# load best model weights
model.load_state_dict(best_model_wts)
return (model, best_acc)
def viewParamsToBeUpdated(model):
return [n for (n,p) in model.named_parameters() if p.requires_grad == True]
def add_graph_model(writer, model, dataloaders, device):
inputs, classes = p(dataloaders['train'], iter, next)
inputs = inputs.to(device)
classes = classes.to(device)
writer.add_graph(model, inputs)
def _run_epoch(model,
criterion, dataloaders, dataset_sizes, device,
epoch, log_params_verbose, num_epochs,
optimizer, scheduler, writer):
print('Epoch {}/{}'.format(epoch, num_epochs - 1))
print('-' * 10)
n_samples = {'train': 0, 'val': 0}
# Each epoch has a training and validation phase
for phase in ['train', 'val']:
is_train = phase == 'train'
if is_train:
scheduler.step()
model.train()
else:
model.eval()
running_loss = 0.0
running_corrects = 0
for inputs, labels in dataloaders[phase]:
n_samples[phase] = n_samples[phase] + len(labels)
inputs = inputs.to(device)
labels = labels.to(device)
preds, loss = _take_step(
model, criterion, optimizer, inputs, labels, is_train)
# statistics
running_loss += loss.item() * inputs.size(0)
running_corrects += torch.sum(preds == labels.data)
epoch_loss = running_loss / dataset_sizes[phase]
epoch_acc = running_corrects.double() / dataset_sizes[phase]
_log_epoch_phase_stats(writer, epoch, phase, epoch_loss, epoch_acc)
if log_params_verbose:
_log_model_params_verbose(writer, model, epoch, phase)
# deep copy the model
model_wts = copy.deepcopy(model.state_dict())
_log_lr(writer, epoch, scheduler)
print('# training samples')
print(n_samples['train'])
print('# val samples')
print(n_samples['val'])
return epoch_acc, model_wts
def _take_step(model, criterion, optimizer, inputs, labels, is_train):
# zero the parameter gradients
optimizer.zero_grad()
# forward
# track history if only in train
with torch.set_grad_enabled(is_train):
outputs = model(inputs)
_, preds = torch.max(outputs, 1)
loss = criterion(outputs, labels)
# backward + optimize only if in training phase
if is_train:
loss.backward()
optimizer.step()
return preds, loss
def _add_scope(scope, k):
return scope + '/' + k
def _add_scope_gen(scope):
return lambda k: _add_scope(scope, k)
def _log_model_params_verbose(writer, model, run_num, scope, use_hist = False):
def write(tag, param):
fn = writer.add_histogram if use_hist else writer.add_scalar
param = param if use_hist else param.abs().mean()
return fn(tag, param, run_num)
with torch.no_grad():
for (name, param) in model.named_parameters():
p(name,
_add_scope_gen(scope),
lambda tag: write(tag, param)
)
def _log_lr(writer, epoch, scheduler):
lr = p(scheduler.get_lr(), np.array)[0]
p('lr',
_add_scope_gen('lr'),
lambda _: writer.add_scalar(_, lr, epoch)
)
p('log10_lr',
_add_scope_gen('lr'),
lambda _: writer.add_scalar(_, log10(lr), epoch)
)
def _log_epoch_phase_stats(writer, epoch, scope, epoch_loss, epoch_acc):
log_measure = lambda k, v: p(k,
_add_scope_gen(scope),
lambda _ : writer.add_scalar(_, v, epoch)
)
log_measure('loss', epoch_loss)
log_measure('accuracy', epoch_acc)
print('{} Loss: {:.4f} Acc: {:.4f}'.format(
scope, epoch_loss, epoch_acc))
def _log_coef_diffs(writer, epoch, prev_model_state, curr_model_state):
def write(name, curr):
diff = curr - prev_model_state[name]
p(name,
_add_scope_gen('params'),
lambda _: writer.add_scalar(
_ + '.diff', diff.abs().mean(), epoch)
)
with torch.no_grad():
for name in curr_model_state:
if ('weight' in name or 'bias' in name):
write(name, curr_model_state[name])
| 27.587963 | 80 | 0.614365 | import copy
import numpy as np
from numpy import log10
import os
from toolz import pipe as p
from tensorboardX import SummaryWriter
import torch
import torch.nn as nn
import numpy as np
import preprocessing as pp
def findParam(model, name_filter):
if callable(name_filter):
fn = name_filter
else:
name_filter = [name_filter] if type(name_filter) is str else name_filter
fn = lambda param_name: all(
component in param_name for component in name_filter)
return [(pn, pv) for (pn, pv) in model.named_parameters() if fn(pn)]
def setParameterRequiresGrad(model, requires_grad = False, params = None):
params = model.parameters() if params is None else params
for param in params:
param.requires_grad = requires_grad
def runEpochs(
model, criterion,
dataloaders, dataset_sizes, device,
log_params_verbose, num_epochs,
optimizer, scheduler,
writer):
best_model_wts = copy.deepcopy(model.state_dict())
best_acc = 0.0
prev_model_wts = best_model_wts
for epoch in range(num_epochs):
epoch_acc, model_wts = _run_epoch(
model,
criterion, dataloaders, dataset_sizes, device,
epoch, log_params_verbose, num_epochs,
optimizer, scheduler, writer)
_log_coef_diffs(writer, epoch, prev_model_wts, model_wts)
prev_model_wts = model_wts
if epoch_acc > best_acc:
best_acc = epoch_acc
best_model_wts = model_wts
model.load_state_dict(best_model_wts)
return (model, best_acc)
def viewParamsToBeUpdated(model):
return [n for (n,p) in model.named_parameters() if p.requires_grad == True]
def add_graph_model(writer, model, dataloaders, device):
inputs, classes = p(dataloaders['train'], iter, next)
inputs = inputs.to(device)
classes = classes.to(device)
writer.add_graph(model, inputs)
def _run_epoch(model,
criterion, dataloaders, dataset_sizes, device,
epoch, log_params_verbose, num_epochs,
optimizer, scheduler, writer):
print('Epoch {}/{}'.format(epoch, num_epochs - 1))
print('-' * 10)
n_samples = {'train': 0, 'val': 0}
for phase in ['train', 'val']:
is_train = phase == 'train'
if is_train:
scheduler.step()
model.train()
else:
model.eval()
running_loss = 0.0
running_corrects = 0
for inputs, labels in dataloaders[phase]:
n_samples[phase] = n_samples[phase] + len(labels)
inputs = inputs.to(device)
labels = labels.to(device)
preds, loss = _take_step(
model, criterion, optimizer, inputs, labels, is_train)
running_loss += loss.item() * inputs.size(0)
running_corrects += torch.sum(preds == labels.data)
epoch_loss = running_loss / dataset_sizes[phase]
epoch_acc = running_corrects.double() / dataset_sizes[phase]
_log_epoch_phase_stats(writer, epoch, phase, epoch_loss, epoch_acc)
if log_params_verbose:
_log_model_params_verbose(writer, model, epoch, phase)
model_wts = copy.deepcopy(model.state_dict())
_log_lr(writer, epoch, scheduler)
print('# training samples')
print(n_samples['train'])
print('# val samples')
print(n_samples['val'])
return epoch_acc, model_wts
def _take_step(model, criterion, optimizer, inputs, labels, is_train):
optimizer.zero_grad()
with torch.set_grad_enabled(is_train):
outputs = model(inputs)
_, preds = torch.max(outputs, 1)
loss = criterion(outputs, labels)
if is_train:
loss.backward()
optimizer.step()
return preds, loss
def _add_scope(scope, k):
return scope + '/' + k
def _add_scope_gen(scope):
return lambda k: _add_scope(scope, k)
def _log_model_params_verbose(writer, model, run_num, scope, use_hist = False):
def write(tag, param):
fn = writer.add_histogram if use_hist else writer.add_scalar
param = param if use_hist else param.abs().mean()
return fn(tag, param, run_num)
with torch.no_grad():
for (name, param) in model.named_parameters():
p(name,
_add_scope_gen(scope),
lambda tag: write(tag, param)
)
def _log_lr(writer, epoch, scheduler):
lr = p(scheduler.get_lr(), np.array)[0]
p('lr',
_add_scope_gen('lr'),
lambda _: writer.add_scalar(_, lr, epoch)
)
p('log10_lr',
_add_scope_gen('lr'),
lambda _: writer.add_scalar(_, log10(lr), epoch)
)
def _log_epoch_phase_stats(writer, epoch, scope, epoch_loss, epoch_acc):
log_measure = lambda k, v: p(k,
_add_scope_gen(scope),
lambda _ : writer.add_scalar(_, v, epoch)
)
log_measure('loss', epoch_loss)
log_measure('accuracy', epoch_acc)
print('{} Loss: {:.4f} Acc: {:.4f}'.format(
scope, epoch_loss, epoch_acc))
def _log_coef_diffs(writer, epoch, prev_model_state, curr_model_state):
def write(name, curr):
diff = curr - prev_model_state[name]
p(name,
_add_scope_gen('params'),
lambda _: writer.add_scalar(
_ + '.diff', diff.abs().mean(), epoch)
)
with torch.no_grad():
for name in curr_model_state:
if ('weight' in name or 'bias' in name):
write(name, curr_model_state[name])
| true | true |
f71c143502daacb5d0cac62a6b711503065c58e7 | 505 | py | Python | functions/l3Consume.py | yuklia/serverless-lambda-chaining | dd24129933489c2f1a522b37d8f4c3e16eb47285 | [
"MIT"
] | null | null | null | functions/l3Consume.py | yuklia/serverless-lambda-chaining | dd24129933489c2f1a522b37d8f4c3e16eb47285 | [
"MIT"
] | null | null | null | functions/l3Consume.py | yuklia/serverless-lambda-chaining | dd24129933489c2f1a522b37d8f4c3e16eb47285 | [
"MIT"
] | null | null | null | import json
def handler(event, context):
message_from_publisher = json.loads(event['Records'][0]['Sns']['Message'])
my_param = message_from_publisher['myParamFromConsumerPublisher']
print("👷 Received paramater from ConsumerPublisher: '{0}'".format(my_param))
body = {
"message": "Go Serverless v1.0! Your function executed successfully!",
"input": event
}
response = {
"statusCode": 200,
"body": json.dumps(body)
}
return response
| 24.047619 | 80 | 0.637624 | import json
def handler(event, context):
message_from_publisher = json.loads(event['Records'][0]['Sns']['Message'])
my_param = message_from_publisher['myParamFromConsumerPublisher']
print("👷 Received paramater from ConsumerPublisher: '{0}'".format(my_param))
body = {
"message": "Go Serverless v1.0! Your function executed successfully!",
"input": event
}
response = {
"statusCode": 200,
"body": json.dumps(body)
}
return response
| true | true |
f71c145bb6baf46c8e813d23d04f55fadd9b6a4e | 112 | py | Python | test/nohtml.py | eaybek/nohtml | 9df8fc032891591516d8a719ebc15440d8cc7a0c | [
"MIT"
] | null | null | null | test/nohtml.py | eaybek/nohtml | 9df8fc032891591516d8a719ebc15440d8cc7a0c | [
"MIT"
] | null | null | null | test/nohtml.py | eaybek/nohtml | 9df8fc032891591516d8a719ebc15440d8cc7a0c | [
"MIT"
] | null | null | null | import unittest
class NohtmlTest(unittest.TestCase):
pass
if __name__ == "__main__":
unittest.main()
| 12.444444 | 36 | 0.705357 | import unittest
class NohtmlTest(unittest.TestCase):
pass
if __name__ == "__main__":
unittest.main()
| true | true |
f71c154fa91e7b9687d0c58c927b63ccf6253ccb | 3,682 | py | Python | ReUseModel/TestFAEModel.py | Eggiverse/FAE | 1b953ba6dfcced83e5929eeaa8f525ec4acde5ed | [
"MIT"
] | null | null | null | ReUseModel/TestFAEModel.py | Eggiverse/FAE | 1b953ba6dfcced83e5929eeaa8f525ec4acde5ed | [
"MIT"
] | null | null | null | ReUseModel/TestFAEModel.py | Eggiverse/FAE | 1b953ba6dfcced83e5929eeaa8f525ec4acde5ed | [
"MIT"
] | null | null | null |
import os
import csv
import numpy as np
from FAE.FeatureAnalysis.Normalizer import Normalizer
from FAE.DataContainer.DataContainer import DataContainer
from FAE.FeatureAnalysis.Classifier import Classifier
from FAE.Func.Metric import EstimateMetirc
from FAE.FeatureAnalysis.FeatureSelector import FeatureSelector
from FAE.FeatureAnalysis.CrossValidation import CrossValidation
def LoadTrainInfo(model_folder):
train_info = {}
##Load normalizaiton
normalizer = Normalizer()
normalization_path = ''
for sub_file in os.listdir(model_folder):
if sub_file.rfind('_normalization_training.csv') != -1:
normalization_path = os.path.join(model_folder, sub_file)
if not os.path.exists(normalization_path):
print('Check the normalization name : zero_center_normalization')
else:
normalizer.Load(normalization_path)
train_info['normalizer'] = normalizer
## Load selected features
selected_feature_path = os.path.join(model_folder, 'feature_select_info.csv')
selected_feature_list = []
with open(selected_feature_path, 'r', newline='') as f:
f_reader = csv.reader(f)
for index in f_reader:
if index[0] == 'selected_feature':
selected_feature_list = index[1:]
if selected_feature_list == []:
print('No selected features')
train_info['selected_features'] = selected_feature_list
## Load FAE model
classifier = Classifier()
classifier.Load(model_folder)
train_info['classifier'] = classifier
return train_info
def TestNewData(NewDataCsv, model_folder, result_save_path):
'''
:param NewDataCsv: New radiomics feature matrix csv file path
:param model_folder:The trained model path
:return:classification result
'''
train_info = LoadTrainInfo(model_folder)
new_data_container = DataContainer()
#Normlization
new_data_container.Load(NewDataCsv)
feature_selector = FeatureSelector()
feature_selector.SelectFeatureByName(new_data_container, train_info['selected_features'], is_replace=True)
new_data_container = train_info['normalizer'].Transform(new_data_container)
# data_frame = new_data_container.GetFrame()
# data_frame = data_frame[train_info['selected_features']]
# new_data_container.SetFrame(data_frame)
# new_data_container.UpdateDataByFrame()
##Model
train_info['classifier'].SetDataContainer(new_data_container)
model = train_info['classifier'].GetModel()
predict = model.predict_proba(new_data_container.GetArray())[:, 1]
label = new_data_container.GetLabel()
case_name = new_data_container.GetCaseName()
np.save(os.path.join(result_save_path, 'test_predict.npy'), predict)
np.save(os.path.join(result_save_path, 'test_label.npy'), label)
test_result_info = [['CaseName', 'Pred', 'Label']]
for index in range(len(label)):
test_result_info.append([case_name[index], predict[index], label[index]])
with open(os.path.join(result_save_path, 'test_info.csv'), 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
writer.writerows(test_result_info)
metric = EstimateMetirc(predict, label)
info = {}
info.update(metric)
cv = CrossValidation()
cv.SaveResult(info, result_save_path)
# print(metric)
return metric
if __name__ == '__main__':
TestNewData(r'D:\hospital\Huangli\smote\test_numeric_feature.csv',
r'D:\hospital\Huangli\smote\process-result\Norm0Center_PCC_ANOVA_5_LR',
r'D:\MyScript\demo') | 32.875 | 111 | 0.69962 |
import os
import csv
import numpy as np
from FAE.FeatureAnalysis.Normalizer import Normalizer
from FAE.DataContainer.DataContainer import DataContainer
from FAE.FeatureAnalysis.Classifier import Classifier
from FAE.Func.Metric import EstimateMetirc
from FAE.FeatureAnalysis.FeatureSelector import FeatureSelector
from FAE.FeatureAnalysis.CrossValidation import CrossValidation
def LoadTrainInfo(model_folder):
train_info = {}
Normalizer()
normalization_path = ''
for sub_file in os.listdir(model_folder):
if sub_file.rfind('_normalization_training.csv') != -1:
normalization_path = os.path.join(model_folder, sub_file)
if not os.path.exists(normalization_path):
print('Check the normalization name : zero_center_normalization')
else:
normalizer.Load(normalization_path)
train_info['normalizer'] = normalizer
ath = os.path.join(model_folder, 'feature_select_info.csv')
selected_feature_list = []
with open(selected_feature_path, 'r', newline='') as f:
f_reader = csv.reader(f)
for index in f_reader:
if index[0] == 'selected_feature':
selected_feature_list = index[1:]
if selected_feature_list == []:
print('No selected features')
train_info['selected_features'] = selected_feature_list
= Classifier()
classifier.Load(model_folder)
train_info['classifier'] = classifier
return train_info
def TestNewData(NewDataCsv, model_folder, result_save_path):
train_info = LoadTrainInfo(model_folder)
new_data_container = DataContainer()
new_data_container.Load(NewDataCsv)
feature_selector = FeatureSelector()
feature_selector.SelectFeatureByName(new_data_container, train_info['selected_features'], is_replace=True)
new_data_container = train_info['normalizer'].Transform(new_data_container)
ain_info['classifier'].SetDataContainer(new_data_container)
model = train_info['classifier'].GetModel()
predict = model.predict_proba(new_data_container.GetArray())[:, 1]
label = new_data_container.GetLabel()
case_name = new_data_container.GetCaseName()
np.save(os.path.join(result_save_path, 'test_predict.npy'), predict)
np.save(os.path.join(result_save_path, 'test_label.npy'), label)
test_result_info = [['CaseName', 'Pred', 'Label']]
for index in range(len(label)):
test_result_info.append([case_name[index], predict[index], label[index]])
with open(os.path.join(result_save_path, 'test_info.csv'), 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
writer.writerows(test_result_info)
metric = EstimateMetirc(predict, label)
info = {}
info.update(metric)
cv = CrossValidation()
cv.SaveResult(info, result_save_path)
return metric
if __name__ == '__main__':
TestNewData(r'D:\hospital\Huangli\smote\test_numeric_feature.csv',
r'D:\hospital\Huangli\smote\process-result\Norm0Center_PCC_ANOVA_5_LR',
r'D:\MyScript\demo') | true | true |
f71c159e39dcb104058740ebee5fb752312a3553 | 160 | py | Python | cranes/__init__.py | annehulsey/high-resolution_post-earthquake_recovery_simulation_of_safety_cordons | 8b8bedceee0343d22143f48992136fc2fc34e191 | [
"MIT"
] | null | null | null | cranes/__init__.py | annehulsey/high-resolution_post-earthquake_recovery_simulation_of_safety_cordons | 8b8bedceee0343d22143f48992136fc2fc34e191 | [
"MIT"
] | null | null | null | cranes/__init__.py | annehulsey/high-resolution_post-earthquake_recovery_simulation_of_safety_cordons | 8b8bedceee0343d22143f48992136fc2fc34e191 | [
"MIT"
] | null | null | null | from .base import *
from .mapping import *
from .community_damage_sampling import *
from .downtime_logistics import *
from .analysis_and_visualization import *
| 26.666667 | 41 | 0.8125 | from .base import *
from .mapping import *
from .community_damage_sampling import *
from .downtime_logistics import *
from .analysis_and_visualization import *
| true | true |
f71c1615d02213bb8e244cd957fa6b17a89b9787 | 2,153 | py | Python | plugins/zscaler/icon_zscaler/actions/lookup_url/schema.py | lukaszlaszuk/insightconnect-plugins | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | [
"MIT"
] | 46 | 2019-06-05T20:47:58.000Z | 2022-03-29T10:18:01.000Z | plugins/zscaler/icon_zscaler/actions/lookup_url/schema.py | lukaszlaszuk/insightconnect-plugins | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | [
"MIT"
] | 386 | 2019-06-07T20:20:39.000Z | 2022-03-30T17:35:01.000Z | plugins/zscaler/icon_zscaler/actions/lookup_url/schema.py | lukaszlaszuk/insightconnect-plugins | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | [
"MIT"
] | 43 | 2019-07-09T14:13:58.000Z | 2022-03-28T12:04:46.000Z | # GENERATED BY KOMAND SDK - DO NOT EDIT
import insightconnect_plugin_runtime
import json
class Component:
DESCRIPTION = "Look up the categorization of a given set of URLs"
class Input:
URLS = "urls"
class Output:
URL_CATEGORIZATION = "url_categorization"
class LookupUrlInput(insightconnect_plugin_runtime.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"urls": {
"type": "array",
"title": "URLs",
"description": "The given set of URLs or domains to be looked up",
"items": {
"type": "string"
},
"order": 1
}
},
"required": [
"urls"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class LookupUrlOutput(insightconnect_plugin_runtime.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"url_categorization": {
"type": "array",
"title": "URL Categorization",
"description": "Information about given URLs",
"items": {
"$ref": "#/definitions/url_categorization"
},
"order": 1
}
},
"required": [
"url_categorization"
],
"definitions": {
"url_categorization": {
"type": "object",
"title": "url_categorization",
"properties": {
"url": {
"type": "string",
"title": "URL",
"description": "Checked URL",
"order": 1
},
"urlClassifications": {
"type": "array",
"title": "URL Classifications",
"description": "URL classifications",
"items": {
"type": "string"
},
"order": 2
},
"urlClassificationsWithSecurityAlert": {
"type": "array",
"title": "URL classifications with security alert",
"description": "URL classifications with security alert",
"items": {
"type": "string"
},
"order": 3
}
}
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
| 21.53 | 72 | 0.533674 |
import insightconnect_plugin_runtime
import json
class Component:
DESCRIPTION = "Look up the categorization of a given set of URLs"
class Input:
URLS = "urls"
class Output:
URL_CATEGORIZATION = "url_categorization"
class LookupUrlInput(insightconnect_plugin_runtime.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"urls": {
"type": "array",
"title": "URLs",
"description": "The given set of URLs or domains to be looked up",
"items": {
"type": "string"
},
"order": 1
}
},
"required": [
"urls"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class LookupUrlOutput(insightconnect_plugin_runtime.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"url_categorization": {
"type": "array",
"title": "URL Categorization",
"description": "Information about given URLs",
"items": {
"$ref": "#/definitions/url_categorization"
},
"order": 1
}
},
"required": [
"url_categorization"
],
"definitions": {
"url_categorization": {
"type": "object",
"title": "url_categorization",
"properties": {
"url": {
"type": "string",
"title": "URL",
"description": "Checked URL",
"order": 1
},
"urlClassifications": {
"type": "array",
"title": "URL Classifications",
"description": "URL classifications",
"items": {
"type": "string"
},
"order": 2
},
"urlClassificationsWithSecurityAlert": {
"type": "array",
"title": "URL classifications with security alert",
"description": "URL classifications with security alert",
"items": {
"type": "string"
},
"order": 3
}
}
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
| true | true |
f71c1852ffabfd6520c29aa0e5896783707f3f65 | 9,759 | py | Python | pyhtmlcv.py | lietu/pyhtmlcv | 68e75d5b761f4cbf4315d9c573fdc39872abbc20 | [
"BSD-3-Clause"
] | 3 | 2017-02-15T14:02:57.000Z | 2019-04-30T23:33:55.000Z | pyhtmlcv.py | lietu/pyhtmlcv | 68e75d5b761f4cbf4315d9c573fdc39872abbc20 | [
"BSD-3-Clause"
] | null | null | null | pyhtmlcv.py | lietu/pyhtmlcv | 68e75d5b761f4cbf4315d9c573fdc39872abbc20 | [
"BSD-3-Clause"
] | 1 | 2017-06-01T15:58:09.000Z | 2017-06-01T15:58:09.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
PyHtmlCv is a tool to that can be used to generate HTML CVs from a
simple JSON configuration.
:copyright: (c) 2012-2019 Janne Enberg
:license: BSD
"""
from argparse import ArgumentParser, ArgumentTypeError
import codecs
from datetime import datetime
from jinja2 import Environment, FileSystemLoader
import json
try:
from pathlib import Path
except ImportError:
from pathlib2 import Path
import re
import sass
import six
import shutil
import sys
from time import sleep
TEMPLATE_PATH = Path("templates")
def str2bool(value):
"""
Convert CLI args to boolean
:param str value:
:return bool:
"""
if value.lower() in ("yes", "true", "t", "y", "1"):
return True
elif value.lower() in ("no", "false", "f", "n", "0"):
return False
else:
raise ArgumentTypeError("Boolean value expected.")
def get_last_change(path):
"""
Figure out when the given path has last changed, recursively if a directory
"""
last_changed = path.stat().st_mtime
if path.is_dir():
for entry in path.glob("**/*"):
entry_changed = entry.stat().st_mtime
if entry_changed > last_changed:
last_changed = entry_changed
return last_changed
def run(options):
"""
Generate the CV page from the source + template
"""
try:
with codecs.open(options.source, encoding="utf-8") as f:
config = json.load(f)
except ValueError as e:
print("Error parsing config {}.".format(options.source))
print("")
raise
except IOError as e:
print("Configuration file not found: {}".format(options.source))
print("")
raise
validate_config(config)
process_config(config)
generate_cv(options.target, options.template, config)
def generate_cv(destination, template, config):
# Get the template
template_path = str(TEMPLATE_PATH / template)
env = Environment(loader=FileSystemLoader(template_path))
template = env.get_template("index.html")
# Generate a few variables for the template
now = datetime.now()
current_time = now.strftime("%Y-%m-%d %H:%M:%S %z")
year = now.strftime("%Y")
navigation = generate_navigation(config)
# Render the template into HTML
html = template.render(
name=config["name"],
contact=config["contact"],
sections=config["sections"],
navigation=navigation,
now=current_time,
year=year,
)
# Make sure that the destination path is deleted first
dst_path = Path(destination)
if dst_path.exists():
shutil.rmtree(destination)
shutil.copytree(template_path, destination)
# Compile Sass/SCSS
scss_files = []
for entry in dst_path.glob("**/*.scss"):
scss_files.append(entry)
entry_name = entry.name
if entry_name.endswith(".scss") and not entry_name.startswith("_"):
entry_str = str(entry)
compiled = sass.compile(filename=entry_str)
entry_css = Path(entry_str[:-5] + ".css")
with entry_css.open("w", encoding="utf-8") as f:
f.write(compiled)
print("Compiled {} to {}".format(entry, entry_css))
# Delete unnecessary files after compilation
for entry in scss_files:
entry.unlink()
# And any left over empty directories
for entry in reversed(list(dst_path.rglob("*"))):
if entry.exists() and entry.is_dir():
empty = True
for _ in entry.iterdir():
empty = False
break
if empty:
entry.rmdir()
# Write the result HTML
full_path = Path(destination) / "index.html"
with full_path.open("w", encoding="utf-8") as f:
f.write(html)
print("Generated CV HTML to {}".format(full_path))
def validate_config(config):
error = False
if "name" not in config:
print('Missing name definition, e.g. { "name": "Janne Enberg", ' "... }")
error = True
if "contact" not in config:
print(
"Missing contact definition, e.g. { ..., "
'"contact": "+1 (2) 345 678 | contact@example.com", ... }'
)
error = True
if "sections" not in config:
print("Missing sections definition, e.g. { ..., " '"sections": [ ... ] }')
error = True
else:
for section in config["sections"]:
# String sections need no other validation
if isinstance(section, six.string_types):
continue
if "title" not in section:
print(
"Missing title from section definition, , "
'e.g. { ..., "sections": [ {"title": "Section '
'title", ...} ] }'
)
print("Found: {}".format(section))
error = True
if (
"fields" not in section
and "large" not in section
and "largeList" not in section
):
print(
"No fields, largeList or large definition for "
"section, , "
'e.g. { ..., "sections": [ {..., '
'"large": "Yadi yadi yada", ...} ] }'
)
error = True
if "fields" in section:
for field in section["fields"]:
if not isinstance(field, list) or len(field) != 2:
print(
"Invalid field definition, "
"it should have two items, e.g. { ..., "
'"sections": [ {..., "fields": [ ["Label",'
' "Value"], ... }, ... ] }'
)
error = True
if error:
print("")
print("Please fix errors in configuration file.")
sys.exit(1)
def process_config(config):
"""
Process the configuration from the readable format to a more useful format
"""
# Process sections
for index, section in enumerate(config["sections"]):
# String sections will be converted to type = heading
if isinstance(section, six.string_types):
if section == "-":
config["sections"][index] = {"type": "page-break"}
else:
config["sections"][index] = {"type": "heading", "title": section}
continue
# The rest are just normal sections
section["type"] = "normal"
# Convert ["Label", "Value"] to {"label": "Label",
# "value": "Value"}
if "fields" in section:
fields = []
for fieldColumns in section["fields"]:
fields.append({"label": fieldColumns[0], "value": fieldColumns[1]})
section["fields"] = fields
# Convert arrays in "largeList" field to <ul> -lists in "large"
if "largeList" in section:
section["large"] = (
"<ul><li>" + "</li><li>".join(section["largeList"]) + "</li></ul>"
)
del section["largeList"]
heading = config["mainHeading"]
main_heading = {"type": "heading", "title": heading}
config["sections"] = [main_heading] + config["sections"]
def generate_navigation(config):
i = 1
nav = {"headings": []}
for _, section in enumerate(config["sections"]):
# Page breaks don't need navigation
if section["type"] == "page-break":
continue
name = section["title"]
section["id"] = make_id(name, i)
if section["type"] == "heading":
nav[name] = [section]
nav["headings"].append(name)
heading = name
else:
nav[heading].append(section)
i += 1
return nav
def make_id(text, index):
# Replace characters not valid in IDs
text = re.sub(r"[^0-9a-zA-Z\-_.:]", "-", text)
# Text must not begin with a number
if re.match(r"^[0-9]", text):
text = "id-{}-{}".format(text, index)
return text
def main():
ap = ArgumentParser()
ap.add_argument("--source", default="cv.json", type=str, help="CV JSON source")
ap.add_argument(
"--target", type=str, help="Target directory, defaults to generated/<source>/"
)
ap.add_argument(
"--template",
type=str,
default="default",
help="One of the subfolders of templates/",
)
ap.add_argument(
"--watch",
type=str2bool,
nargs="?",
const=True,
default=False,
help="Keep watching for changes",
)
options = ap.parse_args()
if not options.target:
options.target = str(Path("generated") / options.source)
if options.watch:
print("Press CTRL+C to stop monitoring for changes")
last_change = 0
source_path = Path(options.source)
template_path = TEMPLATE_PATH / options.template
while True:
changes = False
source_change = get_last_change(source_path)
if source_change > last_change:
changes = True
template_change = get_last_change(template_path)
if template_change > last_change:
changes = True
if changes:
last_change = max(template_change, source_change)
try:
run(options)
except Exception as e:
print(e)
except SystemExit:
pass
sleep(0.25)
else:
run(options)
if __name__ == "__main__":
main()
| 28.043103 | 86 | 0.544216 |
from argparse import ArgumentParser, ArgumentTypeError
import codecs
from datetime import datetime
from jinja2 import Environment, FileSystemLoader
import json
try:
from pathlib import Path
except ImportError:
from pathlib2 import Path
import re
import sass
import six
import shutil
import sys
from time import sleep
TEMPLATE_PATH = Path("templates")
def str2bool(value):
if value.lower() in ("yes", "true", "t", "y", "1"):
return True
elif value.lower() in ("no", "false", "f", "n", "0"):
return False
else:
raise ArgumentTypeError("Boolean value expected.")
def get_last_change(path):
last_changed = path.stat().st_mtime
if path.is_dir():
for entry in path.glob("**/*"):
entry_changed = entry.stat().st_mtime
if entry_changed > last_changed:
last_changed = entry_changed
return last_changed
def run(options):
try:
with codecs.open(options.source, encoding="utf-8") as f:
config = json.load(f)
except ValueError as e:
print("Error parsing config {}.".format(options.source))
print("")
raise
except IOError as e:
print("Configuration file not found: {}".format(options.source))
print("")
raise
validate_config(config)
process_config(config)
generate_cv(options.target, options.template, config)
def generate_cv(destination, template, config):
template_path = str(TEMPLATE_PATH / template)
env = Environment(loader=FileSystemLoader(template_path))
template = env.get_template("index.html")
now = datetime.now()
current_time = now.strftime("%Y-%m-%d %H:%M:%S %z")
year = now.strftime("%Y")
navigation = generate_navigation(config)
html = template.render(
name=config["name"],
contact=config["contact"],
sections=config["sections"],
navigation=navigation,
now=current_time,
year=year,
)
dst_path = Path(destination)
if dst_path.exists():
shutil.rmtree(destination)
shutil.copytree(template_path, destination)
scss_files = []
for entry in dst_path.glob("**/*.scss"):
scss_files.append(entry)
entry_name = entry.name
if entry_name.endswith(".scss") and not entry_name.startswith("_"):
entry_str = str(entry)
compiled = sass.compile(filename=entry_str)
entry_css = Path(entry_str[:-5] + ".css")
with entry_css.open("w", encoding="utf-8") as f:
f.write(compiled)
print("Compiled {} to {}".format(entry, entry_css))
for entry in scss_files:
entry.unlink()
for entry in reversed(list(dst_path.rglob("*"))):
if entry.exists() and entry.is_dir():
empty = True
for _ in entry.iterdir():
empty = False
break
if empty:
entry.rmdir()
full_path = Path(destination) / "index.html"
with full_path.open("w", encoding="utf-8") as f:
f.write(html)
print("Generated CV HTML to {}".format(full_path))
def validate_config(config):
error = False
if "name" not in config:
print('Missing name definition, e.g. { "name": "Janne Enberg", ' "... }")
error = True
if "contact" not in config:
print(
"Missing contact definition, e.g. { ..., "
'"contact": "+1 (2) 345 678 | contact@example.com", ... }'
)
error = True
if "sections" not in config:
print("Missing sections definition, e.g. { ..., " '"sections": [ ... ] }')
error = True
else:
for section in config["sections"]:
if isinstance(section, six.string_types):
continue
if "title" not in section:
print(
"Missing title from section definition, , "
'e.g. { ..., "sections": [ {"title": "Section '
'title", ...} ] }'
)
print("Found: {}".format(section))
error = True
if (
"fields" not in section
and "large" not in section
and "largeList" not in section
):
print(
"No fields, largeList or large definition for "
"section, , "
'e.g. { ..., "sections": [ {..., '
'"large": "Yadi yadi yada", ...} ] }'
)
error = True
if "fields" in section:
for field in section["fields"]:
if not isinstance(field, list) or len(field) != 2:
print(
"Invalid field definition, "
"it should have two items, e.g. { ..., "
'"sections": [ {..., "fields": [ ["Label",'
' "Value"], ... }, ... ] }'
)
error = True
if error:
print("")
print("Please fix errors in configuration file.")
sys.exit(1)
def process_config(config):
for index, section in enumerate(config["sections"]):
if isinstance(section, six.string_types):
if section == "-":
config["sections"][index] = {"type": "page-break"}
else:
config["sections"][index] = {"type": "heading", "title": section}
continue
section["type"] = "normal"
if "fields" in section:
fields = []
for fieldColumns in section["fields"]:
fields.append({"label": fieldColumns[0], "value": fieldColumns[1]})
section["fields"] = fields
if "largeList" in section:
section["large"] = (
"<ul><li>" + "</li><li>".join(section["largeList"]) + "</li></ul>"
)
del section["largeList"]
heading = config["mainHeading"]
main_heading = {"type": "heading", "title": heading}
config["sections"] = [main_heading] + config["sections"]
def generate_navigation(config):
i = 1
nav = {"headings": []}
for _, section in enumerate(config["sections"]):
if section["type"] == "page-break":
continue
name = section["title"]
section["id"] = make_id(name, i)
if section["type"] == "heading":
nav[name] = [section]
nav["headings"].append(name)
heading = name
else:
nav[heading].append(section)
i += 1
return nav
def make_id(text, index):
# Replace characters not valid in IDs
text = re.sub(r"[^0-9a-zA-Z\-_.:]", "-", text)
# Text must not begin with a number
if re.match(r"^[0-9]", text):
text = "id-{}-{}".format(text, index)
return text
def main():
ap = ArgumentParser()
ap.add_argument("--source", default="cv.json", type=str, help="CV JSON source")
ap.add_argument(
"--target", type=str, help="Target directory, defaults to generated/<source>/"
)
ap.add_argument(
"--template",
type=str,
default="default",
help="One of the subfolders of templates/",
)
ap.add_argument(
"--watch",
type=str2bool,
nargs="?",
const=True,
default=False,
help="Keep watching for changes",
)
options = ap.parse_args()
if not options.target:
options.target = str(Path("generated") / options.source)
if options.watch:
print("Press CTRL+C to stop monitoring for changes")
last_change = 0
source_path = Path(options.source)
template_path = TEMPLATE_PATH / options.template
while True:
changes = False
source_change = get_last_change(source_path)
if source_change > last_change:
changes = True
template_change = get_last_change(template_path)
if template_change > last_change:
changes = True
if changes:
last_change = max(template_change, source_change)
try:
run(options)
except Exception as e:
print(e)
except SystemExit:
pass
sleep(0.25)
else:
run(options)
if __name__ == "__main__":
main()
| true | true |
f71c190a14841ef42f527cfad6bd4742ebd5bc55 | 929 | py | Python | nlpaug/util/audio/loader.py | lucidworks/nlpaug | 8e47fa39200db17f4dc1d61567af1419bc389071 | [
"MIT"
] | 1 | 2021-06-09T20:07:30.000Z | 2021-06-09T20:07:30.000Z | nlpaug/util/audio/loader.py | lucidworks/nlpaug | 8e47fa39200db17f4dc1d61567af1419bc389071 | [
"MIT"
] | null | null | null | nlpaug/util/audio/loader.py | lucidworks/nlpaug | 8e47fa39200db17f4dc1d61567af1419bc389071 | [
"MIT"
] | null | null | null | try:
import librosa
except ImportError:
# No installation required if not using this function
pass
class AudioLoader:
@staticmethod
def load_audio(file_path):
try:
import librosa
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Missed librosa library. Install import librosa by `pip install librosa`"
)
return librosa.load(file_path)
@staticmethod
def load_mel_spectrogram(file_path, n_mels=128, fmax=8000):
try:
import librosa
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Missed librosa library. Install import librosa by `pip install librosa`"
)
audio, sampling_rate = AudioLoader.load_audio(file_path)
return librosa.feature.melspectrogram(
y=audio, sr=sampling_rate, n_mels=n_mels, fmax=fmax
)
| 28.151515 | 89 | 0.634015 | try:
import librosa
except ImportError:
pass
class AudioLoader:
@staticmethod
def load_audio(file_path):
try:
import librosa
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Missed librosa library. Install import librosa by `pip install librosa`"
)
return librosa.load(file_path)
@staticmethod
def load_mel_spectrogram(file_path, n_mels=128, fmax=8000):
try:
import librosa
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Missed librosa library. Install import librosa by `pip install librosa`"
)
audio, sampling_rate = AudioLoader.load_audio(file_path)
return librosa.feature.melspectrogram(
y=audio, sr=sampling_rate, n_mels=n_mels, fmax=fmax
)
| true | true |
f71c191c5eb6a1641c149fff6ae72d57a8d19cda | 4,286 | py | Python | dataset/DeepFakes/faceswap-master/lib/training_data.py | MrThiago/FaceForensics | 1806e70d0dd2294a12a8afd1c3f59d6ecac639bf | [
"MIT"
] | 1,930 | 2018-04-20T14:52:01.000Z | 2022-03-30T13:53:31.000Z | dataset/DeepFakes/faceswap-master/lib/training_data.py | chrisgorgo/FaceForensics | a815daa9ebb7c12240a4b7162c431af0e1b959fa | [
"MIT"
] | 68 | 2019-02-14T09:09:02.000Z | 2022-03-23T08:55:23.000Z | dataset/DeepFakes/faceswap-master/lib/training_data.py | chrisgorgo/FaceForensics | a815daa9ebb7c12240a4b7162c431af0e1b959fa | [
"MIT"
] | 499 | 2018-04-20T11:27:11.000Z | 2022-03-29T16:29:50.000Z | import cv2
import numpy
from random import shuffle
from .utils import BackgroundGenerator
from .umeyama import umeyama
class TrainingDataGenerator():
def __init__(self, random_transform_args, coverage, scale=5, zoom=1): #TODO thos default should stay in the warp function
self.random_transform_args = random_transform_args
self.coverage = coverage
self.scale = scale
self.zoom = zoom
def minibatchAB(self, images, batchsize):
batch = BackgroundGenerator(self.minibatch(images, batchsize), 1)
for ep1, warped_img, target_img in batch.iterator():
yield ep1, warped_img, target_img
# A generator function that yields epoch, batchsize of warped_img and batchsize of target_img
def minibatch(self, data, batchsize):
length = len(data)
assert length >= batchsize, "Number of images is lower than batch-size (Note that too few images may lead to bad training). # images: {}, batch-size: {}".format(length, batchsize)
epoch = i = 0
shuffle(data)
while True:
size = batchsize
if i+size > length:
shuffle(data)
i = 0
epoch+=1
rtn = numpy.float32([self.read_image(img) for img in data[i:i+size]])
i+=size
yield epoch, rtn[:,0,:,:,:], rtn[:,1,:,:,:]
def color_adjust(self, img):
return img / 255.0
def read_image(self, fn):
try:
image = self.color_adjust(cv2.imread(fn))
except TypeError:
raise Exception("Error while reading image", fn)
image = cv2.resize(image, (256,256))
image = self.random_transform( image, **self.random_transform_args )
warped_img, target_img = self.random_warp( image, self.coverage, self.scale, self.zoom )
return warped_img, target_img
def random_transform(self, image, rotation_range, zoom_range, shift_range, random_flip):
h, w = image.shape[0:2]
rotation = numpy.random.uniform(-rotation_range, rotation_range)
scale = numpy.random.uniform(1 - zoom_range, 1 + zoom_range)
tx = numpy.random.uniform(-shift_range, shift_range) * w
ty = numpy.random.uniform(-shift_range, shift_range) * h
mat = cv2.getRotationMatrix2D((w // 2, h // 2), rotation, scale)
mat[:, 2] += (tx, ty)
result = cv2.warpAffine(
image, mat, (w, h), borderMode=cv2.BORDER_REPLICATE)
if numpy.random.random() < random_flip:
result = result[:, ::-1]
return result
# get pair of random warped images from aligned face image
def random_warp(self, image, coverage, scale = 5, zoom = 1):
assert image.shape == (256, 256, 3)
range_ = numpy.linspace(128 - coverage//2, 128 + coverage//2, 5)
mapx = numpy.broadcast_to(range_, (5, 5))
mapy = mapx.T
mapx = mapx + numpy.random.normal(size=(5,5), scale=scale)
mapy = mapy + numpy.random.normal(size=(5,5), scale=scale)
interp_mapx = cv2.resize(mapx, (80*zoom,80*zoom))[8*zoom:72*zoom,8*zoom:72*zoom].astype('float32')
interp_mapy = cv2.resize(mapy, (80*zoom,80*zoom))[8*zoom:72*zoom,8*zoom:72*zoom].astype('float32')
warped_image = cv2.remap(image, interp_mapx, interp_mapy, cv2.INTER_LINEAR)
src_points = numpy.stack([mapx.ravel(), mapy.ravel() ], axis=-1)
dst_points = numpy.mgrid[0:65*zoom:16*zoom,0:65*zoom:16*zoom].T.reshape(-1,2)
mat = umeyama(src_points, dst_points, True)[0:2]
target_image = cv2.warpAffine(image, mat, (64*zoom,64*zoom))
return warped_image, target_image
def stack_images(images):
def get_transpose_axes(n):
if n % 2 == 0:
y_axes = list(range(1, n - 1, 2))
x_axes = list(range(0, n - 1, 2))
else:
y_axes = list(range(0, n - 1, 2))
x_axes = list(range(1, n - 1, 2))
return y_axes, x_axes, [n - 1]
images_shape = numpy.array(images.shape)
new_axes = get_transpose_axes(len(images_shape))
new_shape = [numpy.prod(images_shape[x]) for x in new_axes]
return numpy.transpose(
images,
axes=numpy.concatenate(new_axes)
).reshape(new_shape)
| 40.819048 | 187 | 0.617592 | import cv2
import numpy
from random import shuffle
from .utils import BackgroundGenerator
from .umeyama import umeyama
class TrainingDataGenerator():
def __init__(self, random_transform_args, coverage, scale=5, zoom=1):
self.random_transform_args = random_transform_args
self.coverage = coverage
self.scale = scale
self.zoom = zoom
def minibatchAB(self, images, batchsize):
batch = BackgroundGenerator(self.minibatch(images, batchsize), 1)
for ep1, warped_img, target_img in batch.iterator():
yield ep1, warped_img, target_img
def minibatch(self, data, batchsize):
length = len(data)
assert length >= batchsize, "Number of images is lower than batch-size (Note that too few images may lead to bad training). # images: {}, batch-size: {}".format(length, batchsize)
epoch = i = 0
shuffle(data)
while True:
size = batchsize
if i+size > length:
shuffle(data)
i = 0
epoch+=1
rtn = numpy.float32([self.read_image(img) for img in data[i:i+size]])
i+=size
yield epoch, rtn[:,0,:,:,:], rtn[:,1,:,:,:]
def color_adjust(self, img):
return img / 255.0
def read_image(self, fn):
try:
image = self.color_adjust(cv2.imread(fn))
except TypeError:
raise Exception("Error while reading image", fn)
image = cv2.resize(image, (256,256))
image = self.random_transform( image, **self.random_transform_args )
warped_img, target_img = self.random_warp( image, self.coverage, self.scale, self.zoom )
return warped_img, target_img
def random_transform(self, image, rotation_range, zoom_range, shift_range, random_flip):
h, w = image.shape[0:2]
rotation = numpy.random.uniform(-rotation_range, rotation_range)
scale = numpy.random.uniform(1 - zoom_range, 1 + zoom_range)
tx = numpy.random.uniform(-shift_range, shift_range) * w
ty = numpy.random.uniform(-shift_range, shift_range) * h
mat = cv2.getRotationMatrix2D((w // 2, h // 2), rotation, scale)
mat[:, 2] += (tx, ty)
result = cv2.warpAffine(
image, mat, (w, h), borderMode=cv2.BORDER_REPLICATE)
if numpy.random.random() < random_flip:
result = result[:, ::-1]
return result
def random_warp(self, image, coverage, scale = 5, zoom = 1):
assert image.shape == (256, 256, 3)
range_ = numpy.linspace(128 - coverage//2, 128 + coverage//2, 5)
mapx = numpy.broadcast_to(range_, (5, 5))
mapy = mapx.T
mapx = mapx + numpy.random.normal(size=(5,5), scale=scale)
mapy = mapy + numpy.random.normal(size=(5,5), scale=scale)
interp_mapx = cv2.resize(mapx, (80*zoom,80*zoom))[8*zoom:72*zoom,8*zoom:72*zoom].astype('float32')
interp_mapy = cv2.resize(mapy, (80*zoom,80*zoom))[8*zoom:72*zoom,8*zoom:72*zoom].astype('float32')
warped_image = cv2.remap(image, interp_mapx, interp_mapy, cv2.INTER_LINEAR)
src_points = numpy.stack([mapx.ravel(), mapy.ravel() ], axis=-1)
dst_points = numpy.mgrid[0:65*zoom:16*zoom,0:65*zoom:16*zoom].T.reshape(-1,2)
mat = umeyama(src_points, dst_points, True)[0:2]
target_image = cv2.warpAffine(image, mat, (64*zoom,64*zoom))
return warped_image, target_image
def stack_images(images):
def get_transpose_axes(n):
if n % 2 == 0:
y_axes = list(range(1, n - 1, 2))
x_axes = list(range(0, n - 1, 2))
else:
y_axes = list(range(0, n - 1, 2))
x_axes = list(range(1, n - 1, 2))
return y_axes, x_axes, [n - 1]
images_shape = numpy.array(images.shape)
new_axes = get_transpose_axes(len(images_shape))
new_shape = [numpy.prod(images_shape[x]) for x in new_axes]
return numpy.transpose(
images,
axes=numpy.concatenate(new_axes)
).reshape(new_shape)
| true | true |
f71c1b0d0a31e515a655f2a67f62120cc4232d70 | 1,106 | py | Python | setup.py | albertosottile/SmartGadget-gatt | b5b4002f3635afcb97de5106676cc7142b1e9ca5 | [
"MIT"
] | 2 | 2021-06-14T18:08:16.000Z | 2021-08-29T06:48:10.000Z | setup.py | albertosottile/SmartGadget-gatt | b5b4002f3635afcb97de5106676cc7142b1e9ca5 | [
"MIT"
] | null | null | null | setup.py | albertosottile/SmartGadget-gatt | b5b4002f3635afcb97de5106676cc7142b1e9ca5 | [
"MIT"
] | 1 | 2021-08-29T06:48:11.000Z | 2021-08-29T06:48:11.000Z | #!/usr/bin/env python3
import setuptools
def read(fname):
with open(fname, 'r') as f:
return f.read()
setuptools.setup(
name="smartgadget",
version="0.1",
author="Alberto Sottile",
author_email="alby128@gmail.com",
description=' '.join([
'Interact with a Sensirion SHT31 Smart Gadget',
'Development Kit using the Bluetooth GATT SDK for Python'
]),
url="https://github.com/albertosottile/SmartGadget-gatt",
packages=['smartgadget'],
install_requires=read('requirements.txt').splitlines(),
python_requires=">=3.5",
include_package_data=True,
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Education",
"Intended Audience :: Manufacturing",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Scientific/Engineering"
],
)
| 30.722222 | 65 | 0.621157 |
import setuptools
def read(fname):
with open(fname, 'r') as f:
return f.read()
setuptools.setup(
name="smartgadget",
version="0.1",
author="Alberto Sottile",
author_email="alby128@gmail.com",
description=' '.join([
'Interact with a Sensirion SHT31 Smart Gadget',
'Development Kit using the Bluetooth GATT SDK for Python'
]),
url="https://github.com/albertosottile/SmartGadget-gatt",
packages=['smartgadget'],
install_requires=read('requirements.txt').splitlines(),
python_requires=">=3.5",
include_package_data=True,
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Education",
"Intended Audience :: Manufacturing",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Scientific/Engineering"
],
)
| true | true |
f71c1b1f9c2288ed03520cd964f1b666fcfab9a4 | 1,302 | py | Python | CV_A4_/A4_compute_descriptors.py | pseudowasabi/computer-vision-exercises | 34b7c8402c32dbb00e484f90780ebb6546a3f8dc | [
"MIT"
] | null | null | null | CV_A4_/A4_compute_descriptors.py | pseudowasabi/computer-vision-exercises | 34b7c8402c32dbb00e484f90780ebb6546a3f8dc | [
"MIT"
] | null | null | null | CV_A4_/A4_compute_descriptors.py | pseudowasabi/computer-vision-exercises | 34b7c8402c32dbb00e484f90780ebb6546a3f8dc | [
"MIT"
] | null | null | null | '''
Computer vision assignment 4 by Yoseob Kim
A4_compute_descriptors.py
Compute similarity-reflected image descriptors with L1, L2 norm distances by using SIFT descriptors.
* Status: (working on it)
* GitHub Link: https://github.com/pseudowasabi/computer-vision-exercises/tree/master/CV_A4_
'''
import cv2
import numpy as np
import math
import time
import operator
import random
img = cv2.imread('ukbench00000.jpg', cv2.IMREAD_GRAYSCALE)
'''
my_min = np.inf
my_max = 0'''
for i in range(1000):
offset = '00' if i < 10 else '0' if i < 100 else ''
offset += str(i)
#print(offset)
f = open('./sift/sift100'+offset, 'rb')
# reference - https://numpy.org/doc/stable/reference/generated/numpy.frombuffer.html
sift_des = np.frombuffer(f.read(), dtype=np.uint8)
#print(sift_des.shape)
#print(sift_des)
'''
if sift_des.shape[0] % 128 != 0:
print('divide error')
'''
sift_des_reshaped = np.reshape(sift_des, (sift_des.shape[0] // 128, 128))
#print(sift_des_reshaped.shape)
'''
if sift_des_reshaped.shape[0] < my_min:
my_min = sift_des_reshaped.shape[0]
if sift_des_reshaped.shape[0] > my_max:
my_max = sift_des_reshaped.shape[0]'''
f.close()
#print(my_min, my_max)
# N size
# min = 73, max = 2388
| 22.842105 | 100 | 0.669739 |
import cv2
import numpy as np
import math
import time
import operator
import random
img = cv2.imread('ukbench00000.jpg', cv2.IMREAD_GRAYSCALE)
for i in range(1000):
offset = '00' if i < 10 else '0' if i < 100 else ''
offset += str(i)
f = open('./sift/sift100'+offset, 'rb')
sift_des = np.frombuffer(f.read(), dtype=np.uint8)
sift_des_reshaped = np.reshape(sift_des, (sift_des.shape[0] // 128, 128))
f.close()
| true | true |
f71c1c5ae6f65d71e66399bf4776b2b7437ab5a8 | 713 | py | Python | audiophiler/util.py | Mstrodl/audiophiler | 2e3a8299b58ab92a851cae726cc9184a2dad05f8 | [
"MIT"
] | 5 | 2017-05-08T05:07:22.000Z | 2021-06-01T18:48:30.000Z | audiophiler/util.py | Mstrodl/audiophiler | 2e3a8299b58ab92a851cae726cc9184a2dad05f8 | [
"MIT"
] | 34 | 2017-09-11T19:18:40.000Z | 2021-08-28T21:38:15.000Z | audiophiler/util.py | Mstrodl/audiophiler | 2e3a8299b58ab92a851cae726cc9184a2dad05f8 | [
"MIT"
] | 20 | 2017-09-09T22:02:11.000Z | 2021-08-28T17:45:59.000Z | # File: util.py
# Audiophiler utility functions
# Credit to Liam Middlebrook and Ram Zallan
# https://github.com/liam-middlebrook/gallery
from functools import wraps
from flask import session
from audiophiler.models import Tour
def audiophiler_auth(func):
@wraps(func)
def wrapped_function(*args, **kwargs):
uuid = str(session["userinfo"].get("sub", ""))
uid = str(session["userinfo"].get("preferred_username", ""))
auth_dict = {
"uuid": uuid,
"uid": uid
}
kwargs["auth_dict"] = auth_dict
return func(*args, **kwargs)
return wrapped_function
def get_tour_lock_status():
lock = Tour.query.first()
return lock.tour_lock
| 27.423077 | 68 | 0.652174 |
from functools import wraps
from flask import session
from audiophiler.models import Tour
def audiophiler_auth(func):
@wraps(func)
def wrapped_function(*args, **kwargs):
uuid = str(session["userinfo"].get("sub", ""))
uid = str(session["userinfo"].get("preferred_username", ""))
auth_dict = {
"uuid": uuid,
"uid": uid
}
kwargs["auth_dict"] = auth_dict
return func(*args, **kwargs)
return wrapped_function
def get_tour_lock_status():
lock = Tour.query.first()
return lock.tour_lock
| true | true |
f71c1c8decca6e84cea60dec4d962fc15cc6ae66 | 4,251 | py | Python | qutip/hardware_info.py | kiuthed/qutip | b6fb8e5bbd9ffeae117b54e56313e8617038deab | [
"BSD-3-Clause"
] | null | null | null | qutip/hardware_info.py | kiuthed/qutip | b6fb8e5bbd9ffeae117b54e56313e8617038deab | [
"BSD-3-Clause"
] | null | null | null | qutip/hardware_info.py | kiuthed/qutip | b6fb8e5bbd9ffeae117b54e56313e8617038deab | [
"BSD-3-Clause"
] | null | null | null | # This file is part of QuTiP: Quantum Toolbox in Python.
#
# Copyright (c) 2011 and later, Paul D. Nation and Robert J. Johansson.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the QuTiP: Quantum Toolbox in Python nor the names
# of its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
__all__ = ['hardware_info']
import os
import sys
def _mac_hardware_info():
info = dict()
results = dict()
for l in [l.split(':') for l in os.popen('sysctl hw').readlines()[1:20]]:
info[l[0].strip(' "').replace(' ', '_').lower().strip('hw.')] = \
l[1].strip('.\n ')
results.update({'cpus': int(info['physicalcpu'])})
results.update({'cpu_freq': int(info['cpufrequency']) / (1000. ** 3)})
results.update({'memsize': int(info['memsize']) / (1024 ** 2)})
# add OS information
results.update({'os': 'Mac OSX'})
return results
def _linux_hardware_info():
results = {}
# get cpu number
cpu_info = dict()
for l in [l.split(':') for l in os.popen('lscpu').readlines()]:
cpu_info[l[0]] = l[1].strip('.\n ').strip('kB')
sockets = int(cpu_info['Socket(s)'])
cores_per_socket = int(cpu_info['Core(s) per socket'])
results.update({'cpus': sockets * cores_per_socket})
# get cpu frequency directly (bypasses freq scaling)
try:
file = "/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq"
cpu_freq = open(file).readlines()[0]
cpu_freq = float(cpu_freq.strip('\n'))
results.update({'cpu_freq': cpu_freq / (1000. ** 2)})
except:
cpu_freq = float(cpu_info['CPU MHz']) / 1000.
results.update({'cpu_freq': cpu_freq})
# get total amount of memory
mem_info = dict()
for l in [l.split(':') for l in open("/proc/meminfo").readlines()]:
mem_info[l[0]] = l[1].strip('.\n ').strip('kB')
results.update({'memsize': int(mem_info['MemTotal']) / 1024})
# add OS information
results.update({'os': 'Linux'})
return results
def _win_hardware_info():
return {'os': 'Windows'}
def hardware_info():
"""
Returns basic hardware information about the computer.
Gives actual number of CPU's in the machine, even when hyperthreading is
turned on.
Returns
-------
info : dict
Dictionary containing cpu and memory information.
"""
try:
if sys.platform == 'darwin':
out = _mac_hardware_info()
elif sys.platform == 'win32':
out = _win_hardware_info()
elif sys.platform in ['linux', 'linux2']:
out = _linux_hardware_info()
else:
out = {}
except:
return {}
else:
return out
if __name__ == '__main__':
print(hardware_info())
| 36.646552 | 79 | 0.638908 | true | true | |
f71c1cb01d78b077e343818c4290f9abafc4925e | 1,900 | py | Python | cron_descriptor/GetText.py | nathmo/cron-descriptor | 2475065be9e203ed5cea49ec6ca365384f433cb6 | [
"MIT"
] | 3 | 2018-12-11T18:51:36.000Z | 2019-10-16T19:10:19.000Z | cron_descriptor/GetText.py | nathmo/cron-descriptor | 2475065be9e203ed5cea49ec6ca365384f433cb6 | [
"MIT"
] | 2 | 2019-04-14T04:14:31.000Z | 2019-10-15T03:23:54.000Z | cron_descriptor/cron_descriptor/GetText.py | michaelblyons/SublimeSyntax-Crontab | 54f1fa7ff0c9d18aea3790555dba6e533ce3749b | [
"MIT"
] | 2 | 2019-04-11T06:13:54.000Z | 2019-10-04T02:49:58.000Z | # The MIT License (MIT)
#
# Copyright (c) 2016 Adam Schubert
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import gettext
import os
import logging
logger = logging.getLogger(__name__)
class GetText(object):
"""
Handles language translations and Initializes global _() function
"""
def __init__(self, locale_code):
"""
Initialize GetText
:param locale_code selected locale
"""
try:
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'locale', '{}.mo'.format(locale_code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logger.debug('{} Loaded'.format(filename))
except IOError:
logger.debug('Failed to find locale {}'.format(locale_code))
trans = gettext.NullTranslations()
trans.install()
| 36.538462 | 80 | 0.7 |
import gettext
import os
import logging
logger = logging.getLogger(__name__)
class GetText(object):
def __init__(self, locale_code):
try:
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'locale', '{}.mo'.format(locale_code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logger.debug('{} Loaded'.format(filename))
except IOError:
logger.debug('Failed to find locale {}'.format(locale_code))
trans = gettext.NullTranslations()
trans.install()
| true | true |
f71c1cd7ddd442ab1b3e8652cdd667ec554c98f4 | 23,533 | py | Python | adnmtf/nmtf_core.py | Advestis/adnmtf | 7b36da64669894506071a75d8bd341edb0e75b9f | [
"MIT"
] | null | null | null | adnmtf/nmtf_core.py | Advestis/adnmtf | 7b36da64669894506071a75d8bd341edb0e75b9f | [
"MIT"
] | null | null | null | adnmtf/nmtf_core.py | Advestis/adnmtf | 7b36da64669894506071a75d8bd341edb0e75b9f | [
"MIT"
] | null | null | null | """Non-negative matrix and tensor factorization core functions
"""
# Author: Paul Fogel
# License: MIT
# Jan 4, '20
from typing import Tuple
import numpy as np
from .nmtf_utils import EPSILON, sparse_opt
import logging
logger = logging.getLogger(__name__)
# TODO (pcotte): typing
# TODO (pcotte): docstrings (with parameters and returns)
def ntf_stack(m, mmis, n_blocks):
"""Unfold tensor M
for future use with NMF
"""
n, p = m.shape
mmis = mmis.astype(np.int)
n_mmis = mmis.shape[0]
n_blocks = int(n_blocks)
mstacked = np.zeros((int(n * p / n_blocks), n_blocks))
if n_mmis > 0:
mmis_stacked = np.zeros((int(n * p / n_blocks), n_blocks))
else:
mmis_stacked = np.array([])
for i_block in range(0, n_blocks):
for j in range(0, int(p / n_blocks)):
i1 = j * n
i2 = i1 + n
mstacked[i1:i2, i_block] = m[:, int(i_block * p / n_blocks + j)]
if n_mmis > 0:
mmis_stacked[i1:i2, i_block] = mmis[:, int(i_block * p / n_blocks + j)]
return mstacked, mmis_stacked
def ntf_solve(
m,
mmis,
mt0,
mw0,
mb0,
nc,
tolerance,
log_iter,
status0,
max_iterations,
nmf_fix_user_lhe,
nmf_fix_user_rhe,
nmf_fix_user_bhe,
nmf_sparse_level,
ntf_unimodal,
ntf_smooth,
ntf_left_components,
ntf_right_components,
ntf_block_components,
n_blocks,
nmf_priors,
my_status_box,
):
"""Interface to:
- NTFSolve_simple
"""
if len(nmf_priors) > 0:
n_nmf_priors, nc = nmf_priors.shape
else:
n_nmf_priors = 0
if n_nmf_priors > 0:
nmf_priors[nmf_priors > 0] = 1
return ntf_solve_simple(
m=m,
mmis=mmis,
mt0=mt0,
mw0=mw0,
mb0=mb0,
nc=nc,
tolerance=tolerance,
log_iter=log_iter,
status0=status0,
max_iterations=max_iterations,
nmf_fix_user_lhe=nmf_fix_user_lhe,
nmf_fix_user_rhe=nmf_fix_user_rhe,
nmf_fix_user_bhe=nmf_fix_user_bhe,
nmf_sparse_level=nmf_sparse_level,
ntf_unimodal=ntf_unimodal,
ntf_smooth=ntf_smooth,
ntf_left_components=ntf_left_components,
ntf_right_components=ntf_right_components,
ntf_block_components=ntf_block_components,
n_blocks=n_blocks,
nmf_priors=nmf_priors,
my_status_box=my_status_box,
)
def ntf_solve_simple(
m,
mmis,
mt0,
mw0,
mb0,
nc,
tolerance,
log_iter,
status0,
max_iterations,
nmf_fix_user_lhe,
nmf_fix_user_rhe,
nmf_fix_user_bhe,
nmf_sparse_level,
ntf_unimodal,
ntf_smooth,
ntf_left_components,
ntf_right_components,
ntf_block_components,
n_blocks,
nmf_priors,
my_status_box,
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, float, int]:
"""
Estimate NTF matrices (HALS)
Parameters
----------
m: Input matrix
mmis: Define missing values (0 = missing cell, 1 = real cell)
mt0: Initial left hand matrix
mw0: Initial right hand matrix
mb0: Initial block hand matrix
nc: NTF rank
tolerance: Convergence threshold
log_iter: Log results through iterations
status0: Initial displayed status to be updated during iterations
max_iterations: Max iterations
nmf_fix_user_lhe: = 1 => fixed left hand matrix columns
nmf_fix_user_rhe: = 1 => fixed right hand matrix columns
nmf_fix_user_bhe: = 1 => fixed block hand matrix columns
nmf_sparse_level: sparsity level (as defined by Hoyer); +/- = make RHE/LHe sparse
ntf_unimodal: Apply Unimodal constraint on factoring vectors
ntf_smooth: Apply Smooth constraint on factoring vectors
ntf_left_components: Apply Unimodal/Smooth constraint on left hand matrix
ntf_right_components: Apply Unimodal/Smooth constraint on right hand matrix
ntf_block_components: Apply Unimodal/Smooth constraint on block hand matrix
n_blocks: Number of NTF blocks
nmf_priors: Elements in mw that should be updated (others remain 0)
my_status_box
Returns
-------
Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, float, int]\n
* mt: Left hand matrix\n
* mw: Right hand matrix\n
* mb: Block hand matrix\n
* diff: objective cost\n
* cancel_pressed\n
Reference
---------
a. Cichocki, P.H.a.N. Anh-Huym, Fast local algorithms for large scale nonnegative matrix and tensor factorizations,
IEICE Trans. Fundam. Electron. Commun. Comput. Sci. 92 (3) (2009) 708–721.
"""
cancel_pressed = 0
n, p0 = m.shape
n_mmis = mmis.shape[0]
nc = int(nc)
n_blocks = int(n_blocks)
p = int(p0 / n_blocks)
nxp = int(n * p)
nxp0 = int(n * p0)
mt = np.copy(mt0)
mw = np.copy(mw0)
mb = np.copy(mb0)
# step_iter = math.ceil(MaxIterations/10)
step_iter = 1
pbar_step = 100 * step_iter / max_iterations
id_blockp = np.arange(0, (n_blocks - 1) * p + 1, p)
a = np.zeros(n)
b = np.zeros(p)
c = np.zeros(n_blocks)
alpha = np.zeros(nc)
# Compute Residual tensor
mfit = np.zeros((n, p0))
for k in range(0, nc):
if n_blocks > 1:
for i_block in range(0, n_blocks):
mfit[:, id_blockp[i_block]: id_blockp[i_block] + p] += (
mb[i_block, k] * np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))
)
else:
mfit[:, id_blockp[0]: id_blockp[0] + p] += np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))
denomt = np.zeros(n)
denomw = np.zeros(p)
denom_block = np.zeros((n_blocks, nc))
mt2 = np.zeros(n)
mw2 = np.zeros(p)
mt_mw = np.zeros(nxp)
denom_cutoff = 0.1
if n_mmis > 0:
mres = (m - mfit) * mmis
else:
mres = m - mfit
my_status_box.init_bar()
# Loop
cont = 1
i_iter = 0
diff0 = 1.0e99
mpart = np.zeros((n, p0))
if abs(nmf_sparse_level) < 1:
alpha[0] = nmf_sparse_level * 0.8
else:
alpha[0] = nmf_sparse_level
percent_zeros = 0
iter_sparse = 0
while (cont > 0) & (i_iter < max_iterations):
for k in range(0, nc):
(
n_blocks,
mpart,
id_blockp,
p,
mb,
k,
mt,
n,
mw,
n_mmis,
mmis,
mres,
nmf_fix_user_lhe,
denomt,
mw2,
denom_cutoff,
alpha,
ntf_unimodal,
ntf_left_components,
ntf_smooth,
a,
nmf_fix_user_rhe,
denomw,
mt2,
ntf_right_components,
b,
nmf_fix_user_bhe,
mt_mw,
nxp,
denom_block,
ntf_block_components,
c,
mfit,
nmf_priors,
) = ntf_update(
n_blocks=n_blocks,
mpart=mpart,
id_blockp=id_blockp,
p=p,
mb=mb,
k=k,
mt=mt,
n=n,
mw=mw,
n_mmis=n_mmis,
mmis=mmis,
mres=mres,
nmf_fix_user_lhe=nmf_fix_user_lhe,
denomt=denomt,
mw2=mw2,
denom_cutoff=denom_cutoff,
alpha=alpha,
ntf_unimodal=ntf_unimodal,
ntf_left_components=ntf_left_components,
ntf_smooth=ntf_smooth,
a=a,
nmf_fix_user_rhe=nmf_fix_user_rhe,
denomw=denomw,
mt2=mt2,
ntf_right_components=ntf_right_components,
b=b,
nmf_fix_user_bhe=nmf_fix_user_bhe,
mt_mw=mt_mw,
nxp=nxp,
denom_block=denom_block,
ntf_block_components=ntf_block_components,
c=c,
mfit=mfit,
nmf_priors=nmf_priors,
)
if i_iter % step_iter == 0:
# Check convergence
diff = np.linalg.norm(mres) ** 2 / nxp0
if (diff0 - diff) / diff0 < tolerance:
cont = 0
else:
if diff > diff0:
my_status_box.my_print(f"{status0} Iter: {i_iter} MSR does not improve")
diff0 = diff
Status = f"{status0} Iteration: {i_iter}"
if nmf_sparse_level != 0:
Status = f"{Status} ; Achieved sparsity: {round(percent_zeros, 2)}; alpha: {round(alpha[0], 2)}"
if log_iter == 1:
my_status_box.my_print(Status)
my_status_box.update_status(status=Status)
my_status_box.update_bar(step=pbar_step)
if my_status_box.cancel_pressed:
cancel_pressed = 1
return np.array([]), mt, mw, mb, mres, cancel_pressed
if log_iter == 1:
my_status_box.my_print(status0 + " Iter: " + str(i_iter) + " MSR: " + str(diff))
i_iter += 1
if cont == 0 or i_iter == max_iterations or (cont == 0 and abs(nmf_sparse_level) == 1):
if 0 < nmf_sparse_level < 1:
sparse_test = np.zeros((nc, 1))
percent_zeros0 = percent_zeros
for k in range(0, nc):
sparse_test[k] = np.where(mw[:, k] == 0)[0].size
percent_zeros = np.mean(sparse_test) / p
if percent_zeros < percent_zeros0:
iter_sparse += 1
else:
iter_sparse = 0
if (percent_zeros < 0.99 * nmf_sparse_level) & (iter_sparse < 50):
alpha[0] *= min(1.05 * nmf_sparse_level / percent_zeros, 1.1)
if alpha[0] < 1:
i_iter = 0
cont = 1
elif 0 > nmf_sparse_level > -1:
sparse_test = np.zeros((nc, 1))
percent_zeros0 = percent_zeros
for k in range(0, nc):
sparse_test[k] = np.where(mt[:, k] == 0)[0].size
percent_zeros = np.mean(sparse_test) / n
if percent_zeros < percent_zeros0:
iter_sparse += 1
else:
iter_sparse = 0
if (percent_zeros < 0.99 * abs(nmf_sparse_level)) & (iter_sparse < 50):
alpha[0] *= min(1.05 * abs(nmf_sparse_level) / percent_zeros, 1.1)
if abs(alpha[0]) < 1:
i_iter = 0
cont = 1
elif abs(alpha[0]) == 1:
if alpha[0] == -1:
for k in range(0, nc):
if np.max(mt[:, k]) > 0:
hhi = int(
np.round(
(np.linalg.norm(mt[:, k], ord=1) / (np.linalg.norm(mt[:, k], ord=2) + EPSILON))
** 2,
decimals=0,
)
)
alpha[k] = -1 - (n - hhi) / (n - 1)
else:
alpha[k] = 0
else:
for k in range(0, nc):
if np.max(mw[:, k]) > 0:
hhi = int(
np.round(
(np.linalg.norm(mw[:, k], ord=1) / (np.linalg.norm(mw[:, k], ord=2) + EPSILON))
** 2,
decimals=0,
)
)
alpha[k] = 1 + (p - hhi) / (p - 1)
else:
alpha[k] = 0
if alpha[0] <= -1:
alpha_real = -(alpha + 1)
# noinspection PyTypeChecker
alpha_min = min(alpha_real)
for k in range(0, nc):
# noinspection PyUnresolvedReferences
alpha[k] = min(alpha_real[k], 2 * alpha_min)
alpha[k] = -alpha[k] - 1
else:
alpha_real = alpha - 1
alpha_min = min(alpha_real)
for k in range(0, nc):
alpha[k] = min(alpha_real[k], 2 * alpha_min)
alpha[k] = alpha[k] + 1
i_iter = 0
cont = 1
diff0 = 1.0e99
for k in range(0, nc):
hhi = np.round((np.linalg.norm(mt[:, k], ord=1) / np.linalg.norm(mt[:, k], ord=2)) ** 2, decimals=0)
logger.info(f"component: {k}, left hhi: {hhi}")
hhi = np.round((np.linalg.norm(mw[:, k], ord=1) / np.linalg.norm(mw[:, k], ord=2)) ** 2, decimals=0)
logger.info(f"component: {k} right hhi: {hhi}")
if (n_mmis > 0) & (nmf_fix_user_bhe == 0):
mb *= denom_block
# TODO (pcotte): mt and mw can be not yet referenced: fix that
return np.array([]), mt, mw, mb, diff, cancel_pressed
def ntf_update(
n_blocks,
mpart,
id_blockp,
p,
mb,
k,
mt,
n,
mw,
n_mmis,
mmis,
mres,
nmf_fix_user_lhe,
denomt,
mw2,
denom_cutoff,
alpha,
ntf_unimodal,
ntf_left_components,
ntf_smooth,
a,
nmf_fix_user_rhe,
denomw,
mt2,
ntf_right_components,
b,
nmf_fix_user_bhe,
mt_mw,
nxp,
denom_block,
ntf_block_components,
c,
mfit,
nmf_priors,
):
"""Core updating code called by NTFSolve_simple & NTF Solve_conv
Input:
All variables in the calling function used in the function
Output:
Same as Input
"""
if len(nmf_priors) > 0:
n_nmf_priors, nc = nmf_priors.shape
else:
n_nmf_priors = 0
# Compute kth-part
if n_blocks > 1:
for i_block in range(0, n_blocks):
mpart[:, id_blockp[i_block]: id_blockp[i_block] + p] = (
mb[i_block, k] * np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))
)
else:
mpart[:, id_blockp[0]: id_blockp[0] + p] = np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))
if n_mmis > 0:
mpart *= mmis
mpart += mres
if nmf_fix_user_bhe > 0:
norm_bhe = True
if nmf_fix_user_rhe == 0:
norm_lhe = True
norm_rhe = False
else:
norm_lhe = False
norm_rhe = True
else:
norm_bhe = False
norm_lhe = True
norm_rhe = True
if (nmf_fix_user_lhe > 0) & norm_lhe:
norm = np.linalg.norm(mt[:, k])
if norm > 0:
mt[:, k] /= norm
if (nmf_fix_user_rhe > 0) & norm_rhe:
norm = np.linalg.norm(mw[:, k])
if norm > 0:
mw[:, k] /= norm
if (nmf_fix_user_bhe > 0) & norm_bhe & (n_blocks > 1):
norm = np.linalg.norm(mb[:, k])
if norm > 0:
mb[:, k] /= norm
if nmf_fix_user_lhe == 0:
# Update Mt
mt[:, k] = 0
if n_blocks > 1:
for i_block in range(0, n_blocks):
mt[:, k] += mb[i_block, k] * mpart[:, id_blockp[i_block]: id_blockp[i_block] + p] @ mw[:, k]
else:
mt[:, k] += mpart[:, id_blockp[0]: id_blockp[0] + p] @ mw[:, k]
if n_mmis > 0:
denomt[:] = 0
mw2[:] = mw[:, k] ** 2
if n_blocks > 1:
for i_block in range(0, n_blocks):
# Broadcast missing cells into Mw to calculate Mw.T * Mw
denomt += mb[i_block, k] ** 2 * mmis[:, id_blockp[i_block]: id_blockp[i_block] + p] @ mw2
else:
denomt += mmis[:, id_blockp[0]: id_blockp[0] + p] @ mw2
denomt /= np.max(denomt)
denomt[denomt < denom_cutoff] = denom_cutoff
mt[:, k] /= denomt
mt[mt[:, k] < 0, k] = 0
if alpha[0] < 0:
if alpha[0] <= -1:
if (alpha[0] == -1) & (np.max(mt[:, k]) > 0):
t_threshold = mt[:, k]
hhi = int(
np.round(
(np.linalg.norm(t_threshold, ord=1) / (np.linalg.norm(t_threshold, ord=2) + EPSILON)) ** 2,
decimals=0,
)
)
t_rank = np.argsort(t_threshold)
t_threshold[t_rank[0: n - hhi]] = 0
else:
mt[:, k] = sparse_opt(mt[:, k], -alpha[k] - 1, False)
else:
mt[:, k] = sparse_opt(mt[:, k], -alpha[0], False)
if (ntf_unimodal > 0) & (ntf_left_components > 0):
# Enforce unimodal distribution
tmax = np.argmax(mt[:, k])
for i in range(tmax + 1, n):
mt[i, k] = min(mt[i - 1, k], mt[i, k])
for i in range(tmax - 1, -1, -1):
mt[i, k] = min(mt[i + 1, k], mt[i, k])
if (ntf_smooth > 0) & (ntf_left_components > 0):
# Smooth distribution
a[0] = 0.75 * mt[0, k] + 0.25 * mt[1, k]
a[n - 1] = 0.25 * mt[n - 2, k] + 0.75 * mt[n - 1, k]
for i in range(1, n - 1):
a[i] = 0.25 * mt[i - 1, k] + 0.5 * mt[i, k] + 0.25 * mt[i + 1, k]
mt[:, k] = a
if norm_lhe:
norm = np.linalg.norm(mt[:, k])
if norm > 0:
mt[:, k] /= norm
if nmf_fix_user_rhe == 0:
# Update Mw
mw[:, k] = 0
if n_blocks > 1:
for i_block in range(0, n_blocks):
mw[:, k] += mpart[:, id_blockp[i_block]: id_blockp[i_block] + p].T @ mt[:, k] * mb[i_block, k]
else:
mw[:, k] += mpart[:, id_blockp[0]: id_blockp[0] + p].T @ mt[:, k]
if n_mmis > 0:
denomw[:] = 0
mt2[:] = mt[:, k] ** 2
if n_blocks > 1:
for i_block in range(0, n_blocks):
# Broadcast missing cells into Mw to calculate Mt.T * Mt
denomw += mb[i_block, k] ** 2 * mmis[:, id_blockp[i_block]: id_blockp[i_block] + p].T @ mt2
else:
denomw += mmis[:, id_blockp[0]: id_blockp[0] + p].T @ mt2
denomw /= np.max(denomw)
denomw[denomw < denom_cutoff] = denom_cutoff
mw[:, k] /= denomw
mw[mw[:, k] < 0, k] = 0
if alpha[0] > 0:
if alpha[0] >= 1:
if (alpha[0] == 1) & (np.max(mw[:, k]) > 0):
w_threshold = mw[:, k]
hhi = int(
np.round(
(np.linalg.norm(w_threshold, ord=1) / (np.linalg.norm(w_threshold, ord=2) + EPSILON)) ** 2,
decimals=0,
)
)
w_rank = np.argsort(w_threshold)
w_threshold[w_rank[0: p - hhi]] = 0
else:
mw[:, k] = sparse_opt(mw[:, k], alpha[k] - 1, False)
else:
mw[:, k] = sparse_opt(mw[:, k], alpha[0], False)
if (ntf_unimodal > 0) & (ntf_right_components > 0):
# Enforce unimodal distribution
wmax = np.argmax(mw[:, k])
for j in range(wmax + 1, p):
mw[j, k] = min(mw[j - 1, k], mw[j, k])
for j in range(wmax - 1, -1, -1):
mw[j, k] = min(mw[j + 1, k], mw[j, k])
if (ntf_smooth > 0) & (ntf_right_components > 0):
# Smooth distribution
b[0] = 0.75 * mw[0, k] + 0.25 * mw[1, k]
b[p - 1] = 0.25 * mw[p - 2, k] + 0.75 * mw[p - 1, k]
for j in range(1, p - 1):
b[j] = 0.25 * mw[j - 1, k] + 0.5 * mw[j, k] + 0.25 * mw[j + 1, k]
mw[:, k] = b
if n_nmf_priors > 0:
mw[:, k] = mw[:, k] * nmf_priors[:, k]
if norm_rhe:
norm = np.linalg.norm(mw[:, k])
if norm > 0:
mw[:, k] /= norm
if nmf_fix_user_bhe == 0:
# Update Mb
mb[:, k] = 0
mt_mw[:] = np.reshape((np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))), nxp)
for i_block in range(0, n_blocks):
mb[i_block, k] = np.reshape(mpart[:, id_blockp[i_block]: id_blockp[i_block] + p], nxp).T @ mt_mw
if n_mmis > 0:
mt_mw[:] = mt_mw[:] ** 2
for i_block in range(0, n_blocks):
# Broadcast missing cells into Mb to calculate Mb.T * Mb
denom_block[i_block, k] = (
np.reshape(mmis[:, id_blockp[i_block]: id_blockp[i_block] + p], (1, nxp)) @ mt_mw
)
maxdenom_block = np.max(denom_block[:, k])
denom_block[denom_block[:, k] < denom_cutoff * maxdenom_block] = denom_cutoff * maxdenom_block
mb[:, k] /= denom_block[:, k]
mb[mb[:, k] < 0, k] = 0
if (ntf_unimodal > 0) & (ntf_block_components > 0):
# Enforce unimodal distribution
bmax = np.argmax(mb[:, k])
for i_block in range(bmax + 1, n_blocks):
mb[i_block, k] = min(mb[i_block - 1, k], mb[i_block, k])
for i_block in range(bmax - 1, -1, -1):
mb[i_block, k] = min(mb[i_block + 1, k], mb[i_block, k])
if (ntf_smooth > 0) & (ntf_block_components > 0):
# Smooth distribution
c[0] = 0.75 * mb[0, k] + 0.25 * mb[1, k]
c[n_blocks - 1] = 0.25 * mb[n_blocks - 2, k] + 0.75 * mb[n_blocks - 1, k]
for i_block in range(1, n_blocks - 1):
c[i_block] = 0.25 * mb[i_block - 1, k] + 0.5 * mb[i_block, k] + 0.25 * mb[i_block + 1, k]
mb[:, k] = c
if norm_bhe:
norm = np.linalg.norm(mb[:, k])
if norm > 0:
mb[:, k] /= norm
# Update residual tensor
mfit[:, :] = 0
if n_blocks > 1:
for i_block in range(0, n_blocks):
mfit[:, id_blockp[i_block]: id_blockp[i_block] + p] += (
mb[i_block, k] * np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))
)
else:
mfit[:, id_blockp[0]: id_blockp[0] + p] += np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))
if n_mmis > 0:
mres[:, :] = (mpart - mfit) * mmis
else:
mres[:, :] = mpart - mfit
return (
n_blocks,
mpart,
id_blockp,
p,
mb,
k,
mt,
n,
mw,
n_mmis,
mmis,
mres,
nmf_fix_user_lhe,
denomt,
mw2,
denom_cutoff,
alpha,
ntf_unimodal,
ntf_left_components,
ntf_smooth,
a,
nmf_fix_user_rhe,
denomw,
mt2,
ntf_right_components,
b,
nmf_fix_user_bhe,
mt_mw,
nxp,
denom_block,
ntf_block_components,
c,
mfit,
nmf_priors,
)
| 31.169536 | 119 | 0.470658 |
from typing import Tuple
import numpy as np
from .nmtf_utils import EPSILON, sparse_opt
import logging
logger = logging.getLogger(__name__)
# TODO (pcotte): typing
# TODO (pcotte): docstrings (with parameters and returns)
def ntf_stack(m, mmis, n_blocks):
n, p = m.shape
mmis = mmis.astype(np.int)
n_mmis = mmis.shape[0]
n_blocks = int(n_blocks)
mstacked = np.zeros((int(n * p / n_blocks), n_blocks))
if n_mmis > 0:
mmis_stacked = np.zeros((int(n * p / n_blocks), n_blocks))
else:
mmis_stacked = np.array([])
for i_block in range(0, n_blocks):
for j in range(0, int(p / n_blocks)):
i1 = j * n
i2 = i1 + n
mstacked[i1:i2, i_block] = m[:, int(i_block * p / n_blocks + j)]
if n_mmis > 0:
mmis_stacked[i1:i2, i_block] = mmis[:, int(i_block * p / n_blocks + j)]
return mstacked, mmis_stacked
def ntf_solve(
m,
mmis,
mt0,
mw0,
mb0,
nc,
tolerance,
log_iter,
status0,
max_iterations,
nmf_fix_user_lhe,
nmf_fix_user_rhe,
nmf_fix_user_bhe,
nmf_sparse_level,
ntf_unimodal,
ntf_smooth,
ntf_left_components,
ntf_right_components,
ntf_block_components,
n_blocks,
nmf_priors,
my_status_box,
):
if len(nmf_priors) > 0:
n_nmf_priors, nc = nmf_priors.shape
else:
n_nmf_priors = 0
if n_nmf_priors > 0:
nmf_priors[nmf_priors > 0] = 1
return ntf_solve_simple(
m=m,
mmis=mmis,
mt0=mt0,
mw0=mw0,
mb0=mb0,
nc=nc,
tolerance=tolerance,
log_iter=log_iter,
status0=status0,
max_iterations=max_iterations,
nmf_fix_user_lhe=nmf_fix_user_lhe,
nmf_fix_user_rhe=nmf_fix_user_rhe,
nmf_fix_user_bhe=nmf_fix_user_bhe,
nmf_sparse_level=nmf_sparse_level,
ntf_unimodal=ntf_unimodal,
ntf_smooth=ntf_smooth,
ntf_left_components=ntf_left_components,
ntf_right_components=ntf_right_components,
ntf_block_components=ntf_block_components,
n_blocks=n_blocks,
nmf_priors=nmf_priors,
my_status_box=my_status_box,
)
def ntf_solve_simple(
m,
mmis,
mt0,
mw0,
mb0,
nc,
tolerance,
log_iter,
status0,
max_iterations,
nmf_fix_user_lhe,
nmf_fix_user_rhe,
nmf_fix_user_bhe,
nmf_sparse_level,
ntf_unimodal,
ntf_smooth,
ntf_left_components,
ntf_right_components,
ntf_block_components,
n_blocks,
nmf_priors,
my_status_box,
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, float, int]:
cancel_pressed = 0
n, p0 = m.shape
n_mmis = mmis.shape[0]
nc = int(nc)
n_blocks = int(n_blocks)
p = int(p0 / n_blocks)
nxp = int(n * p)
nxp0 = int(n * p0)
mt = np.copy(mt0)
mw = np.copy(mw0)
mb = np.copy(mb0)
# step_iter = math.ceil(MaxIterations/10)
step_iter = 1
pbar_step = 100 * step_iter / max_iterations
id_blockp = np.arange(0, (n_blocks - 1) * p + 1, p)
a = np.zeros(n)
b = np.zeros(p)
c = np.zeros(n_blocks)
alpha = np.zeros(nc)
# Compute Residual tensor
mfit = np.zeros((n, p0))
for k in range(0, nc):
if n_blocks > 1:
for i_block in range(0, n_blocks):
mfit[:, id_blockp[i_block]: id_blockp[i_block] + p] += (
mb[i_block, k] * np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))
)
else:
mfit[:, id_blockp[0]: id_blockp[0] + p] += np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))
denomt = np.zeros(n)
denomw = np.zeros(p)
denom_block = np.zeros((n_blocks, nc))
mt2 = np.zeros(n)
mw2 = np.zeros(p)
mt_mw = np.zeros(nxp)
denom_cutoff = 0.1
if n_mmis > 0:
mres = (m - mfit) * mmis
else:
mres = m - mfit
my_status_box.init_bar()
# Loop
cont = 1
i_iter = 0
diff0 = 1.0e99
mpart = np.zeros((n, p0))
if abs(nmf_sparse_level) < 1:
alpha[0] = nmf_sparse_level * 0.8
else:
alpha[0] = nmf_sparse_level
percent_zeros = 0
iter_sparse = 0
while (cont > 0) & (i_iter < max_iterations):
for k in range(0, nc):
(
n_blocks,
mpart,
id_blockp,
p,
mb,
k,
mt,
n,
mw,
n_mmis,
mmis,
mres,
nmf_fix_user_lhe,
denomt,
mw2,
denom_cutoff,
alpha,
ntf_unimodal,
ntf_left_components,
ntf_smooth,
a,
nmf_fix_user_rhe,
denomw,
mt2,
ntf_right_components,
b,
nmf_fix_user_bhe,
mt_mw,
nxp,
denom_block,
ntf_block_components,
c,
mfit,
nmf_priors,
) = ntf_update(
n_blocks=n_blocks,
mpart=mpart,
id_blockp=id_blockp,
p=p,
mb=mb,
k=k,
mt=mt,
n=n,
mw=mw,
n_mmis=n_mmis,
mmis=mmis,
mres=mres,
nmf_fix_user_lhe=nmf_fix_user_lhe,
denomt=denomt,
mw2=mw2,
denom_cutoff=denom_cutoff,
alpha=alpha,
ntf_unimodal=ntf_unimodal,
ntf_left_components=ntf_left_components,
ntf_smooth=ntf_smooth,
a=a,
nmf_fix_user_rhe=nmf_fix_user_rhe,
denomw=denomw,
mt2=mt2,
ntf_right_components=ntf_right_components,
b=b,
nmf_fix_user_bhe=nmf_fix_user_bhe,
mt_mw=mt_mw,
nxp=nxp,
denom_block=denom_block,
ntf_block_components=ntf_block_components,
c=c,
mfit=mfit,
nmf_priors=nmf_priors,
)
if i_iter % step_iter == 0:
# Check convergence
diff = np.linalg.norm(mres) ** 2 / nxp0
if (diff0 - diff) / diff0 < tolerance:
cont = 0
else:
if diff > diff0:
my_status_box.my_print(f"{status0} Iter: {i_iter} MSR does not improve")
diff0 = diff
Status = f"{status0} Iteration: {i_iter}"
if nmf_sparse_level != 0:
Status = f"{Status} ; Achieved sparsity: {round(percent_zeros, 2)}; alpha: {round(alpha[0], 2)}"
if log_iter == 1:
my_status_box.my_print(Status)
my_status_box.update_status(status=Status)
my_status_box.update_bar(step=pbar_step)
if my_status_box.cancel_pressed:
cancel_pressed = 1
return np.array([]), mt, mw, mb, mres, cancel_pressed
if log_iter == 1:
my_status_box.my_print(status0 + " Iter: " + str(i_iter) + " MSR: " + str(diff))
i_iter += 1
if cont == 0 or i_iter == max_iterations or (cont == 0 and abs(nmf_sparse_level) == 1):
if 0 < nmf_sparse_level < 1:
sparse_test = np.zeros((nc, 1))
percent_zeros0 = percent_zeros
for k in range(0, nc):
sparse_test[k] = np.where(mw[:, k] == 0)[0].size
percent_zeros = np.mean(sparse_test) / p
if percent_zeros < percent_zeros0:
iter_sparse += 1
else:
iter_sparse = 0
if (percent_zeros < 0.99 * nmf_sparse_level) & (iter_sparse < 50):
alpha[0] *= min(1.05 * nmf_sparse_level / percent_zeros, 1.1)
if alpha[0] < 1:
i_iter = 0
cont = 1
elif 0 > nmf_sparse_level > -1:
sparse_test = np.zeros((nc, 1))
percent_zeros0 = percent_zeros
for k in range(0, nc):
sparse_test[k] = np.where(mt[:, k] == 0)[0].size
percent_zeros = np.mean(sparse_test) / n
if percent_zeros < percent_zeros0:
iter_sparse += 1
else:
iter_sparse = 0
if (percent_zeros < 0.99 * abs(nmf_sparse_level)) & (iter_sparse < 50):
alpha[0] *= min(1.05 * abs(nmf_sparse_level) / percent_zeros, 1.1)
if abs(alpha[0]) < 1:
i_iter = 0
cont = 1
elif abs(alpha[0]) == 1:
if alpha[0] == -1:
for k in range(0, nc):
if np.max(mt[:, k]) > 0:
hhi = int(
np.round(
(np.linalg.norm(mt[:, k], ord=1) / (np.linalg.norm(mt[:, k], ord=2) + EPSILON))
** 2,
decimals=0,
)
)
alpha[k] = -1 - (n - hhi) / (n - 1)
else:
alpha[k] = 0
else:
for k in range(0, nc):
if np.max(mw[:, k]) > 0:
hhi = int(
np.round(
(np.linalg.norm(mw[:, k], ord=1) / (np.linalg.norm(mw[:, k], ord=2) + EPSILON))
** 2,
decimals=0,
)
)
alpha[k] = 1 + (p - hhi) / (p - 1)
else:
alpha[k] = 0
if alpha[0] <= -1:
alpha_real = -(alpha + 1)
# noinspection PyTypeChecker
alpha_min = min(alpha_real)
for k in range(0, nc):
# noinspection PyUnresolvedReferences
alpha[k] = min(alpha_real[k], 2 * alpha_min)
alpha[k] = -alpha[k] - 1
else:
alpha_real = alpha - 1
alpha_min = min(alpha_real)
for k in range(0, nc):
alpha[k] = min(alpha_real[k], 2 * alpha_min)
alpha[k] = alpha[k] + 1
i_iter = 0
cont = 1
diff0 = 1.0e99
for k in range(0, nc):
hhi = np.round((np.linalg.norm(mt[:, k], ord=1) / np.linalg.norm(mt[:, k], ord=2)) ** 2, decimals=0)
logger.info(f"component: {k}, left hhi: {hhi}")
hhi = np.round((np.linalg.norm(mw[:, k], ord=1) / np.linalg.norm(mw[:, k], ord=2)) ** 2, decimals=0)
logger.info(f"component: {k} right hhi: {hhi}")
if (n_mmis > 0) & (nmf_fix_user_bhe == 0):
mb *= denom_block
# TODO (pcotte): mt and mw can be not yet referenced: fix that
return np.array([]), mt, mw, mb, diff, cancel_pressed
def ntf_update(
n_blocks,
mpart,
id_blockp,
p,
mb,
k,
mt,
n,
mw,
n_mmis,
mmis,
mres,
nmf_fix_user_lhe,
denomt,
mw2,
denom_cutoff,
alpha,
ntf_unimodal,
ntf_left_components,
ntf_smooth,
a,
nmf_fix_user_rhe,
denomw,
mt2,
ntf_right_components,
b,
nmf_fix_user_bhe,
mt_mw,
nxp,
denom_block,
ntf_block_components,
c,
mfit,
nmf_priors,
):
if len(nmf_priors) > 0:
n_nmf_priors, nc = nmf_priors.shape
else:
n_nmf_priors = 0
# Compute kth-part
if n_blocks > 1:
for i_block in range(0, n_blocks):
mpart[:, id_blockp[i_block]: id_blockp[i_block] + p] = (
mb[i_block, k] * np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))
)
else:
mpart[:, id_blockp[0]: id_blockp[0] + p] = np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))
if n_mmis > 0:
mpart *= mmis
mpart += mres
if nmf_fix_user_bhe > 0:
norm_bhe = True
if nmf_fix_user_rhe == 0:
norm_lhe = True
norm_rhe = False
else:
norm_lhe = False
norm_rhe = True
else:
norm_bhe = False
norm_lhe = True
norm_rhe = True
if (nmf_fix_user_lhe > 0) & norm_lhe:
norm = np.linalg.norm(mt[:, k])
if norm > 0:
mt[:, k] /= norm
if (nmf_fix_user_rhe > 0) & norm_rhe:
norm = np.linalg.norm(mw[:, k])
if norm > 0:
mw[:, k] /= norm
if (nmf_fix_user_bhe > 0) & norm_bhe & (n_blocks > 1):
norm = np.linalg.norm(mb[:, k])
if norm > 0:
mb[:, k] /= norm
if nmf_fix_user_lhe == 0:
# Update Mt
mt[:, k] = 0
if n_blocks > 1:
for i_block in range(0, n_blocks):
mt[:, k] += mb[i_block, k] * mpart[:, id_blockp[i_block]: id_blockp[i_block] + p] @ mw[:, k]
else:
mt[:, k] += mpart[:, id_blockp[0]: id_blockp[0] + p] @ mw[:, k]
if n_mmis > 0:
denomt[:] = 0
mw2[:] = mw[:, k] ** 2
if n_blocks > 1:
for i_block in range(0, n_blocks):
# Broadcast missing cells into Mw to calculate Mw.T * Mw
denomt += mb[i_block, k] ** 2 * mmis[:, id_blockp[i_block]: id_blockp[i_block] + p] @ mw2
else:
denomt += mmis[:, id_blockp[0]: id_blockp[0] + p] @ mw2
denomt /= np.max(denomt)
denomt[denomt < denom_cutoff] = denom_cutoff
mt[:, k] /= denomt
mt[mt[:, k] < 0, k] = 0
if alpha[0] < 0:
if alpha[0] <= -1:
if (alpha[0] == -1) & (np.max(mt[:, k]) > 0):
t_threshold = mt[:, k]
hhi = int(
np.round(
(np.linalg.norm(t_threshold, ord=1) / (np.linalg.norm(t_threshold, ord=2) + EPSILON)) ** 2,
decimals=0,
)
)
t_rank = np.argsort(t_threshold)
t_threshold[t_rank[0: n - hhi]] = 0
else:
mt[:, k] = sparse_opt(mt[:, k], -alpha[k] - 1, False)
else:
mt[:, k] = sparse_opt(mt[:, k], -alpha[0], False)
if (ntf_unimodal > 0) & (ntf_left_components > 0):
# Enforce unimodal distribution
tmax = np.argmax(mt[:, k])
for i in range(tmax + 1, n):
mt[i, k] = min(mt[i - 1, k], mt[i, k])
for i in range(tmax - 1, -1, -1):
mt[i, k] = min(mt[i + 1, k], mt[i, k])
if (ntf_smooth > 0) & (ntf_left_components > 0):
# Smooth distribution
a[0] = 0.75 * mt[0, k] + 0.25 * mt[1, k]
a[n - 1] = 0.25 * mt[n - 2, k] + 0.75 * mt[n - 1, k]
for i in range(1, n - 1):
a[i] = 0.25 * mt[i - 1, k] + 0.5 * mt[i, k] + 0.25 * mt[i + 1, k]
mt[:, k] = a
if norm_lhe:
norm = np.linalg.norm(mt[:, k])
if norm > 0:
mt[:, k] /= norm
if nmf_fix_user_rhe == 0:
# Update Mw
mw[:, k] = 0
if n_blocks > 1:
for i_block in range(0, n_blocks):
mw[:, k] += mpart[:, id_blockp[i_block]: id_blockp[i_block] + p].T @ mt[:, k] * mb[i_block, k]
else:
mw[:, k] += mpart[:, id_blockp[0]: id_blockp[0] + p].T @ mt[:, k]
if n_mmis > 0:
denomw[:] = 0
mt2[:] = mt[:, k] ** 2
if n_blocks > 1:
for i_block in range(0, n_blocks):
# Broadcast missing cells into Mw to calculate Mt.T * Mt
denomw += mb[i_block, k] ** 2 * mmis[:, id_blockp[i_block]: id_blockp[i_block] + p].T @ mt2
else:
denomw += mmis[:, id_blockp[0]: id_blockp[0] + p].T @ mt2
denomw /= np.max(denomw)
denomw[denomw < denom_cutoff] = denom_cutoff
mw[:, k] /= denomw
mw[mw[:, k] < 0, k] = 0
if alpha[0] > 0:
if alpha[0] >= 1:
if (alpha[0] == 1) & (np.max(mw[:, k]) > 0):
w_threshold = mw[:, k]
hhi = int(
np.round(
(np.linalg.norm(w_threshold, ord=1) / (np.linalg.norm(w_threshold, ord=2) + EPSILON)) ** 2,
decimals=0,
)
)
w_rank = np.argsort(w_threshold)
w_threshold[w_rank[0: p - hhi]] = 0
else:
mw[:, k] = sparse_opt(mw[:, k], alpha[k] - 1, False)
else:
mw[:, k] = sparse_opt(mw[:, k], alpha[0], False)
if (ntf_unimodal > 0) & (ntf_right_components > 0):
# Enforce unimodal distribution
wmax = np.argmax(mw[:, k])
for j in range(wmax + 1, p):
mw[j, k] = min(mw[j - 1, k], mw[j, k])
for j in range(wmax - 1, -1, -1):
mw[j, k] = min(mw[j + 1, k], mw[j, k])
if (ntf_smooth > 0) & (ntf_right_components > 0):
# Smooth distribution
b[0] = 0.75 * mw[0, k] + 0.25 * mw[1, k]
b[p - 1] = 0.25 * mw[p - 2, k] + 0.75 * mw[p - 1, k]
for j in range(1, p - 1):
b[j] = 0.25 * mw[j - 1, k] + 0.5 * mw[j, k] + 0.25 * mw[j + 1, k]
mw[:, k] = b
if n_nmf_priors > 0:
mw[:, k] = mw[:, k] * nmf_priors[:, k]
if norm_rhe:
norm = np.linalg.norm(mw[:, k])
if norm > 0:
mw[:, k] /= norm
if nmf_fix_user_bhe == 0:
# Update Mb
mb[:, k] = 0
mt_mw[:] = np.reshape((np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))), nxp)
for i_block in range(0, n_blocks):
mb[i_block, k] = np.reshape(mpart[:, id_blockp[i_block]: id_blockp[i_block] + p], nxp).T @ mt_mw
if n_mmis > 0:
mt_mw[:] = mt_mw[:] ** 2
for i_block in range(0, n_blocks):
# Broadcast missing cells into Mb to calculate Mb.T * Mb
denom_block[i_block, k] = (
np.reshape(mmis[:, id_blockp[i_block]: id_blockp[i_block] + p], (1, nxp)) @ mt_mw
)
maxdenom_block = np.max(denom_block[:, k])
denom_block[denom_block[:, k] < denom_cutoff * maxdenom_block] = denom_cutoff * maxdenom_block
mb[:, k] /= denom_block[:, k]
mb[mb[:, k] < 0, k] = 0
if (ntf_unimodal > 0) & (ntf_block_components > 0):
# Enforce unimodal distribution
bmax = np.argmax(mb[:, k])
for i_block in range(bmax + 1, n_blocks):
mb[i_block, k] = min(mb[i_block - 1, k], mb[i_block, k])
for i_block in range(bmax - 1, -1, -1):
mb[i_block, k] = min(mb[i_block + 1, k], mb[i_block, k])
if (ntf_smooth > 0) & (ntf_block_components > 0):
# Smooth distribution
c[0] = 0.75 * mb[0, k] + 0.25 * mb[1, k]
c[n_blocks - 1] = 0.25 * mb[n_blocks - 2, k] + 0.75 * mb[n_blocks - 1, k]
for i_block in range(1, n_blocks - 1):
c[i_block] = 0.25 * mb[i_block - 1, k] + 0.5 * mb[i_block, k] + 0.25 * mb[i_block + 1, k]
mb[:, k] = c
if norm_bhe:
norm = np.linalg.norm(mb[:, k])
if norm > 0:
mb[:, k] /= norm
# Update residual tensor
mfit[:, :] = 0
if n_blocks > 1:
for i_block in range(0, n_blocks):
mfit[:, id_blockp[i_block]: id_blockp[i_block] + p] += (
mb[i_block, k] * np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))
)
else:
mfit[:, id_blockp[0]: id_blockp[0] + p] += np.reshape(mt[:, k], (n, 1)) @ np.reshape(mw[:, k], (1, p))
if n_mmis > 0:
mres[:, :] = (mpart - mfit) * mmis
else:
mres[:, :] = mpart - mfit
return (
n_blocks,
mpart,
id_blockp,
p,
mb,
k,
mt,
n,
mw,
n_mmis,
mmis,
mres,
nmf_fix_user_lhe,
denomt,
mw2,
denom_cutoff,
alpha,
ntf_unimodal,
ntf_left_components,
ntf_smooth,
a,
nmf_fix_user_rhe,
denomw,
mt2,
ntf_right_components,
b,
nmf_fix_user_bhe,
mt_mw,
nxp,
denom_block,
ntf_block_components,
c,
mfit,
nmf_priors,
)
| true | true |
f71c1d13e2b4281f5e462dfbd3127b8ee3404a3a | 6,928 | py | Python | actions.py | prateekralhan/Chatbot-Development-using-RASA-Framework-and-Zomato-API---PGD-IIITB-Assignment | 5e33586df2bbe146d950f33e018e9e990031e347 | [
"Apache-2.0"
] | 1 | 2020-05-21T10:20:03.000Z | 2020-05-21T10:20:03.000Z | actions.py | prateekralhan/Chatbot-Development-using-RASA-Framework-and-Zomato-API---PGD-IIITB-Assignment | 5e33586df2bbe146d950f33e018e9e990031e347 | [
"Apache-2.0"
] | null | null | null | actions.py | prateekralhan/Chatbot-Development-using-RASA-Framework-and-Zomato-API---PGD-IIITB-Assignment | 5e33586df2bbe146d950f33e018e9e990031e347 | [
"Apache-2.0"
] | 1 | 2021-12-06T03:10:37.000Z | 2021-12-06T03:10:37.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# importing the necessary libraries
# from rasa_core.actions.action import Action
from rasa_sdk import Action
#from rasa_sdk.forms import ( BooleanFormField, EntityFormField, FormAction, FreeTextFormField )
from rasa_sdk.events import SlotSet
# from rasa_core.events import SlotSet
import zomatopy
import json
# providng the API key
zomato_config={ "user_key":"<Enter your Zomato API key here >"}
result_of_last_query = ""
class ActionSearchRestaurants(Action):
def name(self):
return 'action_restaurant'
def filterRestaurantBasedOnBudget(self, userbudget, allRestaurants):
rangeMin = 0
rangeMax = 100000
# defining the characteristics of the budget.
if userbudget.isdigit():
price = int(userbudget)
if price == 1:
rangeMax = 299
elif price == 2:
rangeMin = 300
rangeMax = 699
elif price == 3:
rangeMin = 700
elif price < 300:
rangeMax = 299
elif price < 700 and price >= 300:
rangeMin = 300
rangeMax = 699
else:
rangeMin = 700
else:
# default budget
rangeMin = 300
rangeMax = 699
index = 0
count = 0
response = ""
global result_of_last_query
result_of_last_query = ""
for restaurant in allRestaurants:
++count
res = "[" + restaurant['restaurant']['user_rating']['aggregate_rating'] + "/5] " + restaurant['restaurant']['name'] + " in " + restaurant['restaurant']['location']['address']
# price_range = str(restaurant['restaurant']['price_range'])
avg_c_2 = restaurant['restaurant']['average_cost_for_two']
# if price_range == "1":
if avg_c_2 <= rangeMax and avg_c_2 >= rangeMin:
# mapbybudget["1"].append(restaurant)
# if userbudget == price_range:
res = restaurant['restaurant']['currency'] + str(restaurant['restaurant']['average_cost_for_two']) + " " + res + "\n"
if(index < 5):
response = response + res
if(index < 10):
result_of_last_query = result_of_last_query + res
index = index + 1
# getting the restaur details using the API
# modifying the search results
# if the no. of result fall short, appending the results of other price range
if index == 0:
response = "Oops! no restaurant found for this query. " + " search results = " + str(count)
elif index < 5:
# we can add restaurants from the higher range but for now i am appending an extra message
response = response + "\n \nFor more results please search in higher budget range...\n \n"
elif index < 10:
result_of_last_query = result_of_last_query + "\n \nFor more results please search in higher budget range...\n \n"
return response
def run(self, dispatcher, tracker, domain):
loc = tracker.get_slot('location')
cuisine = tracker.get_slot('cuisine')
budget = tracker.get_slot('budget')
zomato = zomatopy.initialize_app(zomato_config)
location_detail=zomato.get_location(loc, 1)
d1 = json.loads(location_detail)
lat=d1["location_suggestions"][0]["latitude"]
lon=d1["location_suggestions"][0]["longitude"]
#cuisines and their respective cuisine IDs
cuisines_dict={
'american':1,
'mexican':73,
'italian':55,
'chinese':25,
'north indian':50,
'south indian':85
}
results=zomato.restaurant_search("", lat, lon, str(cuisines_dict.get(cuisine)), 50)
d = json.loads(results)
response=""
if d['results_found'] == 0:
response= "Sorry, we didn't find any results for this query."
else:
# dispatcher.utter_message(str(d))
response = self.filterRestaurantBasedOnBudget(budget, d['restaurants'])
dispatcher.utter_message(str(response))
return [SlotSet('location',loc)]
# list of TIER1 and TIER2 cities
t1_t2_cities = ["Ahmedabad","Bangalore","Chennai","Delhi","Hyderabad","Kolkata","Mumbai","Pune",
"Agra","Ajmer","Aligarh","Amravati","Amritsar","Asansol","Aurangabad",
"Bareilly","Belgaum","Bhavnagar","Bhiwandi","Bhopal","Bhubaneswar",
"Bikaner","Bokaro Steel City","Chandigarh","Coimbatore","Cuttack","Dehradun",
"Dhanbad","Bhilai","Durgapur","Erode","Faridabad","Firozabad","Ghaziabad",
"Gorakhpur","Gulbarga","Guntur","Gurgaon","Guwahati","Hamirpur",
"Gwalior","Hubli-Dharwad","Indore","Jabalpur","Jaipur","Jalandhar","Jammu","Jamnagar","Jamshedpur","Jhansi","Jodhpur",
"Kannur","Kanpur","Kakinada","Kochi","Kottayam","Kolhapur","Kollam","Kota","Kozhikode","Kurnool","Lucknow","Ludhiana",
"Madurai","Malappuram","Mathura","Goa","Mangalore","Meerut",
"Moradabad","Mysore","Nagpur","Nanded","Nashik","Nellore","Noida","Patna","Pondicherry","Purulia Prayagraj","Raipur","Rajkot",
"Rajahmundry","Ranchi","Rourkela","Salem","Sangli","Siliguri","Shimla"
"Solapur","Srinagar","Surat","Thiruvananthapuram","Thrissur","Tiruchirappalli","Tiruppur",
"Ujjain","Bijapur","Vadodara","Varanasi",
"Vasai-Virar City","Vijayawada","Visakhapatnam","Vellore","Warangal"]
t1_t2_cities_list = [x.lower() for x in t1_t2_cities]
# Check if the location exists. using zomato api.if found then save it, else utter not found.
class ActionValidateLocation(Action):
def name(self):
return 'action_check_location'
def run(self, dispatcher, tracker, domain):
loc = tracker.get_slot('location')
city = str(loc)
# dispatcher.utter_message(city)
if city.lower() in t1_t2_cities_list:
return [SlotSet('location_match',"one")]
else:
zomato = zomatopy.initialize_app(zomato_config)
try:
results = zomato.get_city_ID(city)
return [SlotSet('location_match',"one")]
except:
# results = "Sorry, didn’t find any such location. Can you please tell again?" + "-----" + city
# dispatcher.utter_message(city)
return [SlotSet('location_match',"zero")]
# Send email the list of 10 restaurants
class ActionSendEmail(Action):
def name(self):
return 'action_send_email'
def run(self, dispatcher, tracker, domain):
email = tracker.get_slot('email')
# for slack handling
if len(email.split("|")) == 2:
email = email.split("|")[1]
import smtplib
s = smtplib.SMTP('smtp.gmail.com', 587)
s.starttls()
s.login("<Enter login E-mail ID here>", "<Enter the password here>")
SUBJECT = "Foodbot - Be Foodoholic in Nature !!"
TEXT = "Hi Foodoholic :) !!\nThe details of all the restaurants you inquried \n \n"
message = 'Subject: {}\n\n{}'.format(SUBJECT,TEXT)
global result_of_last_query
message = message + result_of_last_query
try:
s.sendmail("<Enter login E-mail ID here>", str(email), message)
s.quit()
except:
dispatcher.utter_message(email)
result_of_last_query = ""
return [AllSlotsReset()]
from rasa_sdk.events import AllSlotsReset
from rasa_sdk.events import Restarted
class ActionRestarted(Action):
def name(self):
return 'action_restart'
def run(self, dispatcher, tracker, domain):
return[Restarted()]
class ActionSlotReset(Action):
def name(self):
return 'action_slot_reset'
def run(self, dispatcher, tracker, domain):
return[AllSlotsReset()]
| 32.679245 | 177 | 0.705831 | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from rasa_sdk import Action
from rasa_sdk.events import SlotSet
import zomatopy
import json
zomato_config={ "user_key":"<Enter your Zomato API key here >"}
result_of_last_query = ""
class ActionSearchRestaurants(Action):
def name(self):
return 'action_restaurant'
def filterRestaurantBasedOnBudget(self, userbudget, allRestaurants):
rangeMin = 0
rangeMax = 100000
if userbudget.isdigit():
price = int(userbudget)
if price == 1:
rangeMax = 299
elif price == 2:
rangeMin = 300
rangeMax = 699
elif price == 3:
rangeMin = 700
elif price < 300:
rangeMax = 299
elif price < 700 and price >= 300:
rangeMin = 300
rangeMax = 699
else:
rangeMin = 700
else:
rangeMin = 300
rangeMax = 699
index = 0
count = 0
response = ""
global result_of_last_query
result_of_last_query = ""
for restaurant in allRestaurants:
++count
res = "[" + restaurant['restaurant']['user_rating']['aggregate_rating'] + "/5] " + restaurant['restaurant']['name'] + " in " + restaurant['restaurant']['location']['address']
avg_c_2 = restaurant['restaurant']['average_cost_for_two']
if avg_c_2 <= rangeMax and avg_c_2 >= rangeMin:
res = restaurant['restaurant']['currency'] + str(restaurant['restaurant']['average_cost_for_two']) + " " + res + "\n"
if(index < 5):
response = response + res
if(index < 10):
result_of_last_query = result_of_last_query + res
index = index + 1
if index == 0:
response = "Oops! no restaurant found for this query. " + " search results = " + str(count)
elif index < 5:
response = response + "\n \nFor more results please search in higher budget range...\n \n"
elif index < 10:
result_of_last_query = result_of_last_query + "\n \nFor more results please search in higher budget range...\n \n"
return response
def run(self, dispatcher, tracker, domain):
loc = tracker.get_slot('location')
cuisine = tracker.get_slot('cuisine')
budget = tracker.get_slot('budget')
zomato = zomatopy.initialize_app(zomato_config)
location_detail=zomato.get_location(loc, 1)
d1 = json.loads(location_detail)
lat=d1["location_suggestions"][0]["latitude"]
lon=d1["location_suggestions"][0]["longitude"]
cuisines_dict={
'american':1,
'mexican':73,
'italian':55,
'chinese':25,
'north indian':50,
'south indian':85
}
results=zomato.restaurant_search("", lat, lon, str(cuisines_dict.get(cuisine)), 50)
d = json.loads(results)
response=""
if d['results_found'] == 0:
response= "Sorry, we didn't find any results for this query."
else:
# dispatcher.utter_message(str(d))
response = self.filterRestaurantBasedOnBudget(budget, d['restaurants'])
dispatcher.utter_message(str(response))
return [SlotSet('location',loc)]
# list of TIER1 and TIER2 cities
t1_t2_cities = ["Ahmedabad","Bangalore","Chennai","Delhi","Hyderabad","Kolkata","Mumbai","Pune",
"Agra","Ajmer","Aligarh","Amravati","Amritsar","Asansol","Aurangabad",
"Bareilly","Belgaum","Bhavnagar","Bhiwandi","Bhopal","Bhubaneswar",
"Bikaner","Bokaro Steel City","Chandigarh","Coimbatore","Cuttack","Dehradun",
"Dhanbad","Bhilai","Durgapur","Erode","Faridabad","Firozabad","Ghaziabad",
"Gorakhpur","Gulbarga","Guntur","Gurgaon","Guwahati","Hamirpur",
"Gwalior","Hubli-Dharwad","Indore","Jabalpur","Jaipur","Jalandhar","Jammu","Jamnagar","Jamshedpur","Jhansi","Jodhpur",
"Kannur","Kanpur","Kakinada","Kochi","Kottayam","Kolhapur","Kollam","Kota","Kozhikode","Kurnool","Lucknow","Ludhiana",
"Madurai","Malappuram","Mathura","Goa","Mangalore","Meerut",
"Moradabad","Mysore","Nagpur","Nanded","Nashik","Nellore","Noida","Patna","Pondicherry","Purulia Prayagraj","Raipur","Rajkot",
"Rajahmundry","Ranchi","Rourkela","Salem","Sangli","Siliguri","Shimla"
"Solapur","Srinagar","Surat","Thiruvananthapuram","Thrissur","Tiruchirappalli","Tiruppur",
"Ujjain","Bijapur","Vadodara","Varanasi",
"Vasai-Virar City","Vijayawada","Visakhapatnam","Vellore","Warangal"]
t1_t2_cities_list = [x.lower() for x in t1_t2_cities]
# Check if the location exists. using zomato api.if found then save it, else utter not found.
class ActionValidateLocation(Action):
def name(self):
return 'action_check_location'
def run(self, dispatcher, tracker, domain):
loc = tracker.get_slot('location')
city = str(loc)
# dispatcher.utter_message(city)
if city.lower() in t1_t2_cities_list:
return [SlotSet('location_match',"one")]
else:
zomato = zomatopy.initialize_app(zomato_config)
try:
results = zomato.get_city_ID(city)
return [SlotSet('location_match',"one")]
except:
# results = "Sorry, didn’t find any such location. Can you please tell again?" + "-----" + city
# dispatcher.utter_message(city)
return [SlotSet('location_match',"zero")]
# Send email the list of 10 restaurants
class ActionSendEmail(Action):
def name(self):
return 'action_send_email'
def run(self, dispatcher, tracker, domain):
email = tracker.get_slot('email')
# for slack handling
if len(email.split("|")) == 2:
email = email.split("|")[1]
import smtplib
s = smtplib.SMTP('smtp.gmail.com', 587)
s.starttls()
s.login("<Enter login E-mail ID here>", "<Enter the password here>")
SUBJECT = "Foodbot - Be Foodoholic in Nature !!"
TEXT = "Hi Foodoholic :) !!\nThe details of all the restaurants you inquried \n \n"
message = 'Subject: {}\n\n{}'.format(SUBJECT,TEXT)
global result_of_last_query
message = message + result_of_last_query
try:
s.sendmail("<Enter login E-mail ID here>", str(email), message)
s.quit()
except:
dispatcher.utter_message(email)
result_of_last_query = ""
return [AllSlotsReset()]
from rasa_sdk.events import AllSlotsReset
from rasa_sdk.events import Restarted
class ActionRestarted(Action):
def name(self):
return 'action_restart'
def run(self, dispatcher, tracker, domain):
return[Restarted()]
class ActionSlotReset(Action):
def name(self):
return 'action_slot_reset'
def run(self, dispatcher, tracker, domain):
return[AllSlotsReset()]
| true | true |
f71c1d88fc7cda064575b3b49230553d31ba1b56 | 9,193 | py | Python | services/ui/paths/label.py | sbworth/getnoc | a9a5647df31822062db3db7afe7ae1c005d166f7 | [
"BSD-3-Clause"
] | 84 | 2017-10-22T11:01:39.000Z | 2022-02-27T03:43:48.000Z | services/ui/paths/label.py | sbworth/getnoc | a9a5647df31822062db3db7afe7ae1c005d166f7 | [
"BSD-3-Clause"
] | 22 | 2017-12-11T07:21:56.000Z | 2021-09-23T02:53:50.000Z | services/ui/paths/label.py | sbworth/getnoc | a9a5647df31822062db3db7afe7ae1c005d166f7 | [
"BSD-3-Clause"
] | 23 | 2017-12-06T06:59:52.000Z | 2022-02-24T00:02:25.000Z | # ----------------------------------------------------------------------
# Label REST API
# ----------------------------------------------------------------------
# Copyright (C) 2007-2021 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# Third-party modules
from fastapi import APIRouter
# NOC modules
from noc.main.models.label import Label
from ..models.label import DefaultLabelItem, FormLabelItem, LabelLabelItem
from ..utils.ref import get_reference
from ..utils.rest.document import DocumentResourceAPI
from ..utils.rest.op import FilterExact, FuncFilter, FilterBool
router = APIRouter()
class LabelAPI(DocumentResourceAPI[Label]):
prefix = "/api/ui/label"
model = Label
list_ops = [
FuncFilter("query", function=lambda qs, values: qs.filter(name__icontains=values[0])),
FuncFilter("id", function=lambda qs, values: qs.filter(name__in=values)),
FilterExact("name"),
FilterBool("enable_agent"),
FilterBool("enable_service"),
FilterBool("enable_serviceprofile"),
FilterBool("enable_managedobject"),
FilterBool("enable_managedobjectprofile"),
FilterBool("enable_administrativedomain"),
FilterBool("enable_authprofile"),
FilterBool("enable_commandsnippet"),
FilterBool("enable_allocationgroup"),
FilterBool("enable_networksegment"),
FilterBool("enable_object"),
FilterBool("enable_objectmodel"),
FilterBool("enable_platform"),
FilterBool("enable_resourcegroup"),
FilterBool("enable_sensor"),
FilterBool("enable_sensorprofile"),
FilterBool("enable_subscriber"),
FilterBool("enable_subscriberprofile"),
FilterBool("enable_supplier"),
FilterBool("enable_supplierprofile"),
FilterBool("enable_dnszone"),
FilterBool("enable_dnszonerecord"),
FilterBool("enable_division"),
FilterBool("enable_kbentry"),
FilterBool("enable_ipaddress"),
FilterBool("enable_addressprofile"),
FilterBool("enable_ipaddressrange"),
FilterBool("enable_ipprefix"),
FilterBool("enable_prefixprofile"),
FilterBool("enable_vrf"),
FilterBool("enable_vrfgroup"),
FilterBool("enable_asn"),
FilterBool("enable_peer"),
FilterBool("enable_vc"),
FilterBool("enable_vlan"),
FilterBool("enable_vlanprofile"),
FilterBool("enable_vpn"),
FilterBool("enable_slaprobe"),
FilterBool("enable_slaprofile"),
FilterBool("enable_alarm"),
FilterBool("enable_vpnprofile"),
]
@classmethod
def item_to_label(cls, item: Label) -> LabelLabelItem:
return LabelLabelItem(
id=str(item.name),
label=str(item.name),
is_protected=item.is_protected,
scope=item.name.rsplit("::", 1)[0] if item.is_scoped else "",
value=item.name.split("::")[-1],
bg_color1=f"#{item.bg_color1:06x}",
fg_color1=f"#{item.fg_color1:06x}",
bg_color2=f"#{item.bg_color2:06x}",
fg_color2=f"#{item.fg_color2:06x}",
)
@classmethod
def item_to_default(cls, item: Label) -> DefaultLabelItem:
return DefaultLabelItem(
id=str(item.id),
name=str(item.name),
description=item.description,
bg_color1=item.bg_color1,
fg_color1=item.fg_color1,
bg_color2=item.bg_color2,
fg_color2=item.fg_color2,
is_protected=item.is_protected,
is_autogenerated=item.is_autogenerated,
# Label scope
enable_agent=bool(item.enable_agent),
enable_service=bool(item.enable_service),
enable_serviceprofile=bool(item.enable_serviceprofile),
enable_managedobject=bool(item.enable_managedobject),
enable_managedobjectprofile=bool(item.enable_managedobjectprofile),
enable_administrativedomain=bool(item.enable_administrativedomain),
enable_authprofile=bool(item.enable_authprofile),
enable_commandsnippet=bool(item.enable_commandsnippet),
enable_allocationgroup=bool(item.enable_allocationgroup),
enable_networksegment=bool(item.enable_networksegment),
enable_object=bool(item.enable_object),
enable_objectmodel=bool(item.enable_objectmodel),
enable_platform=bool(item.enable_platform),
enable_resourcegroup=bool(item.enable_resourcegroup),
enable_sensor=bool(item.enable_sensor),
enable_sensorprofile=bool(item.enable_sensorprofile),
enable_subscriber=bool(item.enable_subscriber),
enable_subscriberprofile=bool(item.enable_subscriberprofile),
enable_supplier=bool(item.enable_supplier),
enable_supplierprofile=bool(item.enable_supplierprofile),
enable_dnszone=bool(item.enable_dnszone),
enable_dnszonerecord=bool(item.enable_dnszonerecord),
enable_division=bool(item.enable_division),
enable_kbentry=bool(item.enable_kbentry),
enable_ipaddress=bool(item.enable_ipaddress),
enable_addressprofile=bool(item.enable_addressprofile),
enable_ipaddressrange=bool(item.enable_ipaddressrange),
enable_ipprefix=bool(item.enable_ipprefix),
enable_prefixprofile=bool(item.enable_prefixprofile),
enable_vrf=bool(item.enable_vrf),
enable_vrfgroup=bool(item.enable_vrfgroup),
enable_asn=bool(item.enable_asn),
enable_assetpeer=bool(item.enable_assetpeer),
enable_peer=bool(item.enable_peer),
enable_vc=bool(item.enable_vc),
enable_vlan=bool(item.enable_vlan),
enable_vlanprofile=bool(item.enable_vlanprofile),
enable_vpn=bool(item.enable_vpn),
enable_vpnprofile=bool(item.enable_vpnprofile),
enable_slaprobe=bool(item.enable_slaprobe),
enable_slaprofile=bool(item.enable_slaprofile),
enable_alarm=bool(item.enable_alarm),
expose_metric=bool(item.expose_metric),
expose_datastream=bool(item.expose_datastream),
remote_system=get_reference(item.remote_system),
remote_id=item.remote_id,
)
@classmethod
def item_to_form(cls, item: Label) -> FormLabelItem:
return FormLabelItem(
name=item.name,
description=item.description,
bg_color1=item.bg_color1,
fg_color1=item.fg_color1,
bg_color2=item.bg_color2,
fg_color2=item.fg_color2,
is_protected=item.is_protected,
enable_agent=item.enable_agent,
enable_service=item.enable_service,
enable_serviceprofile=item.enable_serviceprofile,
enable_managedobject=item.enable_managedobject,
enable_managedobjectprofile=item.enable_managedobjectprofile,
enable_administrativedomain=item.enable_administrativedomain,
enable_authprofile=item.enable_authprofile,
enable_commandsnippet=item.enable_commandsnippet,
enable_allocationgroup=item.enable_allocationgroup,
enable_networksegment=item.enable_networksegment,
enable_object=item.enable_object,
enable_objectmodel=item.enable_objectmodel,
enable_platform=item.enable_platform,
enable_resourcegroup=item.enable_resourcegroup,
enable_sensor=item.enable_sensor,
enable_sensorprofile=item.enable_sensorprofile,
enable_subscriber=item.enable_subscriber,
enable_subscriberprofile=item.enable_subscriberprofile,
enable_supplier=item.enable_supplier,
enable_supplierprofile=item.enable_supplierprofile,
enable_dnszone=item.enable_dnszone,
enable_dnszonerecord=item.enable_dnszonerecord,
enable_division=item.enable_division,
enable_kbentry=item.enable_kbentry,
enable_ipaddress=item.enable_ipaddress,
enable_addressprofile=item.enable_addressprofile,
enable_ipaddressrange=item.enable_ipaddressrange,
enable_ipprefix=item.enable_ipprefix,
enable_prefixprofile=item.enable_prefixprofile,
enable_vrf=item.enable_vrf,
enable_vrfgroup=item.enable_vrfgroup,
enable_asn=item.enable_asn,
enable_assetpeer=item.enable_assetpeer,
enable_peer=item.enable_peer,
enable_vc=item.enable_vc,
enable_vlan=item.enable_vlan,
enable_vlanprofile=item.enable_vlanprofile,
enable_vpn=item.enable_vpn,
enable_vpnprofile=item.enable_vpnprofile,
enable_slaprobe=item.enable_slaprobe,
enable_slaprofile=item.enable_slaprofile,
enable_alarm=item.enable_alarm,
expose_metric=item.expose_metric,
expose_datastream=item.expose_datastream,
)
# Install endpoints
LabelAPI(router)
| 44.843902 | 94 | 0.657892 |
from fastapi import APIRouter
from noc.main.models.label import Label
from ..models.label import DefaultLabelItem, FormLabelItem, LabelLabelItem
from ..utils.ref import get_reference
from ..utils.rest.document import DocumentResourceAPI
from ..utils.rest.op import FilterExact, FuncFilter, FilterBool
router = APIRouter()
class LabelAPI(DocumentResourceAPI[Label]):
prefix = "/api/ui/label"
model = Label
list_ops = [
FuncFilter("query", function=lambda qs, values: qs.filter(name__icontains=values[0])),
FuncFilter("id", function=lambda qs, values: qs.filter(name__in=values)),
FilterExact("name"),
FilterBool("enable_agent"),
FilterBool("enable_service"),
FilterBool("enable_serviceprofile"),
FilterBool("enable_managedobject"),
FilterBool("enable_managedobjectprofile"),
FilterBool("enable_administrativedomain"),
FilterBool("enable_authprofile"),
FilterBool("enable_commandsnippet"),
FilterBool("enable_allocationgroup"),
FilterBool("enable_networksegment"),
FilterBool("enable_object"),
FilterBool("enable_objectmodel"),
FilterBool("enable_platform"),
FilterBool("enable_resourcegroup"),
FilterBool("enable_sensor"),
FilterBool("enable_sensorprofile"),
FilterBool("enable_subscriber"),
FilterBool("enable_subscriberprofile"),
FilterBool("enable_supplier"),
FilterBool("enable_supplierprofile"),
FilterBool("enable_dnszone"),
FilterBool("enable_dnszonerecord"),
FilterBool("enable_division"),
FilterBool("enable_kbentry"),
FilterBool("enable_ipaddress"),
FilterBool("enable_addressprofile"),
FilterBool("enable_ipaddressrange"),
FilterBool("enable_ipprefix"),
FilterBool("enable_prefixprofile"),
FilterBool("enable_vrf"),
FilterBool("enable_vrfgroup"),
FilterBool("enable_asn"),
FilterBool("enable_peer"),
FilterBool("enable_vc"),
FilterBool("enable_vlan"),
FilterBool("enable_vlanprofile"),
FilterBool("enable_vpn"),
FilterBool("enable_slaprobe"),
FilterBool("enable_slaprofile"),
FilterBool("enable_alarm"),
FilterBool("enable_vpnprofile"),
]
@classmethod
def item_to_label(cls, item: Label) -> LabelLabelItem:
return LabelLabelItem(
id=str(item.name),
label=str(item.name),
is_protected=item.is_protected,
scope=item.name.rsplit("::", 1)[0] if item.is_scoped else "",
value=item.name.split("::")[-1],
bg_color1=f"#{item.bg_color1:06x}",
fg_color1=f"#{item.fg_color1:06x}",
bg_color2=f"#{item.bg_color2:06x}",
fg_color2=f"#{item.fg_color2:06x}",
)
@classmethod
def item_to_default(cls, item: Label) -> DefaultLabelItem:
return DefaultLabelItem(
id=str(item.id),
name=str(item.name),
description=item.description,
bg_color1=item.bg_color1,
fg_color1=item.fg_color1,
bg_color2=item.bg_color2,
fg_color2=item.fg_color2,
is_protected=item.is_protected,
is_autogenerated=item.is_autogenerated,
enable_agent=bool(item.enable_agent),
enable_service=bool(item.enable_service),
enable_serviceprofile=bool(item.enable_serviceprofile),
enable_managedobject=bool(item.enable_managedobject),
enable_managedobjectprofile=bool(item.enable_managedobjectprofile),
enable_administrativedomain=bool(item.enable_administrativedomain),
enable_authprofile=bool(item.enable_authprofile),
enable_commandsnippet=bool(item.enable_commandsnippet),
enable_allocationgroup=bool(item.enable_allocationgroup),
enable_networksegment=bool(item.enable_networksegment),
enable_object=bool(item.enable_object),
enable_objectmodel=bool(item.enable_objectmodel),
enable_platform=bool(item.enable_platform),
enable_resourcegroup=bool(item.enable_resourcegroup),
enable_sensor=bool(item.enable_sensor),
enable_sensorprofile=bool(item.enable_sensorprofile),
enable_subscriber=bool(item.enable_subscriber),
enable_subscriberprofile=bool(item.enable_subscriberprofile),
enable_supplier=bool(item.enable_supplier),
enable_supplierprofile=bool(item.enable_supplierprofile),
enable_dnszone=bool(item.enable_dnszone),
enable_dnszonerecord=bool(item.enable_dnszonerecord),
enable_division=bool(item.enable_division),
enable_kbentry=bool(item.enable_kbentry),
enable_ipaddress=bool(item.enable_ipaddress),
enable_addressprofile=bool(item.enable_addressprofile),
enable_ipaddressrange=bool(item.enable_ipaddressrange),
enable_ipprefix=bool(item.enable_ipprefix),
enable_prefixprofile=bool(item.enable_prefixprofile),
enable_vrf=bool(item.enable_vrf),
enable_vrfgroup=bool(item.enable_vrfgroup),
enable_asn=bool(item.enable_asn),
enable_assetpeer=bool(item.enable_assetpeer),
enable_peer=bool(item.enable_peer),
enable_vc=bool(item.enable_vc),
enable_vlan=bool(item.enable_vlan),
enable_vlanprofile=bool(item.enable_vlanprofile),
enable_vpn=bool(item.enable_vpn),
enable_vpnprofile=bool(item.enable_vpnprofile),
enable_slaprobe=bool(item.enable_slaprobe),
enable_slaprofile=bool(item.enable_slaprofile),
enable_alarm=bool(item.enable_alarm),
expose_metric=bool(item.expose_metric),
expose_datastream=bool(item.expose_datastream),
remote_system=get_reference(item.remote_system),
remote_id=item.remote_id,
)
@classmethod
def item_to_form(cls, item: Label) -> FormLabelItem:
return FormLabelItem(
name=item.name,
description=item.description,
bg_color1=item.bg_color1,
fg_color1=item.fg_color1,
bg_color2=item.bg_color2,
fg_color2=item.fg_color2,
is_protected=item.is_protected,
enable_agent=item.enable_agent,
enable_service=item.enable_service,
enable_serviceprofile=item.enable_serviceprofile,
enable_managedobject=item.enable_managedobject,
enable_managedobjectprofile=item.enable_managedobjectprofile,
enable_administrativedomain=item.enable_administrativedomain,
enable_authprofile=item.enable_authprofile,
enable_commandsnippet=item.enable_commandsnippet,
enable_allocationgroup=item.enable_allocationgroup,
enable_networksegment=item.enable_networksegment,
enable_object=item.enable_object,
enable_objectmodel=item.enable_objectmodel,
enable_platform=item.enable_platform,
enable_resourcegroup=item.enable_resourcegroup,
enable_sensor=item.enable_sensor,
enable_sensorprofile=item.enable_sensorprofile,
enable_subscriber=item.enable_subscriber,
enable_subscriberprofile=item.enable_subscriberprofile,
enable_supplier=item.enable_supplier,
enable_supplierprofile=item.enable_supplierprofile,
enable_dnszone=item.enable_dnszone,
enable_dnszonerecord=item.enable_dnszonerecord,
enable_division=item.enable_division,
enable_kbentry=item.enable_kbentry,
enable_ipaddress=item.enable_ipaddress,
enable_addressprofile=item.enable_addressprofile,
enable_ipaddressrange=item.enable_ipaddressrange,
enable_ipprefix=item.enable_ipprefix,
enable_prefixprofile=item.enable_prefixprofile,
enable_vrf=item.enable_vrf,
enable_vrfgroup=item.enable_vrfgroup,
enable_asn=item.enable_asn,
enable_assetpeer=item.enable_assetpeer,
enable_peer=item.enable_peer,
enable_vc=item.enable_vc,
enable_vlan=item.enable_vlan,
enable_vlanprofile=item.enable_vlanprofile,
enable_vpn=item.enable_vpn,
enable_vpnprofile=item.enable_vpnprofile,
enable_slaprobe=item.enable_slaprobe,
enable_slaprofile=item.enable_slaprofile,
enable_alarm=item.enable_alarm,
expose_metric=item.expose_metric,
expose_datastream=item.expose_datastream,
)
LabelAPI(router)
| true | true |
f71c1e7d575a6e860d1fb2e06dd08a3844470ff2 | 51,171 | py | Python | dojo/engagement/views.py | pabloalexsandroalmeida83/django-DefectDojo | 094fbbf0d994c2f9b5c7ff071a28d4d5ed1f3a1c | [
"BSD-3-Clause"
] | null | null | null | dojo/engagement/views.py | pabloalexsandroalmeida83/django-DefectDojo | 094fbbf0d994c2f9b5c7ff071a28d4d5ed1f3a1c | [
"BSD-3-Clause"
] | null | null | null | dojo/engagement/views.py | pabloalexsandroalmeida83/django-DefectDojo | 094fbbf0d994c2f9b5c7ff071a28d4d5ed1f3a1c | [
"BSD-3-Clause"
] | null | null | null | import logging
import csv
import re
from openpyxl import Workbook
from openpyxl.styles import Font
from tempfile import NamedTemporaryFile
from datetime import datetime
import operator
from django.contrib.auth.models import User
from django.conf import settings
from django.contrib import messages
from django.core.exceptions import ValidationError, PermissionDenied
from django.urls import reverse
from django.db.models import Q, Count
from django.http import HttpResponseRedirect, StreamingHttpResponse, HttpResponse, FileResponse, QueryDict
from django.shortcuts import render, get_object_or_404
from django.views.decorators.cache import cache_page
from django.utils import timezone
from time import strftime
from django.contrib.admin.utils import NestedObjects
from django.db import DEFAULT_DB_ALIAS
from dojo.engagement.services import close_engagement, reopen_engagement
from dojo.filters import EngagementFilter, EngagementDirectFilter, EngagementTestFilter
from dojo.forms import CheckForm, \
UploadThreatForm, RiskAcceptanceForm, NoteForm, DoneForm, \
EngForm, TestForm, ReplaceRiskAcceptanceProofForm, AddFindingsRiskAcceptanceForm, DeleteEngagementForm, ImportScanForm, \
CredMappingForm, JIRAEngagementForm, JIRAImportScanForm, TypedNoteForm, JIRAProjectForm, \
EditRiskAcceptanceForm
from dojo.models import Finding, Product, Engagement, Test, \
Check_List, Test_Import, Notes, \
Risk_Acceptance, Development_Environment, Endpoint, \
Cred_Mapping, Dojo_User, System_Settings, Note_Type, Product_API_Scan_Configuration
from dojo.tools.factory import get_scan_types_sorted
from dojo.utils import add_error_message_to_response, add_success_message_to_response, get_page_items, add_breadcrumb, handle_uploaded_threat, \
FileIterWrapper, get_cal_event, Product_Tab, is_scan_file_too_large, \
get_system_setting, redirect_to_return_url_or_else, get_return_url
from dojo.notifications.helper import create_notification
from dojo.finding.views import find_available_notetypes
from functools import reduce
from django.db.models.query import Prefetch, QuerySet
import dojo.jira_link.helper as jira_helper
import dojo.risk_acceptance.helper as ra_helper
from dojo.risk_acceptance.helper import prefetch_for_expiration
from dojo.finding.helper import NOT_ACCEPTED_FINDINGS_QUERY
from django.views.decorators.vary import vary_on_cookie
from dojo.authorization.authorization import user_has_permission_or_403
from dojo.authorization.roles_permissions import Permissions
from dojo.product.queries import get_authorized_products
from dojo.engagement.queries import get_authorized_engagements
from dojo.authorization.authorization_decorators import user_is_authorized
from dojo.importers.importer.importer import DojoDefaultImporter as Importer
import dojo.notifications.helper as notifications_helper
from dojo.endpoint.utils import save_endpoints_to_add
logger = logging.getLogger(__name__)
@cache_page(60 * 5) # cache for 5 minutes
@vary_on_cookie
def engagement_calendar(request):
if 'lead' not in request.GET or '0' in request.GET.getlist('lead'):
engagements = get_authorized_engagements(Permissions.Engagement_View)
else:
filters = []
leads = request.GET.getlist('lead', '')
if '-1' in request.GET.getlist('lead'):
leads.remove('-1')
filters.append(Q(lead__isnull=True))
filters.append(Q(lead__in=leads))
engagements = get_authorized_engagements(Permissions.Engagement_View).filter(reduce(operator.or_, filters))
engagements = engagements.select_related('lead')
engagements = engagements.prefetch_related('product')
add_breadcrumb(
title="Engagement Calendar", top_level=True, request=request)
return render(
request, 'dojo/calendar.html', {
'caltype': 'engagements',
'leads': request.GET.getlist('lead', ''),
'engagements': engagements,
'users': Dojo_User.objects.all()
})
def get_filtered_engagements(request, view):
if view not in ['all', 'active']:
raise ValidationError(f'View {view} is not allowed')
engagements = get_authorized_engagements(Permissions.Engagement_View).order_by('-target_start')
if view == 'active':
engagements = engagements.filter(active=True)
engagements = engagements.select_related('product', 'product__prod_type') \
.prefetch_related('lead', 'tags', 'product__tags')
if System_Settings.objects.get().enable_jira:
engagements = engagements.prefetch_related(
'jira_project__jira_instance',
'product__jira_project_set__jira_instance'
)
engagements = EngagementDirectFilter(request.GET, queryset=engagements)
return engagements
def get_test_counts(engagements):
# Get the test counts per engagement. As a separate query, this is much
# faster than annotating the above `engagements` query.
engagement_test_counts = {
test['engagement']: test['test_count']
for test in Test.objects.filter(
engagement__in=engagements
).values(
'engagement'
).annotate(
test_count=Count('engagement')
)
}
return engagement_test_counts
def engagements(request, view):
if not view:
view = 'active'
filtered_engagements = get_filtered_engagements(request, view)
engs = get_page_items(request, filtered_engagements.qs, 25)
product_name_words = sorted(get_authorized_products(Permissions.Product_View).values_list('name', flat=True))
engagement_name_words = sorted(get_authorized_engagements(Permissions.Engagement_View).values_list('name', flat=True).distinct())
add_breadcrumb(
title=f"{view.capitalize()} Engagements",
top_level=not len(request.GET),
request=request)
return render(
request, 'dojo/engagement.html', {
'engagements': engs,
'engagement_test_counts': get_test_counts(filtered_engagements.qs),
'filter_form': filtered_engagements.form,
'product_name_words': product_name_words,
'engagement_name_words': engagement_name_words,
'view': view.capitalize(),
})
def engagements_all(request):
products_with_engagements = get_authorized_products(Permissions.Engagement_View)
products_with_engagements = products_with_engagements.filter(~Q(engagement=None)).distinct()
# count using prefetch instead of just using 'engagement__set_test_test` to avoid loading all test in memory just to count them
filter_qs = products_with_engagements.prefetch_related(
Prefetch('engagement_set', queryset=Engagement.objects.all().annotate(test_count=Count('test__id')))
)
filter_qs = filter_qs.prefetch_related(
'engagement_set__tags',
'prod_type',
'engagement_set__lead',
'tags',
)
if System_Settings.objects.get().enable_jira:
filter_qs = filter_qs.prefetch_related(
'engagement_set__jira_project__jira_instance',
'jira_project_set__jira_instance'
)
filtered = EngagementFilter(
request.GET,
queryset=filter_qs
)
prods = get_page_items(request, filtered.qs, 25)
name_words = products_with_engagements.values_list('name', flat=True)
eng_words = get_authorized_engagements(Permissions.Engagement_View).values_list('name', flat=True).distinct()
add_breadcrumb(
title="All Engagements",
top_level=not len(request.GET),
request=request)
return render(
request, 'dojo/engagements_all.html', {
'products': prods,
'filter_form': filtered.form,
'name_words': sorted(set(name_words)),
'eng_words': sorted(set(eng_words)),
})
@user_is_authorized(Engagement, Permissions.Engagement_Edit, 'eid')
def edit_engagement(request, eid):
engagement = Engagement.objects.get(pk=eid)
is_ci_cd = engagement.engagement_type == "CI/CD"
jira_project_form = None
jira_epic_form = None
jira_project = None
jira_error = False
if request.method == 'POST':
form = EngForm(request.POST, instance=engagement, cicd=is_ci_cd, product=engagement.product, user=request.user)
jira_project = jira_helper.get_jira_project(engagement, use_inheritance=False)
if form.is_valid():
# first save engagement details
new_status = form.cleaned_data.get('status')
engagement = form.save(commit=False)
if (new_status == "Cancelled" or new_status == "Completed"):
engagement.active = False
create_notification(event='close_engagement',
title='Closure of %s' % engagement.name,
description='The engagement "%s" was closed' % (engagement.name),
engagement=engagement, url=reverse('engagement_all_findings', args=(engagement.id, ))),
else:
engagement.active = True
engagement.save()
form.save_m2m()
messages.add_message(
request,
messages.SUCCESS,
'Engagement updated successfully.',
extra_tags='alert-success')
success, jira_project_form = jira_helper.process_jira_project_form(request, instance=jira_project, target='engagement', engagement=engagement, product=engagement.product)
error = not success
success, jira_epic_form = jira_helper.process_jira_epic_form(request, engagement=engagement)
error = error or not success
if not error:
if '_Add Tests' in request.POST:
return HttpResponseRedirect(
reverse('add_tests', args=(engagement.id, )))
else:
return HttpResponseRedirect(
reverse('view_engagement', args=(engagement.id, )))
else:
logger.debug(form.errors)
else:
form = EngForm(initial={'product': engagement.product}, instance=engagement, cicd=is_ci_cd, product=engagement.product, user=request.user)
jira_epic_form = None
if get_system_setting('enable_jira'):
jira_project = jira_helper.get_jira_project(engagement, use_inheritance=False)
jira_project_form = JIRAProjectForm(instance=jira_project, target='engagement', product=engagement.product)
logger.debug('showing jira-epic-form')
jira_epic_form = JIRAEngagementForm(instance=engagement)
if is_ci_cd:
title = 'Edit CI/CD Engagement'
else:
title = 'Edit Interactive Engagement'
product_tab = Product_Tab(engagement.product.id, title=title, tab="engagements")
product_tab.setEngagement(engagement)
return render(request, 'dojo/new_eng.html', {
'product_tab': product_tab,
'title': title,
'form': form,
'edit': True,
'jira_epic_form': jira_epic_form,
'jira_project_form': jira_project_form,
'engagement': engagement,
})
@user_is_authorized(Engagement, Permissions.Engagement_Delete, 'eid')
def delete_engagement(request, eid):
engagement = get_object_or_404(Engagement, pk=eid)
product = engagement.product
form = DeleteEngagementForm(instance=engagement)
if request.method == 'POST':
if 'id' in request.POST and str(engagement.id) == request.POST['id']:
form = DeleteEngagementForm(request.POST, instance=engagement)
if form.is_valid():
product = engagement.product
engagement.delete()
messages.add_message(
request,
messages.SUCCESS,
'Engagement and relationships removed.',
extra_tags='alert-success')
create_notification(event='other',
title='Deletion of %s' % engagement.name,
product=product,
description='The engagement "%s" was deleted by %s' % (engagement.name, request.user),
url=request.build_absolute_uri(reverse('view_engagements', args=(product.id, ))),
recipients=[engagement.lead],
icon="exclamation-triangle")
return HttpResponseRedirect(reverse("view_engagements", args=(product.id, )))
collector = NestedObjects(using=DEFAULT_DB_ALIAS)
collector.collect([engagement])
rels = collector.nested()
product_tab = Product_Tab(product.id, title="Delete Engagement", tab="engagements")
product_tab.setEngagement(engagement)
return render(request, 'dojo/delete_engagement.html', {
'product_tab': product_tab,
'engagement': engagement,
'form': form,
'rels': rels,
})
@user_is_authorized(Engagement, Permissions.Engagement_View, 'eid')
def view_engagement(request, eid):
eng = get_object_or_404(Engagement, id=eid)
tests = eng.test_set.all().order_by('test_type__name', '-updated')
default_page_num = 10
tests_filter = EngagementTestFilter(request.GET, queryset=tests, engagement=eng)
paged_tests = get_page_items(request, tests_filter.qs, default_page_num)
# prefetch only after creating the filters to avoid https://code.djangoproject.com/ticket/23771 and https://code.djangoproject.com/ticket/25375
paged_tests.object_list = prefetch_for_view_tests(paged_tests.object_list)
prod = eng.product
risks_accepted = eng.risk_acceptance.all().select_related('owner').annotate(accepted_findings_count=Count('accepted_findings__id'))
preset_test_type = None
network = None
if eng.preset:
preset_test_type = eng.preset.test_type.all()
network = eng.preset.network_locations.all()
system_settings = System_Settings.objects.get()
jissue = jira_helper.get_jira_issue(eng)
jira_project = jira_helper.get_jira_project(eng)
try:
check = Check_List.objects.get(engagement=eng)
except:
check = None
pass
notes = eng.notes.all()
note_type_activation = Note_Type.objects.filter(is_active=True).count()
if note_type_activation:
available_note_types = find_available_notetypes(notes)
form = DoneForm()
files = eng.files.all()
if request.method == 'POST':
user_has_permission_or_403(request.user, eng, Permissions.Note_Add)
eng.progress = 'check_list'
eng.save()
if note_type_activation:
form = TypedNoteForm(request.POST, available_note_types=available_note_types)
else:
form = NoteForm(request.POST)
if form.is_valid():
new_note = form.save(commit=False)
new_note.author = request.user
new_note.date = timezone.now()
new_note.save()
eng.notes.add(new_note)
if note_type_activation:
form = TypedNoteForm(available_note_types=available_note_types)
else:
form = NoteForm()
url = request.build_absolute_uri(reverse("view_engagement", args=(eng.id,)))
title = "Engagement: %s on %s" % (eng.name, eng.product.name)
messages.add_message(request,
messages.SUCCESS,
'Note added successfully.',
extra_tags='alert-success')
else:
if note_type_activation:
form = TypedNoteForm(available_note_types=available_note_types)
else:
form = NoteForm()
creds = Cred_Mapping.objects.filter(
product=eng.product).select_related('cred_id').order_by('cred_id')
cred_eng = Cred_Mapping.objects.filter(
engagement=eng.id).select_related('cred_id').order_by('cred_id')
add_breadcrumb(parent=eng, top_level=False, request=request)
title = ""
if eng.engagement_type == "CI/CD":
title = " CI/CD"
product_tab = Product_Tab(prod.id, title="View" + title + " Engagement", tab="engagements")
product_tab.setEngagement(eng)
return render(
request, 'dojo/view_eng.html', {
'eng': eng,
'product_tab': product_tab,
'system_settings': system_settings,
'tests': paged_tests,
'filter': tests_filter,
'check': check,
'threat': eng.tmodel_path,
'form': form,
'notes': notes,
'files': files,
'risks_accepted': risks_accepted,
'jissue': jissue,
'jira_project': jira_project,
'creds': creds,
'cred_eng': cred_eng,
'network': network,
'preset_test_type': preset_test_type
})
def prefetch_for_view_tests(tests):
prefetched = tests
if isinstance(tests,
QuerySet): # old code can arrive here with prods being a list because the query was already executed
prefetched = prefetched.select_related('lead')
prefetched = prefetched.prefetch_related('tags', 'test_type', 'notes')
prefetched = prefetched.annotate(count_findings_test_all=Count('finding__id', distinct=True))
prefetched = prefetched.annotate(count_findings_test_active=Count('finding__id', filter=Q(finding__active=True), distinct=True))
prefetched = prefetched.annotate(count_findings_test_active_verified=Count('finding__id', filter=Q(finding__active=True) & Q(finding__verified=True), distinct=True))
prefetched = prefetched.annotate(count_findings_test_mitigated=Count('finding__id', filter=Q(finding__is_mitigated=True), distinct=True))
prefetched = prefetched.annotate(count_findings_test_dups=Count('finding__id', filter=Q(finding__duplicate=True), distinct=True))
prefetched = prefetched.annotate(total_reimport_count=Count('test_import__id', filter=Q(test_import__type=Test_Import.REIMPORT_TYPE), distinct=True))
else:
logger.warn('unable to prefetch because query was already executed')
return prefetched
@user_is_authorized(Engagement, Permissions.Test_Add, 'eid')
def add_tests(request, eid):
eng = Engagement.objects.get(id=eid)
cred_form = CredMappingForm()
cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter(
engagement=eng).order_by('cred_id')
if request.method == 'POST':
form = TestForm(request.POST, engagement=eng)
cred_form = CredMappingForm(request.POST)
cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter(
engagement=eng).order_by('cred_id')
if form.is_valid():
new_test = form.save(commit=False)
# set default scan_type as it's used in reimport
new_test.scan_type = new_test.test_type.name
new_test.engagement = eng
try:
new_test.lead = User.objects.get(id=form['lead'].value())
except:
new_test.lead = None
pass
# Set status to in progress if a test is added
if eng.status != "In Progress" and eng.active is True:
eng.status = "In Progress"
eng.save()
new_test.save()
# Save the credential to the test
if cred_form.is_valid():
if cred_form.cleaned_data['cred_user']:
# Select the credential mapping object from the selected list and only allow if the credential is associated with the product
cred_user = Cred_Mapping.objects.filter(
pk=cred_form.cleaned_data['cred_user'].id,
engagement=eid).first()
new_f = cred_form.save(commit=False)
new_f.test = new_test
new_f.cred_id = cred_user.cred_id
new_f.save()
messages.add_message(
request,
messages.SUCCESS,
'Test added successfully.',
extra_tags='alert-success')
notifications_helper.notify_test_created(new_test)
if '_Add Another Test' in request.POST:
return HttpResponseRedirect(
reverse('add_tests', args=(eng.id, )))
elif '_Add Findings' in request.POST:
return HttpResponseRedirect(
reverse('add_findings', args=(new_test.id, )))
elif '_Finished' in request.POST:
return HttpResponseRedirect(
reverse('view_engagement', args=(eng.id, )))
else:
form = TestForm(engagement=eng)
form.initial['target_start'] = eng.target_start
form.initial['target_end'] = eng.target_end
form.initial['lead'] = request.user
add_breadcrumb(
parent=eng, title="Add Tests", top_level=False, request=request)
product_tab = Product_Tab(eng.product.id, title="Add Tests", tab="engagements")
product_tab.setEngagement(eng)
return render(request, 'dojo/add_tests.html', {
'product_tab': product_tab,
'form': form,
'cred_form': cred_form,
'eid': eid,
'eng': eng
})
# Cant use the easy decorator because of the potential for either eid/pid being used
def import_scan_results(request, eid=None, pid=None):
engagement = None
form = ImportScanForm()
cred_form = CredMappingForm()
finding_count = 0
jform = None
user = request.user
if eid:
engagement = get_object_or_404(Engagement, id=eid)
engagement_or_product = engagement
cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter(engagement=engagement).order_by('cred_id')
elif pid:
product = get_object_or_404(Product, id=pid)
engagement_or_product = product
elif not user.is_staff:
raise PermissionDenied
user_has_permission_or_403(user, engagement_or_product, Permissions.Import_Scan_Result)
push_all_jira_issues = jira_helper.is_push_all_issues(engagement_or_product)
if request.method == "POST":
form = ImportScanForm(request.POST, request.FILES)
cred_form = CredMappingForm(request.POST)
cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter(
engagement=engagement).order_by('cred_id')
if jira_helper.get_jira_project(engagement_or_product):
jform = JIRAImportScanForm(request.POST, push_all=push_all_jira_issues, prefix='jiraform')
logger.debug('jform valid: %s', jform.is_valid())
logger.debug('jform errors: %s', jform.errors)
if form.is_valid() and (jform is None or jform.is_valid()):
scan = request.FILES.get('file', None)
scan_date = form.cleaned_data['scan_date']
minimum_severity = form.cleaned_data['minimum_severity']
active = form.cleaned_data['active']
verified = form.cleaned_data['verified']
scan_type = request.POST['scan_type']
tags = form.cleaned_data['tags']
version = form.cleaned_data['version']
branch_tag = form.cleaned_data.get('branch_tag', None)
build_id = form.cleaned_data.get('build_id', None)
commit_hash = form.cleaned_data.get('commit_hash', None)
api_scan_configuration = form.cleaned_data.get('api_scan_configuration', None)
service = form.cleaned_data.get('service', None)
close_old_findings = form.cleaned_data.get('close_old_findings', None)
# Will save in the provided environment or in the `Development` one if absent
environment_id = request.POST.get('environment', 'Development')
environment = Development_Environment.objects.get(id=environment_id)
group_by = form.cleaned_data.get('group_by', None)
# TODO move to form validation?
if scan and is_scan_file_too_large(scan):
messages.add_message(request,
messages.ERROR,
"Report file is too large. Maximum supported size is {} MB".format(settings.SCAN_FILE_MAX_SIZE),
extra_tags='alert-danger')
return HttpResponseRedirect(reverse('import_scan_results', args=(engagement,)))
# Allows for a test to be imported with an engagement created on the fly
if engagement is None:
engagement = Engagement()
engagement.name = "AdHoc Import - " + strftime("%a, %d %b %Y %X", timezone.now().timetuple())
engagement.threat_model = False
engagement.api_test = False
engagement.pen_test = False
engagement.check_list = False
engagement.target_start = timezone.now().date()
engagement.target_end = timezone.now().date()
engagement.product = product
engagement.active = True
engagement.status = 'In Progress'
engagement.version = version
engagement.branch_tag = branch_tag
engagement.build_id = build_id
engagement.commit_hash = commit_hash
engagement.save()
# can't use helper as when push_all_jira_issues is True, the checkbox gets disabled and is always false
# push_to_jira = jira_helper.is_push_to_jira(new_finding, jform.cleaned_data.get('push_to_jira'))
push_to_jira = push_all_jira_issues or (jform and jform.cleaned_data.get('push_to_jira'))
error = False
# Save newly added endpoints
added_endpoints = save_endpoints_to_add(form.endpoints_to_add_list, engagement.product)
try:
importer = Importer()
test, finding_count, closed_finding_count = importer.import_scan(scan, scan_type, engagement, user, environment, active=active, verified=verified, tags=tags,
minimum_severity=minimum_severity, endpoints_to_add=list(form.cleaned_data['endpoints']) + added_endpoints, scan_date=scan_date,
version=version, branch_tag=branch_tag, build_id=build_id, commit_hash=commit_hash, push_to_jira=push_to_jira,
close_old_findings=close_old_findings, group_by=group_by, api_scan_configuration=api_scan_configuration, service=service)
message = f'{scan_type} processed a total of {finding_count} findings'
if close_old_findings:
message = message + ' and closed %d findings' % (closed_finding_count)
message = message + "."
add_success_message_to_response(message)
except Exception as e:
logger.exception(e)
add_error_message_to_response('An exception error occurred during the report import:%s' % str(e))
error = True
# Save the credential to the test
if cred_form.is_valid():
if cred_form.cleaned_data['cred_user']:
# Select the credential mapping object from the selected list and only allow if the credential is associated with the product
cred_user = Cred_Mapping.objects.filter(
pk=cred_form.cleaned_data['cred_user'].id,
engagement=eid).first()
new_f = cred_form.save(commit=False)
new_f.test = test
new_f.cred_id = cred_user.cred_id
new_f.save()
if not error:
return HttpResponseRedirect(
reverse('product_open_findings', args=(pid, )))
prod_id = None
custom_breadcrumb = None
title = "Import Scan Results"
if engagement:
prod_id = engagement.product.id
product_tab = Product_Tab(prod_id, title=title, tab="engagements")
product_tab.setEngagement(engagement)
else:
prod_id = pid
custom_breadcrumb = {"", ""}
product_tab = Product_Tab(prod_id, title=title, tab="findings")
if jira_helper.get_jira_project(engagement_or_product):
jform = JIRAImportScanForm(push_all=push_all_jira_issues, prefix='jiraform')
form.fields['endpoints'].queryset = Endpoint.objects.filter(product__id=product_tab.product.id)
form.fields['api_scan_configuration'].queryset = Product_API_Scan_Configuration.objects.filter(product__id=product_tab.product.id)
return render(request,
'dojo/import_scan_results.html',
{'form': form,
'product_tab': product_tab,
'engagement_or_product': engagement_or_product,
'custom_breadcrumb': custom_breadcrumb,
'title': title,
'cred_form': cred_form,
'jform': jform,
'scan_types': get_scan_types_sorted(),
})
@user_is_authorized(Engagement, Permissions.Engagement_Edit, 'eid')
def close_eng(request, eid):
eng = Engagement.objects.get(id=eid)
close_engagement(eng)
messages.add_message(
request,
messages.SUCCESS,
'Engagement closed successfully.',
extra_tags='alert-success')
create_notification(event='close_engagement',
title='Closure of %s' % eng.name,
description='The engagement "%s" was closed' % (eng.name),
engagement=eng, url=reverse('engagement_all_findings', args=(eng.id, ))),
return HttpResponseRedirect(reverse("view_engagements", args=(eng.product.id, )))
@user_is_authorized(Engagement, Permissions.Engagement_Edit, 'eid')
def reopen_eng(request, eid):
eng = Engagement.objects.get(id=eid)
reopen_engagement(eng)
messages.add_message(
request,
messages.SUCCESS,
'Engagement reopened successfully.',
extra_tags='alert-success')
create_notification(event='other',
title='Reopening of %s' % eng.name,
engagement=eng,
description='The engagement "%s" was reopened' % (eng.name),
url=reverse('view_engagement', args=(eng.id, ))),
return HttpResponseRedirect(reverse("view_engagements", args=(eng.product.id, )))
"""
Greg:
status: in production
method to complete checklists from the engagement view
"""
@user_is_authorized(Engagement, Permissions.Engagement_Edit, 'eid')
def complete_checklist(request, eid):
eng = get_object_or_404(Engagement, id=eid)
try:
checklist = Check_List.objects.get(engagement=eng)
except:
checklist = None
pass
add_breadcrumb(
parent=eng,
title="Complete checklist",
top_level=False,
request=request)
if request.method == 'POST':
tests = Test.objects.filter(engagement=eng)
findings = Finding.objects.filter(test__in=tests).all()
form = CheckForm(request.POST, instance=checklist, findings=findings)
if form.is_valid():
cl = form.save(commit=False)
try:
check_l = Check_List.objects.get(engagement=eng)
cl.id = check_l.id
cl.save()
form.save_m2m()
except:
cl.engagement = eng
cl.save()
form.save_m2m()
pass
messages.add_message(
request,
messages.SUCCESS,
'Checklist saved.',
extra_tags='alert-success')
return HttpResponseRedirect(
reverse('view_engagement', args=(eid, )))
else:
tests = Test.objects.filter(engagement=eng)
findings = Finding.objects.filter(test__in=tests).all()
form = CheckForm(instance=checklist, findings=findings)
product_tab = Product_Tab(eng.product.id, title="Checklist", tab="engagements")
product_tab.setEngagement(eng)
return render(request, 'dojo/checklist.html', {
'form': form,
'product_tab': product_tab,
'eid': eng.id,
'findings': findings,
})
@user_is_authorized(Engagement, Permissions.Risk_Acceptance, 'eid')
def add_risk_acceptance(request, eid, fid=None):
eng = get_object_or_404(Engagement, id=eid)
finding = None
if fid:
finding = get_object_or_404(Finding, id=fid)
if not eng.product.enable_full_risk_acceptance:
raise PermissionDenied()
if request.method == 'POST':
form = RiskAcceptanceForm(request.POST, request.FILES)
if form.is_valid():
# first capture notes param as it cannot be saved directly as m2m
notes = None
if form.cleaned_data['notes']:
notes = Notes(
entry=form.cleaned_data['notes'],
author=request.user,
date=timezone.now())
notes.save()
del form.cleaned_data['notes']
try:
# we sometimes see a weird exception here, but are unable to reproduce.
# we add some logging in case it happens
risk_acceptance = form.save()
except Exception as e:
logger.debug(vars(request.POST))
logger.error(vars(form))
logger.exception(e)
raise
# attach note to risk acceptance object now in database
if notes:
risk_acceptance.notes.add(notes)
eng.risk_acceptance.add(risk_acceptance)
findings = form.cleaned_data['accepted_findings']
risk_acceptance = ra_helper.add_findings_to_risk_acceptance(risk_acceptance, findings)
messages.add_message(
request,
messages.SUCCESS,
'Risk acceptance saved.',
extra_tags='alert-success')
return redirect_to_return_url_or_else(request, reverse('view_engagement', args=(eid, )))
else:
risk_acceptance_title_suggestion = 'Accept: %s' % finding
form = RiskAcceptanceForm(initial={'owner': request.user, 'name': risk_acceptance_title_suggestion})
finding_choices = Finding.objects.filter(duplicate=False, test__engagement=eng).filter(NOT_ACCEPTED_FINDINGS_QUERY).order_by('title')
form.fields['accepted_findings'].queryset = finding_choices
if fid:
form.fields['accepted_findings'].initial = {fid}
product_tab = Product_Tab(eng.product.id, title="Risk Acceptance", tab="engagements")
product_tab.setEngagement(eng)
return render(request, 'dojo/add_risk_acceptance.html', {
'eng': eng,
'product_tab': product_tab,
'form': form
})
@user_is_authorized(Engagement, Permissions.Engagement_View, 'eid')
def view_risk_acceptance(request, eid, raid):
return view_edit_risk_acceptance(request, eid=eid, raid=raid, edit_mode=False)
@user_is_authorized(Engagement, Permissions.Risk_Acceptance, 'eid')
def edit_risk_acceptance(request, eid, raid):
return view_edit_risk_acceptance(request, eid=eid, raid=raid, edit_mode=True)
# will only be called by view_risk_acceptance and edit_risk_acceptance
def view_edit_risk_acceptance(request, eid, raid, edit_mode=False):
risk_acceptance = get_object_or_404(Risk_Acceptance, pk=raid)
eng = get_object_or_404(Engagement, pk=eid)
if edit_mode and not eng.product.enable_full_risk_acceptance:
raise PermissionDenied()
risk_acceptance_form = None
errors = False
if request.method == 'POST':
# deleting before instantiating the form otherwise django messes up and we end up with an empty path value
if len(request.FILES) > 0:
logger.debug('new proof uploaded')
risk_acceptance.path.delete()
if 'decision' in request.POST:
old_expiration_date = risk_acceptance.expiration_date
risk_acceptance_form = EditRiskAcceptanceForm(request.POST, request.FILES, instance=risk_acceptance)
errors = errors or not risk_acceptance_form.is_valid()
if not errors:
logger.debug('path: %s', risk_acceptance_form.cleaned_data['path'])
risk_acceptance_form.save()
if risk_acceptance.expiration_date != old_expiration_date:
# risk acceptance was changed, check if risk acceptance needs to be reinstated and findings made accepted again
ra_helper.reinstate(risk_acceptance, old_expiration_date)
messages.add_message(
request,
messages.SUCCESS,
'Risk Acceptance saved successfully.',
extra_tags='alert-success')
if 'entry' in request.POST:
note_form = NoteForm(request.POST)
errors = errors or not note_form.is_valid()
if not errors:
new_note = note_form.save(commit=False)
new_note.author = request.user
new_note.date = timezone.now()
new_note.save()
risk_acceptance.notes.add(new_note)
messages.add_message(
request,
messages.SUCCESS,
'Note added successfully.',
extra_tags='alert-success')
if 'delete_note' in request.POST:
note = get_object_or_404(Notes, pk=request.POST['delete_note_id'])
if note.author.username == request.user.username:
risk_acceptance.notes.remove(note)
note.delete()
messages.add_message(
request,
messages.SUCCESS,
'Note deleted successfully.',
extra_tags='alert-success')
else:
messages.add_message(
request,
messages.ERROR,
"Since you are not the note's author, it was not deleted.",
extra_tags='alert-danger')
if 'remove_finding' in request.POST:
finding = get_object_or_404(
Finding, pk=request.POST['remove_finding_id'])
ra_helper.remove_finding_from_risk_acceptance(risk_acceptance, finding)
messages.add_message(
request,
messages.SUCCESS,
'Finding removed successfully from risk acceptance.',
extra_tags='alert-success')
if 'replace_file' in request.POST:
replace_form = ReplaceRiskAcceptanceProofForm(
request.POST, request.FILES, instance=risk_acceptance)
errors = errors or not replace_form.is_valid()
if not errors:
replace_form.save()
messages.add_message(
request,
messages.SUCCESS,
'New Proof uploaded successfully.',
extra_tags='alert-success')
else:
logger.error(replace_form.errors)
if 'add_findings' in request.POST:
add_findings_form = AddFindingsRiskAcceptanceForm(
request.POST, request.FILES, instance=risk_acceptance)
errors = errors or not add_findings_form.is_valid()
if not errors:
findings = add_findings_form.cleaned_data['accepted_findings']
ra_helper.add_findings_to_risk_acceptance(risk_acceptance, findings)
messages.add_message(
request,
messages.SUCCESS,
'Finding%s added successfully.' % ('s' if len(findings) > 1
else ''),
extra_tags='alert-success')
if not errors:
logger.debug('redirecting to return_url')
return redirect_to_return_url_or_else(request, reverse("view_risk_acceptance", args=(eid, raid)))
else:
logger.error('errors found')
else:
if edit_mode:
risk_acceptance_form = EditRiskAcceptanceForm(instance=risk_acceptance)
note_form = NoteForm()
replace_form = ReplaceRiskAcceptanceProofForm(instance=risk_acceptance)
add_findings_form = AddFindingsRiskAcceptanceForm(instance=risk_acceptance)
accepted_findings = risk_acceptance.accepted_findings.order_by('numerical_severity')
fpage = get_page_items(request, accepted_findings, 15)
unaccepted_findings = Finding.objects.filter(test__in=eng.test_set.all()) \
.exclude(id__in=accepted_findings).order_by("title")
add_fpage = get_page_items(request, unaccepted_findings, 10, 'apage')
# on this page we need to add unaccepted findings as possible findings to add as accepted
add_findings_form.fields[
"accepted_findings"].queryset = add_fpage.object_list
product_tab = Product_Tab(eng.product.id, title="Risk Acceptance", tab="engagements")
product_tab.setEngagement(eng)
return render(
request, 'dojo/view_risk_acceptance.html', {
'risk_acceptance': risk_acceptance,
'engagement': eng,
'product_tab': product_tab,
'accepted_findings': fpage,
'notes': risk_acceptance.notes.all(),
'eng': eng,
'edit_mode': edit_mode,
'risk_acceptance_form': risk_acceptance_form,
'note_form': note_form,
'replace_form': replace_form,
'add_findings_form': add_findings_form,
# 'show_add_findings_form': len(unaccepted_findings),
'request': request,
'add_findings': add_fpage,
'return_url': get_return_url(request),
})
@user_is_authorized(Engagement, Permissions.Risk_Acceptance, 'eid')
def expire_risk_acceptance(request, eid, raid):
risk_acceptance = get_object_or_404(prefetch_for_expiration(Risk_Acceptance.objects.all()), pk=raid)
eng = get_object_or_404(Engagement, pk=eid)
ra_helper.expire_now(risk_acceptance)
return redirect_to_return_url_or_else(request, reverse("view_risk_acceptance", args=(eid, raid)))
@user_is_authorized(Engagement, Permissions.Risk_Acceptance, 'eid')
def reinstate_risk_acceptance(request, eid, raid):
risk_acceptance = get_object_or_404(prefetch_for_expiration(Risk_Acceptance.objects.all()), pk=raid)
eng = get_object_or_404(Engagement, pk=eid)
if not eng.product.enable_full_risk_acceptance:
raise PermissionDenied()
ra_helper.reinstate(risk_acceptance, risk_acceptance.expiration_date)
return redirect_to_return_url_or_else(request, reverse("view_risk_acceptance", args=(eid, raid)))
@user_is_authorized(Engagement, Permissions.Risk_Acceptance, 'eid')
def delete_risk_acceptance(request, eid, raid):
risk_acceptance = get_object_or_404(Risk_Acceptance, pk=raid)
eng = get_object_or_404(Engagement, pk=eid)
ra_helper.delete(eng, risk_acceptance)
messages.add_message(
request,
messages.SUCCESS,
'Risk acceptance deleted successfully.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse("view_engagement", args=(eng.id, )))
@user_is_authorized(Engagement, Permissions.Engagement_View, 'eid')
def download_risk_acceptance(request, eid, raid):
import mimetypes
mimetypes.init()
risk_acceptance = get_object_or_404(Risk_Acceptance, pk=raid)
response = StreamingHttpResponse(
FileIterWrapper(
open(settings.MEDIA_ROOT + "/" + risk_acceptance.path.name, mode='rb')))
response['Content-Disposition'] = 'attachment; filename="%s"' \
% risk_acceptance.filename()
mimetype, encoding = mimetypes.guess_type(risk_acceptance.path.name)
response['Content-Type'] = mimetype
return response
"""
Greg
status: in production
Upload a threat model at the engagement level. Threat models are stored
under media folder
"""
@user_is_authorized(Engagement, Permissions.Engagement_Edit, 'eid')
def upload_threatmodel(request, eid):
eng = Engagement.objects.get(id=eid)
add_breadcrumb(
parent=eng,
title="Upload a threat model",
top_level=False,
request=request)
if request.method == 'POST':
form = UploadThreatForm(request.POST, request.FILES)
if form.is_valid():
handle_uploaded_threat(request.FILES['file'], eng)
eng.progress = 'other'
eng.threat_model = True
eng.save()
messages.add_message(
request,
messages.SUCCESS,
'Threat model saved.',
extra_tags='alert-success')
return HttpResponseRedirect(
reverse('view_engagement', args=(eid, )))
else:
form = UploadThreatForm()
product_tab = Product_Tab(eng.product.id, title="Upload Threat Model", tab="engagements")
return render(request, 'dojo/up_threat.html', {
'form': form,
'product_tab': product_tab,
'eng': eng,
})
@user_is_authorized(Engagement, Permissions.Engagement_View, 'eid')
def view_threatmodel(request, eid):
eng = get_object_or_404(Engagement, pk=eid)
response = FileResponse(open(eng.tmodel_path, 'rb'))
return response
@user_is_authorized(Engagement, Permissions.Engagement_View, 'eid')
def engagement_ics(request, eid):
eng = get_object_or_404(Engagement, id=eid)
start_date = datetime.combine(eng.target_start, datetime.min.time())
end_date = datetime.combine(eng.target_end, datetime.max.time())
uid = "dojo_eng_%d_%d" % (eng.id, eng.product.id)
cal = get_cal_event(
start_date, end_date,
"Engagement: %s (%s)" % (eng.name, eng.product.name),
"Set aside for engagement %s, on product %s. Additional detail can be found at %s"
% (eng.name, eng.product.name,
request.build_absolute_uri(
(reverse("view_engagement", args=(eng.id, ))))), uid)
output = cal.serialize()
response = HttpResponse(content=output)
response['Content-Type'] = 'text/calendar'
response['Content-Disposition'] = 'attachment; filename=%s.ics' % eng.name
return response
def get_list_index(list, index):
try:
element = list[index]
except Exception as e:
element = None
return element
def get_engagements(request):
url = request.META.get('QUERY_STRING')
if not url:
raise ValidationError('Please use the export button when exporting engagements')
else:
if url.startswith('url='):
url = url[4:]
path_items = list(filter(None, re.split('/|\?', url))) # noqa W605
if not path_items or path_items[0] != 'engagement':
raise ValidationError('URL is not an engagement view')
view = query = None
if get_list_index(path_items, 1) in ['active', 'all']:
view = get_list_index(path_items, 1)
query = get_list_index(path_items, 2)
else:
view = 'active'
query = get_list_index(path_items, 1)
request.GET = QueryDict(query)
engagements = get_filtered_engagements(request, view).qs
test_counts = get_test_counts(engagements)
return engagements, test_counts
def get_excludes():
return ['is_ci_cd', 'jira_issue', 'jira_project', 'objects', 'unaccepted_open_findings']
def get_foreign_keys():
return ['build_server', 'lead', 'orchestration_engine', 'preset', 'product',
'report_type', 'requester', 'source_code_management_server']
def csv_export(request):
engagements, test_counts = get_engagements(request)
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=engagements.csv'
writer = csv.writer(response)
first_row = True
for engagement in engagements:
if first_row:
fields = []
for key in dir(engagement):
if key not in get_excludes() and not callable(getattr(engagement, key)) and not key.startswith('_'):
fields.append(key)
fields.append('tests')
writer.writerow(fields)
first_row = False
if not first_row:
fields = []
for key in dir(engagement):
if key not in get_excludes() and not callable(getattr(engagement, key)) and not key.startswith('_'):
value = engagement.__dict__.get(key)
if key in get_foreign_keys() and getattr(engagement, key):
value = str(getattr(engagement, key))
if value and isinstance(value, str):
value = value.replace('\n', ' NEWLINE ').replace('\r', '')
fields.append(value)
fields.append(test_counts.get(engagement.id, 0))
writer.writerow(fields)
return response
def excel_export(request):
engagements, test_counts = get_engagements(request)
workbook = Workbook()
workbook.iso_dates = True
worksheet = workbook.active
worksheet.title = 'Engagements'
font_bold = Font(bold=True)
row_num = 1
for engagement in engagements:
if row_num == 1:
col_num = 1
for key in dir(engagement):
if key not in get_excludes() and not callable(getattr(engagement, key)) and not key.startswith('_'):
cell = worksheet.cell(row=row_num, column=col_num, value=key)
cell.font = font_bold
col_num += 1
cell = worksheet.cell(row=row_num, column=col_num, value='tests')
cell.font = font_bold
row_num = 2
if row_num > 1:
col_num = 1
for key in dir(engagement):
if key not in get_excludes() and not callable(getattr(engagement, key)) and not key.startswith('_'):
value = engagement.__dict__.get(key)
if key in get_foreign_keys() and getattr(engagement, key):
value = str(getattr(engagement, key))
if value and isinstance(value, datetime):
value = value.replace(tzinfo=None)
worksheet.cell(row=row_num, column=col_num, value=value)
col_num += 1
worksheet.cell(row=row_num, column=col_num, value=test_counts.get(engagement.id, 0))
row_num += 1
with NamedTemporaryFile() as tmp:
workbook.save(tmp.name)
tmp.seek(0)
stream = tmp.read()
response = HttpResponse(
content=stream,
content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
)
response['Content-Disposition'] = 'attachment; filename=engagements.xlsx'
return response
| 40.547544 | 182 | 0.643431 | import logging
import csv
import re
from openpyxl import Workbook
from openpyxl.styles import Font
from tempfile import NamedTemporaryFile
from datetime import datetime
import operator
from django.contrib.auth.models import User
from django.conf import settings
from django.contrib import messages
from django.core.exceptions import ValidationError, PermissionDenied
from django.urls import reverse
from django.db.models import Q, Count
from django.http import HttpResponseRedirect, StreamingHttpResponse, HttpResponse, FileResponse, QueryDict
from django.shortcuts import render, get_object_or_404
from django.views.decorators.cache import cache_page
from django.utils import timezone
from time import strftime
from django.contrib.admin.utils import NestedObjects
from django.db import DEFAULT_DB_ALIAS
from dojo.engagement.services import close_engagement, reopen_engagement
from dojo.filters import EngagementFilter, EngagementDirectFilter, EngagementTestFilter
from dojo.forms import CheckForm, \
UploadThreatForm, RiskAcceptanceForm, NoteForm, DoneForm, \
EngForm, TestForm, ReplaceRiskAcceptanceProofForm, AddFindingsRiskAcceptanceForm, DeleteEngagementForm, ImportScanForm, \
CredMappingForm, JIRAEngagementForm, JIRAImportScanForm, TypedNoteForm, JIRAProjectForm, \
EditRiskAcceptanceForm
from dojo.models import Finding, Product, Engagement, Test, \
Check_List, Test_Import, Notes, \
Risk_Acceptance, Development_Environment, Endpoint, \
Cred_Mapping, Dojo_User, System_Settings, Note_Type, Product_API_Scan_Configuration
from dojo.tools.factory import get_scan_types_sorted
from dojo.utils import add_error_message_to_response, add_success_message_to_response, get_page_items, add_breadcrumb, handle_uploaded_threat, \
FileIterWrapper, get_cal_event, Product_Tab, is_scan_file_too_large, \
get_system_setting, redirect_to_return_url_or_else, get_return_url
from dojo.notifications.helper import create_notification
from dojo.finding.views import find_available_notetypes
from functools import reduce
from django.db.models.query import Prefetch, QuerySet
import dojo.jira_link.helper as jira_helper
import dojo.risk_acceptance.helper as ra_helper
from dojo.risk_acceptance.helper import prefetch_for_expiration
from dojo.finding.helper import NOT_ACCEPTED_FINDINGS_QUERY
from django.views.decorators.vary import vary_on_cookie
from dojo.authorization.authorization import user_has_permission_or_403
from dojo.authorization.roles_permissions import Permissions
from dojo.product.queries import get_authorized_products
from dojo.engagement.queries import get_authorized_engagements
from dojo.authorization.authorization_decorators import user_is_authorized
from dojo.importers.importer.importer import DojoDefaultImporter as Importer
import dojo.notifications.helper as notifications_helper
from dojo.endpoint.utils import save_endpoints_to_add
logger = logging.getLogger(__name__)
@cache_page(60 * 5)
@vary_on_cookie
def engagement_calendar(request):
if 'lead' not in request.GET or '0' in request.GET.getlist('lead'):
engagements = get_authorized_engagements(Permissions.Engagement_View)
else:
filters = []
leads = request.GET.getlist('lead', '')
if '-1' in request.GET.getlist('lead'):
leads.remove('-1')
filters.append(Q(lead__isnull=True))
filters.append(Q(lead__in=leads))
engagements = get_authorized_engagements(Permissions.Engagement_View).filter(reduce(operator.or_, filters))
engagements = engagements.select_related('lead')
engagements = engagements.prefetch_related('product')
add_breadcrumb(
title="Engagement Calendar", top_level=True, request=request)
return render(
request, 'dojo/calendar.html', {
'caltype': 'engagements',
'leads': request.GET.getlist('lead', ''),
'engagements': engagements,
'users': Dojo_User.objects.all()
})
def get_filtered_engagements(request, view):
if view not in ['all', 'active']:
raise ValidationError(f'View {view} is not allowed')
engagements = get_authorized_engagements(Permissions.Engagement_View).order_by('-target_start')
if view == 'active':
engagements = engagements.filter(active=True)
engagements = engagements.select_related('product', 'product__prod_type') \
.prefetch_related('lead', 'tags', 'product__tags')
if System_Settings.objects.get().enable_jira:
engagements = engagements.prefetch_related(
'jira_project__jira_instance',
'product__jira_project_set__jira_instance'
)
engagements = EngagementDirectFilter(request.GET, queryset=engagements)
return engagements
def get_test_counts(engagements):
engagement_test_counts = {
test['engagement']: test['test_count']
for test in Test.objects.filter(
engagement__in=engagements
).values(
'engagement'
).annotate(
test_count=Count('engagement')
)
}
return engagement_test_counts
def engagements(request, view):
if not view:
view = 'active'
filtered_engagements = get_filtered_engagements(request, view)
engs = get_page_items(request, filtered_engagements.qs, 25)
product_name_words = sorted(get_authorized_products(Permissions.Product_View).values_list('name', flat=True))
engagement_name_words = sorted(get_authorized_engagements(Permissions.Engagement_View).values_list('name', flat=True).distinct())
add_breadcrumb(
title=f"{view.capitalize()} Engagements",
top_level=not len(request.GET),
request=request)
return render(
request, 'dojo/engagement.html', {
'engagements': engs,
'engagement_test_counts': get_test_counts(filtered_engagements.qs),
'filter_form': filtered_engagements.form,
'product_name_words': product_name_words,
'engagement_name_words': engagement_name_words,
'view': view.capitalize(),
})
def engagements_all(request):
products_with_engagements = get_authorized_products(Permissions.Engagement_View)
products_with_engagements = products_with_engagements.filter(~Q(engagement=None)).distinct()
filter_qs = products_with_engagements.prefetch_related(
Prefetch('engagement_set', queryset=Engagement.objects.all().annotate(test_count=Count('test__id')))
)
filter_qs = filter_qs.prefetch_related(
'engagement_set__tags',
'prod_type',
'engagement_set__lead',
'tags',
)
if System_Settings.objects.get().enable_jira:
filter_qs = filter_qs.prefetch_related(
'engagement_set__jira_project__jira_instance',
'jira_project_set__jira_instance'
)
filtered = EngagementFilter(
request.GET,
queryset=filter_qs
)
prods = get_page_items(request, filtered.qs, 25)
name_words = products_with_engagements.values_list('name', flat=True)
eng_words = get_authorized_engagements(Permissions.Engagement_View).values_list('name', flat=True).distinct()
add_breadcrumb(
title="All Engagements",
top_level=not len(request.GET),
request=request)
return render(
request, 'dojo/engagements_all.html', {
'products': prods,
'filter_form': filtered.form,
'name_words': sorted(set(name_words)),
'eng_words': sorted(set(eng_words)),
})
@user_is_authorized(Engagement, Permissions.Engagement_Edit, 'eid')
def edit_engagement(request, eid):
engagement = Engagement.objects.get(pk=eid)
is_ci_cd = engagement.engagement_type == "CI/CD"
jira_project_form = None
jira_epic_form = None
jira_project = None
jira_error = False
if request.method == 'POST':
form = EngForm(request.POST, instance=engagement, cicd=is_ci_cd, product=engagement.product, user=request.user)
jira_project = jira_helper.get_jira_project(engagement, use_inheritance=False)
if form.is_valid():
# first save engagement details
new_status = form.cleaned_data.get('status')
engagement = form.save(commit=False)
if (new_status == "Cancelled" or new_status == "Completed"):
engagement.active = False
create_notification(event='close_engagement',
title='Closure of %s' % engagement.name,
description='The engagement "%s" was closed' % (engagement.name),
engagement=engagement, url=reverse('engagement_all_findings', args=(engagement.id, ))),
else:
engagement.active = True
engagement.save()
form.save_m2m()
messages.add_message(
request,
messages.SUCCESS,
'Engagement updated successfully.',
extra_tags='alert-success')
success, jira_project_form = jira_helper.process_jira_project_form(request, instance=jira_project, target='engagement', engagement=engagement, product=engagement.product)
error = not success
success, jira_epic_form = jira_helper.process_jira_epic_form(request, engagement=engagement)
error = error or not success
if not error:
if '_Add Tests' in request.POST:
return HttpResponseRedirect(
reverse('add_tests', args=(engagement.id, )))
else:
return HttpResponseRedirect(
reverse('view_engagement', args=(engagement.id, )))
else:
logger.debug(form.errors)
else:
form = EngForm(initial={'product': engagement.product}, instance=engagement, cicd=is_ci_cd, product=engagement.product, user=request.user)
jira_epic_form = None
if get_system_setting('enable_jira'):
jira_project = jira_helper.get_jira_project(engagement, use_inheritance=False)
jira_project_form = JIRAProjectForm(instance=jira_project, target='engagement', product=engagement.product)
logger.debug('showing jira-epic-form')
jira_epic_form = JIRAEngagementForm(instance=engagement)
if is_ci_cd:
title = 'Edit CI/CD Engagement'
else:
title = 'Edit Interactive Engagement'
product_tab = Product_Tab(engagement.product.id, title=title, tab="engagements")
product_tab.setEngagement(engagement)
return render(request, 'dojo/new_eng.html', {
'product_tab': product_tab,
'title': title,
'form': form,
'edit': True,
'jira_epic_form': jira_epic_form,
'jira_project_form': jira_project_form,
'engagement': engagement,
})
@user_is_authorized(Engagement, Permissions.Engagement_Delete, 'eid')
def delete_engagement(request, eid):
engagement = get_object_or_404(Engagement, pk=eid)
product = engagement.product
form = DeleteEngagementForm(instance=engagement)
if request.method == 'POST':
if 'id' in request.POST and str(engagement.id) == request.POST['id']:
form = DeleteEngagementForm(request.POST, instance=engagement)
if form.is_valid():
product = engagement.product
engagement.delete()
messages.add_message(
request,
messages.SUCCESS,
'Engagement and relationships removed.',
extra_tags='alert-success')
create_notification(event='other',
title='Deletion of %s' % engagement.name,
product=product,
description='The engagement "%s" was deleted by %s' % (engagement.name, request.user),
url=request.build_absolute_uri(reverse('view_engagements', args=(product.id, ))),
recipients=[engagement.lead],
icon="exclamation-triangle")
return HttpResponseRedirect(reverse("view_engagements", args=(product.id, )))
collector = NestedObjects(using=DEFAULT_DB_ALIAS)
collector.collect([engagement])
rels = collector.nested()
product_tab = Product_Tab(product.id, title="Delete Engagement", tab="engagements")
product_tab.setEngagement(engagement)
return render(request, 'dojo/delete_engagement.html', {
'product_tab': product_tab,
'engagement': engagement,
'form': form,
'rels': rels,
})
@user_is_authorized(Engagement, Permissions.Engagement_View, 'eid')
def view_engagement(request, eid):
eng = get_object_or_404(Engagement, id=eid)
tests = eng.test_set.all().order_by('test_type__name', '-updated')
default_page_num = 10
tests_filter = EngagementTestFilter(request.GET, queryset=tests, engagement=eng)
paged_tests = get_page_items(request, tests_filter.qs, default_page_num)
# prefetch only after creating the filters to avoid https://code.djangoproject.com/ticket/23771 and https://code.djangoproject.com/ticket/25375
paged_tests.object_list = prefetch_for_view_tests(paged_tests.object_list)
prod = eng.product
risks_accepted = eng.risk_acceptance.all().select_related('owner').annotate(accepted_findings_count=Count('accepted_findings__id'))
preset_test_type = None
network = None
if eng.preset:
preset_test_type = eng.preset.test_type.all()
network = eng.preset.network_locations.all()
system_settings = System_Settings.objects.get()
jissue = jira_helper.get_jira_issue(eng)
jira_project = jira_helper.get_jira_project(eng)
try:
check = Check_List.objects.get(engagement=eng)
except:
check = None
pass
notes = eng.notes.all()
note_type_activation = Note_Type.objects.filter(is_active=True).count()
if note_type_activation:
available_note_types = find_available_notetypes(notes)
form = DoneForm()
files = eng.files.all()
if request.method == 'POST':
user_has_permission_or_403(request.user, eng, Permissions.Note_Add)
eng.progress = 'check_list'
eng.save()
if note_type_activation:
form = TypedNoteForm(request.POST, available_note_types=available_note_types)
else:
form = NoteForm(request.POST)
if form.is_valid():
new_note = form.save(commit=False)
new_note.author = request.user
new_note.date = timezone.now()
new_note.save()
eng.notes.add(new_note)
if note_type_activation:
form = TypedNoteForm(available_note_types=available_note_types)
else:
form = NoteForm()
url = request.build_absolute_uri(reverse("view_engagement", args=(eng.id,)))
title = "Engagement: %s on %s" % (eng.name, eng.product.name)
messages.add_message(request,
messages.SUCCESS,
'Note added successfully.',
extra_tags='alert-success')
else:
if note_type_activation:
form = TypedNoteForm(available_note_types=available_note_types)
else:
form = NoteForm()
creds = Cred_Mapping.objects.filter(
product=eng.product).select_related('cred_id').order_by('cred_id')
cred_eng = Cred_Mapping.objects.filter(
engagement=eng.id).select_related('cred_id').order_by('cred_id')
add_breadcrumb(parent=eng, top_level=False, request=request)
title = ""
if eng.engagement_type == "CI/CD":
title = " CI/CD"
product_tab = Product_Tab(prod.id, title="View" + title + " Engagement", tab="engagements")
product_tab.setEngagement(eng)
return render(
request, 'dojo/view_eng.html', {
'eng': eng,
'product_tab': product_tab,
'system_settings': system_settings,
'tests': paged_tests,
'filter': tests_filter,
'check': check,
'threat': eng.tmodel_path,
'form': form,
'notes': notes,
'files': files,
'risks_accepted': risks_accepted,
'jissue': jissue,
'jira_project': jira_project,
'creds': creds,
'cred_eng': cred_eng,
'network': network,
'preset_test_type': preset_test_type
})
def prefetch_for_view_tests(tests):
prefetched = tests
if isinstance(tests,
QuerySet): # old code can arrive here with prods being a list because the query was already executed
prefetched = prefetched.select_related('lead')
prefetched = prefetched.prefetch_related('tags', 'test_type', 'notes')
prefetched = prefetched.annotate(count_findings_test_all=Count('finding__id', distinct=True))
prefetched = prefetched.annotate(count_findings_test_active=Count('finding__id', filter=Q(finding__active=True), distinct=True))
prefetched = prefetched.annotate(count_findings_test_active_verified=Count('finding__id', filter=Q(finding__active=True) & Q(finding__verified=True), distinct=True))
prefetched = prefetched.annotate(count_findings_test_mitigated=Count('finding__id', filter=Q(finding__is_mitigated=True), distinct=True))
prefetched = prefetched.annotate(count_findings_test_dups=Count('finding__id', filter=Q(finding__duplicate=True), distinct=True))
prefetched = prefetched.annotate(total_reimport_count=Count('test_import__id', filter=Q(test_import__type=Test_Import.REIMPORT_TYPE), distinct=True))
else:
logger.warn('unable to prefetch because query was already executed')
return prefetched
@user_is_authorized(Engagement, Permissions.Test_Add, 'eid')
def add_tests(request, eid):
eng = Engagement.objects.get(id=eid)
cred_form = CredMappingForm()
cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter(
engagement=eng).order_by('cred_id')
if request.method == 'POST':
form = TestForm(request.POST, engagement=eng)
cred_form = CredMappingForm(request.POST)
cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter(
engagement=eng).order_by('cred_id')
if form.is_valid():
new_test = form.save(commit=False)
# set default scan_type as it's used in reimport
new_test.scan_type = new_test.test_type.name
new_test.engagement = eng
try:
new_test.lead = User.objects.get(id=form['lead'].value())
except:
new_test.lead = None
pass
if eng.status != "In Progress" and eng.active is True:
eng.status = "In Progress"
eng.save()
new_test.save()
if cred_form.is_valid():
if cred_form.cleaned_data['cred_user']:
cred_user = Cred_Mapping.objects.filter(
pk=cred_form.cleaned_data['cred_user'].id,
engagement=eid).first()
new_f = cred_form.save(commit=False)
new_f.test = new_test
new_f.cred_id = cred_user.cred_id
new_f.save()
messages.add_message(
request,
messages.SUCCESS,
'Test added successfully.',
extra_tags='alert-success')
notifications_helper.notify_test_created(new_test)
if '_Add Another Test' in request.POST:
return HttpResponseRedirect(
reverse('add_tests', args=(eng.id, )))
elif '_Add Findings' in request.POST:
return HttpResponseRedirect(
reverse('add_findings', args=(new_test.id, )))
elif '_Finished' in request.POST:
return HttpResponseRedirect(
reverse('view_engagement', args=(eng.id, )))
else:
form = TestForm(engagement=eng)
form.initial['target_start'] = eng.target_start
form.initial['target_end'] = eng.target_end
form.initial['lead'] = request.user
add_breadcrumb(
parent=eng, title="Add Tests", top_level=False, request=request)
product_tab = Product_Tab(eng.product.id, title="Add Tests", tab="engagements")
product_tab.setEngagement(eng)
return render(request, 'dojo/add_tests.html', {
'product_tab': product_tab,
'form': form,
'cred_form': cred_form,
'eid': eid,
'eng': eng
})
def import_scan_results(request, eid=None, pid=None):
engagement = None
form = ImportScanForm()
cred_form = CredMappingForm()
finding_count = 0
jform = None
user = request.user
if eid:
engagement = get_object_or_404(Engagement, id=eid)
engagement_or_product = engagement
cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter(engagement=engagement).order_by('cred_id')
elif pid:
product = get_object_or_404(Product, id=pid)
engagement_or_product = product
elif not user.is_staff:
raise PermissionDenied
user_has_permission_or_403(user, engagement_or_product, Permissions.Import_Scan_Result)
push_all_jira_issues = jira_helper.is_push_all_issues(engagement_or_product)
if request.method == "POST":
form = ImportScanForm(request.POST, request.FILES)
cred_form = CredMappingForm(request.POST)
cred_form.fields["cred_user"].queryset = Cred_Mapping.objects.filter(
engagement=engagement).order_by('cred_id')
if jira_helper.get_jira_project(engagement_or_product):
jform = JIRAImportScanForm(request.POST, push_all=push_all_jira_issues, prefix='jiraform')
logger.debug('jform valid: %s', jform.is_valid())
logger.debug('jform errors: %s', jform.errors)
if form.is_valid() and (jform is None or jform.is_valid()):
scan = request.FILES.get('file', None)
scan_date = form.cleaned_data['scan_date']
minimum_severity = form.cleaned_data['minimum_severity']
active = form.cleaned_data['active']
verified = form.cleaned_data['verified']
scan_type = request.POST['scan_type']
tags = form.cleaned_data['tags']
version = form.cleaned_data['version']
branch_tag = form.cleaned_data.get('branch_tag', None)
build_id = form.cleaned_data.get('build_id', None)
commit_hash = form.cleaned_data.get('commit_hash', None)
api_scan_configuration = form.cleaned_data.get('api_scan_configuration', None)
service = form.cleaned_data.get('service', None)
close_old_findings = form.cleaned_data.get('close_old_findings', None)
environment_id = request.POST.get('environment', 'Development')
environment = Development_Environment.objects.get(id=environment_id)
group_by = form.cleaned_data.get('group_by', None)
if scan and is_scan_file_too_large(scan):
messages.add_message(request,
messages.ERROR,
"Report file is too large. Maximum supported size is {} MB".format(settings.SCAN_FILE_MAX_SIZE),
extra_tags='alert-danger')
return HttpResponseRedirect(reverse('import_scan_results', args=(engagement,)))
if engagement is None:
engagement = Engagement()
engagement.name = "AdHoc Import - " + strftime("%a, %d %b %Y %X", timezone.now().timetuple())
engagement.threat_model = False
engagement.api_test = False
engagement.pen_test = False
engagement.check_list = False
engagement.target_start = timezone.now().date()
engagement.target_end = timezone.now().date()
engagement.product = product
engagement.active = True
engagement.status = 'In Progress'
engagement.version = version
engagement.branch_tag = branch_tag
engagement.build_id = build_id
engagement.commit_hash = commit_hash
engagement.save()
# push_to_jira = jira_helper.is_push_to_jira(new_finding, jform.cleaned_data.get('push_to_jira'))
push_to_jira = push_all_jira_issues or (jform and jform.cleaned_data.get('push_to_jira'))
error = False
# Save newly added endpoints
added_endpoints = save_endpoints_to_add(form.endpoints_to_add_list, engagement.product)
try:
importer = Importer()
test, finding_count, closed_finding_count = importer.import_scan(scan, scan_type, engagement, user, environment, active=active, verified=verified, tags=tags,
minimum_severity=minimum_severity, endpoints_to_add=list(form.cleaned_data['endpoints']) + added_endpoints, scan_date=scan_date,
version=version, branch_tag=branch_tag, build_id=build_id, commit_hash=commit_hash, push_to_jira=push_to_jira,
close_old_findings=close_old_findings, group_by=group_by, api_scan_configuration=api_scan_configuration, service=service)
message = f'{scan_type} processed a total of {finding_count} findings'
if close_old_findings:
message = message + ' and closed %d findings' % (closed_finding_count)
message = message + "."
add_success_message_to_response(message)
except Exception as e:
logger.exception(e)
add_error_message_to_response('An exception error occurred during the report import:%s' % str(e))
error = True
# Save the credential to the test
if cred_form.is_valid():
if cred_form.cleaned_data['cred_user']:
# Select the credential mapping object from the selected list and only allow if the credential is associated with the product
cred_user = Cred_Mapping.objects.filter(
pk=cred_form.cleaned_data['cred_user'].id,
engagement=eid).first()
new_f = cred_form.save(commit=False)
new_f.test = test
new_f.cred_id = cred_user.cred_id
new_f.save()
if not error:
return HttpResponseRedirect(
reverse('product_open_findings', args=(pid, )))
prod_id = None
custom_breadcrumb = None
title = "Import Scan Results"
if engagement:
prod_id = engagement.product.id
product_tab = Product_Tab(prod_id, title=title, tab="engagements")
product_tab.setEngagement(engagement)
else:
prod_id = pid
custom_breadcrumb = {"", ""}
product_tab = Product_Tab(prod_id, title=title, tab="findings")
if jira_helper.get_jira_project(engagement_or_product):
jform = JIRAImportScanForm(push_all=push_all_jira_issues, prefix='jiraform')
form.fields['endpoints'].queryset = Endpoint.objects.filter(product__id=product_tab.product.id)
form.fields['api_scan_configuration'].queryset = Product_API_Scan_Configuration.objects.filter(product__id=product_tab.product.id)
return render(request,
'dojo/import_scan_results.html',
{'form': form,
'product_tab': product_tab,
'engagement_or_product': engagement_or_product,
'custom_breadcrumb': custom_breadcrumb,
'title': title,
'cred_form': cred_form,
'jform': jform,
'scan_types': get_scan_types_sorted(),
})
@user_is_authorized(Engagement, Permissions.Engagement_Edit, 'eid')
def close_eng(request, eid):
eng = Engagement.objects.get(id=eid)
close_engagement(eng)
messages.add_message(
request,
messages.SUCCESS,
'Engagement closed successfully.',
extra_tags='alert-success')
create_notification(event='close_engagement',
title='Closure of %s' % eng.name,
description='The engagement "%s" was closed' % (eng.name),
engagement=eng, url=reverse('engagement_all_findings', args=(eng.id, ))),
return HttpResponseRedirect(reverse("view_engagements", args=(eng.product.id, )))
@user_is_authorized(Engagement, Permissions.Engagement_Edit, 'eid')
def reopen_eng(request, eid):
eng = Engagement.objects.get(id=eid)
reopen_engagement(eng)
messages.add_message(
request,
messages.SUCCESS,
'Engagement reopened successfully.',
extra_tags='alert-success')
create_notification(event='other',
title='Reopening of %s' % eng.name,
engagement=eng,
description='The engagement "%s" was reopened' % (eng.name),
url=reverse('view_engagement', args=(eng.id, ))),
return HttpResponseRedirect(reverse("view_engagements", args=(eng.product.id, )))
@user_is_authorized(Engagement, Permissions.Engagement_Edit, 'eid')
def complete_checklist(request, eid):
eng = get_object_or_404(Engagement, id=eid)
try:
checklist = Check_List.objects.get(engagement=eng)
except:
checklist = None
pass
add_breadcrumb(
parent=eng,
title="Complete checklist",
top_level=False,
request=request)
if request.method == 'POST':
tests = Test.objects.filter(engagement=eng)
findings = Finding.objects.filter(test__in=tests).all()
form = CheckForm(request.POST, instance=checklist, findings=findings)
if form.is_valid():
cl = form.save(commit=False)
try:
check_l = Check_List.objects.get(engagement=eng)
cl.id = check_l.id
cl.save()
form.save_m2m()
except:
cl.engagement = eng
cl.save()
form.save_m2m()
pass
messages.add_message(
request,
messages.SUCCESS,
'Checklist saved.',
extra_tags='alert-success')
return HttpResponseRedirect(
reverse('view_engagement', args=(eid, )))
else:
tests = Test.objects.filter(engagement=eng)
findings = Finding.objects.filter(test__in=tests).all()
form = CheckForm(instance=checklist, findings=findings)
product_tab = Product_Tab(eng.product.id, title="Checklist", tab="engagements")
product_tab.setEngagement(eng)
return render(request, 'dojo/checklist.html', {
'form': form,
'product_tab': product_tab,
'eid': eng.id,
'findings': findings,
})
@user_is_authorized(Engagement, Permissions.Risk_Acceptance, 'eid')
def add_risk_acceptance(request, eid, fid=None):
eng = get_object_or_404(Engagement, id=eid)
finding = None
if fid:
finding = get_object_or_404(Finding, id=fid)
if not eng.product.enable_full_risk_acceptance:
raise PermissionDenied()
if request.method == 'POST':
form = RiskAcceptanceForm(request.POST, request.FILES)
if form.is_valid():
# first capture notes param as it cannot be saved directly as m2m
notes = None
if form.cleaned_data['notes']:
notes = Notes(
entry=form.cleaned_data['notes'],
author=request.user,
date=timezone.now())
notes.save()
del form.cleaned_data['notes']
try:
# we sometimes see a weird exception here, but are unable to reproduce.
# we add some logging in case it happens
risk_acceptance = form.save()
except Exception as e:
logger.debug(vars(request.POST))
logger.error(vars(form))
logger.exception(e)
raise
# attach note to risk acceptance object now in database
if notes:
risk_acceptance.notes.add(notes)
eng.risk_acceptance.add(risk_acceptance)
findings = form.cleaned_data['accepted_findings']
risk_acceptance = ra_helper.add_findings_to_risk_acceptance(risk_acceptance, findings)
messages.add_message(
request,
messages.SUCCESS,
'Risk acceptance saved.',
extra_tags='alert-success')
return redirect_to_return_url_or_else(request, reverse('view_engagement', args=(eid, )))
else:
risk_acceptance_title_suggestion = 'Accept: %s' % finding
form = RiskAcceptanceForm(initial={'owner': request.user, 'name': risk_acceptance_title_suggestion})
finding_choices = Finding.objects.filter(duplicate=False, test__engagement=eng).filter(NOT_ACCEPTED_FINDINGS_QUERY).order_by('title')
form.fields['accepted_findings'].queryset = finding_choices
if fid:
form.fields['accepted_findings'].initial = {fid}
product_tab = Product_Tab(eng.product.id, title="Risk Acceptance", tab="engagements")
product_tab.setEngagement(eng)
return render(request, 'dojo/add_risk_acceptance.html', {
'eng': eng,
'product_tab': product_tab,
'form': form
})
@user_is_authorized(Engagement, Permissions.Engagement_View, 'eid')
def view_risk_acceptance(request, eid, raid):
return view_edit_risk_acceptance(request, eid=eid, raid=raid, edit_mode=False)
@user_is_authorized(Engagement, Permissions.Risk_Acceptance, 'eid')
def edit_risk_acceptance(request, eid, raid):
return view_edit_risk_acceptance(request, eid=eid, raid=raid, edit_mode=True)
# will only be called by view_risk_acceptance and edit_risk_acceptance
def view_edit_risk_acceptance(request, eid, raid, edit_mode=False):
risk_acceptance = get_object_or_404(Risk_Acceptance, pk=raid)
eng = get_object_or_404(Engagement, pk=eid)
if edit_mode and not eng.product.enable_full_risk_acceptance:
raise PermissionDenied()
risk_acceptance_form = None
errors = False
if request.method == 'POST':
# deleting before instantiating the form otherwise django messes up and we end up with an empty path value
if len(request.FILES) > 0:
logger.debug('new proof uploaded')
risk_acceptance.path.delete()
if 'decision' in request.POST:
old_expiration_date = risk_acceptance.expiration_date
risk_acceptance_form = EditRiskAcceptanceForm(request.POST, request.FILES, instance=risk_acceptance)
errors = errors or not risk_acceptance_form.is_valid()
if not errors:
logger.debug('path: %s', risk_acceptance_form.cleaned_data['path'])
risk_acceptance_form.save()
if risk_acceptance.expiration_date != old_expiration_date:
# risk acceptance was changed, check if risk acceptance needs to be reinstated and findings made accepted again
ra_helper.reinstate(risk_acceptance, old_expiration_date)
messages.add_message(
request,
messages.SUCCESS,
'Risk Acceptance saved successfully.',
extra_tags='alert-success')
if 'entry' in request.POST:
note_form = NoteForm(request.POST)
errors = errors or not note_form.is_valid()
if not errors:
new_note = note_form.save(commit=False)
new_note.author = request.user
new_note.date = timezone.now()
new_note.save()
risk_acceptance.notes.add(new_note)
messages.add_message(
request,
messages.SUCCESS,
'Note added successfully.',
extra_tags='alert-success')
if 'delete_note' in request.POST:
note = get_object_or_404(Notes, pk=request.POST['delete_note_id'])
if note.author.username == request.user.username:
risk_acceptance.notes.remove(note)
note.delete()
messages.add_message(
request,
messages.SUCCESS,
'Note deleted successfully.',
extra_tags='alert-success')
else:
messages.add_message(
request,
messages.ERROR,
"Since you are not the note's author, it was not deleted.",
extra_tags='alert-danger')
if 'remove_finding' in request.POST:
finding = get_object_or_404(
Finding, pk=request.POST['remove_finding_id'])
ra_helper.remove_finding_from_risk_acceptance(risk_acceptance, finding)
messages.add_message(
request,
messages.SUCCESS,
'Finding removed successfully from risk acceptance.',
extra_tags='alert-success')
if 'replace_file' in request.POST:
replace_form = ReplaceRiskAcceptanceProofForm(
request.POST, request.FILES, instance=risk_acceptance)
errors = errors or not replace_form.is_valid()
if not errors:
replace_form.save()
messages.add_message(
request,
messages.SUCCESS,
'New Proof uploaded successfully.',
extra_tags='alert-success')
else:
logger.error(replace_form.errors)
if 'add_findings' in request.POST:
add_findings_form = AddFindingsRiskAcceptanceForm(
request.POST, request.FILES, instance=risk_acceptance)
errors = errors or not add_findings_form.is_valid()
if not errors:
findings = add_findings_form.cleaned_data['accepted_findings']
ra_helper.add_findings_to_risk_acceptance(risk_acceptance, findings)
messages.add_message(
request,
messages.SUCCESS,
'Finding%s added successfully.' % ('s' if len(findings) > 1
else ''),
extra_tags='alert-success')
if not errors:
logger.debug('redirecting to return_url')
return redirect_to_return_url_or_else(request, reverse("view_risk_acceptance", args=(eid, raid)))
else:
logger.error('errors found')
else:
if edit_mode:
risk_acceptance_form = EditRiskAcceptanceForm(instance=risk_acceptance)
note_form = NoteForm()
replace_form = ReplaceRiskAcceptanceProofForm(instance=risk_acceptance)
add_findings_form = AddFindingsRiskAcceptanceForm(instance=risk_acceptance)
accepted_findings = risk_acceptance.accepted_findings.order_by('numerical_severity')
fpage = get_page_items(request, accepted_findings, 15)
unaccepted_findings = Finding.objects.filter(test__in=eng.test_set.all()) \
.exclude(id__in=accepted_findings).order_by("title")
add_fpage = get_page_items(request, unaccepted_findings, 10, 'apage')
add_findings_form.fields[
"accepted_findings"].queryset = add_fpage.object_list
product_tab = Product_Tab(eng.product.id, title="Risk Acceptance", tab="engagements")
product_tab.setEngagement(eng)
return render(
request, 'dojo/view_risk_acceptance.html', {
'risk_acceptance': risk_acceptance,
'engagement': eng,
'product_tab': product_tab,
'accepted_findings': fpage,
'notes': risk_acceptance.notes.all(),
'eng': eng,
'edit_mode': edit_mode,
'risk_acceptance_form': risk_acceptance_form,
'note_form': note_form,
'replace_form': replace_form,
'add_findings_form': add_findings_form,
'request': request,
'add_findings': add_fpage,
'return_url': get_return_url(request),
})
@user_is_authorized(Engagement, Permissions.Risk_Acceptance, 'eid')
def expire_risk_acceptance(request, eid, raid):
risk_acceptance = get_object_or_404(prefetch_for_expiration(Risk_Acceptance.objects.all()), pk=raid)
eng = get_object_or_404(Engagement, pk=eid)
ra_helper.expire_now(risk_acceptance)
return redirect_to_return_url_or_else(request, reverse("view_risk_acceptance", args=(eid, raid)))
@user_is_authorized(Engagement, Permissions.Risk_Acceptance, 'eid')
def reinstate_risk_acceptance(request, eid, raid):
risk_acceptance = get_object_or_404(prefetch_for_expiration(Risk_Acceptance.objects.all()), pk=raid)
eng = get_object_or_404(Engagement, pk=eid)
if not eng.product.enable_full_risk_acceptance:
raise PermissionDenied()
ra_helper.reinstate(risk_acceptance, risk_acceptance.expiration_date)
return redirect_to_return_url_or_else(request, reverse("view_risk_acceptance", args=(eid, raid)))
@user_is_authorized(Engagement, Permissions.Risk_Acceptance, 'eid')
def delete_risk_acceptance(request, eid, raid):
risk_acceptance = get_object_or_404(Risk_Acceptance, pk=raid)
eng = get_object_or_404(Engagement, pk=eid)
ra_helper.delete(eng, risk_acceptance)
messages.add_message(
request,
messages.SUCCESS,
'Risk acceptance deleted successfully.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse("view_engagement", args=(eng.id, )))
@user_is_authorized(Engagement, Permissions.Engagement_View, 'eid')
def download_risk_acceptance(request, eid, raid):
import mimetypes
mimetypes.init()
risk_acceptance = get_object_or_404(Risk_Acceptance, pk=raid)
response = StreamingHttpResponse(
FileIterWrapper(
open(settings.MEDIA_ROOT + "/" + risk_acceptance.path.name, mode='rb')))
response['Content-Disposition'] = 'attachment; filename="%s"' \
% risk_acceptance.filename()
mimetype, encoding = mimetypes.guess_type(risk_acceptance.path.name)
response['Content-Type'] = mimetype
return response
@user_is_authorized(Engagement, Permissions.Engagement_Edit, 'eid')
def upload_threatmodel(request, eid):
eng = Engagement.objects.get(id=eid)
add_breadcrumb(
parent=eng,
title="Upload a threat model",
top_level=False,
request=request)
if request.method == 'POST':
form = UploadThreatForm(request.POST, request.FILES)
if form.is_valid():
handle_uploaded_threat(request.FILES['file'], eng)
eng.progress = 'other'
eng.threat_model = True
eng.save()
messages.add_message(
request,
messages.SUCCESS,
'Threat model saved.',
extra_tags='alert-success')
return HttpResponseRedirect(
reverse('view_engagement', args=(eid, )))
else:
form = UploadThreatForm()
product_tab = Product_Tab(eng.product.id, title="Upload Threat Model", tab="engagements")
return render(request, 'dojo/up_threat.html', {
'form': form,
'product_tab': product_tab,
'eng': eng,
})
@user_is_authorized(Engagement, Permissions.Engagement_View, 'eid')
def view_threatmodel(request, eid):
eng = get_object_or_404(Engagement, pk=eid)
response = FileResponse(open(eng.tmodel_path, 'rb'))
return response
@user_is_authorized(Engagement, Permissions.Engagement_View, 'eid')
def engagement_ics(request, eid):
eng = get_object_or_404(Engagement, id=eid)
start_date = datetime.combine(eng.target_start, datetime.min.time())
end_date = datetime.combine(eng.target_end, datetime.max.time())
uid = "dojo_eng_%d_%d" % (eng.id, eng.product.id)
cal = get_cal_event(
start_date, end_date,
"Engagement: %s (%s)" % (eng.name, eng.product.name),
"Set aside for engagement %s, on product %s. Additional detail can be found at %s"
% (eng.name, eng.product.name,
request.build_absolute_uri(
(reverse("view_engagement", args=(eng.id, ))))), uid)
output = cal.serialize()
response = HttpResponse(content=output)
response['Content-Type'] = 'text/calendar'
response['Content-Disposition'] = 'attachment; filename=%s.ics' % eng.name
return response
def get_list_index(list, index):
try:
element = list[index]
except Exception as e:
element = None
return element
def get_engagements(request):
url = request.META.get('QUERY_STRING')
if not url:
raise ValidationError('Please use the export button when exporting engagements')
else:
if url.startswith('url='):
url = url[4:]
path_items = list(filter(None, re.split('/|\?', url)))
if not path_items or path_items[0] != 'engagement':
raise ValidationError('URL is not an engagement view')
view = query = None
if get_list_index(path_items, 1) in ['active', 'all']:
view = get_list_index(path_items, 1)
query = get_list_index(path_items, 2)
else:
view = 'active'
query = get_list_index(path_items, 1)
request.GET = QueryDict(query)
engagements = get_filtered_engagements(request, view).qs
test_counts = get_test_counts(engagements)
return engagements, test_counts
def get_excludes():
return ['is_ci_cd', 'jira_issue', 'jira_project', 'objects', 'unaccepted_open_findings']
def get_foreign_keys():
return ['build_server', 'lead', 'orchestration_engine', 'preset', 'product',
'report_type', 'requester', 'source_code_management_server']
def csv_export(request):
engagements, test_counts = get_engagements(request)
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=engagements.csv'
writer = csv.writer(response)
first_row = True
for engagement in engagements:
if first_row:
fields = []
for key in dir(engagement):
if key not in get_excludes() and not callable(getattr(engagement, key)) and not key.startswith('_'):
fields.append(key)
fields.append('tests')
writer.writerow(fields)
first_row = False
if not first_row:
fields = []
for key in dir(engagement):
if key not in get_excludes() and not callable(getattr(engagement, key)) and not key.startswith('_'):
value = engagement.__dict__.get(key)
if key in get_foreign_keys() and getattr(engagement, key):
value = str(getattr(engagement, key))
if value and isinstance(value, str):
value = value.replace('\n', ' NEWLINE ').replace('\r', '')
fields.append(value)
fields.append(test_counts.get(engagement.id, 0))
writer.writerow(fields)
return response
def excel_export(request):
engagements, test_counts = get_engagements(request)
workbook = Workbook()
workbook.iso_dates = True
worksheet = workbook.active
worksheet.title = 'Engagements'
font_bold = Font(bold=True)
row_num = 1
for engagement in engagements:
if row_num == 1:
col_num = 1
for key in dir(engagement):
if key not in get_excludes() and not callable(getattr(engagement, key)) and not key.startswith('_'):
cell = worksheet.cell(row=row_num, column=col_num, value=key)
cell.font = font_bold
col_num += 1
cell = worksheet.cell(row=row_num, column=col_num, value='tests')
cell.font = font_bold
row_num = 2
if row_num > 1:
col_num = 1
for key in dir(engagement):
if key not in get_excludes() and not callable(getattr(engagement, key)) and not key.startswith('_'):
value = engagement.__dict__.get(key)
if key in get_foreign_keys() and getattr(engagement, key):
value = str(getattr(engagement, key))
if value and isinstance(value, datetime):
value = value.replace(tzinfo=None)
worksheet.cell(row=row_num, column=col_num, value=value)
col_num += 1
worksheet.cell(row=row_num, column=col_num, value=test_counts.get(engagement.id, 0))
row_num += 1
with NamedTemporaryFile() as tmp:
workbook.save(tmp.name)
tmp.seek(0)
stream = tmp.read()
response = HttpResponse(
content=stream,
content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
)
response['Content-Disposition'] = 'attachment; filename=engagements.xlsx'
return response
| true | true |
f71c1eaf10e717ab23c28074e182c01dfdc1b475 | 5,366 | py | Python | distributed/distributed/db.py | Yuanmessi/Bold-Falcon | 00fcaba0b3d9c462b9d20ecb256ff85db5d119e2 | [
"BSD-3-Clause"
] | 71 | 2016-11-13T03:26:45.000Z | 2022-02-22T08:13:04.000Z | distributed/distributed/db.py | Yuanmessi/Bold-Falcon | 00fcaba0b3d9c462b9d20ecb256ff85db5d119e2 | [
"BSD-3-Clause"
] | 3 | 2021-07-01T08:09:05.000Z | 2022-01-28T03:38:36.000Z | distributed/distributed/db.py | Yuanmessi/Bold-Falcon | 00fcaba0b3d9c462b9d20ecb256ff85db5d119e2 | [
"BSD-3-Clause"
] | 36 | 2016-12-13T11:37:56.000Z | 2021-11-11T12:20:10.000Z | # Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import json
from datetime import datetime
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy.inspection import inspect
db = SQLAlchemy(session_options=dict(autoflush=True))
ALEMBIC_VERSION = "4b86bc0d40aa"
class Serializer(object):
"""Serialize a query result object."""
def to_dict(self):
ret = {}
for key in inspect(self).attrs.keys():
ret[key] = getattr(self, key)
return ret
class StringList(db.TypeDecorator):
"""List of comma-separated strings as field."""
impl = db.Text
def process_bind_param(self, value, dialect):
return ", ".join(value)
def process_result_value(self, value, dialect):
return value.split(", ")
class JsonType(db.TypeDecorator):
"""List of comma-separated strings as field."""
impl = db.Text
def process_bind_param(self, value, dialect):
return json.dumps(value)
def process_result_value(self, value, dialect):
return json.loads(value)
class Node(db.Model):
"""Cuckoo node database model."""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text, nullable=False, unique=True)
url = db.Column(db.Text, nullable=False)
mode = db.Column(db.Text, nullable=False)
enabled = db.Column(db.Boolean, nullable=False)
machines = db.relationship("Machine", backref="node", lazy="dynamic")
def __init__(self, name, url, mode, enabled=True):
self.name = name
self.url = url
self.mode = mode
self.enabled = enabled
class Machine(db.Model):
"""Machine database model related to a Cuckoo node."""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text, nullable=False)
platform = db.Column(db.Text, nullable=False)
tags = db.Column(StringList)
node_id = db.Column(db.Integer, db.ForeignKey("node.id"))
def __init__(self, name, platform, tags):
self.name = name
self.platform = platform
self.tags = tags
class Task(db.Model, Serializer):
"""Analysis task database model."""
PENDING = "pending"
ASSIGNED = "assigned"
PROCESSING = "processing"
FINISHED = "finished"
DELETED = "deleted"
task_status = db.Enum(PENDING, ASSIGNED, PROCESSING, FINISHED, DELETED,
name="task_status_type")
id = db.Column(db.Integer, primary_key=True)
path = db.Column(db.Text)
filename = db.Column(db.Text)
package = db.Column(db.Text)
timeout = db.Column(db.Integer)
priority = db.Column(db.Integer)
options = db.Column(db.Text)
machine = db.Column(db.Text)
platform = db.Column(db.Text)
tags = db.Column(db.Text)
custom = db.Column(db.Text)
owner = db.Column(db.Text)
memory = db.Column(db.Text)
clock = db.Column(db.Integer)
enforce_timeout = db.Column(db.Text)
# Cuckoo node and Task ID this has been submitted to.
node_id = db.Column(db.Integer, db.ForeignKey("node.id"))
task_id = db.Column(db.Integer)
status = db.Column(task_status, nullable=False)
# Timestamps for this task. When it was submitted, when it was delegated
# to a Cuckoo node, when the analysis started, and when we retrieved
# the report.
submitted = db.Column(db.DateTime(timezone=False), default=datetime.now)
delegated = db.Column(db.DateTime(timezone=False), nullable=True)
started = db.Column(db.DateTime(timezone=False), nullable=True)
completed = db.Column(db.DateTime(timezone=False), nullable=True)
__table_args__ = db.Index("ix_node_task", node_id, task_id),
def __init__(self, path=None, filename=None, package=None, timeout=None,
priority=None, options=None, machine=None, platform=None,
tags=None, custom=None, owner=None, memory=None, clock=None,
enforce_timeout=None, node_id=None, task_id=None,
status=PENDING):
self.path = path
self.filename = filename
self.package = package
self.timeout = timeout
self.priority = priority
self.options = options
self.machine = machine
self.platform = platform
self.tags = tags
self.custom = custom
self.owner = owner
self.memory = memory
self.clock = clock
self.enforce_timeout = enforce_timeout
self.node_id = node_id
self.task_id = task_id
self.status = status
class NodeStatus(db.Model, Serializer):
"""Node status monitoring database model."""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text, nullable=False)
timestamp = db.Column(db.DateTime(timezone=False), nullable=False,
index=True)
status = db.Column(JsonType, nullable=False)
def __init__(self, name, timestamp, status):
self.name = name
self.timestamp = timestamp
self.status = status
class AlembicVersion(db.Model):
"""Support model for keeping track of the alembic revision identifier."""
VERSION = ALEMBIC_VERSION
version_num = db.Column(db.Text, nullable=False, primary_key=True)
def __init__(self, version_num):
self.version_num = version_num
| 34.619355 | 77 | 0.660268 |
import json
from datetime import datetime
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy.inspection import inspect
db = SQLAlchemy(session_options=dict(autoflush=True))
ALEMBIC_VERSION = "4b86bc0d40aa"
class Serializer(object):
def to_dict(self):
ret = {}
for key in inspect(self).attrs.keys():
ret[key] = getattr(self, key)
return ret
class StringList(db.TypeDecorator):
impl = db.Text
def process_bind_param(self, value, dialect):
return ", ".join(value)
def process_result_value(self, value, dialect):
return value.split(", ")
class JsonType(db.TypeDecorator):
impl = db.Text
def process_bind_param(self, value, dialect):
return json.dumps(value)
def process_result_value(self, value, dialect):
return json.loads(value)
class Node(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text, nullable=False, unique=True)
url = db.Column(db.Text, nullable=False)
mode = db.Column(db.Text, nullable=False)
enabled = db.Column(db.Boolean, nullable=False)
machines = db.relationship("Machine", backref="node", lazy="dynamic")
def __init__(self, name, url, mode, enabled=True):
self.name = name
self.url = url
self.mode = mode
self.enabled = enabled
class Machine(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text, nullable=False)
platform = db.Column(db.Text, nullable=False)
tags = db.Column(StringList)
node_id = db.Column(db.Integer, db.ForeignKey("node.id"))
def __init__(self, name, platform, tags):
self.name = name
self.platform = platform
self.tags = tags
class Task(db.Model, Serializer):
PENDING = "pending"
ASSIGNED = "assigned"
PROCESSING = "processing"
FINISHED = "finished"
DELETED = "deleted"
task_status = db.Enum(PENDING, ASSIGNED, PROCESSING, FINISHED, DELETED,
name="task_status_type")
id = db.Column(db.Integer, primary_key=True)
path = db.Column(db.Text)
filename = db.Column(db.Text)
package = db.Column(db.Text)
timeout = db.Column(db.Integer)
priority = db.Column(db.Integer)
options = db.Column(db.Text)
machine = db.Column(db.Text)
platform = db.Column(db.Text)
tags = db.Column(db.Text)
custom = db.Column(db.Text)
owner = db.Column(db.Text)
memory = db.Column(db.Text)
clock = db.Column(db.Integer)
enforce_timeout = db.Column(db.Text)
node_id = db.Column(db.Integer, db.ForeignKey("node.id"))
task_id = db.Column(db.Integer)
status = db.Column(task_status, nullable=False)
submitted = db.Column(db.DateTime(timezone=False), default=datetime.now)
delegated = db.Column(db.DateTime(timezone=False), nullable=True)
started = db.Column(db.DateTime(timezone=False), nullable=True)
completed = db.Column(db.DateTime(timezone=False), nullable=True)
__table_args__ = db.Index("ix_node_task", node_id, task_id),
def __init__(self, path=None, filename=None, package=None, timeout=None,
priority=None, options=None, machine=None, platform=None,
tags=None, custom=None, owner=None, memory=None, clock=None,
enforce_timeout=None, node_id=None, task_id=None,
status=PENDING):
self.path = path
self.filename = filename
self.package = package
self.timeout = timeout
self.priority = priority
self.options = options
self.machine = machine
self.platform = platform
self.tags = tags
self.custom = custom
self.owner = owner
self.memory = memory
self.clock = clock
self.enforce_timeout = enforce_timeout
self.node_id = node_id
self.task_id = task_id
self.status = status
class NodeStatus(db.Model, Serializer):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text, nullable=False)
timestamp = db.Column(db.DateTime(timezone=False), nullable=False,
index=True)
status = db.Column(JsonType, nullable=False)
def __init__(self, name, timestamp, status):
self.name = name
self.timestamp = timestamp
self.status = status
class AlembicVersion(db.Model):
VERSION = ALEMBIC_VERSION
version_num = db.Column(db.Text, nullable=False, primary_key=True)
def __init__(self, version_num):
self.version_num = version_num
| true | true |
f71c1f3f5d963f1a9f0cad8f49f08a8a8952fb0f | 7,941 | py | Python | CTFd/config.py | mayoneko/CTFd | 825190ba3aef10f9cdc736f4d6f8ea2a5c8590ac | [
"Apache-2.0"
] | 2 | 2019-06-19T07:11:28.000Z | 2019-06-21T05:30:07.000Z | CTFd/config.py | mayoneko/CTFd | 825190ba3aef10f9cdc736f4d6f8ea2a5c8590ac | [
"Apache-2.0"
] | null | null | null | CTFd/config.py | mayoneko/CTFd | 825190ba3aef10f9cdc736f4d6f8ea2a5c8590ac | [
"Apache-2.0"
] | null | null | null | import os
''' GENERATE SECRET KEY '''
if not os.getenv('SECRET_KEY'):
# Attempt to read the secret from the secret file
# This will fail if the secret has not been written
try:
with open('.ctfd_secret_key', 'rb') as secret:
key = secret.read()
except (OSError, IOError):
key = None
if not key:
key = os.urandom(64)
# Attempt to write the secret file
# This will fail if the filesystem is read-only
try:
with open('.ctfd_secret_key', 'wb') as secret:
secret.write(key)
secret.flush()
except (OSError, IOError):
pass
''' SERVER SETTINGS '''
class Config(object):
"""
CTFd Configuration Object
"""
'''
=== REQUIRED SETTINGS ===
SECRET_KEY:
The secret value used to creation sessions and sign strings. This should be set to a random string. In the
interest of ease, CTFd will automatically create a secret key file for you. If you wish to add this secret key
to your instance you should hard code this value to a random static value.
You can also remove .ctfd_secret_key from the .gitignore file and commit this file into whatever repository
you are using.
http://flask.pocoo.org/docs/latest/quickstart/#sessions
SQLALCHEMY_DATABASE_URI:
The URI that specifies the username, password, hostname, port, and database of the server
used to hold the CTFd database.
e.g. mysql+pymysql://root:<YOUR_PASSWORD_HERE>@localhost/ctfd
CACHE_TYPE:
Specifies how CTFd should cache configuration values. If CACHE_TYPE is set to 'redis', CTFd will make use
of the REDIS_URL specified in environment variables. You can also choose to hardcode the REDIS_URL here.
It is important that you specify some sort of cache as CTFd uses it to store values received from the database. If
no cache is specified, CTFd will default to a simple per-worker cache. The simple cache cannot be effectively used
with multiple workers.
REDIS_URL is the URL to connect to a Redis server.
e.g. redis://user:password@localhost:6379
http://pythonhosted.org/Flask-Caching/#configuring-flask-caching
'''
SECRET_KEY = os.getenv('SECRET_KEY') or key
DATABASE_URL = os.getenv('DATABASE_URL') or 'sqlite:///{}/ctfd.db'.format(os.path.dirname(os.path.abspath(__file__)))
REDIS_URL = os.getenv('REDIS_URL')
SQLALCHEMY_DATABASE_URI = DATABASE_URL
CACHE_REDIS_URL = REDIS_URL
if CACHE_REDIS_URL:
CACHE_TYPE = 'redis'
else:
CACHE_TYPE = 'filesystem'
CACHE_DIR = os.path.join(os.path.dirname(__file__), os.pardir, '.data', 'filesystem_cache')
'''
=== SECURITY ===
SESSION_COOKIE_HTTPONLY:
Controls if cookies should be set with the HttpOnly flag.
PERMANENT_SESSION_LIFETIME:
The lifetime of a session. The default is 604800 seconds.
TRUSTED_PROXIES:
Defines a set of regular expressions used for finding a user's IP address if the CTFd instance
is behind a proxy. If you are running a CTF and users are on the same network as you, you may choose to remove
some proxies from the list.
CTFd only uses IP addresses for cursory tracking purposes. It is ill-advised to do anything complicated based
solely on IP addresses unless you know what you are doing.
'''
SESSION_COOKIE_HTTPONLY = (not os.getenv("SESSION_COOKIE_HTTPONLY")) # Defaults True
PERMANENT_SESSION_LIFETIME = int(os.getenv("PERMANENT_SESSION_LIFETIME") or 604800) # 7 days in seconds
TRUSTED_PROXIES = [
r'^127\.0\.0\.1$',
# Remove the following proxies if you do not trust the local network
# For example if you are running a CTF on your laptop and the teams are
# all on the same network
r'^::1$',
r'^fc00:',
r'^10\.',
r'^172\.(1[6-9]|2[0-9]|3[0-1])\.',
r'^192\.168\.'
]
'''
=== EMAIL ===
MAILFROM_ADDR:
The email address that emails are sent from if not overridden in the configuration panel.
MAIL_SERVER:
The mail server that emails are sent from if not overriden in the configuration panel.
MAIL_PORT:
The mail port that emails are sent from if not overriden in the configuration panel.
'''
MAILFROM_ADDR = os.getenv("MAILFROM_ADDR") or "noreply@ctfd.io"
MAIL_SERVER = os.getenv("MAIL_SERVER") or None
MAIL_PORT = os.getenv("MAIL_PORT")
MAIL_USERNAME = os.getenv("MAIL_USERNAME")
MAIL_PASSWORD = os.getenv("MAIL_PASSWORD")
MAIL_TLS = os.getenv("MAIL_TLS") or False
MAIL_SSL = os.getenv("MAIL_SSL") or False
MAILGUN_API_KEY = os.getenv("MAILGUN_API_KEY")
MAILGUN_BASE_URL = os.getenv("MAILGUN_BASE_URL")
'''
=== LOGS ===
LOG_FOLDER:
The location where logs are written. These are the logs for CTFd key submissions, registrations, and logins.
The default location is the CTFd/logs folder.
'''
LOG_FOLDER = os.getenv('LOG_FOLDER') or os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logs')
'''
=== UPLOADS ===
UPLOAD_PROVIDER:
Specifies the service that CTFd should use to store files.
UPLOAD_FOLDER:
The location where files are uploaded. The default destination is the CTFd/uploads folder.
AWS_ACCESS_KEY_ID:
AWS access token used to authenticate to the S3 bucket.
AWS_SECRET_ACCESS_KEY:
AWS secret token used to authenticate to the S3 bucket.
AWS_S3_BUCKET:
The unique identifier for your S3 bucket.
AWS_S3_ENDPOINT_URL:
A URL pointing to a custom S3 implementation.
'''
UPLOAD_PROVIDER = os.getenv('UPLOAD_PROVIDER') or 'filesystem'
UPLOAD_FOLDER = os.getenv('UPLOAD_FOLDER') or os.path.join(os.path.dirname(os.path.abspath(__file__)), 'uploads')
if UPLOAD_PROVIDER == 's3':
AWS_ACCESS_KEY_ID = os.getenv('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SECRET_ACCESS_KEY')
AWS_S3_BUCKET = os.getenv('AWS_S3_BUCKET')
AWS_S3_ENDPOINT_URL = os.getenv('AWS_S3_ENDPOINT_URL')
'''
=== OPTIONAL ===
REVERSE_PROXY:
Specifies whether CTFd is behind a reverse proxy or not. Set to True if using a reverse proxy like nginx.
TEMPLATES_AUTO_RELOAD:
Specifies whether Flask should check for modifications to templates and reload them automatically.
SQLALCHEMY_TRACK_MODIFICATIONS:
Automatically disabled to suppress warnings and save memory. You should only enable this if you need it.
UPDATE_CHECK:
Specifies whether or not CTFd will check whether or not there is a new version of CTFd
APPLICATION_ROOT:
Specifies what path CTFd is mounted under. It can be used to run CTFd in a subdirectory.
Example: /ctfd
'''
REVERSE_PROXY = os.getenv("REVERSE_PROXY") or False
TEMPLATES_AUTO_RELOAD = (not os.getenv("TEMPLATES_AUTO_RELOAD")) # Defaults True
SQLALCHEMY_TRACK_MODIFICATIONS = (not os.getenv("SQLALCHEMY_TRACK_MODIFICATIONS")) # Defaults True
UPDATE_CHECK = (not os.getenv("UPDATE_CHECK")) # Defaults True
APPLICATION_ROOT = os.getenv('APPLICATION_ROOT') or '/'
'''
=== OAUTH ===
MajorLeagueCyber Integration
Register an event at https://majorleaguecyber.org/ and use the Client ID and Client Secret here
'''
OAUTH_CLIENT_ID = os.getenv("OAUTH_CLIENT_ID")
OAUTH_CLIENT_SECRET = os.getenv("OAUTH_CLIENT_SECRET")
class TestingConfig(Config):
SECRET_KEY = 'AAAAAAAAAAAAAAAAAAAA'
PRESERVE_CONTEXT_ON_EXCEPTION = False
TESTING = True
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv('TESTING_DATABASE_URL') or 'sqlite://'
SERVER_NAME = 'localhost'
UPDATE_CHECK = False
REDIS_URL = None
CACHE_TYPE = 'simple'
SAFE_MODE = True
| 37.107477 | 122 | 0.679008 | import os
if not os.getenv('SECRET_KEY'):
try:
with open('.ctfd_secret_key', 'rb') as secret:
key = secret.read()
except (OSError, IOError):
key = None
if not key:
key = os.urandom(64)
try:
with open('.ctfd_secret_key', 'wb') as secret:
secret.write(key)
secret.flush()
except (OSError, IOError):
pass
class Config(object):
SECRET_KEY = os.getenv('SECRET_KEY') or key
DATABASE_URL = os.getenv('DATABASE_URL') or 'sqlite:///{}/ctfd.db'.format(os.path.dirname(os.path.abspath(__file__)))
REDIS_URL = os.getenv('REDIS_URL')
SQLALCHEMY_DATABASE_URI = DATABASE_URL
CACHE_REDIS_URL = REDIS_URL
if CACHE_REDIS_URL:
CACHE_TYPE = 'redis'
else:
CACHE_TYPE = 'filesystem'
CACHE_DIR = os.path.join(os.path.dirname(__file__), os.pardir, '.data', 'filesystem_cache')
SESSION_COOKIE_HTTPONLY = (not os.getenv("SESSION_COOKIE_HTTPONLY"))
PERMANENT_SESSION_LIFETIME = int(os.getenv("PERMANENT_SESSION_LIFETIME") or 604800)
TRUSTED_PROXIES = [
r'^127\.0\.0\.1$',
r'^::1$',
r'^fc00:',
r'^10\.',
r'^172\.(1[6-9]|2[0-9]|3[0-1])\.',
r'^192\.168\.'
]
MAILFROM_ADDR = os.getenv("MAILFROM_ADDR") or "noreply@ctfd.io"
MAIL_SERVER = os.getenv("MAIL_SERVER") or None
MAIL_PORT = os.getenv("MAIL_PORT")
MAIL_USERNAME = os.getenv("MAIL_USERNAME")
MAIL_PASSWORD = os.getenv("MAIL_PASSWORD")
MAIL_TLS = os.getenv("MAIL_TLS") or False
MAIL_SSL = os.getenv("MAIL_SSL") or False
MAILGUN_API_KEY = os.getenv("MAILGUN_API_KEY")
MAILGUN_BASE_URL = os.getenv("MAILGUN_BASE_URL")
LOG_FOLDER = os.getenv('LOG_FOLDER') or os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logs')
UPLOAD_PROVIDER = os.getenv('UPLOAD_PROVIDER') or 'filesystem'
UPLOAD_FOLDER = os.getenv('UPLOAD_FOLDER') or os.path.join(os.path.dirname(os.path.abspath(__file__)), 'uploads')
if UPLOAD_PROVIDER == 's3':
AWS_ACCESS_KEY_ID = os.getenv('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SECRET_ACCESS_KEY')
AWS_S3_BUCKET = os.getenv('AWS_S3_BUCKET')
AWS_S3_ENDPOINT_URL = os.getenv('AWS_S3_ENDPOINT_URL')
REVERSE_PROXY = os.getenv("REVERSE_PROXY") or False
TEMPLATES_AUTO_RELOAD = (not os.getenv("TEMPLATES_AUTO_RELOAD"))
SQLALCHEMY_TRACK_MODIFICATIONS = (not os.getenv("SQLALCHEMY_TRACK_MODIFICATIONS"))
UPDATE_CHECK = (not os.getenv("UPDATE_CHECK"))
APPLICATION_ROOT = os.getenv('APPLICATION_ROOT') or '/'
OAUTH_CLIENT_ID = os.getenv("OAUTH_CLIENT_ID")
OAUTH_CLIENT_SECRET = os.getenv("OAUTH_CLIENT_SECRET")
class TestingConfig(Config):
SECRET_KEY = 'AAAAAAAAAAAAAAAAAAAA'
PRESERVE_CONTEXT_ON_EXCEPTION = False
TESTING = True
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv('TESTING_DATABASE_URL') or 'sqlite://'
SERVER_NAME = 'localhost'
UPDATE_CHECK = False
REDIS_URL = None
CACHE_TYPE = 'simple'
SAFE_MODE = True
| true | true |
f71c1f9b0854ca663169a6e52dc7055a0f7125c4 | 1,718 | py | Python | player_service.py | yetanotherape/poker-player-vostok | c308a1b015900243817dc9be2707c72848d61f25 | [
"MIT"
] | null | null | null | player_service.py | yetanotherape/poker-player-vostok | c308a1b015900243817dc9be2707c72848d61f25 | [
"MIT"
] | null | null | null | player_service.py | yetanotherape/poker-player-vostok | c308a1b015900243817dc9be2707c72848d61f25 | [
"MIT"
] | null | null | null | import time
import cgi
import json
import BaseHTTPServer
import os
from player import Player
HOST_NAME = '0.0.0.0'
PORT_NUMBER = os.environ.has_key('PORT') and int(os.environ['PORT']) or 9000
class PlayerService(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self):
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
if ctype == 'multipart/form-data':
postvars = cgi.parse_multipart(self.rfile, pdict)
elif ctype == 'application/x-www-form-urlencoded':
length = int(self.headers.getheader('content-length'))
postvars = cgi.parse_qs(self.rfile.read(length), keep_blank_values=1)
else:
postvars = {}
action = postvars['action'][0]
if 'game_state' in postvars:
game_state = json.loads(postvars['game_state'][0])
else:
game_state = {}
response = ''
if action == 'bet_request':
response = Player().bet_request(game_state)
elif action == 'showdown':
Player().showdown(game_state)
elif action == 'version':
response = Player.VERSION
self.wfile.write(response)
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), PlayerService)
print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
| 29.62069 | 81 | 0.632712 | import time
import cgi
import json
import BaseHTTPServer
import os
from player import Player
HOST_NAME = '0.0.0.0'
PORT_NUMBER = os.environ.has_key('PORT') and int(os.environ['PORT']) or 9000
class PlayerService(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self):
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
if ctype == 'multipart/form-data':
postvars = cgi.parse_multipart(self.rfile, pdict)
elif ctype == 'application/x-www-form-urlencoded':
length = int(self.headers.getheader('content-length'))
postvars = cgi.parse_qs(self.rfile.read(length), keep_blank_values=1)
else:
postvars = {}
action = postvars['action'][0]
if 'game_state' in postvars:
game_state = json.loads(postvars['game_state'][0])
else:
game_state = {}
response = ''
if action == 'bet_request':
response = Player().bet_request(game_state)
elif action == 'showdown':
Player().showdown(game_state)
elif action == 'version':
response = Player.VERSION
self.wfile.write(response)
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), PlayerService)
print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
| false | true |
f71c20b62ca77df8f80354e21bf08002ca39890b | 984 | py | Python | src/jsm/api/mixins/infos.py | charbonnierg/jetstream.py | 4d8dc56fc6953d0a28d207b9b162c6f8d0080d37 | [
"Apache-2.0"
] | 8 | 2021-07-26T10:54:10.000Z | 2021-12-06T08:41:02.000Z | src/jsm/api/mixins/infos.py | charbonnierg/jetstream.py | 4d8dc56fc6953d0a28d207b9b162c6f8d0080d37 | [
"Apache-2.0"
] | 3 | 2021-08-09T10:25:39.000Z | 2021-12-06T08:40:41.000Z | src/jsm/api/mixins/infos.py | charbonnierg/jetstream.py | 4d8dc56fc6953d0a28d207b9b162c6f8d0080d37 | [
"Apache-2.0"
] | 3 | 2021-08-22T01:55:11.000Z | 2021-09-13T13:51:42.000Z | # Copyright 2021 - Guillaume Charbonnier
# Licensed under the Apache License, Version 2.0 (the "License");
# http://www.apache.org/licenses/LICENSE-2.0
from __future__ import annotations
from typing import Optional, Union
from jsm.models.account_info import IoNatsJetstreamApiV1AccountInfoResponse
from jsm.models.errors import IoNatsJetstreamApiV1ErrorResponse
from .request_reply import BaseJetStreamRequestReplyMixin, JetStreamResponse
class AccountInfosMixin(BaseJetStreamRequestReplyMixin):
async def account_info(
self,
timeout: Optional[float] = None,
raise_on_error: Optional[bool] = None,
) -> Union[
IoNatsJetstreamApiV1AccountInfoResponse,
IoNatsJetstreamApiV1ErrorResponse,
]:
return await self._jetstream_request(
"INFO",
None,
JetStreamResponse[IoNatsJetstreamApiV1AccountInfoResponse],
raise_on_error=raise_on_error,
timeout=timeout,
)
| 32.8 | 76 | 0.729675 |
from __future__ import annotations
from typing import Optional, Union
from jsm.models.account_info import IoNatsJetstreamApiV1AccountInfoResponse
from jsm.models.errors import IoNatsJetstreamApiV1ErrorResponse
from .request_reply import BaseJetStreamRequestReplyMixin, JetStreamResponse
class AccountInfosMixin(BaseJetStreamRequestReplyMixin):
async def account_info(
self,
timeout: Optional[float] = None,
raise_on_error: Optional[bool] = None,
) -> Union[
IoNatsJetstreamApiV1AccountInfoResponse,
IoNatsJetstreamApiV1ErrorResponse,
]:
return await self._jetstream_request(
"INFO",
None,
JetStreamResponse[IoNatsJetstreamApiV1AccountInfoResponse],
raise_on_error=raise_on_error,
timeout=timeout,
)
| true | true |
f71c21777d66133ec1da30715a6556ddee5fd447 | 467 | py | Python | integration/keeper_secrets_manager_cli/keeper_secrets_manager_cli/exception.py | Keeper-Security/secrets-manager | 0044dec7f323ae2e531f52ef2435bd7205949fe9 | [
"MIT"
] | 9 | 2022-01-10T18:39:45.000Z | 2022-03-06T03:51:41.000Z | integration/keeper_secrets_manager_cli/keeper_secrets_manager_cli/exception.py | Keeper-Security/secrets-manager | 0044dec7f323ae2e531f52ef2435bd7205949fe9 | [
"MIT"
] | 10 | 2022-01-27T00:51:05.000Z | 2022-03-30T08:42:01.000Z | integration/keeper_secrets_manager_cli/keeper_secrets_manager_cli/exception.py | Keeper-Security/secrets-manager | 0044dec7f323ae2e531f52ef2435bd7205949fe9 | [
"MIT"
] | 6 | 2021-12-17T18:59:26.000Z | 2022-03-28T16:47:28.000Z | import click
from colorama import Fore, Style
class KsmCliException(click.ClickException):
in_a_shell = False
def colorize(self):
if KsmCliException.in_a_shell is False:
return str(self.message)
else:
return Fore.RED + str(self.message) + Style.RESET_ALL
def format_message(self):
return self.colorize()
def __str__(self):
return self.colorize()
class KsmRecordSyntaxException:
pass
| 19.458333 | 65 | 0.663812 | import click
from colorama import Fore, Style
class KsmCliException(click.ClickException):
in_a_shell = False
def colorize(self):
if KsmCliException.in_a_shell is False:
return str(self.message)
else:
return Fore.RED + str(self.message) + Style.RESET_ALL
def format_message(self):
return self.colorize()
def __str__(self):
return self.colorize()
class KsmRecordSyntaxException:
pass
| true | true |
f71c235a81782aee7af143ed9a0e3681bf47b496 | 5,934 | py | Python | simple_distillation_mgr/VLE_data_ethanol_water.py | ykholod/kettlebell-simple-distillation-mgr | 172900253be790f9fe7712ba5f20bcbb12a2a635 | [
"MIT"
] | null | null | null | simple_distillation_mgr/VLE_data_ethanol_water.py | ykholod/kettlebell-simple-distillation-mgr | 172900253be790f9fe7712ba5f20bcbb12a2a635 | [
"MIT"
] | null | null | null | simple_distillation_mgr/VLE_data_ethanol_water.py | ykholod/kettlebell-simple-distillation-mgr | 172900253be790f9fe7712ba5f20bcbb12a2a635 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
""" VLE data for ethanol-water mixture, isobaric, 1.01 bar """
__author__ = "Yaroslav Kholod"
__copyright__ = "Copyright 2019, The Kettlebell project"
__credits__ = "Yaroslav Kholod"
__license__ = "MIT"
__version__ = "0.1.0"
__maintainer__ = "Yaroslav Kholod"
__email__ = "pretorian.yaroslav@gmail.com"
__status__ = "Development"
# VLE data map mol fract to temperature
vle_data_dew = {}
# Ethanol dew point data
# Ethanol mol fract # Temperature, C
vle_data_dew[100] = 78.21
vle_data_dew[99] = 78.18
vle_data_dew[98] = 78.16
vle_data_dew[97] = 78.14
vle_data_dew[96] = 78.12
vle_data_dew[95] = 78.23
vle_data_dew[94] = 78.22
vle_data_dew[93] = 78.21
vle_data_dew[92] = 78.21
vle_data_dew[91] = 78.06
vle_data_dew[90] = 78.05
vle_data_dew[89] = 78.06
vle_data_dew[88] = 78.07
vle_data_dew[87] = 78.08
vle_data_dew[86] = 78.09
vle_data_dew[85] = 78.11
vle_data_dew[84] = 78.13
vle_data_dew[83] = 78.15
vle_data_dew[82] = 78.17
vle_data_dew[81] = 78.22
vle_data_dew[80] = 78.27
vle_data_dew[79] = 78.32
vle_data_dew[78] = 78.44
vle_data_dew[77] = 78.49
vle_data_dew[76] = 78.52
vle_data_dew[75] = 78.60
vle_data_dew[74] = 78.68
vle_data_dew[73] = 78.77
vle_data_dew[72] = 78.87
vle_data_dew[71] = 78.95
vle_data_dew[70] = 79.05
vle_data_dew[69] = 79.20
vle_data_dew[68] = 79.37
vle_data_dew[67] = 79.50
vle_data_dew[66] = 79.72
vle_data_dew[65] = 79.92
vle_data_dew[64] = 80.10
vle_data_dew[63] = 80.31
vle_data_dew[62] = 80.52
vle_data_dew[61] = 80.76
vle_data_dew[60] = 81.00
vle_data_dew[59] = 81.26
vle_data_dew[58] = 81.53
vle_data_dew[57] = 81.80
vle_data_dew[56] = 82.18
vle_data_dew[55] = 82.55
vle_data_dew[54] = 82.80
vle_data_dew[53] = 83.04
vle_data_dew[52] = 83.44
vle_data_dew[51] = 83.88
vle_data_dew[50] = 84.27
vle_data_dew[49] = 84.73
vle_data_dew[48] = 85.02
vle_data_dew[47] = 85.34
vle_data_dew[46] = 85.69
vle_data_dew[45] = 86.08
vle_data_dew[44] = 86.31
vle_data_dew[43] = 86.99
vle_data_dew[42] = 87.30
vle_data_dew[41] = 87.55
vle_data_dew[40] = 87.85
vle_data_dew[39] = 88.15
vle_data_dew[38] = 88.50
vle_data_dew[37] = 88.87
vle_data_dew[36] = 89.20
vle_data_dew[35] = 89.70
vle_data_dew[34] = 90.02
vle_data_dew[33] = 90.34
vle_data_dew[32] = 90.67
vle_data_dew[31] = 91.00
vle_data_dew[30] = 91.50
vle_data_dew[29] = 91.67
vle_data_dew[28] = 91.84
vle_data_dew[27] = 92.25
vle_data_dew[26] = 92.60
vle_data_dew[25] = 93.01
vle_data_dew[24] = 93.25
vle_data_dew[23] = 93.78
vle_data_dew[22] = 94.01
vle_data_dew[21] = 94.34
vle_data_dew[20] = 94.50
vle_data_dew[19] = 94.89
vle_data_dew[18] = 94.99
vle_data_dew[17] = 95.27
vle_data_dew[16] = 95.55
vle_data_dew[15] = 95.82
vle_data_dew[14] = 96.10
vle_data_dew[13] = 96.38
vle_data_dew[12] = 96.66
vle_data_dew[11] = 97.25
vle_data_dew[10] = 97.50
vle_data_dew[9] = 97.80
vle_data_dew[8] = 98.00
vle_data_dew[7] = 98.39
vle_data_dew[6] = 98.79
vle_data_dew[5] = 99.00
vle_data_dew[4] = 99.33
vle_data_dew[3] = 99.66
vle_data_dew[2] = 99.80
vle_data_dew[1] = 100.00
# VLE data map mol fract to temperature
vle_data_bubble = {}
# Ethanol bubble point data
# Ethanol mol fract # Temperature, C
vle_data_bubble[100] = 78.21
vle_data_bubble[99] = 78.19
vle_data_bubble[98] = 78.16
vle_data_bubble[97] = 78.14
vle_data_bubble[96] = 78.12
vle_data_bubble[95] = 78.14
vle_data_bubble[94] = 78.16
vle_data_bubble[93] = 78.17
vle_data_bubble[92] = 78.18
vle_data_bubble[91] = 78.19
vle_data_bubble[90] = 78.20
vle_data_bubble[89] = 78.21
vle_data_bubble[88] = 78.22
vle_data_bubble[87] = 78.23
vle_data_bubble[86] = 78.24
vle_data_bubble[85] = 78.26
vle_data_bubble[84] = 78.28
vle_data_bubble[83] = 78.30
vle_data_bubble[82] = 78.32
vle_data_bubble[81] = 78.32
vle_data_bubble[80] = 78.37
vle_data_bubble[79] = 78.40
vle_data_bubble[78] = 78.44
vle_data_bubble[77] = 78.47
vle_data_bubble[76] = 78.51
vle_data_bubble[75] = 78.54
vle_data_bubble[74] = 78.58
vle_data_bubble[73] = 78.63
vle_data_bubble[72] = 78.67
vle_data_bubble[71] = 78.72
vle_data_bubble[70] = 78.76
vle_data_bubble[69] = 78.81
vle_data_bubble[68] = 78.86
vle_data_bubble[67] = 78.92
vle_data_bubble[66] = 78.97
vle_data_bubble[65] = 79.03
vle_data_bubble[64] = 79.09
vle_data_bubble[63] = 79.15
vle_data_bubble[62] = 79.21
vle_data_bubble[61] = 79.27
vle_data_bubble[60] = 79.34
vle_data_bubble[59] = 79.40
vle_data_bubble[58] = 79.47
vle_data_bubble[57] = 79.54
vle_data_bubble[56] = 79.61
vle_data_bubble[55] = 79.68
vle_data_bubble[54] = 79.76
vle_data_bubble[53] = 79.83
vle_data_bubble[52] = 79.91
vle_data_bubble[51] = 79.99
vle_data_bubble[50] = 80.07
vle_data_bubble[49] = 80.15
vle_data_bubble[48] = 80.24
vle_data_bubble[47] = 80.32
vle_data_bubble[46] = 80.41
vle_data_bubble[45] = 80.50
vle_data_bubble[44] = 80.59
vle_data_bubble[43] = 80.68
vle_data_bubble[42] = 80.78
vle_data_bubble[41] = 80.88
vle_data_bubble[40] = 80.98
vle_data_bubble[39] = 81.08
vle_data_bubble[38] = 81.18
vle_data_bubble[37] = 81.29
vle_data_bubble[36] = 81.40
vle_data_bubble[35] = 81.51
vle_data_bubble[34] = 81.62
vle_data_bubble[33] = 81.74
vle_data_bubble[32] = 81.86
vle_data_bubble[31] = 81.99
vle_data_bubble[30] = 82.12
vle_data_bubble[29] = 82.26
vle_data_bubble[28] = 82.39
vle_data_bubble[27] = 82.54
vle_data_bubble[26] = 82.69
vle_data_bubble[25] = 82.18
vle_data_bubble[24] = 83.01
vle_data_bubble[23] = 83.19
vle_data_bubble[22] = 83.37
vle_data_bubble[21] = 83.56
vle_data_bubble[20] = 83.04
vle_data_bubble[19] = 83.98
vle_data_bubble[18] = 84.21
vle_data_bubble[17] = 84.46
vle_data_bubble[16] = 84.73
vle_data_bubble[15] = 84.27
vle_data_bubble[14] = 85.34
vle_data_bubble[13] = 85.69
vle_data_bubble[12] = 86.08
vle_data_bubble[11] = 86.51
vle_data_bubble[10] = 86.30
vle_data_bubble[9] = 87.53
vle_data_bubble[8] = 88.15
vle_data_bubble[7] = 88.87
vle_data_bubble[6] = 89.20
vle_data_bubble[5] = 90.67
vle_data_bubble[4] = 91.50
vle_data_bubble[3] = 93.01
vle_data_bubble[2] = 94.89
vle_data_bubble[1] = 97.25
| 26.373333 | 62 | 0.731378 |
__author__ = "Yaroslav Kholod"
__copyright__ = "Copyright 2019, The Kettlebell project"
__credits__ = "Yaroslav Kholod"
__license__ = "MIT"
__version__ = "0.1.0"
__maintainer__ = "Yaroslav Kholod"
__email__ = "pretorian.yaroslav@gmail.com"
__status__ = "Development"
vle_data_dew = {}
0] = 78.21
vle_data_dew[99] = 78.18
vle_data_dew[98] = 78.16
vle_data_dew[97] = 78.14
vle_data_dew[96] = 78.12
vle_data_dew[95] = 78.23
vle_data_dew[94] = 78.22
vle_data_dew[93] = 78.21
vle_data_dew[92] = 78.21
vle_data_dew[91] = 78.06
vle_data_dew[90] = 78.05
vle_data_dew[89] = 78.06
vle_data_dew[88] = 78.07
vle_data_dew[87] = 78.08
vle_data_dew[86] = 78.09
vle_data_dew[85] = 78.11
vle_data_dew[84] = 78.13
vle_data_dew[83] = 78.15
vle_data_dew[82] = 78.17
vle_data_dew[81] = 78.22
vle_data_dew[80] = 78.27
vle_data_dew[79] = 78.32
vle_data_dew[78] = 78.44
vle_data_dew[77] = 78.49
vle_data_dew[76] = 78.52
vle_data_dew[75] = 78.60
vle_data_dew[74] = 78.68
vle_data_dew[73] = 78.77
vle_data_dew[72] = 78.87
vle_data_dew[71] = 78.95
vle_data_dew[70] = 79.05
vle_data_dew[69] = 79.20
vle_data_dew[68] = 79.37
vle_data_dew[67] = 79.50
vle_data_dew[66] = 79.72
vle_data_dew[65] = 79.92
vle_data_dew[64] = 80.10
vle_data_dew[63] = 80.31
vle_data_dew[62] = 80.52
vle_data_dew[61] = 80.76
vle_data_dew[60] = 81.00
vle_data_dew[59] = 81.26
vle_data_dew[58] = 81.53
vle_data_dew[57] = 81.80
vle_data_dew[56] = 82.18
vle_data_dew[55] = 82.55
vle_data_dew[54] = 82.80
vle_data_dew[53] = 83.04
vle_data_dew[52] = 83.44
vle_data_dew[51] = 83.88
vle_data_dew[50] = 84.27
vle_data_dew[49] = 84.73
vle_data_dew[48] = 85.02
vle_data_dew[47] = 85.34
vle_data_dew[46] = 85.69
vle_data_dew[45] = 86.08
vle_data_dew[44] = 86.31
vle_data_dew[43] = 86.99
vle_data_dew[42] = 87.30
vle_data_dew[41] = 87.55
vle_data_dew[40] = 87.85
vle_data_dew[39] = 88.15
vle_data_dew[38] = 88.50
vle_data_dew[37] = 88.87
vle_data_dew[36] = 89.20
vle_data_dew[35] = 89.70
vle_data_dew[34] = 90.02
vle_data_dew[33] = 90.34
vle_data_dew[32] = 90.67
vle_data_dew[31] = 91.00
vle_data_dew[30] = 91.50
vle_data_dew[29] = 91.67
vle_data_dew[28] = 91.84
vle_data_dew[27] = 92.25
vle_data_dew[26] = 92.60
vle_data_dew[25] = 93.01
vle_data_dew[24] = 93.25
vle_data_dew[23] = 93.78
vle_data_dew[22] = 94.01
vle_data_dew[21] = 94.34
vle_data_dew[20] = 94.50
vle_data_dew[19] = 94.89
vle_data_dew[18] = 94.99
vle_data_dew[17] = 95.27
vle_data_dew[16] = 95.55
vle_data_dew[15] = 95.82
vle_data_dew[14] = 96.10
vle_data_dew[13] = 96.38
vle_data_dew[12] = 96.66
vle_data_dew[11] = 97.25
vle_data_dew[10] = 97.50
vle_data_dew[9] = 97.80
vle_data_dew[8] = 98.00
vle_data_dew[7] = 98.39
vle_data_dew[6] = 98.79
vle_data_dew[5] = 99.00
vle_data_dew[4] = 99.33
vle_data_dew[3] = 99.66
vle_data_dew[2] = 99.80
vle_data_dew[1] = 100.00
vle_data_bubble = {}
[100] = 78.21
vle_data_bubble[99] = 78.19
vle_data_bubble[98] = 78.16
vle_data_bubble[97] = 78.14
vle_data_bubble[96] = 78.12
vle_data_bubble[95] = 78.14
vle_data_bubble[94] = 78.16
vle_data_bubble[93] = 78.17
vle_data_bubble[92] = 78.18
vle_data_bubble[91] = 78.19
vle_data_bubble[90] = 78.20
vle_data_bubble[89] = 78.21
vle_data_bubble[88] = 78.22
vle_data_bubble[87] = 78.23
vle_data_bubble[86] = 78.24
vle_data_bubble[85] = 78.26
vle_data_bubble[84] = 78.28
vle_data_bubble[83] = 78.30
vle_data_bubble[82] = 78.32
vle_data_bubble[81] = 78.32
vle_data_bubble[80] = 78.37
vle_data_bubble[79] = 78.40
vle_data_bubble[78] = 78.44
vle_data_bubble[77] = 78.47
vle_data_bubble[76] = 78.51
vle_data_bubble[75] = 78.54
vle_data_bubble[74] = 78.58
vle_data_bubble[73] = 78.63
vle_data_bubble[72] = 78.67
vle_data_bubble[71] = 78.72
vle_data_bubble[70] = 78.76
vle_data_bubble[69] = 78.81
vle_data_bubble[68] = 78.86
vle_data_bubble[67] = 78.92
vle_data_bubble[66] = 78.97
vle_data_bubble[65] = 79.03
vle_data_bubble[64] = 79.09
vle_data_bubble[63] = 79.15
vle_data_bubble[62] = 79.21
vle_data_bubble[61] = 79.27
vle_data_bubble[60] = 79.34
vle_data_bubble[59] = 79.40
vle_data_bubble[58] = 79.47
vle_data_bubble[57] = 79.54
vle_data_bubble[56] = 79.61
vle_data_bubble[55] = 79.68
vle_data_bubble[54] = 79.76
vle_data_bubble[53] = 79.83
vle_data_bubble[52] = 79.91
vle_data_bubble[51] = 79.99
vle_data_bubble[50] = 80.07
vle_data_bubble[49] = 80.15
vle_data_bubble[48] = 80.24
vle_data_bubble[47] = 80.32
vle_data_bubble[46] = 80.41
vle_data_bubble[45] = 80.50
vle_data_bubble[44] = 80.59
vle_data_bubble[43] = 80.68
vle_data_bubble[42] = 80.78
vle_data_bubble[41] = 80.88
vle_data_bubble[40] = 80.98
vle_data_bubble[39] = 81.08
vle_data_bubble[38] = 81.18
vle_data_bubble[37] = 81.29
vle_data_bubble[36] = 81.40
vle_data_bubble[35] = 81.51
vle_data_bubble[34] = 81.62
vle_data_bubble[33] = 81.74
vle_data_bubble[32] = 81.86
vle_data_bubble[31] = 81.99
vle_data_bubble[30] = 82.12
vle_data_bubble[29] = 82.26
vle_data_bubble[28] = 82.39
vle_data_bubble[27] = 82.54
vle_data_bubble[26] = 82.69
vle_data_bubble[25] = 82.18
vle_data_bubble[24] = 83.01
vle_data_bubble[23] = 83.19
vle_data_bubble[22] = 83.37
vle_data_bubble[21] = 83.56
vle_data_bubble[20] = 83.04
vle_data_bubble[19] = 83.98
vle_data_bubble[18] = 84.21
vle_data_bubble[17] = 84.46
vle_data_bubble[16] = 84.73
vle_data_bubble[15] = 84.27
vle_data_bubble[14] = 85.34
vle_data_bubble[13] = 85.69
vle_data_bubble[12] = 86.08
vle_data_bubble[11] = 86.51
vle_data_bubble[10] = 86.30
vle_data_bubble[9] = 87.53
vle_data_bubble[8] = 88.15
vle_data_bubble[7] = 88.87
vle_data_bubble[6] = 89.20
vle_data_bubble[5] = 90.67
vle_data_bubble[4] = 91.50
vle_data_bubble[3] = 93.01
vle_data_bubble[2] = 94.89
vle_data_bubble[1] = 97.25
| true | true |
f71c249886014075a96d3e57f520b3963e70bc00 | 675 | py | Python | pytest/testSigning.py | RomanValov/ArmoryDB | 625eff9712161676ad83deb03616e6edb48283ca | [
"MIT"
] | 505 | 2016-02-04T15:54:46.000Z | 2022-03-27T18:43:01.000Z | pytest/testSigning.py | jimmysong/BitcoinArmory | 1c7190176897a2e0f3e4e198ab2f199059bb2402 | [
"MIT"
] | 528 | 2016-02-06T19:50:12.000Z | 2022-01-15T10:21:16.000Z | pytest/testSigning.py | jimmysong/BitcoinArmory | 1c7190176897a2e0f3e4e198ab2f199059bb2402 | [
"MIT"
] | 208 | 2015-01-02T10:31:40.000Z | 2021-12-14T07:37:36.000Z | import sys
sys.path.append('..')
import unittest
import random
from armoryengine.ALL import *
class SigningTester(unittest.TestCase):
def testLowSig(self):
sbdPrivKey = SecureBinaryData(b'\x01'*32)
pub = CryptoECDSA().ComputePublicKey(sbdPrivKey).toBinStr()
for i in range(100):
msg = "some random msg %s" % random.random()
sbdSig = CryptoECDSA().SignData(SecureBinaryData(msg), sbdPrivKey, False)
binSig = sbdSig.toBinStr()
derSig = createDERSigFromRS(binSig[:32], binSig[32:])
r, s = getRSFromDERSig(derSig)
j = binary_to_int(s, BIGENDIAN)
self.assertTrue( j <= SECP256K1_ORDER / 2)
| 28.125 | 82 | 0.657778 | import sys
sys.path.append('..')
import unittest
import random
from armoryengine.ALL import *
class SigningTester(unittest.TestCase):
def testLowSig(self):
sbdPrivKey = SecureBinaryData(b'\x01'*32)
pub = CryptoECDSA().ComputePublicKey(sbdPrivKey).toBinStr()
for i in range(100):
msg = "some random msg %s" % random.random()
sbdSig = CryptoECDSA().SignData(SecureBinaryData(msg), sbdPrivKey, False)
binSig = sbdSig.toBinStr()
derSig = createDERSigFromRS(binSig[:32], binSig[32:])
r, s = getRSFromDERSig(derSig)
j = binary_to_int(s, BIGENDIAN)
self.assertTrue( j <= SECP256K1_ORDER / 2)
| true | true |
f71c26869793672d4719326ffb01b5d7f0f78eb2 | 12,414 | py | Python | texar/torch/modules/decoders/rnn_decoders_test.py | wwt17/texar-pytorch | 9fb3ae8f7b541da5c808357033a93fba1817bfbd | [
"Apache-2.0"
] | 19 | 2020-07-29T15:25:45.000Z | 2022-01-19T17:49:42.000Z | texar/torch/modules/decoders/rnn_decoders_test.py | wwt17/texar-pytorch | 9fb3ae8f7b541da5c808357033a93fba1817bfbd | [
"Apache-2.0"
] | 3 | 2021-02-16T10:26:23.000Z | 2021-06-08T16:50:40.000Z | texar/torch/modules/decoders/rnn_decoders_test.py | wwt17/texar-pytorch | 9fb3ae8f7b541da5c808357033a93fba1817bfbd | [
"Apache-2.0"
] | 1 | 2019-10-06T07:54:30.000Z | 2019-10-06T07:54:30.000Z | """
Unit tests for RNN decoders.
"""
import unittest
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from texar.torch.hyperparams import HParams
from texar.torch.modules.decoders.decoder_helpers import get_helper
from texar.torch.modules.decoders.rnn_decoders import (
AttentionRNNDecoder, AttentionRNNDecoderOutput, BasicRNNDecoder,
BasicRNNDecoderOutput)
from texar.torch.modules.embedders.embedders import WordEmbedder
from texar.torch.utils.utils import map_structure
class BasicRNNDecoderTest(unittest.TestCase):
r"""Tests :class:`~texar.torch.modules.decoders.rnn_decoders.BasicRNNDecoder`.
"""
def setUp(self):
self._vocab_size = 4
self._max_time = 8
self._batch_size = 16
self._emb_dim = 20
self._inputs = torch.randint(
self._vocab_size, size=(self._batch_size, self._max_time))
embedding = torch.rand(
self._vocab_size, self._emb_dim, dtype=torch.float)
self._embedder = WordEmbedder(init_value=embedding)
self._hparams = HParams(None, BasicRNNDecoder.default_hparams())
def _test_outputs(self, decoder, outputs, final_state, sequence_lengths,
test_mode=False):
hidden_size = decoder.hparams.rnn_cell.kwargs.num_units
self.assertIsInstance(outputs, BasicRNNDecoderOutput)
max_time = (self._max_time if not test_mode
else max(sequence_lengths).item())
self.assertEqual(
outputs.logits.shape,
(self._batch_size, max_time, self._vocab_size))
if not test_mode:
np.testing.assert_array_equal(
sequence_lengths, [max_time] * self._batch_size)
self.assertEqual(final_state[0].shape, (self._batch_size, hidden_size))
def test_decode_train(self):
r"""Tests decoding in training mode.
"""
decoder = BasicRNNDecoder(
token_embedder=self._embedder, input_size=self._emb_dim,
vocab_size=self._vocab_size, hparams=self._hparams)
sequence_length = torch.tensor([self._max_time] * self._batch_size)
# Helper by default HParams
helper_train = decoder.create_helper()
outputs, final_state, sequence_lengths = decoder(
helper=helper_train, inputs=self._inputs,
sequence_length=sequence_length)
self._test_outputs(decoder, outputs, final_state, sequence_lengths)
# Helper by decoding strategy
helper_train = decoder.create_helper(decoding_strategy='train_greedy')
outputs, final_state, sequence_lengths = decoder(
helper=helper_train, inputs=self._inputs,
sequence_length=sequence_length)
self._test_outputs(decoder, outputs, final_state, sequence_lengths)
# Implicit helper
outputs, final_state, sequence_lengths = decoder(
inputs=self._inputs, sequence_length=sequence_length)
self._test_outputs(decoder, outputs, final_state, sequence_lengths)
# Eval helper through forward args
outputs, final_state, sequence_lengths = decoder(
embedding=self._embedder,
start_tokens=torch.tensor([1] * self._batch_size),
end_token=2, infer_mode=True)
self._test_outputs(
decoder, outputs, final_state, sequence_lengths, test_mode=True)
@staticmethod
def _assert_tensor_equal(a: torch.Tensor, b: torch.Tensor) -> bool:
if torch.is_tensor(a):
a = a.detach().numpy()
if torch.is_tensor(b):
b = b.detach().numpy()
if any(np.issubdtype(array.dtype, np.floating) for array in [a, b]):
return np.testing.assert_allclose(a, b, rtol=1e-5, atol=1e-8)
return np.testing.assert_array_equal(a, b)
def test_decode_train_with_torch(self):
r"""Compares decoding results with PyTorch built-in decoder.
"""
decoder = BasicRNNDecoder(
token_embedder=self._embedder, input_size=self._emb_dim,
vocab_size=self._vocab_size, hparams=self._hparams)
input_size = self._emb_dim
hidden_size = decoder.hparams.rnn_cell.kwargs.num_units
num_layers = decoder.hparams.rnn_cell.num_layers
torch_lstm = nn.LSTM(input_size, hidden_size, num_layers,
batch_first=True)
# match parameters
for name in ['weight_ih', 'weight_hh', 'bias_ih', 'bias_hh']:
setattr(torch_lstm, f'{name}_l0',
getattr(decoder._cell._cell, name))
torch_lstm.flatten_parameters()
output_layer = decoder._output_layer
input_lengths = torch.tensor([self._max_time] * self._batch_size)
inputs = torch.randint(
self._vocab_size, size=(self._batch_size, self._max_time))
# decoder outputs
helper_train = decoder.create_helper()
outputs, final_state, sequence_lengths = decoder(
inputs=inputs,
sequence_length=input_lengths,
helper=helper_train)
# torch LSTM outputs
lstm_inputs = F.embedding(inputs, self._embedder.embedding)
torch_outputs, torch_states = torch_lstm(lstm_inputs)
torch_outputs = output_layer(torch_outputs)
torch_sample_id = torch.argmax(torch_outputs, dim=-1)
self.assertEqual(final_state[0].shape,
(self._batch_size, hidden_size))
self._assert_tensor_equal(outputs.logits, torch_outputs)
self._assert_tensor_equal(outputs.sample_id, torch_sample_id)
self._assert_tensor_equal(final_state[0], torch_states[0].squeeze(0))
self._assert_tensor_equal(final_state[1], torch_states[1].squeeze(0))
self._assert_tensor_equal(sequence_lengths, input_lengths)
def test_decode_infer(self):
r"""Tests decoding in inference mode."""
decoder = BasicRNNDecoder(
token_embedder=self._embedder, input_size=self._emb_dim,
vocab_size=self._vocab_size, hparams=self._hparams)
decoder.eval()
start_tokens = torch.tensor([self._vocab_size - 2] * self._batch_size)
helpers = []
for strategy in ['infer_greedy', 'infer_sample']:
helper = decoder.create_helper(
decoding_strategy=strategy,
start_tokens=start_tokens,
end_token=self._vocab_size - 1)
helpers.append(helper)
for klass in ['TopKSampleEmbeddingHelper', 'SoftmaxEmbeddingHelper',
'GumbelSoftmaxEmbeddingHelper']:
helper = get_helper(
klass, start_tokens=start_tokens,
end_token=self._vocab_size - 1,
top_k=self._vocab_size // 2, tau=2.0,
straight_through=True)
helpers.append(helper)
for helper in helpers:
max_length = 100
outputs, final_state, sequence_lengths = decoder(
helper=helper, max_decoding_length=max_length)
self.assertLessEqual(max(sequence_lengths), max_length)
self._test_outputs(decoder, outputs, final_state, sequence_lengths,
test_mode=True)
class AttentionRNNDecoderTest(unittest.TestCase):
r"""Tests :class:`~texar.torch.modules.decoders.rnn_decoders.AttentionRNNDecoder`.
"""
def setUp(self):
self._vocab_size = 10
self._max_time = 16
self._batch_size = 8
self._emb_dim = 20
self._attention_dim = 256
self._inputs = torch.randint(
self._vocab_size, size=(self._batch_size, self._max_time))
embedding = torch.rand(
self._vocab_size, self._emb_dim, dtype=torch.float)
self._embedder = WordEmbedder(init_value=embedding)
self._encoder_output = torch.rand(
self._batch_size, self._max_time, 64)
self._test_hparams = {} # (cell_type, is_multi) -> hparams
for cell_type in ["RNNCell", "LSTMCell", "GRUCell"]:
hparams = {
"rnn_cell": {
'type': cell_type,
'kwargs': {
'num_units': 256,
},
},
"attention": {
"kwargs": {
"num_units": self._attention_dim
},
}
}
self._test_hparams[(cell_type, False)] = HParams(
hparams, AttentionRNNDecoder.default_hparams())
hparams = {
"rnn_cell": {
'type': 'LSTMCell',
'kwargs': {
'num_units': 256,
},
'num_layers': 3,
},
"attention": {
"kwargs": {
"num_units": self._attention_dim
},
}
}
self._test_hparams[("LSTMCell", True)] = HParams(
hparams, AttentionRNNDecoder.default_hparams())
def _test_outputs(self, decoder, outputs, final_state, sequence_lengths,
test_mode=False):
hidden_size = decoder.hparams.rnn_cell.kwargs.num_units
cell_type = decoder.hparams.rnn_cell.type
is_multi = decoder.hparams.rnn_cell.num_layers > 1
self.assertIsInstance(outputs, AttentionRNNDecoderOutput)
max_time = (self._max_time if not test_mode
else max(sequence_lengths).item())
self.assertEqual(
outputs.logits.shape,
(self._batch_size, max_time, self._vocab_size))
if not test_mode:
np.testing.assert_array_equal(
sequence_lengths, [max_time] * self._batch_size)
map_structure(
lambda t: self.assertEqual(
t.size(), (self._batch_size, hidden_size)),
final_state.cell_state)
state = final_state.cell_state
if is_multi:
self.assertIsInstance(state, list)
state = state[0]
if cell_type == "LSTMCell":
self.assertIsInstance(state, tuple)
state = state[0]
self.assertIsInstance(state, torch.Tensor)
def test_decode_infer(self):
r"""Tests decoding in inference mode.
"""
seq_length = np.random.randint(
self._max_time, size=[self._batch_size]) + 1
encoder_values_length = torch.tensor(seq_length)
for (cell_type, is_multi), hparams in self._test_hparams.items():
decoder = AttentionRNNDecoder(
encoder_output_size=64,
token_embedder=self._embedder,
vocab_size=self._vocab_size,
input_size=self._emb_dim,
hparams=hparams)
decoder.eval()
helper_infer = decoder.create_helper(
start_tokens=torch.tensor([1] * self._batch_size), end_token=2)
outputs, final_state, sequence_lengths = decoder(
memory=self._encoder_output,
memory_sequence_length=encoder_values_length,
helper=helper_infer)
self._test_outputs(decoder, outputs, final_state, sequence_lengths,
test_mode=True)
def test_decode_train(self):
r"""Tests decoding in training mode.
"""
seq_length = np.random.randint(
self._max_time, size=[self._batch_size]) + 1
encoder_values_length = torch.tensor(seq_length)
for (cell_type, is_multi), hparams in self._test_hparams.items():
decoder = AttentionRNNDecoder(
encoder_output_size=64,
token_embedder=self._embedder,
vocab_size=self._vocab_size,
input_size=self._emb_dim,
hparams=hparams)
sequence_length = torch.tensor([self._max_time] * self._batch_size)
helper_train = decoder.create_helper()
outputs, final_state, sequence_lengths = decoder(
memory=self._encoder_output,
memory_sequence_length=encoder_values_length,
helper=helper_train,
inputs=self._inputs,
sequence_length=sequence_length)
self._test_outputs(decoder, outputs, final_state, sequence_lengths)
if __name__ == "__main__":
unittest.main()
| 39.160883 | 86 | 0.617931 |
import unittest
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from texar.torch.hyperparams import HParams
from texar.torch.modules.decoders.decoder_helpers import get_helper
from texar.torch.modules.decoders.rnn_decoders import (
AttentionRNNDecoder, AttentionRNNDecoderOutput, BasicRNNDecoder,
BasicRNNDecoderOutput)
from texar.torch.modules.embedders.embedders import WordEmbedder
from texar.torch.utils.utils import map_structure
class BasicRNNDecoderTest(unittest.TestCase):
def setUp(self):
self._vocab_size = 4
self._max_time = 8
self._batch_size = 16
self._emb_dim = 20
self._inputs = torch.randint(
self._vocab_size, size=(self._batch_size, self._max_time))
embedding = torch.rand(
self._vocab_size, self._emb_dim, dtype=torch.float)
self._embedder = WordEmbedder(init_value=embedding)
self._hparams = HParams(None, BasicRNNDecoder.default_hparams())
def _test_outputs(self, decoder, outputs, final_state, sequence_lengths,
test_mode=False):
hidden_size = decoder.hparams.rnn_cell.kwargs.num_units
self.assertIsInstance(outputs, BasicRNNDecoderOutput)
max_time = (self._max_time if not test_mode
else max(sequence_lengths).item())
self.assertEqual(
outputs.logits.shape,
(self._batch_size, max_time, self._vocab_size))
if not test_mode:
np.testing.assert_array_equal(
sequence_lengths, [max_time] * self._batch_size)
self.assertEqual(final_state[0].shape, (self._batch_size, hidden_size))
def test_decode_train(self):
decoder = BasicRNNDecoder(
token_embedder=self._embedder, input_size=self._emb_dim,
vocab_size=self._vocab_size, hparams=self._hparams)
sequence_length = torch.tensor([self._max_time] * self._batch_size)
helper_train = decoder.create_helper()
outputs, final_state, sequence_lengths = decoder(
helper=helper_train, inputs=self._inputs,
sequence_length=sequence_length)
self._test_outputs(decoder, outputs, final_state, sequence_lengths)
helper_train = decoder.create_helper(decoding_strategy='train_greedy')
outputs, final_state, sequence_lengths = decoder(
helper=helper_train, inputs=self._inputs,
sequence_length=sequence_length)
self._test_outputs(decoder, outputs, final_state, sequence_lengths)
outputs, final_state, sequence_lengths = decoder(
inputs=self._inputs, sequence_length=sequence_length)
self._test_outputs(decoder, outputs, final_state, sequence_lengths)
outputs, final_state, sequence_lengths = decoder(
embedding=self._embedder,
start_tokens=torch.tensor([1] * self._batch_size),
end_token=2, infer_mode=True)
self._test_outputs(
decoder, outputs, final_state, sequence_lengths, test_mode=True)
@staticmethod
def _assert_tensor_equal(a: torch.Tensor, b: torch.Tensor) -> bool:
if torch.is_tensor(a):
a = a.detach().numpy()
if torch.is_tensor(b):
b = b.detach().numpy()
if any(np.issubdtype(array.dtype, np.floating) for array in [a, b]):
return np.testing.assert_allclose(a, b, rtol=1e-5, atol=1e-8)
return np.testing.assert_array_equal(a, b)
def test_decode_train_with_torch(self):
decoder = BasicRNNDecoder(
token_embedder=self._embedder, input_size=self._emb_dim,
vocab_size=self._vocab_size, hparams=self._hparams)
input_size = self._emb_dim
hidden_size = decoder.hparams.rnn_cell.kwargs.num_units
num_layers = decoder.hparams.rnn_cell.num_layers
torch_lstm = nn.LSTM(input_size, hidden_size, num_layers,
batch_first=True)
for name in ['weight_ih', 'weight_hh', 'bias_ih', 'bias_hh']:
setattr(torch_lstm, f'{name}_l0',
getattr(decoder._cell._cell, name))
torch_lstm.flatten_parameters()
output_layer = decoder._output_layer
input_lengths = torch.tensor([self._max_time] * self._batch_size)
inputs = torch.randint(
self._vocab_size, size=(self._batch_size, self._max_time))
helper_train = decoder.create_helper()
outputs, final_state, sequence_lengths = decoder(
inputs=inputs,
sequence_length=input_lengths,
helper=helper_train)
lstm_inputs = F.embedding(inputs, self._embedder.embedding)
torch_outputs, torch_states = torch_lstm(lstm_inputs)
torch_outputs = output_layer(torch_outputs)
torch_sample_id = torch.argmax(torch_outputs, dim=-1)
self.assertEqual(final_state[0].shape,
(self._batch_size, hidden_size))
self._assert_tensor_equal(outputs.logits, torch_outputs)
self._assert_tensor_equal(outputs.sample_id, torch_sample_id)
self._assert_tensor_equal(final_state[0], torch_states[0].squeeze(0))
self._assert_tensor_equal(final_state[1], torch_states[1].squeeze(0))
self._assert_tensor_equal(sequence_lengths, input_lengths)
def test_decode_infer(self):
decoder = BasicRNNDecoder(
token_embedder=self._embedder, input_size=self._emb_dim,
vocab_size=self._vocab_size, hparams=self._hparams)
decoder.eval()
start_tokens = torch.tensor([self._vocab_size - 2] * self._batch_size)
helpers = []
for strategy in ['infer_greedy', 'infer_sample']:
helper = decoder.create_helper(
decoding_strategy=strategy,
start_tokens=start_tokens,
end_token=self._vocab_size - 1)
helpers.append(helper)
for klass in ['TopKSampleEmbeddingHelper', 'SoftmaxEmbeddingHelper',
'GumbelSoftmaxEmbeddingHelper']:
helper = get_helper(
klass, start_tokens=start_tokens,
end_token=self._vocab_size - 1,
top_k=self._vocab_size // 2, tau=2.0,
straight_through=True)
helpers.append(helper)
for helper in helpers:
max_length = 100
outputs, final_state, sequence_lengths = decoder(
helper=helper, max_decoding_length=max_length)
self.assertLessEqual(max(sequence_lengths), max_length)
self._test_outputs(decoder, outputs, final_state, sequence_lengths,
test_mode=True)
class AttentionRNNDecoderTest(unittest.TestCase):
def setUp(self):
self._vocab_size = 10
self._max_time = 16
self._batch_size = 8
self._emb_dim = 20
self._attention_dim = 256
self._inputs = torch.randint(
self._vocab_size, size=(self._batch_size, self._max_time))
embedding = torch.rand(
self._vocab_size, self._emb_dim, dtype=torch.float)
self._embedder = WordEmbedder(init_value=embedding)
self._encoder_output = torch.rand(
self._batch_size, self._max_time, 64)
self._test_hparams = {}
for cell_type in ["RNNCell", "LSTMCell", "GRUCell"]:
hparams = {
"rnn_cell": {
'type': cell_type,
'kwargs': {
'num_units': 256,
},
},
"attention": {
"kwargs": {
"num_units": self._attention_dim
},
}
}
self._test_hparams[(cell_type, False)] = HParams(
hparams, AttentionRNNDecoder.default_hparams())
hparams = {
"rnn_cell": {
'type': 'LSTMCell',
'kwargs': {
'num_units': 256,
},
'num_layers': 3,
},
"attention": {
"kwargs": {
"num_units": self._attention_dim
},
}
}
self._test_hparams[("LSTMCell", True)] = HParams(
hparams, AttentionRNNDecoder.default_hparams())
def _test_outputs(self, decoder, outputs, final_state, sequence_lengths,
test_mode=False):
hidden_size = decoder.hparams.rnn_cell.kwargs.num_units
cell_type = decoder.hparams.rnn_cell.type
is_multi = decoder.hparams.rnn_cell.num_layers > 1
self.assertIsInstance(outputs, AttentionRNNDecoderOutput)
max_time = (self._max_time if not test_mode
else max(sequence_lengths).item())
self.assertEqual(
outputs.logits.shape,
(self._batch_size, max_time, self._vocab_size))
if not test_mode:
np.testing.assert_array_equal(
sequence_lengths, [max_time] * self._batch_size)
map_structure(
lambda t: self.assertEqual(
t.size(), (self._batch_size, hidden_size)),
final_state.cell_state)
state = final_state.cell_state
if is_multi:
self.assertIsInstance(state, list)
state = state[0]
if cell_type == "LSTMCell":
self.assertIsInstance(state, tuple)
state = state[0]
self.assertIsInstance(state, torch.Tensor)
def test_decode_infer(self):
seq_length = np.random.randint(
self._max_time, size=[self._batch_size]) + 1
encoder_values_length = torch.tensor(seq_length)
for (cell_type, is_multi), hparams in self._test_hparams.items():
decoder = AttentionRNNDecoder(
encoder_output_size=64,
token_embedder=self._embedder,
vocab_size=self._vocab_size,
input_size=self._emb_dim,
hparams=hparams)
decoder.eval()
helper_infer = decoder.create_helper(
start_tokens=torch.tensor([1] * self._batch_size), end_token=2)
outputs, final_state, sequence_lengths = decoder(
memory=self._encoder_output,
memory_sequence_length=encoder_values_length,
helper=helper_infer)
self._test_outputs(decoder, outputs, final_state, sequence_lengths,
test_mode=True)
def test_decode_train(self):
seq_length = np.random.randint(
self._max_time, size=[self._batch_size]) + 1
encoder_values_length = torch.tensor(seq_length)
for (cell_type, is_multi), hparams in self._test_hparams.items():
decoder = AttentionRNNDecoder(
encoder_output_size=64,
token_embedder=self._embedder,
vocab_size=self._vocab_size,
input_size=self._emb_dim,
hparams=hparams)
sequence_length = torch.tensor([self._max_time] * self._batch_size)
helper_train = decoder.create_helper()
outputs, final_state, sequence_lengths = decoder(
memory=self._encoder_output,
memory_sequence_length=encoder_values_length,
helper=helper_train,
inputs=self._inputs,
sequence_length=sequence_length)
self._test_outputs(decoder, outputs, final_state, sequence_lengths)
if __name__ == "__main__":
unittest.main()
| true | true |
f71c2776956637e7ed8b7da0a4acf3481ed4e4c7 | 3,001 | py | Python | huaweicloud-sdk-vss/huaweicloudsdkvss/v3/model/delete_domains_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 64 | 2020-06-12T07:05:07.000Z | 2022-03-30T03:32:50.000Z | huaweicloud-sdk-vss/huaweicloudsdkvss/v3/model/delete_domains_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 11 | 2020-07-06T07:56:54.000Z | 2022-01-11T11:14:40.000Z | huaweicloud-sdk-vss/huaweicloudsdkvss/v3/model/delete_domains_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 24 | 2020-06-08T11:42:13.000Z | 2022-03-04T06:44:08.000Z | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class DeleteDomainsRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'domain_name': 'str'
}
attribute_map = {
'domain_name': 'domain_name'
}
def __init__(self, domain_name=None):
"""DeleteDomainsRequest - a model defined in huaweicloud sdk"""
self._domain_name = None
self.discriminator = None
self.domain_name = domain_name
@property
def domain_name(self):
"""Gets the domain_name of this DeleteDomainsRequest.
域名
:return: The domain_name of this DeleteDomainsRequest.
:rtype: str
"""
return self._domain_name
@domain_name.setter
def domain_name(self, domain_name):
"""Sets the domain_name of this DeleteDomainsRequest.
域名
:param domain_name: The domain_name of this DeleteDomainsRequest.
:type: str
"""
self._domain_name = domain_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DeleteDomainsRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.324561 | 79 | 0.553149 |
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class DeleteDomainsRequest:
sensitive_list = []
openapi_types = {
'domain_name': 'str'
}
attribute_map = {
'domain_name': 'domain_name'
}
def __init__(self, domain_name=None):
self._domain_name = None
self.discriminator = None
self.domain_name = domain_name
@property
def domain_name(self):
return self._domain_name
@domain_name.setter
def domain_name(self, domain_name):
self._domain_name = domain_name
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, DeleteDomainsRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f71c29769d72428dea8c648a9ffeb523dd34dd62 | 6,207 | py | Python | examples/mountaincar.py | knollsen/agent57 | 7d3d7890f8eb3a1420945e0b7bcb85ad87459167 | [
"MIT"
] | 1 | 2021-02-24T08:22:35.000Z | 2021-02-24T08:22:35.000Z | examples/mountaincar.py | knollsen/agent57 | 7d3d7890f8eb3a1420945e0b7bcb85ad87459167 | [
"MIT"
] | null | null | null | examples/mountaincar.py | knollsen/agent57 | 7d3d7890f8eb3a1420945e0b7bcb85ad87459167 | [
"MIT"
] | null | null | null | import gym
from keras.optimizers import Adam
import traceback
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
from agent.agent57 import ActorUser
from agent.policy import EpsilonGreedy, AnnealingEpsilonGreedy
from agent.memory import PERRankBaseMemory, PERProportionalMemory
from agent.model import InputType, LstmType, UvfaType
from agent.model import ValueModel
from agent.common import seed_everything
from agent.callbacks import LoggerType
from agent.main_runner import run_gym_dqn, run_play, run_replay, run_gym_agent57
MINUTES_OF_TRAINING = 60
seed_everything(42)
ENV_NAME = "MountainCar-v0"
episode_save_dir = "tmp_{}.".format(ENV_NAME)
def create_parameter(env, nb_steps):
kwargs = {
"input_shape": env.observation_space.shape,
"input_type": InputType.VALUES,
"input_model": None,
"nb_actions": env.action_space.n,
"memory": "PERRankBaseMemory",
"memory_kwargs": {
"capacity": 100_000,
"alpha": 1.0, # PERの確率反映率
"beta_initial": 0.0, # IS反映率の初期値(1.0が最大)
"beta_steps": nb_steps, # IS反映率の上昇step数
"enable_is": True, # ISを有効にするかどうか
},
"optimizer_ext": Adam(lr=0.001),
"optimizer_int": Adam(lr=0.001),
"optimizer_rnd": Adam(lr=0.005),
"optimizer_emb": Adam(lr=0.005),
# NN
"batch_size": 16, # batch_size
"input_sequence": 4, # 入力フレーム数
"dense_units_num": 64, # dense層のユニット数
"enable_dueling_network": True,
"lstm_type": LstmType.STATELESS, # 使用するLSTMアルゴリズム
"lstm_units_num": 64, # LSTMのユニット数
"lstmful_input_length": 2, # ステートフルLSTMの入力数
# train
"memory_warmup_size": 1000, # 初期のメモリー確保用step数(学習しない)
"target_model_update_interval": 2000, # target networkのupdate間隔
"enable_double_dqn": True,
"enable_rescaling": False, # rescalingを有効にするか
"burnin_length": 0, # burn-in期間
"reward_multisteps": 3, # multistep reward
"demo_memory": "PERProportionalMemory",
"demo_memory_kwargs": {
"capacity": 100_000,
"alpha": 0.8,
},
"demo_episode_dir": episode_save_dir,
"demo_ratio_initial": 1.0,
"demo_ratio_final": 1.0/512.0,
"demo_ratio_steps": nb_steps,
"episode_memory": "PERProportionalMemory",
"episode_memory_kwargs": {
"capacity": 1000,
"alpha": 0.8,
},
"episode_ratio": 1.0/32.0,
# intrinsic_reward
"policy_num": 8,
"ucb_epsilon": 0.5,
"ucb_window_size": 50,
"gamma0": 0.999,
"gamma1": 0.99,
"gamma2": 0.9,
"enable_intrinsic_actval_model": False,
"beta_max": 0.3,
"uvfa_ext": [
#UvfaType.ACTION,
#UvfaType.REWARD_EXT,
#UvfaType.REWARD_INT,
#UvfaType.POLICY,
],
"uvfa_int": [
UvfaType.ACTION,
UvfaType.REWARD_EXT,
UvfaType.REWARD_INT,
UvfaType.POLICY,
],
# other
"step_interval": 1,
"enable_add_episode_end_frame": False,
}
return kwargs
#---------------------------------------------------------
def run_dqn(enable_train):
env = gym.make(ENV_NAME)
# ゲーム情報
print("action_space : " + str(env.action_space))
print("observation_space : " + str(env.observation_space))
print("reward_range : " + str(env.reward_range))
nb_steps = 100_000
kwargs = create_parameter(env, nb_steps)
kwargs["action_policy"] = AnnealingEpsilonGreedy(
initial_epsilon=0.5, # 初期ε
final_epsilon=0.01, # 最終状態でのε
exploration_steps=10_000 # 初期→最終状態になるまでのステップ数
)
#kwargs["action_policy"] = EpsilonGreedy(0.1)
run_gym_dqn(
enable_train,
env,
ENV_NAME,
kwargs,
nb_steps=nb_steps,
nb_time=MINUTES_OF_TRAINING*60,
logger_type=LoggerType.STEP,
log_interval=1000,
test_env=env,
movie_save=False,
)
env.close()
#---------------------------------------------------------
class MyActor(ActorUser):
@staticmethod
def allocate(actor_index, actor_num):
return "/device:CPU:0"
def getPolicy(self, actor_index, actor_num):
return EpsilonGreedy(0.1)
def fit(self, index, agent):
env = gym.make(ENV_NAME)
agent.fit(env, visualize=False, verbose=0)
env.close()
class MyActor1(MyActor):
def getPolicy(self, actor_index, actor_num):
return EpsilonGreedy(0.01)
class MyActor2(MyActor):
def getPolicy(self, actor_index, actor_num):
return EpsilonGreedy(0.1)
def create_env():
return gym.make(ENV_NAME)
def run_agent57(enable_train):
env = gym.make(ENV_NAME)
# ゲーム情報
print("action_space : " + str(env.action_space))
print("observation_space : " + str(env.observation_space))
print("reward_range : " + str(env.reward_range))
nb_trains = 100_000
kwargs = create_parameter(env, nb_trains)
kwargs["actors"] = [MyActor1]
kwargs["sync_actor_model_interval"] = 50 # learner から model を同期する間隔
run_gym_agent57(
enable_train,
env,
ENV_NAME,
kwargs,
nb_trains=nb_trains,
nb_time=MINUTES_OF_TRAINING*60,
logger_type=LoggerType.STEP,
log_interval=1000,
test_env=create_env,
is_load_weights=False,
movie_save=False,
)
env.close()
#----------------------
if __name__ == '__main__':
# エピソードを作成、保存
if False:
env = gym.make(ENV_NAME)
kwargs = create_parameter(env, 0)
run_play(env, episode_save_dir, kwargs["processor"])
# エピソードを再生(確認用)
if False:
run_replay(episode_save_dir)
# SingleActorレーニング
if False:
run_dqn(enable_train=True)
#run_dqn(enable_train=False) # test only
# 複数Actorレーニング
if True:
run_agent57(enable_train=True)
#run_agent57(enable_train=False) # test only
| 27.343612 | 80 | 0.595779 | import gym
from keras.optimizers import Adam
import traceback
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
from agent.agent57 import ActorUser
from agent.policy import EpsilonGreedy, AnnealingEpsilonGreedy
from agent.memory import PERRankBaseMemory, PERProportionalMemory
from agent.model import InputType, LstmType, UvfaType
from agent.model import ValueModel
from agent.common import seed_everything
from agent.callbacks import LoggerType
from agent.main_runner import run_gym_dqn, run_play, run_replay, run_gym_agent57
MINUTES_OF_TRAINING = 60
seed_everything(42)
ENV_NAME = "MountainCar-v0"
episode_save_dir = "tmp_{}.".format(ENV_NAME)
def create_parameter(env, nb_steps):
kwargs = {
"input_shape": env.observation_space.shape,
"input_type": InputType.VALUES,
"input_model": None,
"nb_actions": env.action_space.n,
"memory": "PERRankBaseMemory",
"memory_kwargs": {
"capacity": 100_000,
"alpha": 1.0,
"beta_initial": 0.0,
"beta_steps": nb_steps,
"enable_is": True,
},
"optimizer_ext": Adam(lr=0.001),
"optimizer_int": Adam(lr=0.001),
"optimizer_rnd": Adam(lr=0.005),
"optimizer_emb": Adam(lr=0.005),
"batch_size": 16,
"input_sequence": 4,
"dense_units_num": 64,
"enable_dueling_network": True,
"lstm_type": LstmType.STATELESS,
"lstm_units_num": 64,
"lstmful_input_length": 2,
"memory_warmup_size": 1000,
"target_model_update_interval": 2000,
"enable_double_dqn": True,
"enable_rescaling": False,
"burnin_length": 0,
"reward_multisteps": 3,
"demo_memory": "PERProportionalMemory",
"demo_memory_kwargs": {
"capacity": 100_000,
"alpha": 0.8,
},
"demo_episode_dir": episode_save_dir,
"demo_ratio_initial": 1.0,
"demo_ratio_final": 1.0/512.0,
"demo_ratio_steps": nb_steps,
"episode_memory": "PERProportionalMemory",
"episode_memory_kwargs": {
"capacity": 1000,
"alpha": 0.8,
},
"episode_ratio": 1.0/32.0,
"policy_num": 8,
"ucb_epsilon": 0.5,
"ucb_window_size": 50,
"gamma0": 0.999,
"gamma1": 0.99,
"gamma2": 0.9,
"enable_intrinsic_actval_model": False,
"beta_max": 0.3,
"uvfa_ext": [
],
"uvfa_int": [
UvfaType.ACTION,
UvfaType.REWARD_EXT,
UvfaType.REWARD_INT,
UvfaType.POLICY,
],
"step_interval": 1,
"enable_add_episode_end_frame": False,
}
return kwargs
def run_dqn(enable_train):
env = gym.make(ENV_NAME)
print("action_space : " + str(env.action_space))
print("observation_space : " + str(env.observation_space))
print("reward_range : " + str(env.reward_range))
nb_steps = 100_000
kwargs = create_parameter(env, nb_steps)
kwargs["action_policy"] = AnnealingEpsilonGreedy(
initial_epsilon=0.5,
final_epsilon=0.01,
exploration_steps=10_000
)
run_gym_dqn(
enable_train,
env,
ENV_NAME,
kwargs,
nb_steps=nb_steps,
nb_time=MINUTES_OF_TRAINING*60,
logger_type=LoggerType.STEP,
log_interval=1000,
test_env=env,
movie_save=False,
)
env.close()
class MyActor(ActorUser):
@staticmethod
def allocate(actor_index, actor_num):
return "/device:CPU:0"
def getPolicy(self, actor_index, actor_num):
return EpsilonGreedy(0.1)
def fit(self, index, agent):
env = gym.make(ENV_NAME)
agent.fit(env, visualize=False, verbose=0)
env.close()
class MyActor1(MyActor):
def getPolicy(self, actor_index, actor_num):
return EpsilonGreedy(0.01)
class MyActor2(MyActor):
def getPolicy(self, actor_index, actor_num):
return EpsilonGreedy(0.1)
def create_env():
return gym.make(ENV_NAME)
def run_agent57(enable_train):
env = gym.make(ENV_NAME)
print("action_space : " + str(env.action_space))
print("observation_space : " + str(env.observation_space))
print("reward_range : " + str(env.reward_range))
nb_trains = 100_000
kwargs = create_parameter(env, nb_trains)
kwargs["actors"] = [MyActor1]
kwargs["sync_actor_model_interval"] = 50
run_gym_agent57(
enable_train,
env,
ENV_NAME,
kwargs,
nb_trains=nb_trains,
nb_time=MINUTES_OF_TRAINING*60,
logger_type=LoggerType.STEP,
log_interval=1000,
test_env=create_env,
is_load_weights=False,
movie_save=False,
)
env.close()
if __name__ == '__main__':
if False:
env = gym.make(ENV_NAME)
kwargs = create_parameter(env, 0)
run_play(env, episode_save_dir, kwargs["processor"])
if False:
run_replay(episode_save_dir)
if False:
run_dqn(enable_train=True)
if True:
run_agent57(enable_train=True)
| true | true |
f71c2a25b344a3cfa098ae966121ffc6edd9ccd3 | 4,074 | py | Python | train.py | artemkurylev/Context-Aware_Crowd_Counting-pytorch | d68ddd87b99f2afc512357cb8fcb0ca41ea22865 | [
"MIT"
] | 71 | 2019-05-21T02:10:46.000Z | 2022-03-17T05:19:35.000Z | train.py | GuoleiSun/Context-Aware_Crowd_Counting-pytorch | d68ddd87b99f2afc512357cb8fcb0ca41ea22865 | [
"MIT"
] | 28 | 2019-05-14T01:32:21.000Z | 2021-07-06T08:39:10.000Z | train.py | GuoleiSun/Context-Aware_Crowd_Counting-pytorch | d68ddd87b99f2afc512357cb8fcb0ca41ea22865 | [
"MIT"
] | 22 | 2019-05-22T09:38:51.000Z | 2022-03-22T23:31:25.000Z | import numpy as np
import time
import torch
import torch.nn as nn
import os
import visdom
import random
from tqdm import tqdm as tqdm
from cannet import CANNet
from my_dataset import CrowdDataset
if __name__=="__main__":
# configuration
train_image_root='./data/Shanghai_part_A/train_data/images'
train_dmap_root='./data/Shanghai_part_A/train_data/ground_truth'
test_image_root='./data/Shanghai_part_A/test_data/images'
test_dmap_root='./data/Shanghai_part_A/test_data/ground_truth'
gpu_or_cpu='cuda' # use cuda or cpu
lr = 1e-7
batch_size = 1
momentum = 0.95
epochs = 20000
steps = [-1,1,100,150]
scales = [1,1,1,1]
workers = 4
seed = time.time()
print_freq = 30
vis=visdom.Visdom()
device=torch.device(gpu_or_cpu)
torch.cuda.manual_seed(seed)
model=CANNet().to(device)
criterion=nn.MSELoss(size_average=False).to(device)
optimizer=torch.optim.SGD(model.parameters(),lr,
momentum=momentum,
weight_decay=0)
# optimizer=torch.optim.Adam(model.parameters(),lr)
train_dataset=CrowdDataset(train_image_root,train_dmap_root,gt_downsample=8,phase='train')
train_loader=torch.utils.data.DataLoader(train_dataset,batch_size=1,shuffle=True)
test_dataset=CrowdDataset(test_image_root,test_dmap_root,gt_downsample=8,phase='test')
test_loader=torch.utils.data.DataLoader(test_dataset,batch_size=1,shuffle=False)
if not os.path.exists('./checkpoints'):
os.mkdir('./checkpoints')
min_mae=10000
min_epoch=0
train_loss_list=[]
epoch_list=[]
test_error_list=[]
for epoch in range(0,epochs):
# training phase
model.train()
epoch_loss=0
for i,(img,gt_dmap) in enumerate(tqdm(train_loader)):
img=img.to(device)
gt_dmap=gt_dmap.to(device)
# forward propagation
et_dmap=model(img)
# calculate loss
loss=criterion(et_dmap,gt_dmap)
epoch_loss+=loss.item()
optimizer.zero_grad()
loss.backward()
optimizer.step()
# print("epoch:",epoch,"loss:",epoch_loss/len(dataloader))
epoch_list.append(epoch)
train_loss_list.append(epoch_loss/len(train_loader))
torch.save(model.state_dict(),'./checkpoints/epoch_'+str(epoch)+".pth")
# testing phase
model.eval()
mae=0
for i,(img,gt_dmap) in enumerate(tqdm(test_loader)):
img=img.to(device)
gt_dmap=gt_dmap.to(device)
# forward propagation
et_dmap=model(img)
mae+=abs(et_dmap.data.sum()-gt_dmap.data.sum()).item()
del img,gt_dmap,et_dmap
if mae/len(test_loader)<min_mae:
min_mae=mae/len(test_loader)
min_epoch=epoch
test_error_list.append(mae/len(test_loader))
print("epoch:"+str(epoch)+" error:"+str(mae/len(test_loader))+" min_mae:"+str(min_mae)+" min_epoch:"+str(min_epoch))
vis.line(win=1,X=epoch_list, Y=train_loss_list, opts=dict(title='train_loss'))
vis.line(win=2,X=epoch_list, Y=test_error_list, opts=dict(title='test_error'))
# show an image
index=random.randint(0,len(test_loader)-1)
img,gt_dmap=test_dataset[index]
vis.image(win=3,img=img,opts=dict(title='img'))
vis.image(win=4,img=gt_dmap/(gt_dmap.max())*255,opts=dict(title='gt_dmap('+str(gt_dmap.sum())+')'))
img=img.unsqueeze(0).to(device)
gt_dmap=gt_dmap.unsqueeze(0)
et_dmap=model(img)
et_dmap=et_dmap.squeeze(0).detach().cpu().numpy()
vis.image(win=5,img=et_dmap/(et_dmap.max())*255,opts=dict(title='et_dmap('+str(et_dmap.sum())+')'))
import time
print(time.strftime('%Y.%m.%d %H:%M:%S',time.localtime(time.time())))
| 32.854839 | 124 | 0.60972 | import numpy as np
import time
import torch
import torch.nn as nn
import os
import visdom
import random
from tqdm import tqdm as tqdm
from cannet import CANNet
from my_dataset import CrowdDataset
if __name__=="__main__":
train_image_root='./data/Shanghai_part_A/train_data/images'
train_dmap_root='./data/Shanghai_part_A/train_data/ground_truth'
test_image_root='./data/Shanghai_part_A/test_data/images'
test_dmap_root='./data/Shanghai_part_A/test_data/ground_truth'
gpu_or_cpu='cuda'
lr = 1e-7
batch_size = 1
momentum = 0.95
epochs = 20000
steps = [-1,1,100,150]
scales = [1,1,1,1]
workers = 4
seed = time.time()
print_freq = 30
vis=visdom.Visdom()
device=torch.device(gpu_or_cpu)
torch.cuda.manual_seed(seed)
model=CANNet().to(device)
criterion=nn.MSELoss(size_average=False).to(device)
optimizer=torch.optim.SGD(model.parameters(),lr,
momentum=momentum,
weight_decay=0)
train_dataset=CrowdDataset(train_image_root,train_dmap_root,gt_downsample=8,phase='train')
train_loader=torch.utils.data.DataLoader(train_dataset,batch_size=1,shuffle=True)
test_dataset=CrowdDataset(test_image_root,test_dmap_root,gt_downsample=8,phase='test')
test_loader=torch.utils.data.DataLoader(test_dataset,batch_size=1,shuffle=False)
if not os.path.exists('./checkpoints'):
os.mkdir('./checkpoints')
min_mae=10000
min_epoch=0
train_loss_list=[]
epoch_list=[]
test_error_list=[]
for epoch in range(0,epochs):
model.train()
epoch_loss=0
for i,(img,gt_dmap) in enumerate(tqdm(train_loader)):
img=img.to(device)
gt_dmap=gt_dmap.to(device)
et_dmap=model(img)
loss=criterion(et_dmap,gt_dmap)
epoch_loss+=loss.item()
optimizer.zero_grad()
loss.backward()
optimizer.step()
epoch_list.append(epoch)
train_loss_list.append(epoch_loss/len(train_loader))
torch.save(model.state_dict(),'./checkpoints/epoch_'+str(epoch)+".pth")
model.eval()
mae=0
for i,(img,gt_dmap) in enumerate(tqdm(test_loader)):
img=img.to(device)
gt_dmap=gt_dmap.to(device)
et_dmap=model(img)
mae+=abs(et_dmap.data.sum()-gt_dmap.data.sum()).item()
del img,gt_dmap,et_dmap
if mae/len(test_loader)<min_mae:
min_mae=mae/len(test_loader)
min_epoch=epoch
test_error_list.append(mae/len(test_loader))
print("epoch:"+str(epoch)+" error:"+str(mae/len(test_loader))+" min_mae:"+str(min_mae)+" min_epoch:"+str(min_epoch))
vis.line(win=1,X=epoch_list, Y=train_loss_list, opts=dict(title='train_loss'))
vis.line(win=2,X=epoch_list, Y=test_error_list, opts=dict(title='test_error'))
index=random.randint(0,len(test_loader)-1)
img,gt_dmap=test_dataset[index]
vis.image(win=3,img=img,opts=dict(title='img'))
vis.image(win=4,img=gt_dmap/(gt_dmap.max())*255,opts=dict(title='gt_dmap('+str(gt_dmap.sum())+')'))
img=img.unsqueeze(0).to(device)
gt_dmap=gt_dmap.unsqueeze(0)
et_dmap=model(img)
et_dmap=et_dmap.squeeze(0).detach().cpu().numpy()
vis.image(win=5,img=et_dmap/(et_dmap.max())*255,opts=dict(title='et_dmap('+str(et_dmap.sum())+')'))
import time
print(time.strftime('%Y.%m.%d %H:%M:%S',time.localtime(time.time())))
| true | true |
f71c2aa0bf024d0c142df184c4a0782128c38601 | 579 | py | Python | tests/test_ext_indicator_cycles.py | ryanrussell/pandas-ta | 720bbcf0196d363d621beeced8fba711990d075d | [
"MIT"
] | 2,298 | 2019-02-20T18:38:18.000Z | 2022-03-31T07:45:50.000Z | tests/test_ext_indicator_cycles.py | ryanrussell/pandas-ta | 720bbcf0196d363d621beeced8fba711990d075d | [
"MIT"
] | 451 | 2019-02-26T00:50:02.000Z | 2022-03-31T03:17:39.000Z | tests/test_ext_indicator_cycles.py | ryanrussell/pandas-ta | 720bbcf0196d363d621beeced8fba711990d075d | [
"MIT"
] | 579 | 2019-03-19T01:53:03.000Z | 2022-03-31T11:13:50.000Z | from pandas.core.series import Series
from .config import sample_data
from .context import pandas_ta
from unittest import TestCase
from pandas import DataFrame
class TestCylesExtension(TestCase):
@classmethod
def setUpClass(cls):
cls.data = sample_data
@classmethod
def tearDownClass(cls):
del cls.data
def setUp(self): pass
def tearDown(self): pass
def test_ebsw_ext(self):
self.data.ta.ebsw(append=True)
self.assertIsInstance(self.data, DataFrame)
self.assertEqual(self.data.columns[-1], "EBSW_40_10")
| 22.269231 | 61 | 0.70639 | from pandas.core.series import Series
from .config import sample_data
from .context import pandas_ta
from unittest import TestCase
from pandas import DataFrame
class TestCylesExtension(TestCase):
@classmethod
def setUpClass(cls):
cls.data = sample_data
@classmethod
def tearDownClass(cls):
del cls.data
def setUp(self): pass
def tearDown(self): pass
def test_ebsw_ext(self):
self.data.ta.ebsw(append=True)
self.assertIsInstance(self.data, DataFrame)
self.assertEqual(self.data.columns[-1], "EBSW_40_10")
| true | true |
f71c2bcacd1a2766209c84a64e9d8e489810169f | 274 | py | Python | buildscripts/condarecipe.hsa/run_test.py | ehsantn/numba | 4749ef7ccc630b7f649ec972497bc5b7fca79303 | [
"BSD-2-Clause",
"MIT"
] | 4 | 2017-06-30T14:22:30.000Z | 2021-01-11T16:47:23.000Z | buildscripts/condarecipe.hsa/run_test.py | ehsantn/numba | 4749ef7ccc630b7f649ec972497bc5b7fca79303 | [
"BSD-2-Clause",
"MIT"
] | 1 | 2018-04-03T22:37:40.000Z | 2018-04-03T23:53:43.000Z | buildscripts/condarecipe.hsa/run_test.py | ehsantn/numba | 4749ef7ccc630b7f649ec972497bc5b7fca79303 | [
"BSD-2-Clause",
"MIT"
] | null | null | null | import sys
import numba
args = []
if sys.platform.startswith('win32'):
args += ['-b']
else:
args += ['-m', '-b']
args += ['numba.tests']
if not numba.runtests.main(*args):
print("Test failed")
sys.exit(1)
print('numba.__version__: %s' % numba.__version__)
| 18.266667 | 50 | 0.613139 | import sys
import numba
args = []
if sys.platform.startswith('win32'):
args += ['-b']
else:
args += ['-m', '-b']
args += ['numba.tests']
if not numba.runtests.main(*args):
print("Test failed")
sys.exit(1)
print('numba.__version__: %s' % numba.__version__)
| true | true |
f71c2c1d589bfaed486a8c50f486a53a358a0d09 | 59,359 | py | Python | python/ccxt/async_support/timex.py | sandutsar/ccxt | f27c187fa1626a6c261c6fa5caaae89cb657461d | [
"MIT"
] | null | null | null | python/ccxt/async_support/timex.py | sandutsar/ccxt | f27c187fa1626a6c261c6fa5caaae89cb657461d | [
"MIT"
] | null | null | null | python/ccxt/async_support/timex.py | sandutsar/ccxt | f27c187fa1626a6c261c6fa5caaae89cb657461d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NotSupported
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class timex(Exchange):
def describe(self):
return self.deep_extend(super(timex, self).describe(), {
'id': 'timex',
'name': 'TimeX',
'countries': ['AU'],
'version': 'v1',
'rateLimit': 1500,
'has': {
'CORS': None,
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'addMargin': False,
'cancelOrder': True,
'cancelOrders': True,
'createOrder': True,
'createReduceOnlyOrder': False,
'createStopLimitOrder': False,
'createStopMarketOrder': False,
'createStopOrder': False,
'editOrder': True,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchBorrowRatesPerSymbol': False,
'fetchClosedOrders': True,
'fetchCurrencies': True,
'fetchFundingHistory': False,
'fetchFundingRate': False,
'fetchFundingRateHistory': False,
'fetchFundingRates': False,
'fetchIndexOHLCV': False,
'fetchLeverage': False,
'fetchLeverageTiers': False,
'fetchMarkets': True,
'fetchMarkOHLCV': False,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenInterestHistory': False,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchPosition': False,
'fetchPositions': False,
'fetchPositionsRisk': False,
'fetchPremiumIndexOHLCV': False,
'fetchTicker': True,
'fetchTickers': True,
'fetchTrades': True,
'fetchTradingFee': True, # maker fee only
'reduceMargin': False,
'setLeverage': False,
'setMarginMode': False,
'setPositionMode': False,
},
'timeframes': {
'1m': 'I1',
'5m': 'I5',
'15m': 'I15',
'30m': 'I30',
'1h': 'H1',
'2h': 'H2',
'4h': 'H4',
'6h': 'H6',
'12h': 'H12',
'1d': 'D1',
'1w': 'W1',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/70423869-6839ab00-1a7f-11ea-8f94-13ae72c31115.jpg',
'api': 'https://plasma-relay-backend.timex.io',
'www': 'https://timex.io',
'doc': 'https://docs.timex.io',
'referral': 'https://timex.io/?refcode=1x27vNkTbP1uwkCck',
},
'api': {
'custody': {
'get': [
'credentials', # Get api key for address
'credentials/h/{hash}', # Get api key by hash
'credentials/k/{key}', # Get api key by key
'credentials/me/address', # Get api key by hash
'deposit-addresses', # Get deposit addresses list
'deposit-addresses/h/{hash}', # Get deposit address by hash
],
},
'history': {
'get': [
'orders', # Gets historical orders
'orders/details', # Gets order details
'orders/export/csv', # Export orders to csv
'trades', # Gets historical trades
'trades/export/csv', # Export trades to csv
],
},
'currencies': {
'get': [
'a/{address}', # Gets currency by address
'i/{id}', # Gets currency by id
's/{symbol}', # Gets currency by symbol
],
'post': [
'perform', # Creates new currency
'prepare', # Prepare creates new currency
'remove/perform', # Removes currency by symbol
's/{symbol}/remove/prepare', # Prepare remove currency by symbol
's/{symbol}/update/perform', # Prepare update currency by symbol
's/{symbol}/update/prepare', # Prepare update currency by symbol
],
},
'markets': {
'get': [
'i/{id}', # Gets market by id
's/{symbol}', # Gets market by symbol
],
'post': [
'perform', # Creates new market
'prepare', # Prepare creates new market
'remove/perform', # Removes market by symbol
's/{symbol}/remove/prepare', # Prepare remove market by symbol
's/{symbol}/update/perform', # Prepare update market by symbol
's/{symbol}/update/prepare', # Prepare update market by symbol
],
},
'public': {
'get': [
'candles', # Gets candles
'currencies', # Gets all the currencies
'markets', # Gets all the markets
'orderbook', # Gets orderbook
'orderbook/raw', # Gets raw orderbook
'orderbook/v2', # Gets orderbook v2
'tickers', # Gets all the tickers
'trades', # Gets trades
],
},
'statistics': {
'get': [
'address', # calculateAddressStatistics
],
},
'trading': {
'get': [
'balances', # Get trading balances for all(or selected) currencies
'fees', # Get trading fee rates for all(or selected) markets
'orders', # Gets open orders
],
'post': [
'orders', # Create new order
'orders/json', # Create orders
],
'put': [
'orders', # Cancel or update orders
'orders/json', # Update orders
],
'delete': [
'orders', # Delete orders
'orders/json', # Delete orders
],
},
'tradingview': {
'get': [
'config', # Gets config
'history', # Gets history
'symbol_info', # Gets symbol info
'time', # Gets time
],
},
},
'precisionMode': TICK_SIZE,
'exceptions': {
'exact': {
'0': ExchangeError,
'1': NotSupported,
'4000': BadRequest,
'4001': BadRequest,
'4002': InsufficientFunds,
'4003': AuthenticationError,
'4004': AuthenticationError,
'4005': BadRequest,
'4006': BadRequest,
'4007': BadRequest,
'4300': PermissionDenied,
'4100': AuthenticationError,
'4400': OrderNotFound,
'5001': InvalidOrder,
'5002': ExchangeError,
'400': BadRequest,
'401': AuthenticationError,
'403': PermissionDenied,
'404': OrderNotFound,
'429': RateLimitExceeded,
'500': ExchangeError,
'503': ExchangeNotAvailable,
},
'broad': {
'Insufficient': InsufficientFunds,
},
},
'options': {
'expireIn': 31536000, # 365 × 24 × 60 × 60
'fetchTickers': {
'period': '1d',
},
'fetchTrades': {
'sort': 'timestamp,asc',
},
'fetchMyTrades': {
'sort': 'timestamp,asc',
},
'fetchOpenOrders': {
'sort': 'createdAt,asc',
},
'fetchClosedOrders': {
'sort': 'createdAt,asc',
},
'defaultSort': 'timestamp,asc',
'defaultSortOrders': 'createdAt,asc',
},
})
async def fetch_markets(self, params={}):
"""
retrieves data on all markets for timex
:param dict params: extra parameters specific to the exchange api endpoint
:returns [dict]: an array of objects representing market data
"""
response = await self.publicGetMarkets(params)
#
# [
# {
# "symbol": "ETHBTC",
# "name": "ETH/BTC",
# "baseCurrency": "ETH",
# "baseTokenAddress": "0x45932db54b38af1f5a57136302eeba66a5975c15",
# "quoteCurrency": "BTC",
# "quoteTokenAddress": "0x8370fbc6ddec1e18b4e41e72ed943e238458487c",
# "feeCurrency": "BTC",
# "feeTokenAddress": "0x8370fbc6ddec1e18b4e41e72ed943e238458487c",
# "quantityIncrement": "0.0000001",
# "takerFee": "0.005",
# "makerFee": "0.0025",
# "tickSize": "0.00000001",
# "baseMinSize": "0.0001",
# "quoteMinSize": "0.00001",
# "locked": False
# }
# ]
#
result = []
for i in range(0, len(response)):
result.append(self.parse_market(response[i]))
return result
async def fetch_currencies(self, params={}):
"""
fetches all available currencies on an exchange
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: an associative dictionary of currencies
"""
response = await self.publicGetCurrencies(params)
#
# [
# {
# "symbol": "BTC",
# "name": "Bitcoin",
# "address": "0x8370fbc6ddec1e18b4e41e72ed943e238458487c",
# "icon": "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNjAiIGhlaWdodD0iNjAiIHZpZXdCb3g9IjAgMCA2MCA2MCIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggb3BhY2l0eT0iMC41IiBmaWxsLXJ1bGU9ImV2ZW5vZGQiIGNsaXAtcnVsZT0iZXZlbm9kZCIgZD0iTTMwIDUzQzQyLjcwMjUgNTMgNTMgNDIuNzAyNSA1MyAzMEM1MyAxNy4yOTc1IDQyLjcwMjUgNyAzMCA3QzE3LjI5NzUgNyA3IDE3LjI5NzUgNyAzMEM3IDQyLjcwMjUgMTcuMjk3NSA1MyAzMCA1M1pNMzAgNTVDNDMuODA3MSA1NSA1NSA0My44MDcxIDU1IDMwQzU1IDE2LjE5MjkgNDMuODA3MSA1IDMwIDVDMTYuMTkyOSA1IDUgMTYuMTkyOSA1IDMwQzUgNDMuODA3MSAxNi4xOTI5IDU1IDMwIDU1WiIvPgo8cGF0aCBkPSJNNDAuOTQyNSAyNi42NTg1QzQxLjQwMDMgMjMuNjExMyAzOS4wNzA1IDIxLjk3MzIgMzUuODg0OCAyMC44ODA0TDM2LjkxODIgMTYuNzUyNkwzNC4zOTUxIDE2LjEyNjRMMzMuMzg5IDIwLjE0NTVDMzIuNzI1OCAxOS45ODA5IDMyLjA0NDUgMTkuODI1NiAzMS4zNjc1IDE5LjY3MTdMMzIuMzgwOCAxNS42MjYyTDI5Ljg1OTEgMTVMMjguODI1IDE5LjEyNjRDMjguMjc2IDE5LjAwMTkgMjcuNzM3IDE4Ljg3ODggMjcuMjEzOSAxOC43NDkzTDI3LjIxNjggMTguNzM2NEwyMy43MzcyIDE3Ljg3MTJMMjMuMDY2IDIwLjU1NDhDMjMuMDY2IDIwLjU1NDggMjQuOTM4IDIwLjk4MjEgMjQuODk4NSAyMS4wMDg1QzI1LjkyMDQgMjEuMjYyNiAyNi4xMDUgMjEuOTM2IDI2LjA3NDEgMjIuNDY5OUwyNC44OTcgMjcuMTcyNEMyNC45Njc1IDI3LjE5MDMgMjUuMDU4NyAyNy4yMTYgMjUuMTU5MyAyNy4yNTYxQzI1LjA3NTMgMjcuMjM1NCAyNC45ODU0IDI3LjIxMjQgMjQuODkyNyAyNy4xOTAzTDIzLjI0MjggMzMuNzc3OEMyMy4xMTc3IDM0LjA4NjkgMjIuODAwOCAzNC41NTA2IDIyLjA4NjUgMzQuMzc0NkMyMi4xMTE3IDM0LjQxMTEgMjAuMjUyNiAzMy45MTg3IDIwLjI1MjYgMzMuOTE4N0wxOSAzNi43OTQ5TDIyLjI4MzQgMzcuNjFDMjIuODk0MiAzNy43NjI0IDIzLjQ5MjggMzcuOTIyIDI0LjA4MjEgMzguMDcyM0wyMy4wMzggNDIuMjQ3NEwyNS41NTgyIDQyLjg3MzZMMjYuNTkyMyAzOC43NDI5QzI3LjI4MDcgMzguOTI5IDI3Ljk0OSAzOS4xMDA3IDI4LjYwMyAzOS4yNjI0TDI3LjU3MjUgNDMuMzczOEwzMC4wOTU2IDQ0TDMxLjEzOTcgMzkuODMyOEMzNS40NDIyIDQwLjY0MzYgMzguNjc3NCA0MC4zMTY2IDQwLjAzOTIgMzYuNDQxNEM0MS4xMzY1IDMzLjMyMTIgMzkuOTg0NiAzMS41MjEzIDM3LjcyMDkgMzAuMzQ3N0MzOS4zNjk0IDI5Ljk2OTEgNDAuNjExMiAyOC44ODkyIDQwLjk0MjUgMjYuNjU4NVYyNi42NTg1Wk0zNS4xNzc3IDM0LjcwODhDMzQuMzk4IDM3LjgyOSAyOS4xMjI2IDM2LjE0MjIgMjcuNDEyMiAzNS43MTkzTDI4Ljc5NzcgMzAuMTg4MUMzMC41MDgxIDMwLjYxMzIgMzUuOTkyNiAzMS40NTQ4IDM1LjE3NzcgMzQuNzA4OFpNMzUuOTU4MSAyNi42MTM0QzM1LjI0NjcgMjkuNDUxNyAzMC44NTU5IDI4LjAwOTcgMjkuNDMxNiAyNy42NTYxTDMwLjY4NzcgMjIuNjM5NUMzMi4xMTIgMjIuOTkzIDM2LjY5OSAyMy42NTI4IDM1Ljk1ODEgMjYuNjEzNFoiLz4KPC9zdmc+Cg==",
# "background": "transparent",
# "fiatSymbol": "BTC",
# "decimals": 8,
# "tradeDecimals": 20,
# "displayDecimals": 4,
# "crypto": True,
# "depositEnabled": True,
# "withdrawalEnabled": True,
# "transferEnabled": True,
# "buyEnabled": False,
# "purchaseEnabled": False,
# "redeemEnabled": False,
# "active": True,
# "withdrawalFee": "50000000000000000",
# "purchaseCommissions": []
# },
# ]
#
result = []
for i in range(0, len(response)):
currency = response[i]
result.append(self.parse_currency(currency))
return self.index_by(result, 'code')
async def fetch_tickers(self, symbols=None, params={}):
"""
fetches price tickers for multiple markets, statistical calculations with the information calculated over the past 24 hours each market
:param [str]|None symbols: unified symbols of the markets to fetch the ticker for, all market tickers are returned if not assigned
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: an array of `ticker structures <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
await self.load_markets()
period = self.safe_string(self.options['fetchTickers'], 'period', '1d')
request = {
'period': self.timeframes[period], # I1, I5, I15, I30, H1, H2, H4, H6, H12, D1, W1
}
response = await self.publicGetTickers(self.extend(request, params))
#
# [
# {
# "ask": 0.017,
# "bid": 0.016,
# "high": 0.019,
# "last": 0.017,
# "low": 0.015,
# "market": "TIME/ETH",
# "open": 0.016,
# "period": "H1",
# "timestamp": "2018-12-14T20:50:36.134Z",
# "volume": 4.57,
# "volumeQuote": 0.07312
# }
# ]
#
return self.parse_tickers(response, symbols)
async def fetch_ticker(self, symbol, params={}):
"""
fetches a price ticker, a statistical calculation with the information calculated over the past 24 hours for a specific market
:param str symbol: unified symbol of the market to fetch the ticker for
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: a `ticker structure <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
await self.load_markets()
market = self.market(symbol)
period = self.safe_string(self.options['fetchTickers'], 'period', '1d')
request = {
'market': market['id'],
'period': self.timeframes[period], # I1, I5, I15, I30, H1, H2, H4, H6, H12, D1, W1
}
response = await self.publicGetTickers(self.extend(request, params))
#
# [
# {
# "ask": 0.017,
# "bid": 0.016,
# "high": 0.019,
# "last": 0.017,
# "low": 0.015,
# "market": "TIME/ETH",
# "open": 0.016,
# "period": "H1",
# "timestamp": "2018-12-14T20:50:36.134Z",
# "volume": 4.57,
# "volumeQuote": 0.07312
# }
# ]
#
ticker = self.safe_value(response, 0)
return self.parse_ticker(ticker, market)
async def fetch_order_book(self, symbol, limit=None, params={}):
"""
fetches information on open orders with bid(buy) and ask(sell) prices, volumes and other data
:param str symbol: unified symbol of the market to fetch the order book for
:param int|None limit: the maximum amount of order book entries to return
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: A dictionary of `order book structures <https://docs.ccxt.com/en/latest/manual.html#order-book-structure>` indexed by market symbols
"""
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
}
if limit is not None:
request['limit'] = limit
response = await self.publicGetOrderbookV2(self.extend(request, params))
#
# {
# "timestamp":"2019-12-05T00:21:09.538",
# "bid":[
# {
# "index":"2",
# "price":"0.02024007",
# "baseTokenAmount":"0.0096894",
# "baseTokenCumulativeAmount":"0.0096894",
# "quoteTokenAmount":"0.000196114134258",
# "quoteTokenCumulativeAmount":"0.000196114134258"
# },
# "ask":[
# {
# "index":"-3",
# "price":"0.02024012",
# "baseTokenAmount":"0.005",
# "baseTokenCumulativeAmount":"0.005",
# "quoteTokenAmount":"0.0001012006",
# "quoteTokenCumulativeAmount":"0.0001012006"
# },
# ]
# }
#
timestamp = self.parse8601(self.safe_string(response, 'timestamp'))
return self.parse_order_book(response, symbol, timestamp, 'bid', 'ask', 'price', 'baseTokenAmount')
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
"""
get the list of most recent trades for a particular symbol
:param str symbol: unified symbol of the market to fetch trades for
:param int|None since: timestamp in ms of the earliest trade to fetch
:param int|None limit: the maximum amount of trades to fetch
:param dict params: extra parameters specific to the timex api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html?#public-trades>`
"""
await self.load_markets()
market = self.market(symbol)
options = self.safe_value(self.options, 'fetchTrades', {})
defaultSort = self.safe_value(options, 'sort', 'timestamp,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
# 'address': 'string', # trade’s member account(?)
# 'cursor': 1234, # int64(?)
# 'from': self.iso8601(since),
'market': market['id'],
# 'page': 0, # results page you want to retrieve 0 .. N
# 'size': limit, # number of records per page, 100 by default
'sort': sort, # array[string], sorting criteria in the format "property,asc" or "property,desc", default is ascending
# 'till': self.iso8601(self.milliseconds()),
}
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['size'] = limit # default is 100
response = await self.publicGetTrades(self.extend(request, query))
#
# [
# {
# "id":1,
# "timestamp":"2019-06-25T17:01:50.309",
# "direction":"BUY",
# "price":"0.027",
# "quantity":"0.001"
# }
# ]
#
return self.parse_trades(response, market, since, limit)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
"""
fetches historical candlestick data containing the open, high, low, and close price, and the volume of a market
:param str symbol: unified symbol of the market to fetch OHLCV data for
:param str timeframe: the length of time each candle represents
:param int|None since: timestamp in ms of the earliest candle to fetch
:param int|None limit: the maximum amount of candles to fetch
:param dict params: extra parameters specific to the timex api endpoint
:returns [[int]]: A list of candles ordered as timestamp, open, high, low, close, volume
"""
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
'period': self.timeframes[timeframe],
}
# if since and limit are not specified
duration = self.parse_timeframe(timeframe)
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['till'] = self.iso8601(self.sum(since, self.sum(limit, 1) * duration * 1000))
elif limit is not None:
now = self.milliseconds()
request['till'] = self.iso8601(now)
request['from'] = self.iso8601(now - limit * duration * 1000 - 1)
else:
request['till'] = self.iso8601(self.milliseconds())
response = await self.publicGetCandles(self.extend(request, params))
#
# [
# {
# "timestamp":"2019-12-04T23:00:00",
# "open":"0.02024009",
# "high":"0.02024009",
# "low":"0.02024009",
# "close":"0.02024009",
# "volume":"0.00008096036",
# "volumeQuote":"0.004",
# },
# ]
#
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_balance(self, response):
result = {
'info': response,
'timestamp': None,
'datetime': None,
}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['total'] = self.safe_string(balance, 'totalBalance')
account['used'] = self.safe_string(balance, 'lockedBalance')
result[code] = account
return self.safe_balance(result)
async def fetch_balance(self, params={}):
"""
query for balance and get the amount of funds available for trading or funds locked in orders
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: a `balance structure <https://docs.ccxt.com/en/latest/manual.html?#balance-structure>`
"""
await self.load_markets()
response = await self.tradingGetBalances(params)
#
# [
# {"currency":"BTC","totalBalance":"0","lockedBalance":"0"},
# {"currency":"AUDT","totalBalance":"0","lockedBalance":"0"},
# {"currency":"ETH","totalBalance":"0","lockedBalance":"0"},
# {"currency":"TIME","totalBalance":"0","lockedBalance":"0"},
# {"currency":"USDT","totalBalance":"0","lockedBalance":"0"}
# ]
#
return self.parse_balance(response)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
"""
create a trade order
:param str symbol: unified symbol of the market to create an order in
:param str type: 'market' or 'limit'
:param str side: 'buy' or 'sell'
:param float amount: how much of currency you want to trade in units of base currency
:param float|None price: the price at which the order is to be fullfilled, in units of the quote currency, ignored in market orders
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: an `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
market = self.market(symbol)
uppercaseSide = side.upper()
uppercaseType = type.upper()
postOnly = self.safe_value(params, 'postOnly', False)
if postOnly:
uppercaseType = 'POST_ONLY'
params = self.omit(params, ['postOnly'])
request = {
'symbol': market['id'],
'quantity': self.amount_to_precision(symbol, amount),
'side': uppercaseSide,
'orderTypes': uppercaseType,
# 'clientOrderId': '123',
# 'expireIn': 1575523308, # in seconds
# 'expireTime': 1575523308, # unix timestamp
}
query = params
if (uppercaseType == 'LIMIT') or (uppercaseType == 'POST_ONLY'):
request['price'] = self.price_to_precision(symbol, price)
defaultExpireIn = self.safe_integer(self.options, 'expireIn')
expireTime = self.safe_value(params, 'expireTime')
expireIn = self.safe_value(params, 'expireIn', defaultExpireIn)
if expireTime is not None:
request['expireTime'] = expireTime
elif expireIn is not None:
request['expireIn'] = expireIn
else:
raise InvalidOrder(self.id + ' createOrder() method requires a expireTime or expireIn param for a ' + type + ' order, you can also set the expireIn exchange-wide option')
query = self.omit(params, ['expireTime', 'expireIn'])
else:
request['price'] = 0
response = await self.tradingPostOrders(self.extend(request, query))
#
# {
# "orders": [
# {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# }
# ]
# }
#
orders = self.safe_value(response, 'orders', [])
order = self.safe_value(orders, 0, {})
return self.parse_order(order, market)
async def edit_order(self, id, symbol, type, side, amount=None, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'id': id,
}
if amount is not None:
request['quantity'] = self.amount_to_precision(symbol, amount)
if price is not None:
request['price'] = self.price_to_precision(symbol, price)
response = await self.tradingPutOrders(self.extend(request, params))
#
# {
# "changedOrders": [
# {
# "newOrder": {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# },
# "oldId": "string",
# },
# ],
# "unchangedOrders": ["string"],
# }
#
if 'unchangedOrders' in response:
orderIds = self.safe_value(response, 'unchangedOrders', [])
orderId = self.safe_string(orderIds, 0)
return {
'id': orderId,
'info': response,
}
orders = self.safe_value(response, 'changedOrders', [])
firstOrder = self.safe_value(orders, 0, {})
order = self.safe_value(firstOrder, 'newOrder', {})
return self.parse_order(order, market)
async def cancel_order(self, id, symbol=None, params={}):
"""
cancels an open order
:param str id: order id
:param str|None symbol: not used by timex cancelOrder()
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
return await self.cancel_orders([id], symbol, params)
async def cancel_orders(self, ids, symbol=None, params={}):
"""
cancel multiple orders
:param [str] ids: order ids
:param str|None symbol: unified market symbol, default is None
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: an list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
request = {
'id': ids,
}
response = await self.tradingDeleteOrders(self.extend(request, params))
#
# {
# "changedOrders": [
# {
# "newOrder": {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# },
# "oldId": "string",
# },
# ],
# "unchangedOrders": ["string"],
# }
return response
async def fetch_order(self, id, symbol=None, params={}):
"""
fetches information on an order made by the user
:param str|None symbol: not used by timex fetchOrder
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
request = {
'orderHash': id,
}
response = await self.historyGetOrdersDetails(request)
#
# {
# "order": {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# },
# "trades": [
# {
# "fee": "0.3",
# "id": 100,
# "makerOrTaker": "MAKER",
# "makerOrderId": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "takerOrderId": "string",
# "timestamp": "2019-12-05T07:48:26.310Z"
# }
# ]
# }
#
order = self.safe_value(response, 'order', {})
trades = self.safe_value(response, 'trades', [])
return self.parse_order(self.extend(order, {'trades': trades}))
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetch all unfilled currently open orders
:param str|None symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch open orders for
:param int|None limit: the maximum number of open orders structures to retrieve
:param dict params: extra parameters specific to the timex api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
options = self.safe_value(self.options, 'fetchOpenOrders', {})
defaultSort = self.safe_value(options, 'sort', 'createdAt,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
# 'clientOrderId': '123', # order’s client id list for filter
# page: 0, # results page you want to retrieve(0 .. N)
'sort': sort, # sorting criteria in the format "property,asc" or "property,desc", default order is ascending, multiple sort criteria are supported
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if limit is not None:
request['size'] = limit
response = await self.tradingGetOrders(self.extend(request, query))
#
# {
# "orders": [
# {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# }
# ]
# }
#
orders = self.safe_value(response, 'orders', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetches information on multiple closed orders made by the user
:param str|None symbol: unified market symbol of the market orders were made in
:param int|None since: the earliest time in ms to fetch orders for
:param int|None limit: the maximum number of orde structures to retrieve
:param dict params: extra parameters specific to the timex api endpoint
:returns [dict]: a list of [order structures]{@link https://docs.ccxt.com/en/latest/manual.html#order-structure
"""
await self.load_markets()
options = self.safe_value(self.options, 'fetchClosedOrders', {})
defaultSort = self.safe_value(options, 'sort', 'createdAt,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
# 'clientOrderId': '123', # order’s client id list for filter
# page: 0, # results page you want to retrieve(0 .. N)
'sort': sort, # sorting criteria in the format "property,asc" or "property,desc", default order is ascending, multiple sort criteria are supported
'side': 'BUY', # or 'SELL'
# 'till': self.iso8601(self.milliseconds()),
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['size'] = limit
response = await self.historyGetOrders(self.extend(request, query))
#
# {
# "orders": [
# {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# }
# ]
# }
#
orders = self.safe_value(response, 'orders', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
"""
fetch all trades made by the user
:param str|None symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch trades for
:param int|None limit: the maximum number of trades structures to retrieve
:param dict params: extra parameters specific to the timex api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html#trade-structure>`
"""
await self.load_markets()
options = self.safe_value(self.options, 'fetchMyTrades', {})
defaultSort = self.safe_value(options, 'sort', 'timestamp,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
# 'cursorId': 123, # int64(?)
# 'from': self.iso8601(since),
# 'makerOrderId': '1234', # maker order hash
# 'owner': '...', # owner address(?)
# 'page': 0, # results page you want to retrieve(0 .. N)
# 'side': 'BUY', # or 'SELL'
# 'size': limit,
'sort': sort, # sorting criteria in the format "property,asc" or "property,desc", default order is ascending, multiple sort criteria are supported
# 'symbol': market['id'],
# 'takerOrderId': '1234',
# 'till': self.iso8601(self.milliseconds()),
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['size'] = limit
response = await self.historyGetTrades(self.extend(request, query))
#
# {
# "trades": [
# {
# "fee": "0.3",
# "id": 100,
# "makerOrTaker": "MAKER",
# "makerOrderId": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "takerOrderId": "string",
# "timestamp": "2019-12-08T04:54:11.171Z"
# }
# ]
# }
#
trades = self.safe_value(response, 'trades', [])
return self.parse_trades(trades, market, since, limit)
def parse_trading_fee(self, fee, market=None):
#
# {
# "fee": 0.0075,
# "market": "ETHBTC"
# }
#
marketId = self.safe_string(fee, 'market')
rate = self.safe_number(fee, 'fee')
return {
'info': fee,
'symbol': self.safe_symbol(marketId, market),
'maker': rate,
'taker': rate,
}
async def fetch_trading_fee(self, symbol, params={}):
"""
fetch the trading fees for a market
:param str symbol: unified market symbol
:param dict params: extra parameters specific to the timex api endpoint
:returns dict: a `fee structure <https://docs.ccxt.com/en/latest/manual.html#fee-structure>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'markets': market['id'],
}
response = await self.tradingGetFees(self.extend(request, params))
#
# [
# {
# "fee": 0.0075,
# "market": "ETHBTC"
# }
# ]
#
result = self.safe_value(response, 0, {})
return self.parse_trading_fee(result, market)
def parse_market(self, market):
#
# {
# "symbol": "ETHBTC",
# "name": "ETH/BTC",
# "baseCurrency": "ETH",
# "baseTokenAddress": "0x45932db54b38af1f5a57136302eeba66a5975c15",
# "quoteCurrency": "BTC",
# "quoteTokenAddress": "0x8370fbc6ddec1e18b4e41e72ed943e238458487c",
# "feeCurrency": "BTC",
# "feeTokenAddress": "0x8370fbc6ddec1e18b4e41e72ed943e238458487c",
# "quantityIncrement": "0.0000001",
# "takerFee": "0.005",
# "makerFee": "0.0025",
# "tickSize": "0.00000001",
# "baseMinSize": "0.0001",
# "quoteMinSize": "0.00001",
# "locked": False
# }
#
locked = self.safe_value(market, 'locked')
id = self.safe_string(market, 'symbol')
baseId = self.safe_string(market, 'baseCurrency')
quoteId = self.safe_string(market, 'quoteCurrency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
amountIncrement = self.safe_string(market, 'quantityIncrement')
minBase = self.safe_string(market, 'baseMinSize')
minAmount = Precise.string_max(amountIncrement, minBase)
priceIncrement = self.safe_string(market, 'tickSize')
minCost = self.safe_string(market, 'quoteMinSize')
return {
'id': id,
'symbol': base + '/' + quote,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'active': not locked,
'contract': False,
'linear': None,
'inverse': None,
'taker': self.safe_number(market, 'takerFee'),
'maker': self.safe_number(market, 'makerFee'),
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': self.safe_number(market, 'quantityIncrement'),
'price': self.safe_number(market, 'tickSize'),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': self.parse_number(minAmount),
'max': None,
},
'price': {
'min': self.parse_number(priceIncrement),
'max': None,
},
'cost': {
'min': minCost,
'max': None,
},
},
'info': market,
}
def parse_currency(self, currency):
#
# {
# "symbol": "BTC",
# "name": "Bitcoin",
# "address": "0x8370fbc6ddec1e18b4e41e72ed943e238458487c",
# "icon": "data:image/svg+xml;base64,PHN2ZyB3aWR...mc+Cg==",
# "background": "transparent",
# "fiatSymbol": "BTC",
# "decimals": 8,
# "tradeDecimals": 20,
# "displayDecimals": 4,
# "crypto": True,
# "depositEnabled": True,
# "withdrawalEnabled": True,
# "transferEnabled": True,
# "buyEnabled": False,
# "purchaseEnabled": False,
# "redeemEnabled": False,
# "active": True,
# "withdrawalFee": "50000000000000000",
# "purchaseCommissions": []
# }
#
# https://github.com/ccxt/ccxt/issues/6878
#
# {
# "symbol":"XRP",
# "name":"Ripple",
# "address":"0x0dc8882914f3ddeebf4cec6dc20edb99df3def6c",
# "decimals":6,
# "tradeDecimals":16,
# "depositEnabled":true,
# "withdrawalEnabled":true,
# "transferEnabled":true,
# "active":true
# }
#
id = self.safe_string(currency, 'symbol')
code = self.safe_currency_code(id)
name = self.safe_string(currency, 'name')
depositEnabled = self.safe_value(currency, 'depositEnabled')
withdrawEnabled = self.safe_value(currency, 'withdrawalEnabled')
isActive = self.safe_value(currency, 'active')
active = depositEnabled and withdrawEnabled and isActive
# fee = self.safe_number(currency, 'withdrawalFee')
feeString = self.safe_string(currency, 'withdrawalFee')
tradeDecimals = self.safe_integer(currency, 'tradeDecimals')
fee = None
if (feeString is not None) and (tradeDecimals is not None):
feeStringLen = len(feeString)
dotIndex = feeStringLen - tradeDecimals
if dotIndex > 0:
whole = feeString[0:dotIndex]
fraction = feeString[-dotIndex:]
fee = self.parse_number(whole + '.' + fraction)
else:
fraction = '.'
for i in range(0, -dotIndex):
fraction += '0'
fee = self.parse_number(fraction + feeString)
return {
'id': code,
'code': code,
'info': currency,
'type': None,
'name': name,
'active': active,
'deposit': depositEnabled,
'withdraw': withdrawEnabled,
'fee': fee,
'precision': self.parse_number(self.parse_precision(self.safe_string(currency, 'decimals'))),
'limits': {
'withdraw': {'min': fee, 'max': None},
'amount': {'min': None, 'max': None},
},
}
def parse_ticker(self, ticker, market=None):
#
# {
# "ask": 0.017,
# "bid": 0.016,
# "high": 0.019,
# "last": 0.017,
# "low": 0.015,
# "market": "TIME/ETH",
# "open": 0.016,
# "period": "H1",
# "timestamp": "2018-12-14T20:50:36.134Z",
# "volume": 4.57,
# "volumeQuote": 0.07312
# }
#
marketId = self.safe_string(ticker, 'market')
symbol = self.safe_symbol(marketId, market, '/')
timestamp = self.parse8601(self.safe_string(ticker, 'timestamp'))
last = self.safe_string(ticker, 'last')
open = self.safe_string(ticker, 'open')
return self.safe_ticker({
'symbol': symbol,
'info': ticker,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_string(ticker, 'high'),
'low': self.safe_string(ticker, 'low'),
'bid': self.safe_string(ticker, 'bid'),
'bidVolume': None,
'ask': self.safe_string(ticker, 'ask'),
'askVolume': None,
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_string(ticker, 'volume'),
'quoteVolume': self.safe_string(ticker, 'volumeQuote'),
}, market)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "id":1,
# "timestamp":"2019-06-25T17:01:50.309",
# "direction":"BUY",
# "price":"0.027",
# "quantity":"0.001"
# }
#
# fetchMyTrades, fetchOrder(private)
#
# {
# "id": "7613414",
# "makerOrderId": "0x8420af060722f560098f786a2894d4358079b6ea5d14b395969ed77bc87a623a",
# "takerOrderId": "0x1235ef158a361815b54c9988b6241c85aedcbc1fe81caf8df8587d5ab0373d1a",
# "symbol": "LTCUSDT",
# "side": "BUY",
# "quantity": "0.2",
# "fee": "0.22685",
# "feeToken": "USDT",
# "price": "226.85",
# "makerOrTaker": "TAKER",
# "timestamp": "2021-04-09T15:39:45.608"
# }
#
marketId = self.safe_string(trade, 'symbol')
symbol = self.safe_symbol(marketId, market)
timestamp = self.parse8601(self.safe_string(trade, 'timestamp'))
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string(trade, 'quantity')
price = self.parse_number(priceString)
amount = self.parse_number(amountString)
cost = self.parse_number(Precise.string_mul(priceString, amountString))
id = self.safe_string(trade, 'id')
side = self.safe_string_lower_2(trade, 'direction', 'side')
takerOrMaker = self.safe_string_lower(trade, 'makerOrTaker')
orderId = None
if takerOrMaker is not None:
orderId = self.safe_string(trade, takerOrMaker + 'OrderId')
fee = None
feeCost = self.safe_number(trade, 'fee')
feeCurrency = self.safe_currency_code(self.safe_string(trade, 'feeToken'))
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
return {
'info': trade,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': None,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'takerOrMaker': takerOrMaker,
'fee': fee,
}
def parse_ohlcv(self, ohlcv, market=None):
#
# {
# "timestamp":"2019-12-04T23:00:00",
# "open":"0.02024009",
# "high":"0.02024009",
# "low":"0.02024009",
# "close":"0.02024009",
# "volume":"0.00008096036",
# "volumeQuote":"0.004",
# }
#
return [
self.parse8601(self.safe_string(ohlcv, 'timestamp')),
self.safe_number(ohlcv, 'open'),
self.safe_number(ohlcv, 'high'),
self.safe_number(ohlcv, 'low'),
self.safe_number(ohlcv, 'close'),
self.safe_number(ohlcv, 'volume'),
]
def parse_order(self, order, market=None):
#
# fetchOrder, createOrder, cancelOrder, cancelOrders, fetchOpenOrders, fetchClosedOrders
#
# {
# "cancelledQuantity": "0.3",
# "clientOrderId": "my-order-1",
# "createdAt": "1970-01-01T00:00:00",
# "cursorId": 50,
# "expireTime": "1970-01-01T00:00:00",
# "filledQuantity": "0.3",
# "id": "string",
# "price": "0.017",
# "quantity": "0.3",
# "side": "BUY",
# "symbol": "TIMEETH",
# "type": "LIMIT",
# "updatedAt": "1970-01-01T00:00:00"
# "trades": [], # injected from the outside
# }
#
id = self.safe_string(order, 'id')
type = self.safe_string_lower(order, 'type')
side = self.safe_string_lower(order, 'side')
marketId = self.safe_string(order, 'symbol')
symbol = self.safe_symbol(marketId, market)
timestamp = self.parse8601(self.safe_string(order, 'createdAt'))
price = self.safe_string(order, 'price')
amount = self.safe_string(order, 'quantity')
filled = self.safe_string(order, 'filledQuantity')
canceledQuantity = self.omit_zero(self.safe_string(order, 'cancelledQuantity'))
status = None
if Precise.string_equals(filled, amount):
status = 'closed'
elif canceledQuantity is not None:
status = 'canceled'
else:
status = 'open'
rawTrades = self.safe_value(order, 'trades', [])
clientOrderId = self.safe_string(order, 'clientOrderId')
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': None,
'amount': amount,
'cost': None,
'average': None,
'filled': filled,
'remaining': None,
'status': status,
'fee': None,
'trades': rawTrades,
}, market)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + api + '/' + path
if params:
url += '?' + self.urlencode_with_array_repeat(params)
if api != 'public':
self.check_required_credentials()
auth = self.string_to_base64(self.apiKey + ':' + self.secret)
secret = 'Basic ' + self.decode(auth)
headers = {'authorization': secret}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, statusCode, statusText, url, method, responseHeaders, responseBody, response, requestHeaders, requestBody):
if response is None:
return
if statusCode >= 400:
#
# {"error":{"timestamp":"05.12.2019T05:25:43.584+0000","status":"BAD_REQUEST","message":"Insufficient ETH balance. Required: 1, actual: 0.","code":4001}}
# {"error":{"timestamp":"05.12.2019T04:03:25.419+0000","status":"FORBIDDEN","message":"Access denied","code":4300}}
#
feedback = self.id + ' ' + responseBody
error = self.safe_value(response, 'error')
if error is None:
error = response
code = self.safe_string_2(error, 'code', 'status')
message = self.safe_string_2(error, 'message', 'debugMessage')
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], code, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
raise ExchangeError(feedback)
| 43.904586 | 2,235 | 0.497953 |
rt.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NotSupported
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class timex(Exchange):
def describe(self):
return self.deep_extend(super(timex, self).describe(), {
'id': 'timex',
'name': 'TimeX',
'countries': ['AU'],
'version': 'v1',
'rateLimit': 1500,
'has': {
'CORS': None,
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'addMargin': False,
'cancelOrder': True,
'cancelOrders': True,
'createOrder': True,
'createReduceOnlyOrder': False,
'createStopLimitOrder': False,
'createStopMarketOrder': False,
'createStopOrder': False,
'editOrder': True,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchBorrowRatesPerSymbol': False,
'fetchClosedOrders': True,
'fetchCurrencies': True,
'fetchFundingHistory': False,
'fetchFundingRate': False,
'fetchFundingRateHistory': False,
'fetchFundingRates': False,
'fetchIndexOHLCV': False,
'fetchLeverage': False,
'fetchLeverageTiers': False,
'fetchMarkets': True,
'fetchMarkOHLCV': False,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenInterestHistory': False,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchPosition': False,
'fetchPositions': False,
'fetchPositionsRisk': False,
'fetchPremiumIndexOHLCV': False,
'fetchTicker': True,
'fetchTickers': True,
'fetchTrades': True,
'fetchTradingFee': True,
'reduceMargin': False,
'setLeverage': False,
'setMarginMode': False,
'setPositionMode': False,
},
'timeframes': {
'1m': 'I1',
'5m': 'I5',
'15m': 'I15',
'30m': 'I30',
'1h': 'H1',
'2h': 'H2',
'4h': 'H4',
'6h': 'H6',
'12h': 'H12',
'1d': 'D1',
'1w': 'W1',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/70423869-6839ab00-1a7f-11ea-8f94-13ae72c31115.jpg',
'api': 'https://plasma-relay-backend.timex.io',
'www': 'https://timex.io',
'doc': 'https://docs.timex.io',
'referral': 'https://timex.io/?refcode=1x27vNkTbP1uwkCck',
},
'api': {
'custody': {
'get': [
'credentials',
'credentials/h/{hash}',
'credentials/k/{key}',
'credentials/me/address',
'deposit-addresses',
'deposit-addresses/h/{hash}',
],
},
'history': {
'get': [
'orders',
'orders/details',
'orders/export/csv',
'trades',
'trades/export/csv',
],
},
'currencies': {
'get': [
'a/{address}',
'i/{id}',
's/{symbol}',
],
'post': [
'perform',
'prepare',
'remove/perform',
's/{symbol}/remove/prepare',
's/{symbol}/update/perform',
's/{symbol}/update/prepare',
],
},
'markets': {
'get': [
'i/{id}',
's/{symbol}',
],
'post': [
'perform',
'prepare',
'remove/perform',
's/{symbol}/remove/prepare',
's/{symbol}/update/perform',
's/{symbol}/update/prepare',
],
},
'public': {
'get': [
'candles',
'currencies',
'markets',
'orderbook',
'orderbook/raw',
'orderbook/v2',
'tickers',
'trades',
],
},
'statistics': {
'get': [
'address',
],
},
'trading': {
'get': [
'balances',
'fees',
'orders',
],
'post': [
'orders',
'orders/json',
],
'put': [
'orders',
'orders/json',
],
'delete': [
'orders',
'orders/json',
],
},
'tradingview': {
'get': [
'config',
'history',
'symbol_info',
'time',
],
},
},
'precisionMode': TICK_SIZE,
'exceptions': {
'exact': {
'0': ExchangeError,
'1': NotSupported,
'4000': BadRequest,
'4001': BadRequest,
'4002': InsufficientFunds,
'4003': AuthenticationError,
'4004': AuthenticationError,
'4005': BadRequest,
'4006': BadRequest,
'4007': BadRequest,
'4300': PermissionDenied,
'4100': AuthenticationError,
'4400': OrderNotFound,
'5001': InvalidOrder,
'5002': ExchangeError,
'400': BadRequest,
'401': AuthenticationError,
'403': PermissionDenied,
'404': OrderNotFound,
'429': RateLimitExceeded,
'500': ExchangeError,
'503': ExchangeNotAvailable,
},
'broad': {
'Insufficient': InsufficientFunds,
},
},
'options': {
'expireIn': 31536000,
'fetchTickers': {
'period': '1d',
},
'fetchTrades': {
'sort': 'timestamp,asc',
},
'fetchMyTrades': {
'sort': 'timestamp,asc',
},
'fetchOpenOrders': {
'sort': 'createdAt,asc',
},
'fetchClosedOrders': {
'sort': 'createdAt,asc',
},
'defaultSort': 'timestamp,asc',
'defaultSortOrders': 'createdAt,asc',
},
})
async def fetch_markets(self, params={}):
response = await self.publicGetMarkets(params)
result = []
for i in range(0, len(response)):
result.append(self.parse_market(response[i]))
return result
async def fetch_currencies(self, params={}):
response = await self.publicGetCurrencies(params)
result = []
for i in range(0, len(response)):
currency = response[i]
result.append(self.parse_currency(currency))
return self.index_by(result, 'code')
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
period = self.safe_string(self.options['fetchTickers'], 'period', '1d')
request = {
'period': self.timeframes[period],
}
response = await self.publicGetTickers(self.extend(request, params))
return self.parse_tickers(response, symbols)
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
period = self.safe_string(self.options['fetchTickers'], 'period', '1d')
request = {
'market': market['id'],
'period': self.timeframes[period],
}
response = await self.publicGetTickers(self.extend(request, params))
ticker = self.safe_value(response, 0)
return self.parse_ticker(ticker, market)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
}
if limit is not None:
request['limit'] = limit
response = await self.publicGetOrderbookV2(self.extend(request, params))
timestamp = self.parse8601(self.safe_string(response, 'timestamp'))
return self.parse_order_book(response, symbol, timestamp, 'bid', 'ask', 'price', 'baseTokenAmount')
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
options = self.safe_value(self.options, 'fetchTrades', {})
defaultSort = self.safe_value(options, 'sort', 'timestamp,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
'market': market['id'],
ot None:
request['from'] = self.iso8601(since)
if limit is not None:
request['size'] = limit
response = await self.publicGetTrades(self.extend(request, query))
return self.parse_trades(response, market, since, limit)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
'period': self.timeframes[timeframe],
}
duration = self.parse_timeframe(timeframe)
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['till'] = self.iso8601(self.sum(since, self.sum(limit, 1) * duration * 1000))
elif limit is not None:
now = self.milliseconds()
request['till'] = self.iso8601(now)
request['from'] = self.iso8601(now - limit * duration * 1000 - 1)
else:
request['till'] = self.iso8601(self.milliseconds())
response = await self.publicGetCandles(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_balance(self, response):
result = {
'info': response,
'timestamp': None,
'datetime': None,
}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['total'] = self.safe_string(balance, 'totalBalance')
account['used'] = self.safe_string(balance, 'lockedBalance')
result[code] = account
return self.safe_balance(result)
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.tradingGetBalances(params)
return self.parse_balance(response)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
uppercaseSide = side.upper()
uppercaseType = type.upper()
postOnly = self.safe_value(params, 'postOnly', False)
if postOnly:
uppercaseType = 'POST_ONLY'
params = self.omit(params, ['postOnly'])
request = {
'symbol': market['id'],
'quantity': self.amount_to_precision(symbol, amount),
'side': uppercaseSide,
'orderTypes': uppercaseType,
query = params
if (uppercaseType == 'LIMIT') or (uppercaseType == 'POST_ONLY'):
request['price'] = self.price_to_precision(symbol, price)
defaultExpireIn = self.safe_integer(self.options, 'expireIn')
expireTime = self.safe_value(params, 'expireTime')
expireIn = self.safe_value(params, 'expireIn', defaultExpireIn)
if expireTime is not None:
request['expireTime'] = expireTime
elif expireIn is not None:
request['expireIn'] = expireIn
else:
raise InvalidOrder(self.id + ' createOrder() method requires a expireTime or expireIn param for a ' + type + ' order, you can also set the expireIn exchange-wide option')
query = self.omit(params, ['expireTime', 'expireIn'])
else:
request['price'] = 0
response = await self.tradingPostOrders(self.extend(request, query))
orders = self.safe_value(response, 'orders', [])
order = self.safe_value(orders, 0, {})
return self.parse_order(order, market)
async def edit_order(self, id, symbol, type, side, amount=None, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'id': id,
}
if amount is not None:
request['quantity'] = self.amount_to_precision(symbol, amount)
if price is not None:
request['price'] = self.price_to_precision(symbol, price)
response = await self.tradingPutOrders(self.extend(request, params))
if 'unchangedOrders' in response:
orderIds = self.safe_value(response, 'unchangedOrders', [])
orderId = self.safe_string(orderIds, 0)
return {
'id': orderId,
'info': response,
}
orders = self.safe_value(response, 'changedOrders', [])
firstOrder = self.safe_value(orders, 0, {})
order = self.safe_value(firstOrder, 'newOrder', {})
return self.parse_order(order, market)
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
return await self.cancel_orders([id], symbol, params)
async def cancel_orders(self, ids, symbol=None, params={}):
await self.load_markets()
request = {
'id': ids,
}
response = await self.tradingDeleteOrders(self.extend(request, params))
return response
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'orderHash': id,
}
response = await self.historyGetOrdersDetails(request)
order = self.safe_value(response, 'order', {})
trades = self.safe_value(response, 'trades', [])
return self.parse_order(self.extend(order, {'trades': trades}))
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
options = self.safe_value(self.options, 'fetchOpenOrders', {})
defaultSort = self.safe_value(options, 'sort', 'createdAt,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if limit is not None:
request['size'] = limit
response = await self.tradingGetOrders(self.extend(request, query))
orders = self.safe_value(response, 'orders', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
options = self.safe_value(self.options, 'fetchClosedOrders', {})
defaultSort = self.safe_value(options, 'sort', 'createdAt,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['size'] = limit
response = await self.historyGetOrders(self.extend(request, query))
orders = self.safe_value(response, 'orders', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
options = self.safe_value(self.options, 'fetchMyTrades', {})
defaultSort = self.safe_value(options, 'sort', 'timestamp,asc')
sort = self.safe_string(params, 'sort', defaultSort)
query = self.omit(params, 'sort')
request = {
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['size'] = limit
response = await self.historyGetTrades(self.extend(request, query))
trades = self.safe_value(response, 'trades', [])
return self.parse_trades(trades, market, since, limit)
def parse_trading_fee(self, fee, market=None):
marketId = self.safe_string(fee, 'market')
rate = self.safe_number(fee, 'fee')
return {
'info': fee,
'symbol': self.safe_symbol(marketId, market),
'maker': rate,
'taker': rate,
}
async def fetch_trading_fee(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'markets': market['id'],
}
response = await self.tradingGetFees(self.extend(request, params))
result = self.safe_value(response, 0, {})
return self.parse_trading_fee(result, market)
def parse_market(self, market):
locked = self.safe_value(market, 'locked')
id = self.safe_string(market, 'symbol')
baseId = self.safe_string(market, 'baseCurrency')
quoteId = self.safe_string(market, 'quoteCurrency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
amountIncrement = self.safe_string(market, 'quantityIncrement')
minBase = self.safe_string(market, 'baseMinSize')
minAmount = Precise.string_max(amountIncrement, minBase)
priceIncrement = self.safe_string(market, 'tickSize')
minCost = self.safe_string(market, 'quoteMinSize')
return {
'id': id,
'symbol': base + '/' + quote,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'active': not locked,
'contract': False,
'linear': None,
'inverse': None,
'taker': self.safe_number(market, 'takerFee'),
'maker': self.safe_number(market, 'makerFee'),
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': self.safe_number(market, 'quantityIncrement'),
'price': self.safe_number(market, 'tickSize'),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': self.parse_number(minAmount),
'max': None,
},
'price': {
'min': self.parse_number(priceIncrement),
'max': None,
},
'cost': {
'min': minCost,
'max': None,
},
},
'info': market,
}
def parse_currency(self, currency):
id = self.safe_string(currency, 'symbol')
code = self.safe_currency_code(id)
name = self.safe_string(currency, 'name')
depositEnabled = self.safe_value(currency, 'depositEnabled')
withdrawEnabled = self.safe_value(currency, 'withdrawalEnabled')
isActive = self.safe_value(currency, 'active')
active = depositEnabled and withdrawEnabled and isActive
feeString = self.safe_string(currency, 'withdrawalFee')
tradeDecimals = self.safe_integer(currency, 'tradeDecimals')
fee = None
if (feeString is not None) and (tradeDecimals is not None):
feeStringLen = len(feeString)
dotIndex = feeStringLen - tradeDecimals
if dotIndex > 0:
whole = feeString[0:dotIndex]
fraction = feeString[-dotIndex:]
fee = self.parse_number(whole + '.' + fraction)
else:
fraction = '.'
for i in range(0, -dotIndex):
fraction += '0'
fee = self.parse_number(fraction + feeString)
return {
'id': code,
'code': code,
'info': currency,
'type': None,
'name': name,
'active': active,
'deposit': depositEnabled,
'withdraw': withdrawEnabled,
'fee': fee,
'precision': self.parse_number(self.parse_precision(self.safe_string(currency, 'decimals'))),
'limits': {
'withdraw': {'min': fee, 'max': None},
'amount': {'min': None, 'max': None},
},
}
def parse_ticker(self, ticker, market=None):
marketId = self.safe_string(ticker, 'market')
symbol = self.safe_symbol(marketId, market, '/')
timestamp = self.parse8601(self.safe_string(ticker, 'timestamp'))
last = self.safe_string(ticker, 'last')
open = self.safe_string(ticker, 'open')
return self.safe_ticker({
'symbol': symbol,
'info': ticker,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_string(ticker, 'high'),
'low': self.safe_string(ticker, 'low'),
'bid': self.safe_string(ticker, 'bid'),
'bidVolume': None,
'ask': self.safe_string(ticker, 'ask'),
'askVolume': None,
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_string(ticker, 'volume'),
'quoteVolume': self.safe_string(ticker, 'volumeQuote'),
}, market)
def parse_trade(self, trade, market=None):
marketId = self.safe_string(trade, 'symbol')
symbol = self.safe_symbol(marketId, market)
timestamp = self.parse8601(self.safe_string(trade, 'timestamp'))
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string(trade, 'quantity')
price = self.parse_number(priceString)
amount = self.parse_number(amountString)
cost = self.parse_number(Precise.string_mul(priceString, amountString))
id = self.safe_string(trade, 'id')
side = self.safe_string_lower_2(trade, 'direction', 'side')
takerOrMaker = self.safe_string_lower(trade, 'makerOrTaker')
orderId = None
if takerOrMaker is not None:
orderId = self.safe_string(trade, takerOrMaker + 'OrderId')
fee = None
feeCost = self.safe_number(trade, 'fee')
feeCurrency = self.safe_currency_code(self.safe_string(trade, 'feeToken'))
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
return {
'info': trade,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': None,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'takerOrMaker': takerOrMaker,
'fee': fee,
}
def parse_ohlcv(self, ohlcv, market=None):
return [
self.parse8601(self.safe_string(ohlcv, 'timestamp')),
self.safe_number(ohlcv, 'open'),
self.safe_number(ohlcv, 'high'),
self.safe_number(ohlcv, 'low'),
self.safe_number(ohlcv, 'close'),
self.safe_number(ohlcv, 'volume'),
]
def parse_order(self, order, market=None):
id = self.safe_string(order, 'id')
type = self.safe_string_lower(order, 'type')
side = self.safe_string_lower(order, 'side')
marketId = self.safe_string(order, 'symbol')
symbol = self.safe_symbol(marketId, market)
timestamp = self.parse8601(self.safe_string(order, 'createdAt'))
price = self.safe_string(order, 'price')
amount = self.safe_string(order, 'quantity')
filled = self.safe_string(order, 'filledQuantity')
canceledQuantity = self.omit_zero(self.safe_string(order, 'cancelledQuantity'))
status = None
if Precise.string_equals(filled, amount):
status = 'closed'
elif canceledQuantity is not None:
status = 'canceled'
else:
status = 'open'
rawTrades = self.safe_value(order, 'trades', [])
clientOrderId = self.safe_string(order, 'clientOrderId')
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': None,
'amount': amount,
'cost': None,
'average': None,
'filled': filled,
'remaining': None,
'status': status,
'fee': None,
'trades': rawTrades,
}, market)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + api + '/' + path
if params:
url += '?' + self.urlencode_with_array_repeat(params)
if api != 'public':
self.check_required_credentials()
auth = self.string_to_base64(self.apiKey + ':' + self.secret)
secret = 'Basic ' + self.decode(auth)
headers = {'authorization': secret}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, statusCode, statusText, url, method, responseHeaders, responseBody, response, requestHeaders, requestBody):
if response is None:
return
if statusCode >= 400:
feedback = self.id + ' ' + responseBody
error = self.safe_value(response, 'error')
if error is None:
error = response
code = self.safe_string_2(error, 'code', 'status')
message = self.safe_string_2(error, 'message', 'debugMessage')
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], code, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
raise ExchangeError(feedback)
| true | true |
f71c2d2de4124b6e764541c80fea3098c5dcf2c2 | 231,565 | py | Python | zerver/tests/test_messages.py | myii/zulip | 915d8013271f1823954dd8d4441842842857ab9f | [
"Apache-2.0"
] | null | null | null | zerver/tests/test_messages.py | myii/zulip | 915d8013271f1823954dd8d4441842842857ab9f | [
"Apache-2.0"
] | null | null | null | zerver/tests/test_messages.py | myii/zulip | 915d8013271f1823954dd8d4441842842857ab9f | [
"Apache-2.0"
] | null | null | null | from django.db import IntegrityError
from django.db.models import Q
from django.conf import settings
from django.http import HttpResponse
from django.test import TestCase, override_settings
from django.utils.timezone import now as timezone_now
from zerver.lib import bugdown
from zerver.decorator import JsonableError
from zerver.lib.test_runner import slow
from zerver.lib.addressee import Addressee
from zerver.lib.actions import (
check_message,
check_send_stream_message,
create_mirror_user_if_needed,
do_add_alert_words,
do_change_is_admin,
do_change_stream_invite_only,
do_change_stream_post_policy,
do_claim_attachments,
do_create_user,
do_deactivate_user,
do_send_messages,
do_update_message,
do_set_realm_property,
extract_private_recipients,
extract_stream_indicator,
gather_subscriptions_helper,
get_active_presence_idle_user_ids,
get_client,
get_last_message_id,
get_topic_messages,
get_user_info_for_message_updates,
internal_prep_private_message,
internal_prep_stream_message_by_name,
internal_send_huddle_message,
internal_send_private_message,
internal_send_stream_message,
internal_send_stream_message_by_name,
send_rate_limited_pm_notification_to_bot_owner,
)
from zerver.lib.cache import (
cache_delete,
get_stream_cache_key,
to_dict_cache_key_id,
)
from zerver.lib.create_user import (
create_user_profile,
)
from zerver.lib.message import (
MessageDict,
bulk_access_messages,
get_first_visible_message_id,
get_raw_unread_data,
get_recent_private_conversations,
maybe_update_first_visible_message_id,
messages_for_ids,
render_markdown,
sew_messages_and_reactions,
update_first_visible_message_id,
)
from zerver.lib.test_helpers import (
get_subscription,
get_user_messages,
make_client,
message_stream_count,
most_recent_message,
most_recent_usermessage,
queries_captured,
reset_emails_in_zulip_realm,
)
from zerver.lib.test_classes import (
ZulipTestCase,
)
from zerver.lib.topic import (
LEGACY_PREV_TOPIC,
DB_TOPIC_NAME,
TOPIC_LINKS,
TOPIC_NAME,
)
from zerver.lib.types import DisplayRecipientT, UserDisplayRecipient
from zerver.lib.soft_deactivation import (
add_missing_messages,
do_soft_activate_users,
do_soft_deactivate_users,
reactivate_user_if_soft_deactivated,
)
from zerver.models import (
MAX_MESSAGE_LENGTH, MAX_TOPIC_NAME_LENGTH,
Message, Realm, Recipient, Stream, UserMessage, UserProfile, Attachment,
RealmAuditLog, RealmDomain, get_realm, UserPresence, Subscription,
get_stream, get_system_bot, get_user, Reaction,
flush_per_request_caches, ScheduledMessage, get_huddle_recipient,
bulk_get_huddle_user_ids, get_huddle_user_ids,
get_display_recipient, RealmFilter
)
from zerver.lib.timestamp import convert_to_UTC, datetime_to_timestamp
from zerver.lib.timezone import get_timezone
from zerver.lib.upload import create_attachment
from zerver.lib.url_encoding import near_message_url
from zerver.views.messages import create_mirrored_message_users, InvalidMirrorInput
from analytics.lib.counts import COUNT_STATS
from analytics.models import RealmCount
import datetime
import mock
from operator import itemgetter
import time
import ujson
from typing import Any, Dict, List, Set, Union, Tuple
from collections import namedtuple
class MiscMessageTest(ZulipTestCase):
def test_get_last_message_id(self) -> None:
self.assertEqual(
get_last_message_id(),
Message.objects.latest('id').id
)
Message.objects.all().delete()
self.assertEqual(get_last_message_id(), -1)
class TopicHistoryTest(ZulipTestCase):
def test_topics_history_zephyr_mirror(self) -> None:
user_profile = self.mit_user('sipbtest')
stream_name = 'new_stream'
# Send a message to this new stream from another user
self.subscribe(self.mit_user("starnine"), stream_name)
stream = get_stream(stream_name, user_profile.realm)
self.send_stream_message(self.mit_user("starnine"), stream_name,
topic_name="secret topic")
# Now subscribe this MIT user to the new stream and verify
# that the new topic is not accessible
self.login_user(user_profile)
self.subscribe(user_profile, stream_name)
endpoint = '/json/users/me/%d/topics' % (stream.id,)
result = self.client_get(endpoint, dict(), subdomain="zephyr")
self.assert_json_success(result)
history = result.json()['topics']
self.assertEqual(history, [])
def test_topics_history(self) -> None:
# verified: int(UserMessage.flags.read) == 1
user_profile = self.example_user('iago')
self.login_user(user_profile)
stream_name = 'Verona'
stream = get_stream(stream_name, user_profile.realm)
recipient = stream.recipient
def create_test_message(topic: str) -> int:
# TODO: Clean this up to send messages the normal way.
hamlet = self.example_user('hamlet')
message = Message(
sender=hamlet,
recipient=recipient,
content='whatever',
date_sent=timezone_now(),
sending_client=get_client('whatever'),
)
message.set_topic_name(topic)
message.save()
UserMessage.objects.create(
user_profile=user_profile,
message=message,
flags=0,
)
return message.id
# our most recent topics are topic0, topic1, topic2
# Create old messages with strange spellings.
create_test_message('topic2')
create_test_message('toPIc1')
create_test_message('toPIc0')
create_test_message('topic2')
create_test_message('topic2')
create_test_message('Topic2')
# Create new messages
topic2_msg_id = create_test_message('topic2')
create_test_message('topic1')
create_test_message('topic1')
topic1_msg_id = create_test_message('topic1')
topic0_msg_id = create_test_message('topic0')
endpoint = '/json/users/me/%d/topics' % (stream.id,)
result = self.client_get(endpoint, dict())
self.assert_json_success(result)
history = result.json()['topics']
# We only look at the most recent three topics, because
# the prior fixture data may be unreliable.
history = history[:3]
self.assertEqual([topic['name'] for topic in history], [
'topic0',
'topic1',
'topic2',
])
self.assertEqual([topic['max_id'] for topic in history], [
topic0_msg_id,
topic1_msg_id,
topic2_msg_id,
])
# Now try as cordelia, who we imagine as a totally new user in
# that she doesn't have UserMessage rows. We should see the
# same results for a public stream.
self.login('cordelia')
result = self.client_get(endpoint, dict())
self.assert_json_success(result)
history = result.json()['topics']
# We only look at the most recent three topics, because
# the prior fixture data may be unreliable.
history = history[:3]
self.assertEqual([topic['name'] for topic in history], [
'topic0',
'topic1',
'topic2',
])
self.assertIn('topic0', [topic['name'] for topic in history])
self.assertEqual([topic['max_id'] for topic in history], [
topic0_msg_id,
topic1_msg_id,
topic2_msg_id,
])
# Now make stream private, but subscribe cordelia
do_change_stream_invite_only(stream, True)
self.subscribe(self.example_user("cordelia"), stream.name)
result = self.client_get(endpoint, dict())
self.assert_json_success(result)
history = result.json()['topics']
history = history[:3]
# Cordelia doesn't have these recent history items when we
# wasn't subscribed in her results.
self.assertNotIn('topic0', [topic['name'] for topic in history])
self.assertNotIn('topic1', [topic['name'] for topic in history])
self.assertNotIn('topic2', [topic['name'] for topic in history])
def test_bad_stream_id(self) -> None:
self.login('iago')
# non-sensible stream id
endpoint = '/json/users/me/9999999999/topics'
result = self.client_get(endpoint, dict())
self.assert_json_error(result, 'Invalid stream id')
# out of realm
bad_stream = self.make_stream(
'mit_stream',
realm=get_realm('zephyr')
)
endpoint = '/json/users/me/%s/topics' % (bad_stream.id,)
result = self.client_get(endpoint, dict())
self.assert_json_error(result, 'Invalid stream id')
# private stream to which I am not subscribed
private_stream = self.make_stream(
'private_stream',
invite_only=True
)
endpoint = '/json/users/me/%s/topics' % (private_stream.id,)
result = self.client_get(endpoint, dict())
self.assert_json_error(result, 'Invalid stream id')
class TopicDeleteTest(ZulipTestCase):
def test_topic_delete(self) -> None:
initial_last_msg_id = self.get_last_message().id
stream_name = 'new_stream'
topic_name = 'new topic 2'
# NON-ADMIN USER
user_profile = self.example_user('hamlet')
self.subscribe(user_profile, stream_name)
# Send message
stream = get_stream(stream_name, user_profile.realm)
self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
last_msg_id = self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
# Deleting the topic
self.login_user(user_profile)
endpoint = '/json/streams/' + str(stream.id) + '/delete_topic'
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_error(result, "Must be an organization administrator")
self.assertEqual(self.get_last_message().id, last_msg_id)
# Make stream private with limited history
do_change_stream_invite_only(stream, invite_only=True,
history_public_to_subscribers=False)
# ADMIN USER subscribed now
user_profile = self.example_user('iago')
self.subscribe(user_profile, stream_name)
self.login_user(user_profile)
new_last_msg_id = self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
# Now admin deletes all messages in topic -- which should only
# delete new_last_msg_id, i.e. the one sent since they joined.
self.assertEqual(self.get_last_message().id, new_last_msg_id)
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, last_msg_id)
# Try to delete all messages in the topic again. There are no messages accessible
# to the administrator, so this should do nothing.
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, last_msg_id)
# Make the stream's history public to subscribers
do_change_stream_invite_only(stream, invite_only=True,
history_public_to_subscribers=True)
# Delete the topic should now remove all messages
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, initial_last_msg_id)
# Delete again, to test the edge case of deleting an empty topic.
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, initial_last_msg_id)
class TestCrossRealmPMs(ZulipTestCase):
def make_realm(self, domain: str) -> Realm:
realm = Realm.objects.create(string_id=domain, invite_required=False)
RealmDomain.objects.create(realm=realm, domain=domain)
return realm
def create_user(self, email: str) -> UserProfile:
subdomain = email.split("@")[1]
self.register(email, 'test', subdomain=subdomain)
return get_user(email, get_realm(subdomain))
@slow("Sends a large number of messages")
@override_settings(CROSS_REALM_BOT_EMAILS=['notification-bot@zulip.com',
'welcome-bot@zulip.com',
'support@3.example.com'])
def test_realm_scenarios(self) -> None:
self.make_realm('1.example.com')
r2 = self.make_realm('2.example.com')
self.make_realm('3.example.com')
def assert_message_received(to_user: UserProfile, from_user: UserProfile) -> None:
messages = get_user_messages(to_user)
self.assertEqual(messages[-1].sender.id, from_user.id)
def assert_invalid_user() -> Any:
return self.assertRaisesRegex(
JsonableError,
'Invalid user ID ')
user1_email = 'user1@1.example.com'
user1a_email = 'user1a@1.example.com'
user2_email = 'user2@2.example.com'
user3_email = 'user3@3.example.com'
notification_bot_email = 'notification-bot@zulip.com'
support_email = 'support@3.example.com' # note: not zulip.com
user1 = self.create_user(user1_email)
user1a = self.create_user(user1a_email)
user2 = self.create_user(user2_email)
user3 = self.create_user(user3_email)
notification_bot = get_system_bot(notification_bot_email)
with self.settings(CROSS_REALM_BOT_EMAILS=['notification-bot@zulip.com', 'welcome-bot@zulip.com']):
# HACK: We should probably be creating this "bot" user another
# way, but since you can't register a user with a
# cross-realm email, we need to hide this for now.
support_bot = self.create_user(support_email)
# Users can PM themselves
self.send_personal_message(user1, user1)
assert_message_received(user1, user1)
# Users on the same realm can PM each other
self.send_personal_message(user1, user1a)
assert_message_received(user1a, user1)
# Cross-realm bots in the zulip.com realm can PM any realm
# (They need lower level APIs to do this.)
internal_send_private_message(
realm=r2,
sender=get_system_bot(notification_bot_email),
recipient_user=get_user(user2_email, r2),
content='bla',
)
assert_message_received(user2, notification_bot)
# All users can PM cross-realm bots in the zulip.com realm
self.send_personal_message(user1, notification_bot)
assert_message_received(notification_bot, user1)
# Users can PM cross-realm bots on non-zulip realms.
# (The support bot represents some theoretical bot that we may
# create in the future that does not have zulip.com as its realm.)
self.send_personal_message(user1, support_bot)
assert_message_received(support_bot, user1)
# Allow sending PMs to two different cross-realm bots simultaneously.
# (We don't particularly need this feature, but since users can
# already individually send PMs to cross-realm bots, we shouldn't
# prevent them from sending multiple bots at once. We may revisit
# this if it's a nuisance for huddles.)
self.send_huddle_message(user1, [notification_bot, support_bot])
assert_message_received(notification_bot, user1)
assert_message_received(support_bot, user1)
# Prevent old loophole where I could send PMs to other users as long
# as I copied a cross-realm bot from the same realm.
with assert_invalid_user():
self.send_huddle_message(user1, [user3, support_bot])
# Users on three different realms can't PM each other,
# even if one of the users is a cross-realm bot.
with assert_invalid_user():
self.send_huddle_message(user1, [user2, notification_bot])
with assert_invalid_user():
self.send_huddle_message(notification_bot, [user1, user2])
# Users on the different realms cannot PM each other
with assert_invalid_user():
self.send_personal_message(user1, user2)
# Users on non-zulip realms can't PM "ordinary" Zulip users
with assert_invalid_user():
self.send_personal_message(user1, self.example_user('hamlet'))
# Users on three different realms cannot PM each other
with assert_invalid_user():
self.send_huddle_message(user1, [user2, user3])
class TestAddressee(ZulipTestCase):
def test_addressee_for_user_ids(self) -> None:
realm = get_realm('zulip')
user_ids = [self.example_user('cordelia').id,
self.example_user('hamlet').id,
self.example_user('othello').id]
result = Addressee.for_user_ids(user_ids=user_ids, realm=realm)
user_profiles = result.user_profiles()
result_user_ids = [user_profiles[0].id, user_profiles[1].id,
user_profiles[2].id]
self.assertEqual(set(result_user_ids), set(user_ids))
def test_addressee_for_user_ids_nonexistent_id(self) -> None:
def assert_invalid_user_id() -> Any:
return self.assertRaisesRegex(
JsonableError,
'Invalid user ID ')
with assert_invalid_user_id():
Addressee.for_user_ids(user_ids=[779], realm=get_realm('zulip'))
def test_addressee_legacy_build_for_user_ids(self) -> None:
realm = get_realm('zulip')
self.login('hamlet')
user_ids = [self.example_user('cordelia').id,
self.example_user('othello').id]
result = Addressee.legacy_build(
sender=self.example_user('hamlet'), message_type_name='private',
message_to=user_ids, topic_name='random_topic',
realm=realm
)
user_profiles = result.user_profiles()
result_user_ids = [user_profiles[0].id, user_profiles[1].id]
self.assertEqual(set(result_user_ids), set(user_ids))
def test_addressee_legacy_build_for_stream_id(self) -> None:
realm = get_realm('zulip')
self.login('iago')
sender = self.example_user('iago')
self.subscribe(sender, "Denmark")
stream = get_stream('Denmark', realm)
result = Addressee.legacy_build(
sender=sender, message_type_name='stream',
message_to=[stream.id], topic_name='random_topic',
realm=realm
)
stream_id = result.stream_id()
self.assertEqual(stream.id, stream_id)
class InternalPrepTest(ZulipTestCase):
def test_returns_for_internal_sends(self) -> None:
# For our internal_send_* functions we return
# if the prep stages fail. This is mostly defensive
# code, since we are generally creating the messages
# ourselves, but we want to make sure that the functions
# won't actually explode if we give them bad content.
bad_content = ''
realm = get_realm('zulip')
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
othello = self.example_user('othello')
stream = get_stream('Verona', realm)
with mock.patch('logging.exception') as m:
internal_send_private_message(
realm=realm,
sender=cordelia,
recipient_user=hamlet,
content=bad_content,
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
with mock.patch('logging.exception') as m:
internal_send_huddle_message(
realm=realm,
sender=cordelia,
emails=[hamlet.email, othello.email],
content=bad_content,
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
with mock.patch('logging.exception') as m:
internal_send_stream_message(
realm=realm,
sender=cordelia,
topic='whatever',
content=bad_content,
stream=stream
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
with mock.patch('logging.exception') as m:
internal_send_stream_message_by_name(
realm=realm,
sender=cordelia,
stream_name=stream.name,
topic='whatever',
content=bad_content
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
def test_error_handling(self) -> None:
realm = get_realm('zulip')
sender = self.example_user('cordelia')
recipient_user = self.example_user('hamlet')
content = 'x' * 15000
result = internal_prep_private_message(
realm=realm,
sender=sender,
recipient_user=recipient_user,
content=content)
message = result['message']
self.assertIn('message was too long', message.content)
with self.assertRaises(RuntimeError):
internal_prep_private_message(
realm=None, # should cause error
sender=sender,
recipient_user=recipient_user,
content=content)
# Simulate sending a message to somebody not in the
# realm of the sender.
recipient_user = self.mit_user('starnine')
with mock.patch('logging.exception') as logging_mock:
result = internal_prep_private_message(
realm=realm,
sender=sender,
recipient_user=recipient_user,
content=content)
arg = logging_mock.call_args_list[0][0][0]
prefix = "Error queueing internal message by cordelia@zulip.com: You can't send private messages outside of your organization."
self.assertTrue(arg.startswith(prefix))
def test_ensure_stream_gets_called(self) -> None:
realm = get_realm('zulip')
sender = self.example_user('cordelia')
stream_name = 'test_stream'
topic = 'whatever'
content = 'hello'
internal_prep_stream_message_by_name(
realm=realm,
sender=sender,
stream_name=stream_name,
topic=topic,
content=content)
# This would throw an error if the stream
# wasn't automatically created.
Stream.objects.get(name=stream_name, realm_id=realm.id)
class ExtractTest(TestCase):
def test_extract_stream_indicator(self) -> None:
self.assertEqual(
extract_stream_indicator('development'),
"development",
)
self.assertEqual(
extract_stream_indicator('commas,are,fine'),
"commas,are,fine",
)
self.assertEqual(
extract_stream_indicator('"Who hasn\'t done this?"'),
"Who hasn't done this?",
)
self.assertEqual(
extract_stream_indicator("999"),
999,
)
# For legacy reasons it's plausible that users will
# put a single stream into an array and then encode it
# as JSON. We can probably eliminate this support
# by mid 2020 at the latest.
self.assertEqual(
extract_stream_indicator('["social"]'),
'social',
)
self.assertEqual(
extract_stream_indicator("[123]"),
123,
)
with self.assertRaisesRegex(JsonableError, 'Invalid data type for stream'):
extract_stream_indicator('{}')
with self.assertRaisesRegex(JsonableError, 'Invalid data type for stream'):
extract_stream_indicator('[{}]')
with self.assertRaisesRegex(JsonableError, 'Expected exactly one stream'):
extract_stream_indicator('[1,2,"general"]')
def test_extract_private_recipients_emails(self) -> None:
# JSON list w/dups, empties, and trailing whitespace
s = ujson.dumps([' alice@zulip.com ', ' bob@zulip.com ', ' ', 'bob@zulip.com'])
# sorted() gets confused by extract_private_recipients' return type
# For testing, ignorance here is better than manual casting
result = sorted(extract_private_recipients(s))
self.assertEqual(result, ['alice@zulip.com', 'bob@zulip.com'])
# simple string with one name
s = 'alice@zulip.com '
self.assertEqual(extract_private_recipients(s), ['alice@zulip.com'])
# JSON-encoded string
s = '"alice@zulip.com"'
self.assertEqual(extract_private_recipients(s), ['alice@zulip.com'])
# bare comma-delimited string
s = 'bob@zulip.com, alice@zulip.com'
result = sorted(extract_private_recipients(s))
self.assertEqual(result, ['alice@zulip.com', 'bob@zulip.com'])
# JSON-encoded, comma-delimited string
s = '"bob@zulip.com,alice@zulip.com"'
result = sorted(extract_private_recipients(s))
self.assertEqual(result, ['alice@zulip.com', 'bob@zulip.com'])
# Invalid data
s = ujson.dumps(dict(color='red'))
with self.assertRaisesRegex(JsonableError, 'Invalid data type for recipients'):
extract_private_recipients(s)
s = ujson.dumps([{}])
with self.assertRaisesRegex(JsonableError, 'Invalid data type for recipients'):
extract_private_recipients(s)
# Empty list
self.assertEqual(extract_private_recipients('[]'), [])
# Heterogeneous lists are not supported
mixed = ujson.dumps(['eeshan@example.com', 3, 4])
with self.assertRaisesRegex(JsonableError, 'Recipient lists may contain emails or user IDs, but not both.'):
extract_private_recipients(mixed)
def test_extract_recipient_ids(self) -> None:
# JSON list w/dups
s = ujson.dumps([3, 3, 12])
result = sorted(extract_private_recipients(s))
self.assertEqual(result, [3, 12])
# Invalid data
ids = ujson.dumps(dict(recipient=12))
with self.assertRaisesRegex(JsonableError, 'Invalid data type for recipients'):
extract_private_recipients(ids)
# Heterogeneous lists are not supported
mixed = ujson.dumps([3, 4, 'eeshan@example.com'])
with self.assertRaisesRegex(JsonableError, 'Recipient lists may contain emails or user IDs, but not both.'):
extract_private_recipients(mixed)
class PersonalMessagesTest(ZulipTestCase):
def test_near_pm_message_url(self) -> None:
realm = get_realm('zulip')
message = dict(
type='personal',
id=555,
display_recipient=[
dict(id=77),
dict(id=80),
],
)
url = near_message_url(
realm=realm,
message=message,
)
self.assertEqual(url, 'http://zulip.testserver/#narrow/pm-with/77,80-pm/near/555')
def test_is_private_flag_not_leaked(self) -> None:
"""
Make sure `is_private` flag is not leaked to the API.
"""
self.login('hamlet')
self.send_personal_message(self.example_user("hamlet"),
self.example_user("cordelia"),
"test")
for msg in self.get_messages():
self.assertNotIn('is_private', msg['flags'])
def test_auto_subbed_to_personals(self) -> None:
"""
Newly created users are auto-subbed to the ability to receive
personals.
"""
test_email = self.nonreg_email('test')
self.register(test_email, "test")
user_profile = self.nonreg_user('test')
old_messages_count = message_stream_count(user_profile)
self.send_personal_message(user_profile, user_profile)
new_messages_count = message_stream_count(user_profile)
self.assertEqual(new_messages_count, old_messages_count + 1)
recipient = Recipient.objects.get(type_id=user_profile.id,
type=Recipient.PERSONAL)
message = most_recent_message(user_profile)
self.assertEqual(message.recipient, recipient)
with mock.patch('zerver.models.get_display_recipient', return_value='recip'):
self.assertEqual(
str(message),
'<Message: recip / / '
'<UserProfile: {} {}>>'.format(user_profile.email, user_profile.realm))
user_message = most_recent_usermessage(user_profile)
self.assertEqual(
str(user_message),
'<UserMessage: recip / {} ([])>'.format(user_profile.email)
)
@slow("checks several profiles")
def test_personal_to_self(self) -> None:
"""
If you send a personal to yourself, only you see it.
"""
old_user_profiles = list(UserProfile.objects.all())
test_email = self.nonreg_email('test1')
self.register(test_email, "test1")
old_messages = []
for user_profile in old_user_profiles:
old_messages.append(message_stream_count(user_profile))
user_profile = self.nonreg_user('test1')
self.send_personal_message(user_profile, user_profile)
new_messages = []
for user_profile in old_user_profiles:
new_messages.append(message_stream_count(user_profile))
self.assertEqual(old_messages, new_messages)
user_profile = self.nonreg_user('test1')
recipient = Recipient.objects.get(type_id=user_profile.id, type=Recipient.PERSONAL)
self.assertEqual(most_recent_message(user_profile).recipient, recipient)
def assert_personal(self, sender: UserProfile, receiver: UserProfile, content: str="testcontent") -> None:
"""
Send a private message from `sender_email` to `receiver_email` and check
that only those two parties actually received the message.
"""
sender_messages = message_stream_count(sender)
receiver_messages = message_stream_count(receiver)
other_user_profiles = UserProfile.objects.filter(~Q(id=sender.id) &
~Q(id=receiver.id))
old_other_messages = []
for user_profile in other_user_profiles:
old_other_messages.append(message_stream_count(user_profile))
self.send_personal_message(sender, receiver, content)
# Users outside the conversation don't get the message.
new_other_messages = []
for user_profile in other_user_profiles:
new_other_messages.append(message_stream_count(user_profile))
self.assertEqual(old_other_messages, new_other_messages)
# The personal message is in the streams of both the sender and receiver.
self.assertEqual(message_stream_count(sender),
sender_messages + 1)
self.assertEqual(message_stream_count(receiver),
receiver_messages + 1)
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
self.assertEqual(most_recent_message(sender).recipient, recipient)
self.assertEqual(most_recent_message(receiver).recipient, recipient)
def test_personal(self) -> None:
"""
If you send a personal, only you and the recipient see it.
"""
self.login('hamlet')
self.assert_personal(
sender=self.example_user("hamlet"),
receiver=self.example_user("othello")
)
def test_private_message_policy(self) -> None:
"""
Tests that PRIVATE_MESSAGE_POLICY_DISABLED works correctly.
"""
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
do_set_realm_property(user_profile.realm, "private_message_policy",
Realm.PRIVATE_MESSAGE_POLICY_DISABLED)
with self.assertRaises(JsonableError):
self.send_personal_message(user_profile, self.example_user("cordelia"))
bot_profile = self.create_test_bot("testbot", user_profile)
self.send_personal_message(user_profile, get_system_bot(settings.NOTIFICATION_BOT))
self.send_personal_message(user_profile, bot_profile)
self.send_personal_message(bot_profile, user_profile)
def test_non_ascii_personal(self) -> None:
"""
Sending a PM containing non-ASCII characters succeeds.
"""
self.login('hamlet')
self.assert_personal(
sender=self.example_user("hamlet"),
receiver=self.example_user("othello"),
content="hümbüǵ"
)
class StreamMessagesTest(ZulipTestCase):
def assert_stream_message(self, stream_name: str, topic_name: str="test topic",
content: str="test content") -> None:
"""
Check that messages sent to a stream reach all subscribers to that stream.
"""
realm = get_realm('zulip')
subscribers = self.users_subscribed_to_stream(stream_name, realm)
# Outgoing webhook bots don't store UserMessage rows; they will be processed later.
subscribers = [subscriber for subscriber in subscribers
if subscriber.bot_type != UserProfile.OUTGOING_WEBHOOK_BOT]
old_subscriber_messages = []
for subscriber in subscribers:
old_subscriber_messages.append(message_stream_count(subscriber))
non_subscribers = [user_profile for user_profile in UserProfile.objects.all()
if user_profile not in subscribers]
old_non_subscriber_messages = []
for non_subscriber in non_subscribers:
old_non_subscriber_messages.append(message_stream_count(non_subscriber))
non_bot_subscribers = [user_profile for user_profile in subscribers
if not user_profile.is_bot]
a_subscriber = non_bot_subscribers[0]
self.login_user(a_subscriber)
self.send_stream_message(a_subscriber, stream_name,
content=content, topic_name=topic_name)
# Did all of the subscribers get the message?
new_subscriber_messages = []
for subscriber in subscribers:
new_subscriber_messages.append(message_stream_count(subscriber))
# Did non-subscribers not get the message?
new_non_subscriber_messages = []
for non_subscriber in non_subscribers:
new_non_subscriber_messages.append(message_stream_count(non_subscriber))
self.assertEqual(old_non_subscriber_messages, new_non_subscriber_messages)
self.assertEqual(new_subscriber_messages, [elt + 1 for elt in old_subscriber_messages])
def test_performance(self) -> None:
'''
This test is part of the automated test suite, but
it is more intended as an aid to measuring the
performance of do_send_messages() with consistent
data setup across different commits. You can modify
the values below and run just this test, and then
comment out the print statement toward the bottom.
'''
num_messages = 2
num_extra_users = 10
sender = self.example_user('cordelia')
realm = sender.realm
message_content = 'whatever'
stream = get_stream('Denmark', realm)
topic_name = 'lunch'
recipient = stream.recipient
sending_client = make_client(name="test suite")
for i in range(num_extra_users):
# Make every other user be idle.
long_term_idle = i % 2 > 0
email = 'foo%d@example.com' % (i,)
user = UserProfile.objects.create(
realm=realm,
email=email,
pointer=0,
long_term_idle=long_term_idle,
)
Subscription.objects.create(
user_profile=user,
recipient=recipient
)
def send_test_message() -> None:
message = Message(
sender=sender,
recipient=recipient,
content=message_content,
date_sent=timezone_now(),
sending_client=sending_client,
)
message.set_topic_name(topic_name)
do_send_messages([dict(message=message)])
before_um_count = UserMessage.objects.count()
t = time.time()
for i in range(num_messages):
send_test_message()
delay = time.time() - t
assert(delay) # quiet down lint
# print(delay)
after_um_count = UserMessage.objects.count()
ums_created = after_um_count - before_um_count
num_active_users = num_extra_users / 2
self.assertTrue(ums_created > (num_active_users * num_messages))
def test_not_too_many_queries(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago"),
self.example_user("cordelia"), self.example_user("othello")]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
sender = self.example_user('hamlet')
sending_client = make_client(name="test suite")
stream_name = 'Denmark'
topic_name = 'foo'
content = 'whatever'
realm = sender.realm
# To get accurate count of the queries, we should make sure that
# caches don't come into play. If we count queries while caches are
# filled, we will get a lower count. Caches are not supposed to be
# persistent, so our test can also fail if cache is invalidated
# during the course of the unit test.
flush_per_request_caches()
cache_delete(get_stream_cache_key(stream_name, realm.id))
with queries_captured() as queries:
check_send_stream_message(
sender=sender,
client=sending_client,
stream_name=stream_name,
topic=topic_name,
body=content,
)
self.assert_length(queries, 15)
def test_stream_message_dict(self) -> None:
user_profile = self.example_user('iago')
self.subscribe(user_profile, "Denmark")
self.send_stream_message(self.example_user("hamlet"), "Denmark",
content="whatever", topic_name="my topic")
message = most_recent_message(user_profile)
row = MessageDict.get_raw_db_rows([message.id])[0]
dct = MessageDict.build_dict_from_raw_db_row(row)
MessageDict.post_process_dicts([dct], apply_markdown=True, client_gravatar=False)
self.assertEqual(dct['display_recipient'], 'Denmark')
stream = get_stream('Denmark', user_profile.realm)
self.assertEqual(dct['stream_id'], stream.id)
def test_stream_message_unicode(self) -> None:
receiving_user_profile = self.example_user('iago')
sender = self.example_user('hamlet')
self.subscribe(receiving_user_profile, "Denmark")
self.send_stream_message(sender, "Denmark",
content="whatever", topic_name="my topic")
message = most_recent_message(receiving_user_profile)
self.assertEqual(str(message),
'<Message: Denmark / my topic / '
'<UserProfile: {} {}>>'.format(sender.email, sender.realm))
def test_message_mentions(self) -> None:
user_profile = self.example_user('iago')
self.subscribe(user_profile, "Denmark")
self.send_stream_message(self.example_user("hamlet"), "Denmark",
content="test @**Iago** rules")
message = most_recent_message(user_profile)
assert(UserMessage.objects.get(user_profile=user_profile, message=message).flags.mentioned.is_set)
def test_is_private_flag(self) -> None:
user_profile = self.example_user('iago')
self.subscribe(user_profile, "Denmark")
self.send_stream_message(self.example_user("hamlet"), "Denmark",
content="test")
message = most_recent_message(user_profile)
self.assertFalse(UserMessage.objects.get(user_profile=user_profile, message=message).flags.is_private.is_set)
self.send_personal_message(self.example_user("hamlet"), user_profile,
content="test")
message = most_recent_message(user_profile)
self.assertTrue(UserMessage.objects.get(user_profile=user_profile, message=message).flags.is_private.is_set)
def _send_stream_message(self, user: UserProfile, stream_name: str, content: str) -> Set[int]:
with mock.patch('zerver.lib.actions.send_event') as m:
self.send_stream_message(
user,
stream_name,
content=content
)
self.assertEqual(m.call_count, 1)
users = m.call_args[0][2]
user_ids = {u['id'] for u in users}
return user_ids
def test_unsub_mention(self) -> None:
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
stream_name = 'Test Stream'
self.subscribe(hamlet, stream_name)
UserMessage.objects.filter(
user_profile=cordelia
).delete()
def mention_cordelia() -> Set[int]:
content = 'test @**Cordelia Lear** rules'
user_ids = self._send_stream_message(
user=hamlet,
stream_name=stream_name,
content=content
)
return user_ids
def num_cordelia_messages() -> int:
return UserMessage.objects.filter(
user_profile=cordelia
).count()
user_ids = mention_cordelia()
self.assertEqual(0, num_cordelia_messages())
self.assertNotIn(cordelia.id, user_ids)
# Make sure test isn't too brittle-subscribing
# Cordelia and mentioning her should give her a
# message.
self.subscribe(cordelia, stream_name)
user_ids = mention_cordelia()
self.assertIn(cordelia.id, user_ids)
self.assertEqual(1, num_cordelia_messages())
def test_message_bot_mentions(self) -> None:
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
realm = hamlet.realm
stream_name = 'Test Stream'
self.subscribe(hamlet, stream_name)
normal_bot = do_create_user(
email='normal-bot@zulip.com',
password='',
realm=realm,
full_name='Normal Bot',
short_name='',
bot_type=UserProfile.DEFAULT_BOT,
bot_owner=cordelia,
)
content = 'test @**Normal Bot** rules'
user_ids = self._send_stream_message(
user=hamlet,
stream_name=stream_name,
content=content
)
self.assertIn(normal_bot.id, user_ids)
user_message = most_recent_usermessage(normal_bot)
self.assertEqual(user_message.message.content, content)
self.assertTrue(user_message.flags.mentioned)
def test_stream_message_mirroring(self) -> None:
user = self.mit_user('starnine')
self.subscribe(user, 'Verona')
do_change_is_admin(user, True, 'api_super_user')
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"to": "Verona",
"sender": self.mit_email("sipbtest"),
"client": "zephyr_mirror",
"topic": "announcement",
"content": "Everyone knows Iago rules",
"forged": "true"},
subdomain="zephyr")
self.assert_json_success(result)
do_change_is_admin(user, False, 'api_super_user')
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"to": "Verona",
"sender": self.mit_email("sipbtest"),
"client": "zephyr_mirror",
"topic": "announcement",
"content": "Everyone knows Iago rules",
"forged": "true"},
subdomain="zephyr")
self.assert_json_error(result, "User not authorized for this query")
def test_message_to_stream(self) -> None:
"""
If you send a message to a stream, everyone subscribed to the stream
receives the messages.
"""
self.assert_stream_message("Scotland")
def test_non_ascii_stream_message(self) -> None:
"""
Sending a stream message containing non-ASCII characters in the stream
name, topic, or message body succeeds.
"""
self.login('hamlet')
# Subscribe everyone to a stream with non-ASCII characters.
non_ascii_stream_name = "hümbüǵ"
realm = get_realm("zulip")
stream = self.make_stream(non_ascii_stream_name)
for user_profile in UserProfile.objects.filter(is_active=True, is_bot=False,
realm=realm)[0:3]:
self.subscribe(user_profile, stream.name)
self.assert_stream_message(non_ascii_stream_name, topic_name="hümbüǵ",
content="hümbüǵ")
def test_get_raw_unread_data_for_huddle_messages(self) -> None:
users = [
self.example_user('hamlet'),
self.example_user('cordelia'),
self.example_user('iago'),
self.example_user('prospero'),
self.example_user('othello'),
]
message1_id = self.send_huddle_message(users[0], users, "test content 1")
message2_id = self.send_huddle_message(users[0], users, "test content 2")
msg_data = get_raw_unread_data(users[1])
# both the messages are present in msg_data
self.assertIn(message1_id, msg_data["huddle_dict"].keys())
self.assertIn(message2_id, msg_data["huddle_dict"].keys())
# only these two messages are present in msg_data
self.assertEqual(len(msg_data["huddle_dict"].keys()), 2)
recent_conversations = get_recent_private_conversations(users[1])
self.assertEqual(len(recent_conversations), 1)
recent_conversation = list(recent_conversations.values())[0]
self.assertEqual(set(recent_conversation['user_ids']), {user.id for user in users if
user != users[1]})
self.assertEqual(recent_conversation['max_message_id'], message2_id)
class MessageDictTest(ZulipTestCase):
def test_both_codepaths(self) -> None:
'''
We have two different codepaths that
extract a particular shape of dictionary
for messages to send to clients:
events:
These are the events we send to MANY
clients when a message is originally
sent.
fetch:
These are the messages we send to ONE
client when they fetch messages via
some narrow/search in the UI.
Different clients have different needs
when it comes to things like generating avatar
hashes or including both rendered and unrendered
markdown, so that explains the different shapes.
And then the two codepaths have different
performance needs. In the events codepath, we
have the Django view generate a single "wide"
dictionary that gets put on the event queue,
and then we send events to multiple clients,
finalizing the payload for each of them depending
on the "shape" they want. (We also avoid
doing extra work for any two clients who want
the same shape dictionary, but that's out of the
scope of this particular test).
In the fetch scenario, the single client only needs
a dictionary of one shape, but we need to re-hydrate
the sender information, since the sender details
may have changed since the message was originally
sent.
This test simply verifies that the two codepaths
ultimately provide the same result.
'''
def reload_message(msg_id: int) -> Message:
# Get a clean copy of the message, and
# clear the cache.
cache_delete(to_dict_cache_key_id(msg_id))
msg = Message.objects.get(id=msg_id)
return msg
def get_send_message_payload(
msg_id: int,
apply_markdown: bool,
client_gravatar: bool) -> Dict[str, Any]:
msg = reload_message(msg_id)
wide_dict = MessageDict.wide_dict(msg)
narrow_dict = MessageDict.finalize_payload(
wide_dict,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
return narrow_dict
def get_fetch_payload(
msg_id: int,
apply_markdown: bool,
client_gravatar: bool) -> Dict[str, Any]:
msg = reload_message(msg_id)
unhydrated_dict = MessageDict.to_dict_uncached_helper([msg])[0]
# The next step mutates the dict in place
# for performance reasons.
MessageDict.post_process_dicts(
[unhydrated_dict],
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
final_dict = unhydrated_dict
return final_dict
def test_message_id() -> int:
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
msg_id = self.send_stream_message(
hamlet,
"Scotland",
topic_name="editing",
content="before edit"
)
return msg_id
flag_setups = [
[False, False],
[False, True],
[True, False],
[True, True],
]
msg_id = test_message_id()
for (apply_markdown, client_gravatar) in flag_setups:
send_message_payload = get_send_message_payload(
msg_id,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
fetch_payload = get_fetch_payload(
msg_id,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
self.assertEqual(send_message_payload, fetch_payload)
@slow('builds lots of messages')
def test_bulk_message_fetching(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
pm_recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
stream_name = 'Çiğdem'
stream = self.make_stream(stream_name)
stream_recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
sending_client = make_client(name="test suite")
ids = []
for i in range(300):
for recipient in [pm_recipient, stream_recipient]:
message = Message(
sender=sender,
recipient=recipient,
content='whatever %d' % (i,),
rendered_content='DOES NOT MATTER',
rendered_content_version=bugdown.version,
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
ids.append(message.id)
Reaction.objects.create(user_profile=sender, message=message,
emoji_name='simple_smile')
num_ids = len(ids)
self.assertTrue(num_ids >= 600)
flush_per_request_caches()
t = time.time()
with queries_captured() as queries:
rows = list(MessageDict.get_raw_db_rows(ids))
objs = [
MessageDict.build_dict_from_raw_db_row(row)
for row in rows
]
MessageDict.post_process_dicts(objs, apply_markdown=False, client_gravatar=False)
delay = time.time() - t
# Make sure we don't take longer than 1.5ms per message to
# extract messages. Note that we increased this from 1ms to
# 1.5ms to handle tests running in parallel being a bit
# slower.
error_msg = "Number of ids: {}. Time delay: {}".format(num_ids, delay)
self.assertTrue(delay < 0.0015 * num_ids, error_msg)
self.assert_length(queries, 7)
self.assertEqual(len(rows), num_ids)
def test_applying_markdown(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
sending_client = make_client(name="test suite")
message = Message(
sender=sender,
recipient=recipient,
content='hello **world**',
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
# An important part of this test is to get the message through this exact code path,
# because there is an ugly hack we need to cover. So don't just say "row = message".
row = MessageDict.get_raw_db_rows([message.id])[0]
dct = MessageDict.build_dict_from_raw_db_row(row)
expected_content = '<p>hello <strong>world</strong></p>'
self.assertEqual(dct['rendered_content'], expected_content)
message = Message.objects.get(id=message.id)
self.assertEqual(message.rendered_content, expected_content)
self.assertEqual(message.rendered_content_version, bugdown.version)
@mock.patch("zerver.lib.message.bugdown.convert")
def test_applying_markdown_invalid_format(self, convert_mock: Any) -> None:
# pretend the converter returned an invalid message without raising an exception
convert_mock.return_value = None
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
sending_client = make_client(name="test suite")
message = Message(
sender=sender,
recipient=recipient,
content='hello **world**',
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
# An important part of this test is to get the message through this exact code path,
# because there is an ugly hack we need to cover. So don't just say "row = message".
row = MessageDict.get_raw_db_rows([message.id])[0]
dct = MessageDict.build_dict_from_raw_db_row(row)
error_content = '<p>[Zulip note: Sorry, we could not understand the formatting of your message]</p>'
self.assertEqual(dct['rendered_content'], error_content)
def test_topic_links_use_stream_realm(self) -> None:
# Set up a realm filter on 'zulip' and assert that messages
# sent to a stream on 'zulip' have the topic linkified from
# senders in both the 'zulip' and 'lear' realms as well as
# the notification bot.
zulip_realm = get_realm('zulip')
url_format_string = r"https://trac.zulip.net/ticket/%(id)s"
url = 'https://trac.zulip.net/ticket/123'
topic_name = 'test #123'
realm_filter = RealmFilter(realm=zulip_realm,
pattern=r"#(?P<id>[0-9]{2,8})",
url_format_string=url_format_string)
self.assertEqual(
realm_filter.__str__(),
'<RealmFilter(zulip): #(?P<id>[0-9]{2,8})'
' https://trac.zulip.net/ticket/%(id)s>')
def get_message(sender: UserProfile) -> Message:
msg_id = self.send_stream_message(sender, 'Denmark', 'hello world', topic_name,
zulip_realm)
return Message.objects.get(id=msg_id)
def assert_topic_links(links: List[str], msg: Message) -> None:
dct = MessageDict.to_dict_uncached_helper([msg])[0]
self.assertEqual(dct[TOPIC_LINKS], links)
# Send messages before and after saving the realm filter from each user.
assert_topic_links([], get_message(self.example_user('othello')))
assert_topic_links([], get_message(self.lear_user('cordelia')))
assert_topic_links([], get_message(self.notification_bot()))
realm_filter.save()
assert_topic_links([url], get_message(self.example_user('othello')))
assert_topic_links([url], get_message(self.lear_user('cordelia')))
assert_topic_links([url], get_message(self.notification_bot()))
def test_reaction(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
sending_client = make_client(name="test suite")
message = Message(
sender=sender,
recipient=recipient,
content='hello **world**',
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
reaction = Reaction.objects.create(
message=message, user_profile=sender,
emoji_name='simple_smile')
row = MessageDict.get_raw_db_rows([message.id])[0]
msg_dict = MessageDict.build_dict_from_raw_db_row(row)
self.assertEqual(msg_dict['reactions'][0]['emoji_name'],
reaction.emoji_name)
self.assertEqual(msg_dict['reactions'][0]['user_id'], sender.id)
self.assertEqual(msg_dict['reactions'][0]['user']['id'],
sender.id)
self.assertEqual(msg_dict['reactions'][0]['user']['email'],
sender.email)
self.assertEqual(msg_dict['reactions'][0]['user']['full_name'],
sender.full_name)
def test_missing_anchor(self) -> None:
self.login('hamlet')
result = self.client_get(
'/json/messages?use_first_unread_anchor=false&num_before=1&num_after=1')
self.assert_json_error(
result, "Missing 'anchor' argument.")
def test_invalid_anchor(self) -> None:
self.login('hamlet')
result = self.client_get(
'/json/messages?use_first_unread_anchor=false&num_before=1&num_after=1&anchor=chocolate')
self.assert_json_error(
result, "Invalid anchor")
class SewMessageAndReactionTest(ZulipTestCase):
def test_sew_messages_and_reaction(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
pm_recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
stream_name = 'Çiğdem'
stream = self.make_stream(stream_name)
stream_recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
sending_client = make_client(name="test suite")
needed_ids = []
for i in range(5):
for recipient in [pm_recipient, stream_recipient]:
message = Message(
sender=sender,
recipient=recipient,
content='whatever %d' % (i,),
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
needed_ids.append(message.id)
reaction = Reaction(user_profile=sender, message=message,
emoji_name='simple_smile')
reaction.save()
messages = Message.objects.filter(id__in=needed_ids).values(
*['id', 'content'])
reactions = Reaction.get_raw_db_rows(needed_ids)
tied_data = sew_messages_and_reactions(messages, reactions)
for data in tied_data:
self.assertEqual(len(data['reactions']), 1)
self.assertEqual(data['reactions'][0]['emoji_name'],
'simple_smile')
self.assertTrue(data['id'])
self.assertTrue(data['content'])
class MessagePOSTTest(ZulipTestCase):
def _send_and_verify_message(self, user: UserProfile, stream_name: str, error_msg: str=None) -> None:
if error_msg is None:
msg_id = self.send_stream_message(user, stream_name)
result = self.api_get(user, '/json/messages/' + str(msg_id))
self.assert_json_success(result)
else:
with self.assertRaisesRegex(JsonableError, error_msg):
self.send_stream_message(user, stream_name)
def test_message_to_self(self) -> None:
"""
Sending a message to a stream to which you are subscribed is
successful.
"""
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_success(result)
def test_api_message_to_self(self) -> None:
"""
Same as above, but for the API view
"""
user = self.example_user('hamlet')
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_success(result)
def test_message_to_stream_with_nonexistent_id(self) -> None:
cordelia = self.example_user('cordelia')
bot = self.create_test_bot(
short_name='whatever',
user_profile=cordelia,
)
result = self.api_post(
bot, "/api/v1/messages",
{
"type": "stream",
"to": ujson.dumps([99999]),
"client": "test suite",
"content": "Stream message by ID.",
"topic": "Test topic for stream ID message"
}
)
self.assert_json_error(result, "Stream with ID '99999' does not exist")
msg = self.get_last_message()
expected = ("Your bot `whatever-bot@zulip.testserver` tried to send a message to "
"stream ID 99999, but there is no stream with that ID.")
self.assertEqual(msg.content, expected)
def test_message_to_stream_by_id(self) -> None:
"""
Sending a message to a stream (by stream ID) to which you are
subscribed is successful.
"""
self.login('hamlet')
realm = get_realm('zulip')
stream = get_stream('Verona', realm)
result = self.client_post("/json/messages", {"type": "stream",
"to": ujson.dumps([stream.id]),
"client": "test suite",
"content": "Stream message by ID.",
"topic": "Test topic for stream ID message"})
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content, "Stream message by ID.")
def test_sending_message_as_stream_post_policy_admins(self) -> None:
"""
Sending messages to streams which only the admins can create and post to.
"""
admin_profile = self.example_user("iago")
self.login_user(admin_profile)
stream_name = "Verona"
stream = get_stream(stream_name, admin_profile.realm)
do_change_stream_post_policy(stream, Stream.STREAM_POST_POLICY_ADMINS)
# Admins and their owned bots can send to STREAM_POST_POLICY_ADMINS streams
self._send_and_verify_message(admin_profile, stream_name)
admin_owned_bot = self.create_test_bot(
short_name='whatever1',
full_name='whatever1',
user_profile=admin_profile,
)
self._send_and_verify_message(admin_owned_bot, stream_name)
non_admin_profile = self.example_user("hamlet")
self.login_user(non_admin_profile)
# Non admins and their owned bots cannot send to STREAM_POST_POLICY_ADMINS streams
self._send_and_verify_message(non_admin_profile, stream_name,
"Only organization administrators can send to this stream.")
non_admin_owned_bot = self.create_test_bot(
short_name='whatever2',
full_name='whatever2',
user_profile=non_admin_profile,
)
self._send_and_verify_message(non_admin_owned_bot, stream_name,
"Only organization administrators can send to this stream.")
# Bots without owner (except cross realm bot) cannot send to announcement only streams
bot_without_owner = do_create_user(
email='free-bot@zulip.testserver',
password='',
realm=non_admin_profile.realm,
full_name='freebot',
short_name='freebot',
bot_type=UserProfile.DEFAULT_BOT,
)
self._send_and_verify_message(bot_without_owner, stream_name,
"Only organization administrators can send to this stream.")
# Cross realm bots should be allowed
notification_bot = get_system_bot("notification-bot@zulip.com")
internal_send_stream_message(stream.realm, notification_bot, stream,
'Test topic', 'Test message by notification bot')
self.assertEqual(self.get_last_message().content, 'Test message by notification bot')
def test_sending_message_as_stream_post_policy_restrict_new_members(self) -> None:
"""
Sending messages to streams which new members cannot create and post to.
"""
admin_profile = self.example_user("iago")
self.login_user(admin_profile)
do_set_realm_property(admin_profile.realm, 'waiting_period_threshold', 10)
admin_profile.date_joined = timezone_now() - datetime.timedelta(days=9)
admin_profile.save()
self.assertTrue(admin_profile.is_new_member)
self.assertTrue(admin_profile.is_realm_admin)
stream_name = "Verona"
stream = get_stream(stream_name, admin_profile.realm)
do_change_stream_post_policy(stream, Stream.STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS)
# Admins and their owned bots can send to STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS streams,
# even if the admin is a new user
self._send_and_verify_message(admin_profile, stream_name)
admin_owned_bot = self.create_test_bot(
short_name='whatever1',
full_name='whatever1',
user_profile=admin_profile,
)
self._send_and_verify_message(admin_owned_bot, stream_name)
non_admin_profile = self.example_user("hamlet")
self.login_user(non_admin_profile)
non_admin_profile.date_joined = timezone_now() - datetime.timedelta(days=9)
non_admin_profile.save()
self.assertTrue(non_admin_profile.is_new_member)
self.assertFalse(non_admin_profile.is_realm_admin)
# Non admins and their owned bots can send to STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS streams,
# if the user is not a new member
self._send_and_verify_message(non_admin_profile, stream_name,
"New members cannot send to this stream.")
non_admin_owned_bot = self.create_test_bot(
short_name='whatever2',
full_name='whatever2',
user_profile=non_admin_profile,
)
self._send_and_verify_message(non_admin_owned_bot, stream_name,
"New members cannot send to this stream.")
# Bots without owner (except cross realm bot) cannot send to announcement only stream
bot_without_owner = do_create_user(
email='free-bot@zulip.testserver',
password='',
realm=non_admin_profile.realm,
full_name='freebot',
short_name='freebot',
bot_type=UserProfile.DEFAULT_BOT,
)
self._send_and_verify_message(bot_without_owner, stream_name,
"New members cannot send to this stream.")
# Cross realm bots should be allowed
notification_bot = get_system_bot("notification-bot@zulip.com")
internal_send_stream_message(stream.realm, notification_bot, stream,
'Test topic', 'Test message by notification bot')
self.assertEqual(self.get_last_message().content, 'Test message by notification bot')
def test_api_message_with_default_to(self) -> None:
"""
Sending messages without a to field should be sent to the default
stream for the user_profile.
"""
user = self.example_user('hamlet')
user.default_sending_stream_id = get_stream('Verona', user.realm).id
user.save()
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"client": "test suite",
"content": "Test message no to",
"topic": "Test topic"})
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content, "Test message no to")
def test_message_to_nonexistent_stream(self) -> None:
"""
Sending a message to a nonexistent stream fails.
"""
self.login('hamlet')
self.assertFalse(Stream.objects.filter(name="nonexistent_stream"))
result = self.client_post("/json/messages", {"type": "stream",
"to": "nonexistent_stream",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_error(result, "Stream 'nonexistent_stream' does not exist")
def test_message_to_nonexistent_stream_with_bad_characters(self) -> None:
"""
Nonexistent stream name with bad characters should be escaped properly.
"""
self.login('hamlet')
self.assertFalse(Stream.objects.filter(name="""&<"'><non-existent>"""))
result = self.client_post("/json/messages", {"type": "stream",
"to": """&<"'><non-existent>""",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_error(result, "Stream '&<"'><non-existent>' does not exist")
def test_personal_message(self) -> None:
"""
Sending a personal message to a valid username is successful.
"""
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
othello = self.example_user('othello')
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": othello.email})
self.assert_json_success(result)
message_id = ujson.loads(result.content.decode())['id']
recent_conversations = get_recent_private_conversations(user_profile)
self.assertEqual(len(recent_conversations), 1)
recent_conversation = list(recent_conversations.values())[0]
recipient_id = list(recent_conversations.keys())[0]
self.assertEqual(set(recent_conversation['user_ids']), {othello.id})
self.assertEqual(recent_conversation['max_message_id'], message_id)
# Now send a message to yourself and see how that interacts with the data structure
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": user_profile.email})
self.assert_json_success(result)
self_message_id = ujson.loads(result.content.decode())['id']
recent_conversations = get_recent_private_conversations(user_profile)
self.assertEqual(len(recent_conversations), 2)
recent_conversation = recent_conversations[recipient_id]
self.assertEqual(set(recent_conversation['user_ids']), {othello.id})
self.assertEqual(recent_conversation['max_message_id'], message_id)
# Now verify we have the appropriate self-pm data structure
del recent_conversations[recipient_id]
recent_conversation = list(recent_conversations.values())[0]
recipient_id = list(recent_conversations.keys())[0]
self.assertEqual(set(recent_conversation['user_ids']), set())
self.assertEqual(recent_conversation['max_message_id'], self_message_id)
def test_personal_message_by_id(self) -> None:
"""
Sending a personal message to a valid user ID is successful.
"""
self.login('hamlet')
result = self.client_post(
"/json/messages",
{
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([self.example_user("othello").id])
}
)
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual("Test message", msg.content)
self.assertEqual(msg.recipient_id, self.example_user("othello").id)
def test_group_personal_message_by_id(self) -> None:
"""
Sending a personal message to a valid user ID is successful.
"""
self.login('hamlet')
result = self.client_post(
"/json/messages",
{
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([self.example_user("othello").id,
self.example_user("cordelia").id])
}
)
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual("Test message", msg.content)
self.assertEqual(msg.recipient_id, get_huddle_recipient(
{self.example_user("hamlet").id,
self.example_user("othello").id,
self.example_user("cordelia").id}).id
)
def test_personal_message_copying_self(self) -> None:
"""
Sending a personal message to yourself plus another user is successful,
and counts as a message just to that user.
"""
hamlet = self.example_user('hamlet')
othello = self.example_user('othello')
self.login_user(hamlet)
result = self.client_post("/json/messages", {
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([hamlet.id, othello.id])})
self.assert_json_success(result)
msg = self.get_last_message()
# Verify that we're not actually on the "recipient list"
self.assertNotIn("Hamlet", str(msg.recipient))
def test_personal_message_to_nonexistent_user(self) -> None:
"""
Sending a personal message to an invalid email returns error JSON.
"""
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": "nonexistent"})
self.assert_json_error(result, "Invalid email 'nonexistent'")
def test_personal_message_to_deactivated_user(self) -> None:
"""
Sending a personal message to a deactivated user returns error JSON.
"""
othello = self.example_user('othello')
cordelia = self.example_user('cordelia')
do_deactivate_user(othello)
self.login('hamlet')
result = self.client_post("/json/messages", {
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([othello.id])})
self.assert_json_error(result, "'{}' is no longer using Zulip.".format(othello.email))
result = self.client_post("/json/messages", {
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([othello.id, cordelia.id])})
self.assert_json_error(result, "'{}' is no longer using Zulip.".format(othello.email))
def test_invalid_type(self) -> None:
"""
Sending a message of unknown type returns error JSON.
"""
self.login('hamlet')
othello = self.example_user('othello')
result = self.client_post("/json/messages", {"type": "invalid type",
"content": "Test message",
"client": "test suite",
"to": othello.email})
self.assert_json_error(result, "Invalid message type")
def test_empty_message(self) -> None:
"""
Sending a message that is empty or only whitespace should fail
"""
self.login('hamlet')
othello = self.example_user('othello')
result = self.client_post("/json/messages", {"type": "private",
"content": " ",
"client": "test suite",
"to": othello.email})
self.assert_json_error(result, "Message must not be empty")
def test_empty_string_topic(self) -> None:
"""
Sending a message that has empty string topic should fail
"""
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": ""})
self.assert_json_error(result, "Topic can't be empty")
def test_missing_topic(self) -> None:
"""
Sending a message without topic should fail
"""
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message"})
self.assert_json_error(result, "Missing topic")
def test_invalid_message_type(self) -> None:
"""
Messages other than the type of "private" or "stream" are considered as invalid
"""
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "invalid",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_error(result, "Invalid message type")
def test_private_message_without_recipients(self) -> None:
"""
Sending private message without recipients should fail
"""
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "private",
"content": "Test content",
"client": "test suite",
"to": ""})
self.assert_json_error(result, "Message must have recipients")
def test_mirrored_huddle(self) -> None:
"""
Sending a mirrored huddle message works
"""
result = self.api_post(self.mit_user("starnine"),
"/json/messages", {"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": ujson.dumps([self.mit_email("starnine"),
self.mit_email("espuser")])},
subdomain="zephyr")
self.assert_json_success(result)
def test_mirrored_personal(self) -> None:
"""
Sending a mirrored personal message works
"""
result = self.api_post(self.mit_user("starnine"),
"/json/messages", {"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_success(result)
def test_mirrored_personal_browser(self) -> None:
"""
Sending a mirrored personal message via the browser should not work.
"""
user = self.mit_user('starnine')
self.login_user(user)
result = self.client_post("/json/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "Invalid mirrored message")
def test_mirrored_personal_to_someone_else(self) -> None:
"""
Sending a mirrored personal message to someone else is not allowed.
"""
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("espuser")},
subdomain="zephyr")
self.assert_json_error(result, "User not authorized for this query")
def test_duplicated_mirrored_huddle(self) -> None:
"""
Sending two mirrored huddles in the row return the same ID
"""
msg = {"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": ujson.dumps([self.mit_email("espuser"),
self.mit_email("starnine")])}
with mock.patch('DNS.dnslookup', return_value=[['starnine:*:84233:101:Athena Consulting Exchange User,,,:/mit/starnine:/bin/bash']]):
result1 = self.api_post(self.mit_user("starnine"), "/api/v1/messages", msg,
subdomain="zephyr")
self.assert_json_success(result1)
with mock.patch('DNS.dnslookup', return_value=[['espuser:*:95494:101:Esp Classroom,,,:/mit/espuser:/bin/athena/bash']]):
result2 = self.api_post(self.mit_user("espuser"), "/api/v1/messages", msg,
subdomain="zephyr")
self.assert_json_success(result2)
self.assertEqual(ujson.loads(result1.content)['id'],
ujson.loads(result2.content)['id'])
def test_message_with_null_bytes(self) -> None:
"""
A message with null bytes in it is handled.
"""
self.login('hamlet')
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": " I like null bytes \x00 in my content", "topic": "Test topic"}
result = self.client_post("/json/messages", post_data)
self.assert_json_error(result, "Message must not contain null bytes")
def test_strip_message(self) -> None:
"""
A message with mixed whitespace at the end is cleaned up.
"""
self.login('hamlet')
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": " I like whitespace at the end! \n\n \n", "topic": "Test topic"}
result = self.client_post("/json/messages", post_data)
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content, " I like whitespace at the end!")
def test_long_message(self) -> None:
"""
Sending a message longer than the maximum message length succeeds but is
truncated.
"""
self.login('hamlet')
long_message = "A" * (MAX_MESSAGE_LENGTH + 1)
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": long_message, "topic": "Test topic"}
result = self.client_post("/json/messages", post_data)
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content,
"A" * (MAX_MESSAGE_LENGTH - 20) + "\n[message truncated]")
def test_long_topic(self) -> None:
"""
Sending a message with a topic longer than the maximum topic length
succeeds, but the topic is truncated.
"""
self.login('hamlet')
long_topic = "A" * (MAX_TOPIC_NAME_LENGTH + 1)
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": "test content", "topic": long_topic}
result = self.client_post("/json/messages", post_data)
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.topic_name(),
"A" * (MAX_TOPIC_NAME_LENGTH - 3) + "...")
def test_send_forged_message_as_not_superuser(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic",
"forged": "true"})
self.assert_json_error(result, "User not authorized for this query")
def test_send_message_as_not_superuser_to_different_domain(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic",
"realm_str": "mit"})
self.assert_json_error(result, "User not authorized for this query")
def test_send_message_as_superuser_to_domain_that_dont_exist(self) -> None:
user = self.example_user("default_bot")
password = "test_password"
user.set_password(password)
user.is_api_super_user = True
user.save()
result = self.api_post(user,
"/api/v1/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic",
"realm_str": "non-existing"})
user.is_api_super_user = False
user.save()
self.assert_json_error(result, "Unknown organization 'non-existing'")
def test_send_message_when_sender_is_not_set(self) -> None:
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "private",
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "Missing sender")
def test_send_message_as_not_superuser_when_type_is_not_private(self) -> None:
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "not-private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "User not authorized for this query")
@mock.patch("zerver.views.messages.create_mirrored_message_users")
def test_send_message_create_mirrored_message_user_returns_invalid_input(
self, create_mirrored_message_users_mock: Any) -> None:
create_mirrored_message_users_mock.side_effect = InvalidMirrorInput()
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "Invalid mirrored message")
@mock.patch("zerver.views.messages.create_mirrored_message_users")
def test_send_message_when_client_is_zephyr_mirror_but_string_id_is_not_zephyr(
self, create_mirrored_message_users_mock: Any) -> None:
create_mirrored_message_users_mock.return_value = mock.Mock()
user = self.mit_user("starnine")
user.realm.string_id = 'notzephyr'
user.realm.save()
result = self.api_post(user, "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": user.email},
subdomain="notzephyr")
self.assert_json_error(result, "Zephyr mirroring is not allowed in this organization")
@mock.patch("zerver.views.messages.create_mirrored_message_users")
def test_send_message_when_client_is_zephyr_mirror_but_recipient_is_user_id(
self, create_mirrored_message_users_mock: Any) -> None:
create_mirrored_message_users_mock.return_value = mock.Mock()
user = self.mit_user("starnine")
self.login_user(user)
result = self.api_post(user, "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": ujson.dumps([user.id])},
subdomain="zephyr")
self.assert_json_error(result, "Mirroring not allowed with recipient user IDs")
def test_send_message_irc_mirror(self) -> None:
reset_emails_in_zulip_realm()
self.login('hamlet')
bot_info = {
'full_name': 'IRC bot',
'short_name': 'irc',
}
result = self.client_post("/json/bots", bot_info)
self.assert_json_success(result)
email = "irc-bot@zulip.testserver"
user = get_user(email, get_realm('zulip'))
user.is_api_super_user = True
user.save()
user = get_user(email, get_realm('zulip'))
self.subscribe(user, "IRCland")
# Simulate a mirrored message with a slightly old timestamp.
fake_date_sent = timezone_now() - datetime.timedelta(minutes=37)
fake_timestamp = datetime_to_timestamp(fake_date_sent)
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"forged": "true",
"time": fake_timestamp,
"sender": "irc-user@irc.zulip.com",
"content": "Test message",
"client": "irc_mirror",
"topic": "from irc",
"to": "IRCLand"})
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual(int(datetime_to_timestamp(msg.date_sent)), int(fake_timestamp))
# Now test again using forged=yes
fake_date_sent = timezone_now() - datetime.timedelta(minutes=22)
fake_timestamp = datetime_to_timestamp(fake_date_sent)
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"forged": "yes",
"time": fake_timestamp,
"sender": "irc-user@irc.zulip.com",
"content": "Test message",
"client": "irc_mirror",
"topic": "from irc",
"to": "IRCLand"})
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual(int(datetime_to_timestamp(msg.date_sent)), int(fake_timestamp))
def test_unsubscribed_api_super_user(self) -> None:
reset_emails_in_zulip_realm()
cordelia = self.example_user('cordelia')
stream_name = 'private_stream'
self.make_stream(stream_name, invite_only=True)
self.unsubscribe(cordelia, stream_name)
# As long as Cordelia is a super_user, she can send messages
# to ANY stream, even one she is not unsubscribed to, and
# she can do it for herself or on behalf of a mirrored user.
def test_with(sender_email: str, client: str, forged: bool) -> None:
payload = dict(
type="stream",
to=stream_name,
client=client,
topic='whatever',
content='whatever',
forged=ujson.dumps(forged),
)
# Only pass the 'sender' property when doing mirroring behavior.
if forged:
payload['sender'] = sender_email
cordelia.is_api_super_user = False
cordelia.save()
result = self.api_post(cordelia, "/api/v1/messages", payload)
self.assert_json_error_contains(result, 'authorized')
cordelia.is_api_super_user = True
cordelia.save()
result = self.api_post(cordelia, "/api/v1/messages", payload)
self.assert_json_success(result)
test_with(
sender_email=cordelia.email,
client='test suite',
forged=False,
)
test_with(
sender_email='irc_person@zulip.com',
client='irc_mirror',
forged=True,
)
def test_bot_can_send_to_owner_stream(self) -> None:
cordelia = self.example_user('cordelia')
bot = self.create_test_bot(
short_name='whatever',
user_profile=cordelia,
)
stream_name = 'private_stream'
self.make_stream(stream_name, invite_only=True)
payload = dict(
type="stream",
to=stream_name,
client='test suite',
topic='whatever',
content='whatever',
)
result = self.api_post(bot, "/api/v1/messages", payload)
self.assert_json_error_contains(result, 'Not authorized to send')
# We subscribe the bot owner! (aka cordelia)
self.subscribe(bot.bot_owner, stream_name)
result = self.api_post(bot, "/api/v1/messages", payload)
self.assert_json_success(result)
def test_cross_realm_bots_can_use_api_on_own_subdomain(self) -> None:
# Cross realm bots should use internal_send_*_message, not the API:
notification_bot = self.notification_bot()
stream = self.make_stream("notify_channel", get_realm("zulipinternal"))
result = self.api_post(notification_bot,
"/api/v1/messages",
{"type": "stream",
"to": "notify_channel",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"},
subdomain='zulipinternal')
self.assert_json_success(result)
message = self.get_last_message()
self.assertEqual(message.content, "Test message")
self.assertEqual(message.sender, notification_bot)
self.assertEqual(message.recipient.type_id, stream.id)
def test_create_mirror_user_despite_race(self) -> None:
realm = get_realm('zulip')
email = 'fred@example.com'
email_to_full_name = lambda email: 'fred'
def create_user(**kwargs: Any) -> UserProfile:
self.assertEqual(kwargs['full_name'], 'fred')
self.assertEqual(kwargs['email'], email)
self.assertEqual(kwargs['active'], False)
self.assertEqual(kwargs['is_mirror_dummy'], True)
# We create an actual user here to simulate a race.
# We use the minimal, un-mocked function.
kwargs['bot_type'] = None
kwargs['bot_owner'] = None
kwargs['tos_version'] = None
kwargs['timezone'] = timezone_now()
create_user_profile(**kwargs).save()
raise IntegrityError()
with mock.patch('zerver.lib.actions.create_user',
side_effect=create_user) as m:
mirror_fred_user = create_mirror_user_if_needed(
realm,
email,
email_to_full_name,
)
self.assertEqual(mirror_fred_user.delivery_email, email)
m.assert_called()
def test_guest_user(self) -> None:
sender = self.example_user('polonius')
stream_name = 'public stream'
self.make_stream(stream_name, invite_only=False)
payload = dict(
type="stream",
to=stream_name,
client='test suite',
topic='whatever',
content='whatever',
)
# Guest user can't send message to unsubscribed public streams
result = self.api_post(sender, "/api/v1/messages", payload)
self.assert_json_error(result, "Not authorized to send to stream 'public stream'")
self.subscribe(sender, stream_name)
# Guest user can send message to subscribed public streams
result = self.api_post(sender, "/api/v1/messages", payload)
self.assert_json_success(result)
class ScheduledMessageTest(ZulipTestCase):
def last_scheduled_message(self) -> ScheduledMessage:
return ScheduledMessage.objects.all().order_by('-id')[0]
def do_schedule_message(self, msg_type: str, to: str, msg: str,
defer_until: str='', tz_guess: str='',
delivery_type: str='send_later',
realm_str: str='zulip') -> HttpResponse:
self.login('hamlet')
topic_name = ''
if msg_type == 'stream':
topic_name = 'Test topic'
payload = {"type": msg_type,
"to": to,
"client": "test suite",
"content": msg,
"topic": topic_name,
"realm_str": realm_str,
"delivery_type": delivery_type,
"tz_guess": tz_guess}
if defer_until:
payload["deliver_at"] = defer_until
result = self.client_post("/json/messages", payload)
return result
def test_schedule_message(self) -> None:
content = "Test message"
defer_until = timezone_now().replace(tzinfo=None) + datetime.timedelta(days=1)
defer_until_str = str(defer_until)
# Scheduling a message to a stream you are subscribed is successful.
result = self.do_schedule_message('stream', 'Verona',
content + ' 1', defer_until_str)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 1')
self.assertEqual(message.topic_name(), 'Test topic')
self.assertEqual(message.scheduled_timestamp, convert_to_UTC(defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
# Scheduling a message for reminders.
result = self.do_schedule_message('stream', 'Verona',
content + ' 2', defer_until_str,
delivery_type='remind')
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.delivery_type, ScheduledMessage.REMIND)
# Scheduling a private message is successful.
othello = self.example_user('othello')
hamlet = self.example_user('hamlet')
result = self.do_schedule_message('private', othello.email,
content + ' 3', defer_until_str)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 3')
self.assertEqual(message.scheduled_timestamp, convert_to_UTC(defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
# Setting a reminder in PM's to other users causes a error.
result = self.do_schedule_message('private', othello.email,
content + ' 4', defer_until_str,
delivery_type='remind')
self.assert_json_error(result, 'Reminders can only be set for streams.')
# Setting a reminder in PM's to ourself is successful.
# Required by reminders from message actions popover caret feature.
result = self.do_schedule_message('private', hamlet.email,
content + ' 5', defer_until_str,
delivery_type='remind')
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 5')
self.assertEqual(message.delivery_type, ScheduledMessage.REMIND)
# Scheduling a message while guessing timezone.
tz_guess = 'Asia/Kolkata'
result = self.do_schedule_message('stream', 'Verona', content + ' 6',
defer_until_str, tz_guess=tz_guess)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 6')
local_tz = get_timezone(tz_guess)
# Since mypy is not able to recognize localize and normalize as attributes of tzinfo we use ignore.
utz_defer_until = local_tz.normalize(local_tz.localize(defer_until)) # type: ignore[attr-defined] # Reason in comment on previous line.
self.assertEqual(message.scheduled_timestamp,
convert_to_UTC(utz_defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
# Test with users timezone setting as set to some timezone rather than
# empty. This will help interpret timestamp in users local timezone.
user = self.example_user("hamlet")
user.timezone = 'US/Pacific'
user.save(update_fields=['timezone'])
result = self.do_schedule_message('stream', 'Verona',
content + ' 7', defer_until_str)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 7')
local_tz = get_timezone(user.timezone)
# Since mypy is not able to recognize localize and normalize as attributes of tzinfo we use ignore.
utz_defer_until = local_tz.normalize(local_tz.localize(defer_until)) # type: ignore[attr-defined] # Reason in comment on previous line.
self.assertEqual(message.scheduled_timestamp,
convert_to_UTC(utz_defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
def test_scheduling_in_past(self) -> None:
# Scheduling a message in past should fail.
content = "Test message"
defer_until = timezone_now()
defer_until_str = str(defer_until)
result = self.do_schedule_message('stream', 'Verona',
content + ' 1', defer_until_str)
self.assert_json_error(result, 'Time must be in the future.')
def test_invalid_timestamp(self) -> None:
# Scheduling a message from which timestamp couldn't be parsed
# successfully should fail.
content = "Test message"
defer_until = 'Missed the timestamp'
result = self.do_schedule_message('stream', 'Verona',
content + ' 1', defer_until)
self.assert_json_error(result, 'Invalid time format')
def test_missing_deliver_at(self) -> None:
content = "Test message"
result = self.do_schedule_message('stream', 'Verona',
content + ' 1')
self.assert_json_error(result, 'Missing deliver_at in a request for delayed message delivery')
class EditMessageTest(ZulipTestCase):
def check_topic(self,
msg_id: int,
topic_name: str) -> None:
msg = Message.objects.get(id=msg_id)
self.assertEqual(msg.topic_name(), topic_name)
def check_message(self,
msg_id: int,
topic_name: str,
content: str) -> None:
# Make sure we saved the message correctly to the DB.
msg = Message.objects.get(id=msg_id)
self.assertEqual(msg.topic_name(), topic_name)
self.assertEqual(msg.content, content)
'''
Next, we will make sure we properly cached the
messages. We still have to do 2 queries to
hydrate sender/recipient info, but we won't need
to hit the zerver_message table.
'''
with queries_captured() as queries:
(fetch_message_dict,) = messages_for_ids(
message_ids = [msg.id],
user_message_flags={msg_id: []},
search_fields=dict(),
apply_markdown=False,
client_gravatar=False,
allow_edit_history=True,
)
self.assertEqual(len(queries), 2)
for query in queries:
self.assertNotIn('message', query['sql'])
self.assertEqual(
fetch_message_dict[TOPIC_NAME],
msg.topic_name()
)
self.assertEqual(
fetch_message_dict['content'],
msg.content
)
self.assertEqual(
fetch_message_dict['sender_id'],
msg.sender_id
)
if msg.edit_history:
self.assertEqual(
fetch_message_dict['edit_history'],
ujson.loads(msg.edit_history)
)
def test_query_count_on_to_dict_uncached(self) -> None:
# `to_dict_uncached` method is used by the mechanisms
# tested in this class. Hence, its performance is tested here.
# Generate 2 messages
user = self.example_user("hamlet")
self.login_user(user)
stream_name = "public_stream"
self.subscribe(user, stream_name)
message_one_id = self.send_stream_message(user,
stream_name, "Message one")
later_subscribed_user = self.example_user("cordelia")
self.subscribe(later_subscribed_user, stream_name)
message_two_id = self.send_stream_message(user,
stream_name, "Message two")
message_ids = [message_one_id, message_two_id]
messages = [Message.objects.select_related().get(id=message_id)
for message_id in message_ids]
# Check number of queries performed
with queries_captured() as queries:
MessageDict.to_dict_uncached(messages)
# 1 query for realm_id per message = 2
# 1 query each for reactions & submessage for all messages = 2
self.assertEqual(len(queries), 4)
def test_save_message(self) -> None:
"""This is also tested by a client test, but here we can verify
the cache against the database"""
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'after edit'
})
self.assert_json_success(result)
self.check_message(msg_id, topic_name="editing", content="after edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': 'edited'
})
self.assert_json_success(result)
self.check_topic(msg_id, topic_name="edited")
def test_fetch_raw_message(self) -> None:
self.login('hamlet')
msg_id = self.send_personal_message(
from_user=self.example_user("hamlet"),
to_user=self.example_user("cordelia"),
content="**before** edit",
)
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
self.assertEqual(result.json()['raw_content'], '**before** edit')
# Test error cases
result = self.client_get('/json/messages/999999')
self.assert_json_error(result, 'Invalid message(s)')
self.login('cordelia')
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
self.login('othello')
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_error(result, 'Invalid message(s)')
def test_fetch_raw_message_stream_wrong_realm(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.make_stream('public_stream')
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name="test", content="test")
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
mit_user = self.mit_user('sipbtest')
self.login_user(mit_user)
result = self.client_get('/json/messages/' + str(msg_id), subdomain="zephyr")
self.assert_json_error(result, 'Invalid message(s)')
def test_fetch_raw_message_private_stream(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.make_stream('private_stream', invite_only=True)
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name="test", content="test")
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
self.login('othello')
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_error(result, 'Invalid message(s)')
def test_edit_message_no_permission(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content after edit',
})
self.assert_json_error(result, "You don't have permission to edit this message")
def test_edit_message_no_changes(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
})
self.assert_json_error(result, "Nothing to change")
def test_edit_message_no_topic(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': ' '
})
self.assert_json_error(result, "Topic can't be empty")
def test_edit_message_no_content(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': ' '
})
self.assert_json_success(result)
content = Message.objects.filter(id=msg_id).values_list('content', flat = True)[0]
self.assertEqual(content, "(deleted)")
def test_edit_message_history_disabled(self) -> None:
user_profile = self.example_user("hamlet")
do_set_realm_property(user_profile.realm, "allow_edit_history", False)
self.login('hamlet')
# Single-line edit
msg_id_1 = self.send_stream_message(self.example_user("hamlet"),
"Denmark",
topic_name="editing",
content="content before edit")
new_content_1 = 'content after edit'
result_1 = self.client_patch("/json/messages/" + str(msg_id_1), {
'message_id': msg_id_1, 'content': new_content_1
})
self.assert_json_success(result_1)
result = self.client_get(
"/json/messages/" + str(msg_id_1) + "/history")
self.assert_json_error(result, "Message edit history is disabled in this organization")
# Now verify that if we fetch the message directly, there's no
# edit history data attached.
messages_result = self.client_get("/json/messages",
{"anchor": msg_id_1, "num_before": 0, "num_after": 10})
self.assert_json_success(messages_result)
json_messages = ujson.loads(
messages_result.content.decode('utf-8'))
for msg in json_messages['messages']:
self.assertNotIn("edit_history", msg)
def test_edit_message_history(self) -> None:
self.login('hamlet')
# Single-line edit
msg_id_1 = self.send_stream_message(
self.example_user("hamlet"),
"Scotland",
topic_name="editing",
content="content before edit")
new_content_1 = 'content after edit'
result_1 = self.client_patch("/json/messages/" + str(msg_id_1), {
'message_id': msg_id_1, 'content': new_content_1
})
self.assert_json_success(result_1)
message_edit_history_1 = self.client_get(
"/json/messages/" + str(msg_id_1) + "/history")
json_response_1 = ujson.loads(
message_edit_history_1.content.decode('utf-8'))
message_history_1 = json_response_1['message_history']
# Check content of message after edit.
self.assertEqual(message_history_1[0]['rendered_content'],
'<p>content before edit</p>')
self.assertEqual(message_history_1[1]['rendered_content'],
'<p>content after edit</p>')
self.assertEqual(message_history_1[1]['content_html_diff'],
('<p>content '
'<span class="highlight_text_inserted">after</span> '
'<span class="highlight_text_deleted">before</span>'
' edit</p>'))
# Check content of message before edit.
self.assertEqual(message_history_1[1]['prev_rendered_content'],
'<p>content before edit</p>')
# Edits on new lines
msg_id_2 = self.send_stream_message(
self.example_user("hamlet"),
"Scotland",
topic_name="editing",
content=('content before edit, line 1\n'
'\n'
'content before edit, line 3'))
new_content_2 = ('content before edit, line 1\n'
'content after edit, line 2\n'
'content before edit, line 3')
result_2 = self.client_patch("/json/messages/" + str(msg_id_2), {
'message_id': msg_id_2, 'content': new_content_2
})
self.assert_json_success(result_2)
message_edit_history_2 = self.client_get(
"/json/messages/" + str(msg_id_2) + "/history")
json_response_2 = ujson.loads(
message_edit_history_2.content.decode('utf-8'))
message_history_2 = json_response_2['message_history']
self.assertEqual(message_history_2[0]['rendered_content'],
('<p>content before edit, line 1</p>\n'
'<p>content before edit, line 3</p>'))
self.assertEqual(message_history_2[1]['rendered_content'],
('<p>content before edit, line 1<br>\n'
'content after edit, line 2<br>\n'
'content before edit, line 3</p>'))
self.assertEqual(message_history_2[1]['content_html_diff'],
('<p>content before edit, line 1<br> '
'content <span class="highlight_text_inserted">after edit, line 2<br> '
'content</span> before edit, line 3</p>'))
self.assertEqual(message_history_2[1]['prev_rendered_content'],
('<p>content before edit, line 1</p>\n'
'<p>content before edit, line 3</p>'))
def test_edit_link(self) -> None:
# Link editing
self.login('hamlet')
msg_id_1 = self.send_stream_message(
self.example_user("hamlet"),
"Scotland",
topic_name="editing",
content="Here is a link to [zulip](www.zulip.org).")
new_content_1 = 'Here is a link to [zulip](www.zulipchat.com).'
result_1 = self.client_patch("/json/messages/" + str(msg_id_1), {
'message_id': msg_id_1, 'content': new_content_1
})
self.assert_json_success(result_1)
message_edit_history_1 = self.client_get(
"/json/messages/" + str(msg_id_1) + "/history")
json_response_1 = ujson.loads(
message_edit_history_1.content.decode('utf-8'))
message_history_1 = json_response_1['message_history']
# Check content of message after edit.
self.assertEqual(message_history_1[0]['rendered_content'],
'<p>Here is a link to '
'<a href="http://www.zulip.org">zulip</a>.</p>')
self.assertEqual(message_history_1[1]['rendered_content'],
'<p>Here is a link to '
'<a href="http://www.zulipchat.com">zulip</a>.</p>')
self.assertEqual(message_history_1[1]['content_html_diff'],
('<p>Here is a link to <a href="http://www.zulipchat.com"'
'>zulip '
'<span class="highlight_text_inserted"> Link: http://www.zulipchat.com .'
'</span> <span class="highlight_text_deleted"> Link: http://www.zulip.org .'
'</span> </a></p>'))
def test_edit_history_unedited(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(
self.example_user('hamlet'),
'Scotland',
topic_name='editing',
content='This message has not been edited.')
result = self.client_get('/json/messages/{}/history'.format(msg_id))
self.assert_json_success(result)
message_history = result.json()['message_history']
self.assert_length(message_history, 1)
def test_user_info_for_updates(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
self.login_user(hamlet)
self.subscribe(hamlet, 'Scotland')
self.subscribe(cordelia, 'Scotland')
msg_id = self.send_stream_message(hamlet, 'Scotland',
content='@**Cordelia Lear**')
user_info = get_user_info_for_message_updates(msg_id)
message_user_ids = user_info['message_user_ids']
self.assertIn(hamlet.id, message_user_ids)
self.assertIn(cordelia.id, message_user_ids)
mention_user_ids = user_info['mention_user_ids']
self.assertEqual(mention_user_ids, {cordelia.id})
def test_edit_cases(self) -> None:
"""This test verifies the accuracy of construction of Zulip's edit
history data structures."""
self.login('hamlet')
hamlet = self.example_user('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic 1", content="content 1")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content 2',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0]['prev_content'], 'content 1')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.assertEqual(set(history[0].keys()),
{'timestamp', 'prev_content', 'user_id',
'prev_rendered_content', 'prev_rendered_content_version'})
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': 'topic 2',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 1')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.assertEqual(set(history[0].keys()), {'timestamp', LEGACY_PREV_TOPIC, 'user_id'})
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content 3',
'topic': 'topic 3',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0]['prev_content'], 'content 2')
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 2')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.assertEqual(set(history[0].keys()),
{'timestamp', LEGACY_PREV_TOPIC, 'prev_content', 'user_id',
'prev_rendered_content', 'prev_rendered_content_version'})
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content 4',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0]['prev_content'], 'content 3')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.login('iago')
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': 'topic 4',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 3')
self.assertEqual(history[0]['user_id'], self.example_user('iago').id)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 3')
self.assertEqual(history[2][LEGACY_PREV_TOPIC], 'topic 2')
self.assertEqual(history[3][LEGACY_PREV_TOPIC], 'topic 1')
self.assertEqual(history[1]['prev_content'], 'content 3')
self.assertEqual(history[2]['prev_content'], 'content 2')
self.assertEqual(history[4]['prev_content'], 'content 1')
# Now, we verify that the edit history data sent back has the
# correct filled-out fields
message_edit_history = self.client_get("/json/messages/" + str(msg_id) + "/history")
json_response = ujson.loads(message_edit_history.content.decode('utf-8'))
# We reverse the message history view output so that the IDs line up with the above.
message_history = list(reversed(json_response['message_history']))
i = 0
for entry in message_history:
expected_entries = {'content', 'rendered_content', 'topic', 'timestamp', 'user_id'}
if i in {0, 2, 3}:
expected_entries.add('prev_topic')
if i in {1, 2, 4}:
expected_entries.add('prev_content')
expected_entries.add('prev_rendered_content')
expected_entries.add('content_html_diff')
i += 1
self.assertEqual(expected_entries, set(entry.keys()))
self.assertEqual(len(message_history), 6)
self.assertEqual(message_history[0]['prev_topic'], 'topic 3')
self.assertEqual(message_history[0]['topic'], 'topic 4')
self.assertEqual(message_history[1]['topic'], 'topic 3')
self.assertEqual(message_history[2]['topic'], 'topic 3')
self.assertEqual(message_history[2]['prev_topic'], 'topic 2')
self.assertEqual(message_history[3]['topic'], 'topic 2')
self.assertEqual(message_history[3]['prev_topic'], 'topic 1')
self.assertEqual(message_history[4]['topic'], 'topic 1')
self.assertEqual(message_history[0]['content'], 'content 4')
self.assertEqual(message_history[1]['content'], 'content 4')
self.assertEqual(message_history[1]['prev_content'], 'content 3')
self.assertEqual(message_history[2]['content'], 'content 3')
self.assertEqual(message_history[2]['prev_content'], 'content 2')
self.assertEqual(message_history[3]['content'], 'content 2')
self.assertEqual(message_history[4]['content'], 'content 2')
self.assertEqual(message_history[4]['prev_content'], 'content 1')
self.assertEqual(message_history[5]['content'], 'content 1')
self.assertEqual(message_history[5]['topic'], 'topic 1')
def test_edit_message_content_limit(self) -> None:
def set_message_editing_params(allow_message_editing: bool,
message_content_edit_limit_seconds: int,
allow_community_topic_editing: bool) -> None:
result = self.client_patch("/json/realm", {
'allow_message_editing': ujson.dumps(allow_message_editing),
'message_content_edit_limit_seconds': message_content_edit_limit_seconds,
'allow_community_topic_editing': ujson.dumps(allow_community_topic_editing),
})
self.assert_json_success(result)
def do_edit_message_assert_success(id_: int, unique_str: str, topic_only: bool=False) -> None:
new_topic = 'topic' + unique_str
new_content = 'content' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
if not topic_only:
params_dict['content'] = new_content
result = self.client_patch("/json/messages/" + str(id_), params_dict)
self.assert_json_success(result)
if topic_only:
self.check_topic(id_, topic_name=new_topic)
else:
self.check_message(id_, topic_name=new_topic, content=new_content)
def do_edit_message_assert_error(id_: int, unique_str: str, error: str,
topic_only: bool=False) -> None:
message = Message.objects.get(id=id_)
old_topic = message.topic_name()
old_content = message.content
new_topic = 'topic' + unique_str
new_content = 'content' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
if not topic_only:
params_dict['content'] = new_content
result = self.client_patch("/json/messages/" + str(id_), params_dict)
message = Message.objects.get(id=id_)
self.assert_json_error(result, error)
msg = Message.objects.get(id=id_)
self.assertEqual(msg.topic_name(), old_topic)
self.assertEqual(msg.content, old_content)
self.login('iago')
# send a message in the past
id_ = self.send_stream_message(self.example_user("iago"), "Scotland",
content="content", topic_name="topic")
message = Message.objects.get(id=id_)
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
message.save()
# test the various possible message editing settings
# high enough time limit, all edits allowed
set_message_editing_params(True, 240, False)
do_edit_message_assert_success(id_, 'A')
# out of time, only topic editing allowed
set_message_editing_params(True, 120, False)
do_edit_message_assert_success(id_, 'B', True)
do_edit_message_assert_error(id_, 'C', "The time limit for editing this message has passed")
# infinite time, all edits allowed
set_message_editing_params(True, 0, False)
do_edit_message_assert_success(id_, 'D')
# without allow_message_editing, nothing is allowed
set_message_editing_params(False, 240, False)
do_edit_message_assert_error(id_, 'E', "Your organization has turned off message editing", True)
set_message_editing_params(False, 120, False)
do_edit_message_assert_error(id_, 'F', "Your organization has turned off message editing", True)
set_message_editing_params(False, 0, False)
do_edit_message_assert_error(id_, 'G', "Your organization has turned off message editing", True)
def test_allow_community_topic_editing(self) -> None:
def set_message_editing_params(allow_message_editing: bool,
message_content_edit_limit_seconds: int,
allow_community_topic_editing: bool) -> None:
result = self.client_patch("/json/realm", {
'allow_message_editing': ujson.dumps(allow_message_editing),
'message_content_edit_limit_seconds': message_content_edit_limit_seconds,
'allow_community_topic_editing': ujson.dumps(allow_community_topic_editing),
})
self.assert_json_success(result)
def do_edit_message_assert_success(id_: int, unique_str: str) -> None:
new_topic = 'topic' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
result = self.client_patch("/json/messages/" + str(id_), params_dict)
self.assert_json_success(result)
self.check_topic(id_, topic_name=new_topic)
def do_edit_message_assert_error(id_: int, unique_str: str, error: str) -> None:
message = Message.objects.get(id=id_)
old_topic = message.topic_name()
old_content = message.content
new_topic = 'topic' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
result = self.client_patch("/json/messages/" + str(id_), params_dict)
message = Message.objects.get(id=id_)
self.assert_json_error(result, error)
msg = Message.objects.get(id=id_)
self.assertEqual(msg.topic_name(), old_topic)
self.assertEqual(msg.content, old_content)
self.login('iago')
# send a message in the past
id_ = self.send_stream_message(self.example_user("hamlet"), "Scotland",
content="content", topic_name="topic")
message = Message.objects.get(id=id_)
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
message.save()
# any user can edit the topic of a message
set_message_editing_params(True, 0, True)
# log in as a new user
self.login('cordelia')
do_edit_message_assert_success(id_, 'A')
# only admins can edit the topics of messages
self.login('iago')
set_message_editing_params(True, 0, False)
do_edit_message_assert_success(id_, 'B')
self.login('cordelia')
do_edit_message_assert_error(id_, 'C', "You don't have permission to edit this message")
# users cannot edit topics if allow_message_editing is False
self.login('iago')
set_message_editing_params(False, 0, True)
self.login('cordelia')
do_edit_message_assert_error(id_, 'D', "Your organization has turned off message editing")
# non-admin users cannot edit topics sent > 24 hrs ago
message.date_sent = message.date_sent - datetime.timedelta(seconds=90000)
message.save()
self.login('iago')
set_message_editing_params(True, 0, True)
do_edit_message_assert_success(id_, 'E')
self.login('cordelia')
do_edit_message_assert_error(id_, 'F', "The time limit for editing this message has passed")
# anyone should be able to edit "no topic" indefinitely
message.set_topic_name("(no topic)")
message.save()
self.login('cordelia')
do_edit_message_assert_success(id_, 'D')
@mock.patch("zerver.lib.actions.send_event")
def test_edit_topic_public_history_stream(self, mock_send_event: mock.MagicMock) -> None:
stream_name = "Macbeth"
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
self.make_stream(stream_name, history_public_to_subscribers=True)
self.subscribe(hamlet, stream_name)
self.login_user(hamlet)
message_id = self.send_stream_message(hamlet, stream_name, "Where am I?")
self.login_user(cordelia)
self.subscribe(cordelia, stream_name)
message = Message.objects.get(id=message_id)
def do_update_message_topic_success(user_profile: UserProfile, message: Message,
topic_name: str, users_to_be_notified: List[Dict[str, Any]]) -> None:
do_update_message(
user_profile=user_profile,
message=message,
new_stream=None,
topic_name=topic_name,
propagate_mode="change_later",
content=None,
rendered_content=None,
prior_mention_user_ids=set(),
mention_user_ids=set(),
mention_data=None,
)
mock_send_event.assert_called_with(mock.ANY, mock.ANY, users_to_be_notified)
# Returns the users that need to be notified when a message topic is changed
def notify(user_id: int) -> Dict[str, Any]:
um = UserMessage.objects.get(message=message_id)
if um.user_profile_id == user_id:
return {
"id": user_id,
"flags": um.flags_list()
}
else:
return {
"id": user_id,
"flags": ["read"]
}
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
# Edit topic of a message sent before Cordelia subscribed the stream
do_update_message_topic_success(cordelia, message, "Othello eats apple", users_to_be_notified)
# If Cordelia is long-term idle, she doesn't get a notification.
cordelia.long_term_idle = True
cordelia.save()
users_to_be_notified = list(map(notify, [hamlet.id]))
do_update_message_topic_success(cordelia, message, "Another topic idle", users_to_be_notified)
cordelia.long_term_idle = False
cordelia.save()
# Even if Hamlet unsubscribes the stream, he should be notified when the topic is changed
# because he has a UserMessage row.
self.unsubscribe(hamlet, stream_name)
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
do_update_message_topic_success(cordelia, message, "Another topic", users_to_be_notified)
# Hamlet subscribes to the stream again and Cordelia unsubscribes, then Hamlet changes
# the message topic. Cordelia won't receive any updates when a message on that stream is
# changed because she is not a subscriber and doesn't have a UserMessage row.
self.subscribe(hamlet, stream_name)
self.unsubscribe(cordelia, stream_name)
self.login_user(hamlet)
users_to_be_notified = list(map(notify, [hamlet.id]))
do_update_message_topic_success(hamlet, message, "Change again", users_to_be_notified)
@mock.patch("zerver.lib.actions.send_event")
def test_wildcard_mention(self, mock_send_event: mock.MagicMock) -> None:
stream_name = "Macbeth"
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
self.make_stream(stream_name, history_public_to_subscribers=True)
self.subscribe(hamlet, stream_name)
self.subscribe(cordelia, stream_name)
self.login_user(hamlet)
message_id = self.send_stream_message(hamlet, stream_name, "Hello everyone")
def notify(user_id: int) -> Dict[str, Any]:
return {
"id": user_id,
"flags": ["wildcard_mentioned"]
}
users_to_be_notified = sorted(map(notify, [cordelia.id, hamlet.id]), key=itemgetter("id"))
result = self.client_patch("/json/messages/" + str(message_id), {
'message_id': message_id,
'content': 'Hello @**everyone**',
})
self.assert_json_success(result)
# Extract the send_event call where event type is 'update_message'.
# Here we assert wildcard_mention_user_ids has been set properly.
called = False
for call_args in mock_send_event.call_args_list:
(arg_realm, arg_event, arg_notified_users) = call_args[0]
if arg_event['type'] == 'update_message':
self.assertEqual(arg_event['type'], 'update_message')
self.assertEqual(arg_event['wildcard_mention_user_ids'], [cordelia.id, hamlet.id])
self.assertEqual(sorted(arg_notified_users, key=itemgetter("id")), users_to_be_notified)
called = True
self.assertTrue(called)
def test_propagate_topic_forward(self) -> None:
self.login('hamlet')
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
id2 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic1")
id3 = self.send_stream_message(self.example_user("iago"), "Rome",
topic_name="topic1")
id4 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic2")
id5 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic1")
result = self.client_patch("/json/messages/" + str(id1), {
'message_id': id1,
'topic': 'edited',
'propagate_mode': 'change_later'
})
self.assert_json_success(result)
self.check_topic(id1, topic_name="edited")
self.check_topic(id2, topic_name="edited")
self.check_topic(id3, topic_name="topic1")
self.check_topic(id4, topic_name="topic2")
self.check_topic(id5, topic_name="edited")
def test_propagate_all_topics(self) -> None:
self.login('hamlet')
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
id2 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
id3 = self.send_stream_message(self.example_user("iago"), "Rome",
topic_name="topic1")
id4 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic2")
id5 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic1")
id6 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic3")
result = self.client_patch("/json/messages/" + str(id2), {
'message_id': id2,
'topic': 'edited',
'propagate_mode': 'change_all'
})
self.assert_json_success(result)
self.check_topic(id1, topic_name="edited")
self.check_topic(id2, topic_name="edited")
self.check_topic(id3, topic_name="topic1")
self.check_topic(id4, topic_name="topic2")
self.check_topic(id5, topic_name="edited")
self.check_topic(id6, topic_name="topic3")
def test_propagate_invalid(self) -> None:
self.login('hamlet')
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
result = self.client_patch("/json/messages/" + str(id1), {
'topic': 'edited',
'propagate_mode': 'invalid',
})
self.assert_json_error(result, 'Invalid propagate_mode')
self.check_topic(id1, topic_name="topic1")
result = self.client_patch("/json/messages/" + str(id1), {
'content': 'edited',
'propagate_mode': 'change_all',
})
self.assert_json_error(result, 'Invalid propagate_mode without topic edit')
self.check_topic(id1, topic_name="topic1")
def prepare_move_topics(self, user_email: str, old_stream: str, new_stream: str, topic: str) -> Tuple[UserProfile, Stream, Stream, int, int]:
user_profile = self.example_user(user_email)
self.login(user_email)
stream = self.make_stream(old_stream)
new_stream = self.make_stream(new_stream)
self.subscribe(user_profile, stream.name)
self.subscribe(user_profile, new_stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name=topic, content="First")
msg_id_lt = self.send_stream_message(user_profile, stream.name,
topic_name=topic, content="Second")
self.send_stream_message(user_profile, stream.name,
topic_name=topic, content="third")
return (user_profile, stream, new_stream, msg_id, msg_id_lt)
def test_move_message_to_stream(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all'
})
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0].content, "This topic was moved by @_**Iago|%s** to #**new stream>test**" % (user_profile.id,))
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 4)
self.assertEqual(messages[3].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%s**" % (user_profile.id,))
def test_move_message_to_stream_change_later(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id_later), {
'message_id': msg_id_later,
'stream_id': new_stream.id,
'propagate_mode': 'change_later'
})
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 2)
self.assertEqual(messages[0].id, msg_id)
self.assertEqual(messages[1].content, "This topic was moved by @_**Iago|%s** to #**new stream>test**" % (user_profile.id,))
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 3)
self.assertEqual(messages[0].id, msg_id_later)
self.assertEqual(messages[2].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%d**" % (user_profile.id,))
def test_move_message_to_stream_no_allowed(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"aaron", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all'
})
self.assert_json_error(result, "You don't have permission to move this message")
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 3)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 0)
def test_move_message_to_stream_with_content(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all',
'content': 'Not allowed'
})
self.assert_json_error(result, "Cannot change message content while changing stream")
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 3)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 0)
def test_move_message_to_stream_and_topic(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all',
'topic': 'new topic'
})
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0].content, "This topic was moved by @_**Iago|%s** to #**new stream>new topic**" % (user_profile.id,))
messages = get_topic_messages(user_profile, new_stream, "new topic")
self.assertEqual(len(messages), 4)
self.assertEqual(messages[3].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%s**" % (user_profile.id,))
self.assert_json_success(result)
def test_move_message_to_stream_to_private_stream(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
stream = self.make_stream("test move stream")
new_stream = self.make_stream("new stream", None, True)
self.subscribe(user_profile, stream.name)
self.subscribe(user_profile, new_stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name="test", content="First")
self.send_stream_message(user_profile, stream.name,
topic_name="test", content="Second")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all',
})
self.assert_json_error(result, "Streams must be public")
# We expect the messages to remain in the original stream/topic
messages = get_topic_messages(user_profile, stream, "test")
self.assertEqual(len(messages), 2)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 0)
class MirroredMessageUsersTest(ZulipTestCase):
def test_invalid_sender(self) -> None:
user = self.example_user('hamlet')
recipients: List[str] = []
Request = namedtuple('Request', ['POST'])
request = Request(POST=dict()) # no sender
with self.assertRaises(InvalidMirrorInput):
create_mirrored_message_users(request, user, recipients)
def test_invalid_client(self) -> None:
client = get_client(name='banned_mirror') # Invalid!!!
user = self.example_user('hamlet')
sender = user
recipients: List[str] = []
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
with self.assertRaises(InvalidMirrorInput):
create_mirrored_message_users(request, user, recipients)
def test_invalid_email(self) -> None:
invalid_email = 'alice AT example.com'
recipients = [invalid_email]
# We use an MIT user here to maximize code coverage
user = self.mit_user('starnine')
sender = user
Request = namedtuple('Request', ['POST', 'client'])
for client_name in ['zephyr_mirror', 'irc_mirror', 'jabber_mirror']:
client = get_client(name=client_name)
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
with self.assertRaises(InvalidMirrorInput):
create_mirrored_message_users(request, user, recipients)
@mock.patch('DNS.dnslookup', return_value=[['sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh']])
def test_zephyr_mirror_new_recipient(self, ignored: object) -> None:
"""Test mirror dummy user creation for PM recipients"""
client = get_client(name='zephyr_mirror')
user = self.mit_user('starnine')
sender = self.mit_user('sipbtest')
new_user_email = 'bob_the_new_user@mit.edu'
new_user_realm = get_realm("zephyr")
recipients = [user.email, new_user_email]
# Now make the request.
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
mirror_sender = create_mirrored_message_users(request, user, recipients)
self.assertEqual(mirror_sender, sender)
realm_users = UserProfile.objects.filter(realm=sender.realm)
realm_emails = {user.email for user in realm_users}
self.assertIn(user.email, realm_emails)
self.assertIn(new_user_email, realm_emails)
bob = get_user(new_user_email, new_user_realm)
self.assertTrue(bob.is_mirror_dummy)
@mock.patch('DNS.dnslookup', return_value=[['sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh']])
def test_zephyr_mirror_new_sender(self, ignored: object) -> None:
"""Test mirror dummy user creation for sender when sending to stream"""
client = get_client(name='zephyr_mirror')
user = self.mit_user('starnine')
sender_email = 'new_sender@mit.edu'
recipients = ['stream_name']
# Now make the request.
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender_email, type='stream'),
client = client)
mirror_sender = create_mirrored_message_users(request, user, recipients)
assert(mirror_sender is not None)
self.assertEqual(mirror_sender.email, sender_email)
self.assertTrue(mirror_sender.is_mirror_dummy)
def test_irc_mirror(self) -> None:
reset_emails_in_zulip_realm()
client = get_client(name='irc_mirror')
sender = self.example_user('hamlet')
recipients = [self.nonreg_email('alice'), 'bob@irc.zulip.com', self.nonreg_email('cordelia')]
# Now make the request.
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
mirror_sender = create_mirrored_message_users(request, sender, recipients)
self.assertEqual(mirror_sender, sender)
realm_users = UserProfile.objects.filter(realm=sender.realm)
realm_emails = {user.email for user in realm_users}
self.assertIn(self.nonreg_email('alice'), realm_emails)
self.assertIn('bob@irc.zulip.com', realm_emails)
bob = get_user('bob@irc.zulip.com', sender.realm)
self.assertTrue(bob.is_mirror_dummy)
def test_jabber_mirror(self) -> None:
reset_emails_in_zulip_realm()
client = get_client(name='jabber_mirror')
sender = self.example_user('hamlet')
user = sender
recipients = [self.nonreg_email('alice'), self.nonreg_email('bob'), self.nonreg_email('cordelia')]
# Now make the request.
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
mirror_sender = create_mirrored_message_users(request, user, recipients)
self.assertEqual(mirror_sender, sender)
realm_users = UserProfile.objects.filter(realm=sender.realm)
realm_emails = {user.email for user in realm_users}
self.assertIn(self.nonreg_email('alice'), realm_emails)
self.assertIn(self.nonreg_email('bob'), realm_emails)
bob = get_user(self.nonreg_email('bob'), sender.realm)
self.assertTrue(bob.is_mirror_dummy)
class MessageAccessTests(ZulipTestCase):
def test_update_invalid_flags(self) -> None:
message = self.send_personal_message(
self.example_user("cordelia"),
self.example_user("hamlet"),
"hello",
)
self.login('hamlet')
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "invalid"})
self.assert_json_error(result, "Invalid flag: 'invalid'")
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "is_private"})
self.assert_json_error(result, "Invalid flag: 'is_private'")
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "active_mobile_push_notification"})
self.assert_json_error(result, "Invalid flag: 'active_mobile_push_notification'")
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "mentioned"})
self.assert_json_error(result, "Flag not editable: 'mentioned'")
def change_star(self, messages: List[int], add: bool=True, **kwargs: Any) -> HttpResponse:
return self.client_post("/json/messages/flags",
{"messages": ujson.dumps(messages),
"op": "add" if add else "remove",
"flag": "starred"},
**kwargs)
def test_change_star(self) -> None:
"""
You can set a message as starred/un-starred through
POST /json/messages/flags.
"""
self.login('hamlet')
message_ids = [self.send_personal_message(self.example_user("hamlet"),
self.example_user("hamlet"),
"test")]
# Star a message.
result = self.change_star(message_ids)
self.assert_json_success(result)
for msg in self.get_messages():
if msg['id'] in message_ids:
self.assertEqual(msg['flags'], ['starred'])
else:
self.assertEqual(msg['flags'], ['read'])
result = self.change_star(message_ids, False)
self.assert_json_success(result)
# Remove the stars.
for msg in self.get_messages():
if msg['id'] in message_ids:
self.assertEqual(msg['flags'], [])
def test_change_star_public_stream_historical(self) -> None:
"""
You can set a message as starred/un-starred through
POST /json/messages/flags.
"""
stream_name = "new_stream"
self.subscribe(self.example_user("hamlet"), stream_name)
self.login('hamlet')
message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
]
# Send a second message so we can verify it isn't modified
other_message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test_unused"),
]
received_message_ids = [
self.send_personal_message(
self.example_user("hamlet"),
self.example_user("cordelia"),
"test_received"
),
]
# Now login as another user who wasn't on that stream
self.login('cordelia')
# Send a message to yourself to make sure we have at least one with the read flag
sent_message_ids = [
self.send_personal_message(
self.example_user("cordelia"),
self.example_user("cordelia"),
"test_read_message",
),
]
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps(sent_message_ids),
"op": "add",
"flag": "read"})
# We can't change flags other than "starred" on historical messages:
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps(message_ids),
"op": "add",
"flag": "read"})
self.assert_json_error(result, 'Invalid message(s)')
# Trying to change a list of more than one historical message fails
result = self.change_star(message_ids * 2)
self.assert_json_error(result, 'Invalid message(s)')
# Confirm that one can change the historical flag now
result = self.change_star(message_ids)
self.assert_json_success(result)
for msg in self.get_messages():
if msg['id'] in message_ids:
self.assertEqual(set(msg['flags']), {'starred', 'historical', 'read'})
elif msg['id'] in received_message_ids:
self.assertEqual(msg['flags'], [])
else:
self.assertEqual(msg['flags'], ['read'])
self.assertNotIn(msg['id'], other_message_ids)
result = self.change_star(message_ids, False)
self.assert_json_success(result)
# But it still doesn't work if you're in another realm
user = self.mit_user('sipbtest')
self.login_user(user)
result = self.change_star(message_ids, subdomain="zephyr")
self.assert_json_error(result, 'Invalid message(s)')
def test_change_star_private_message_security(self) -> None:
"""
You can set a message as starred/un-starred through
POST /json/messages/flags.
"""
self.login('hamlet')
message_ids = [
self.send_personal_message(
self.example_user("hamlet"),
self.example_user("hamlet"),
"test",
),
]
# Starring private messages you didn't receive fails.
self.login('cordelia')
result = self.change_star(message_ids)
self.assert_json_error(result, 'Invalid message(s)')
def test_change_star_private_stream_security(self) -> None:
stream_name = "private_stream"
self.make_stream(stream_name, invite_only=True)
self.subscribe(self.example_user("hamlet"), stream_name)
self.login('hamlet')
message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
]
# Starring private stream messages you received works
result = self.change_star(message_ids)
self.assert_json_success(result)
# Starring private stream messages you didn't receive fails.
self.login('cordelia')
result = self.change_star(message_ids)
self.assert_json_error(result, 'Invalid message(s)')
stream_name = "private_stream_2"
self.make_stream(stream_name, invite_only=True,
history_public_to_subscribers=True)
self.subscribe(self.example_user("hamlet"), stream_name)
self.login('hamlet')
message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
]
# With stream.history_public_to_subscribers = True, you still
# can't see it if you didn't receive the message and are
# not subscribed.
self.login('cordelia')
result = self.change_star(message_ids)
self.assert_json_error(result, 'Invalid message(s)')
# But if you subscribe, then you can star the message
self.subscribe(self.example_user("cordelia"), stream_name)
result = self.change_star(message_ids)
self.assert_json_success(result)
def test_new_message(self) -> None:
"""
New messages aren't starred.
"""
sender = self.example_user('hamlet')
self.login_user(sender)
content = "Test message for star"
self.send_stream_message(sender, "Verona",
content=content)
sent_message = UserMessage.objects.filter(
user_profile=self.example_user('hamlet')
).order_by("id").reverse()[0]
self.assertEqual(sent_message.message.content, content)
self.assertFalse(sent_message.flags.starred)
def test_change_star_public_stream_security_for_guest_user(self) -> None:
# Guest user can't access(star) unsubscribed public stream messages
normal_user = self.example_user("hamlet")
stream_name = "public_stream"
self.make_stream(stream_name)
self.subscribe(normal_user, stream_name)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 1")
]
guest_user = self.example_user('polonius')
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_error(result, 'Invalid message(s)')
# Subscribed guest users can access public stream messages sent before they join
self.subscribe(guest_user, stream_name)
result = self.change_star(message_id)
self.assert_json_success(result)
# And messages sent after they join
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 2")
]
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_success(result)
def test_change_star_private_stream_security_for_guest_user(self) -> None:
# Guest users can't access(star) unsubscribed private stream messages
normal_user = self.example_user("hamlet")
stream_name = "private_stream"
stream = self.make_stream(stream_name, invite_only=True)
self.subscribe(normal_user, stream_name)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 1")
]
guest_user = self.example_user('polonius')
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_error(result, 'Invalid message(s)')
# Guest user can't access messages of subscribed private streams if
# history is not public to subscribers
self.subscribe(guest_user, stream_name)
result = self.change_star(message_id)
self.assert_json_error(result, 'Invalid message(s)')
# Guest user can access messages of subscribed private streams if
# history is public to subscribers
do_change_stream_invite_only(stream, True, history_public_to_subscribers=True)
result = self.change_star(message_id)
self.assert_json_success(result)
# With history not public to subscribers, they can still see new messages
do_change_stream_invite_only(stream, True, history_public_to_subscribers=False)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 2")
]
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_success(result)
def test_bulk_access_messages_private_stream(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
stream_name = "private_stream"
stream = self.make_stream(stream_name, invite_only=True,
history_public_to_subscribers=False)
self.subscribe(user, stream_name)
# Send a message before subscribing a new user to stream
message_one_id = self.send_stream_message(user,
stream_name, "Message one")
later_subscribed_user = self.example_user("cordelia")
# Subscribe a user to private-protected history stream
self.subscribe(later_subscribed_user, stream_name)
# Send a message after subscribing a new user to stream
message_two_id = self.send_stream_message(user,
stream_name, "Message two")
message_ids = [message_one_id, message_two_id]
messages = [Message.objects.select_related().get(id=message_id)
for message_id in message_ids]
filtered_messages = bulk_access_messages(later_subscribed_user, messages)
# Message sent before subscribing wouldn't be accessible by later
# subscribed user as stream has protected history
self.assertEqual(len(filtered_messages), 1)
self.assertEqual(filtered_messages[0].id, message_two_id)
do_change_stream_invite_only(stream, True, history_public_to_subscribers=True)
filtered_messages = bulk_access_messages(later_subscribed_user, messages)
# Message sent before subscribing are accessible by 8user as stream
# don't have protected history
self.assertEqual(len(filtered_messages), 2)
# Testing messages accessiblity for an unsubscribed user
unsubscribed_user = self.example_user("ZOE")
filtered_messages = bulk_access_messages(unsubscribed_user, messages)
self.assertEqual(len(filtered_messages), 0)
def test_bulk_access_messages_public_stream(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
# Testing messages accessiblity including a public stream message
stream_name = "public_stream"
self.subscribe(user, stream_name)
message_one_id = self.send_stream_message(user,
stream_name, "Message one")
later_subscribed_user = self.example_user("cordelia")
self.subscribe(later_subscribed_user, stream_name)
# Send a message after subscribing a new user to stream
message_two_id = self.send_stream_message(user,
stream_name, "Message two")
message_ids = [message_one_id, message_two_id]
messages = [Message.objects.select_related().get(id=message_id)
for message_id in message_ids]
# All public stream messages are always accessible
filtered_messages = bulk_access_messages(later_subscribed_user, messages)
self.assertEqual(len(filtered_messages), 2)
unsubscribed_user = self.example_user("ZOE")
filtered_messages = bulk_access_messages(unsubscribed_user, messages)
self.assertEqual(len(filtered_messages), 2)
class MessageHasKeywordsTest(ZulipTestCase):
'''Test for keywords like has_link, has_image, has_attachment.'''
def setup_dummy_attachments(self, user_profile: UserProfile) -> List[str]:
sample_size = 10
realm_id = user_profile.realm_id
dummy_files = [
('zulip.txt', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt' % (realm_id,), sample_size),
('temp_file.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py' % (realm_id,), sample_size),
('abc.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py' % (realm_id,), sample_size)
]
for file_name, path_id, size in dummy_files:
create_attachment(file_name, path_id, user_profile, size)
# return path ids
return [x[1] for x in dummy_files]
def test_claim_attachment(self) -> None:
user_profile = self.example_user('hamlet')
dummy_path_ids = self.setup_dummy_attachments(user_profile)
dummy_urls = ["http://zulip.testserver/user_uploads/{}".format(x) for x in dummy_path_ids]
# Send message referring the attachment
self.subscribe(user_profile, "Denmark")
def assert_attachment_claimed(path_id: str, claimed: bool) -> None:
attachment = Attachment.objects.get(path_id=path_id)
self.assertEqual(attachment.is_claimed(), claimed)
# This message should claim attachments 1 only because attachment 2
# is not being parsed as a link by Bugdown.
body = ("Some files here ...[zulip.txt]({})" +
"{}.... Some more...." +
"{}").format(dummy_urls[0], dummy_urls[1], dummy_urls[1])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[0], True)
assert_attachment_claimed(dummy_path_ids[1], False)
# This message tries to claim the third attachment but fails because
# Bugdown would not set has_attachments = True here.
body = "Link in code: `{}`".format(dummy_urls[2])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[2], False)
# Another scenario where we wouldn't parse the link.
body = "Link to not parse: .{}.`".format(dummy_urls[2])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[2], False)
# Finally, claim attachment 3.
body = "Link: {}".format(dummy_urls[2])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[2], True)
assert_attachment_claimed(dummy_path_ids[1], False)
def test_finds_all_links(self) -> None:
msg_ids = []
msg_contents = ["foo.org", "[bar](baz.gov)", "http://quux.ca"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertTrue(all([msg.has_link for msg in msgs]))
def test_finds_only_links(self) -> None:
msg_ids = []
msg_contents = ["`example.org`", '``example.org```', '$$https://example.org$$', "foo"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertFalse(all([msg.has_link for msg in msgs]))
def update_message(self, msg: Message, content: str) -> None:
hamlet = self.example_user('hamlet')
realm_id = hamlet.realm.id
rendered_content = render_markdown(msg, content)
mention_data = bugdown.MentionData(realm_id, content)
do_update_message(hamlet, msg, None, None, "change_one", content,
rendered_content, set(), set(), mention_data=mention_data)
def test_finds_link_after_edit(self) -> None:
hamlet = self.example_user('hamlet')
msg_id = self.send_stream_message(hamlet, 'Denmark', content='a')
msg = Message.objects.get(id=msg_id)
self.assertFalse(msg.has_link)
self.update_message(msg, 'a http://foo.com')
self.assertTrue(msg.has_link)
self.update_message(msg, 'a')
self.assertFalse(msg.has_link)
# Check in blockquotes work
self.update_message(msg, '> http://bar.com')
self.assertTrue(msg.has_link)
self.update_message(msg, 'a `http://foo.com`')
self.assertFalse(msg.has_link)
def test_has_image(self) -> None:
msg_ids = []
msg_contents = ["Link: foo.org",
"Image: https://www.google.com/images/srpr/logo4w.png",
"Image: https://www.google.com/images/srpr/logo4w.pdf",
"[Google Link](https://www.google.com/images/srpr/logo4w.png)"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertEqual([False, True, False, True], [msg.has_image for msg in msgs])
self.update_message(msgs[0], 'https://www.google.com/images/srpr/logo4w.png')
self.assertTrue(msgs[0].has_image)
self.update_message(msgs[0], 'No Image Again')
self.assertFalse(msgs[0].has_image)
def test_has_attachment(self) -> None:
hamlet = self.example_user('hamlet')
dummy_path_ids = self.setup_dummy_attachments(hamlet)
dummy_urls = ["http://zulip.testserver/user_uploads/{}".format(x) for x in dummy_path_ids]
self.subscribe(hamlet, "Denmark")
body = ("Files ...[zulip.txt]({}) {} {}").format(dummy_urls[0], dummy_urls[1], dummy_urls[2])
msg_id = self.send_stream_message(hamlet, "Denmark", body, "test")
msg = Message.objects.get(id=msg_id)
self.assertTrue(msg.has_attachment)
self.update_message(msg, 'No Attachments')
self.assertFalse(msg.has_attachment)
self.update_message(msg, body)
self.assertTrue(msg.has_attachment)
self.update_message(msg, 'Link in code: `{}`'.format(dummy_urls[1]))
self.assertFalse(msg.has_attachment)
# Test blockquotes
self.update_message(msg, '> {}'.format(dummy_urls[1]))
self.assertTrue(msg.has_attachment)
# Additional test to check has_attachment is being set is due to the correct attachment.
self.update_message(msg, 'Outside: {}. In code: `{}`.'.format(dummy_urls[0], dummy_urls[1]))
self.assertTrue(msg.has_attachment)
self.assertTrue(msg.attachment_set.filter(path_id=dummy_path_ids[0]))
self.assertEqual(msg.attachment_set.count(), 1)
self.update_message(msg, 'Outside: {}. In code: `{}`.'.format(dummy_urls[1], dummy_urls[0]))
self.assertTrue(msg.has_attachment)
self.assertTrue(msg.attachment_set.filter(path_id=dummy_path_ids[1]))
self.assertEqual(msg.attachment_set.count(), 1)
self.update_message(msg, 'Both in code: `{} {}`.'.format(dummy_urls[1], dummy_urls[0]))
self.assertFalse(msg.has_attachment)
self.assertEqual(msg.attachment_set.count(), 0)
def test_potential_attachment_path_ids(self) -> None:
hamlet = self.example_user('hamlet')
self.subscribe(hamlet, "Denmark")
dummy_path_ids = self.setup_dummy_attachments(hamlet)
body = "Hello"
msg_id = self.send_stream_message(hamlet, "Denmark", body, "test")
msg = Message.objects.get(id=msg_id)
with mock.patch("zerver.lib.actions.do_claim_attachments",
wraps=do_claim_attachments) as m:
self.update_message(msg, '[link](http://{}/user_uploads/{})'.format(
hamlet.realm.host, dummy_path_ids[0]))
self.assertTrue(m.called)
m.reset_mock()
self.update_message(msg, '[link](/user_uploads/{})'.format(dummy_path_ids[1]))
self.assertTrue(m.called)
m.reset_mock()
self.update_message(msg, '[new text link](/user_uploads/{})'.format(dummy_path_ids[1]))
self.assertFalse(m.called)
m.reset_mock()
# It's not clear this is correct behavior
self.update_message(msg, '[link](user_uploads/{})'.format(dummy_path_ids[2]))
self.assertFalse(m.called)
m.reset_mock()
self.update_message(msg, '[link](https://github.com/user_uploads/{})'.format(
dummy_path_ids[0]))
self.assertFalse(m.called)
m.reset_mock()
class MissedMessageTest(ZulipTestCase):
def test_presence_idle_user_ids(self) -> None:
UserPresence.objects.all().delete()
sender = self.example_user('cordelia')
realm = sender.realm
hamlet = self.example_user('hamlet')
othello = self.example_user('othello')
recipient_ids = {hamlet.id, othello.id}
message_type = 'stream'
user_flags: Dict[int, List[str]] = {}
def assert_missing(user_ids: List[int]) -> None:
presence_idle_user_ids = get_active_presence_idle_user_ids(
realm=realm,
sender_id=sender.id,
message_type=message_type,
active_user_ids=recipient_ids,
user_flags=user_flags,
)
self.assertEqual(sorted(user_ids), sorted(presence_idle_user_ids))
def set_presence(user: UserProfile, client_name: str, ago: int) -> None:
when = timezone_now() - datetime.timedelta(seconds=ago)
UserPresence.objects.create(
user_profile_id=user.id,
realm_id=user.realm_id,
client=get_client(client_name),
timestamp=when,
)
message_type = 'private'
assert_missing([hamlet.id, othello.id])
message_type = 'stream'
user_flags[hamlet.id] = ['mentioned']
assert_missing([hamlet.id])
set_presence(hamlet, 'iPhone', ago=5000)
assert_missing([hamlet.id])
set_presence(hamlet, 'webapp', ago=15)
assert_missing([])
message_type = 'private'
assert_missing([othello.id])
class LogDictTest(ZulipTestCase):
def test_to_log_dict(self) -> None:
user = self.example_user('hamlet')
stream_name = 'Denmark'
topic_name = 'Copenhagen'
content = 'find me some good coffee shops'
message_id = self.send_stream_message(user, stream_name,
topic_name=topic_name,
content=content)
message = Message.objects.get(id=message_id)
dct = message.to_log_dict()
self.assertTrue('timestamp' in dct)
self.assertEqual(dct['content'], 'find me some good coffee shops')
self.assertEqual(dct['id'], message.id)
self.assertEqual(dct['recipient'], 'Denmark')
self.assertEqual(dct['sender_realm_str'], 'zulip')
self.assertEqual(dct['sender_email'], user.email)
self.assertEqual(dct['sender_full_name'], 'King Hamlet')
self.assertEqual(dct['sender_id'], user.id)
self.assertEqual(dct['sender_short_name'], 'hamlet')
self.assertEqual(dct['sending_client'], 'test suite')
self.assertEqual(dct[DB_TOPIC_NAME], 'Copenhagen')
self.assertEqual(dct['type'], 'stream')
class CheckMessageTest(ZulipTestCase):
def test_basic_check_message_call(self) -> None:
sender = self.example_user('othello')
client = make_client(name="test suite")
stream_name = 'España y Francia'
self.make_stream(stream_name)
topic_name = 'issue'
message_content = 'whatever'
addressee = Addressee.for_stream_name(stream_name, topic_name)
ret = check_message(sender, client, addressee, message_content)
self.assertEqual(ret['message'].sender.id, sender.id)
def test_bot_pm_feature(self) -> None:
"""We send a PM to a bot's owner if their bot sends a message to
an unsubscribed stream"""
parent = self.example_user('othello')
bot = do_create_user(
email='othello-bot@zulip.com',
password='',
realm=parent.realm,
full_name='',
short_name='',
bot_type=UserProfile.DEFAULT_BOT,
bot_owner=parent
)
bot.last_reminder = None
sender = bot
client = make_client(name="test suite")
stream_name = 'Россия'
topic_name = 'issue'
addressee = Addressee.for_stream_name(stream_name, topic_name)
message_content = 'whatever'
old_count = message_stream_count(parent)
# Try sending to stream that doesn't exist sends a reminder to
# the sender
with self.assertRaises(JsonableError):
check_message(sender, client, addressee, message_content)
new_count = message_stream_count(parent)
self.assertEqual(new_count, old_count + 1)
self.assertIn("that stream does not exist.", most_recent_message(parent).content)
# Try sending to stream that exists with no subscribers soon
# after; due to rate-limiting, this should send nothing.
self.make_stream(stream_name)
ret = check_message(sender, client, addressee, message_content)
new_count = message_stream_count(parent)
self.assertEqual(new_count, old_count + 1)
# Try sending to stream that exists with no subscribers longer
# after; this should send an error to the bot owner that the
# stream doesn't exist
assert(sender.last_reminder is not None)
sender.last_reminder = sender.last_reminder - datetime.timedelta(hours=1)
sender.save(update_fields=["last_reminder"])
ret = check_message(sender, client, addressee, message_content)
new_count = message_stream_count(parent)
self.assertEqual(new_count, old_count + 2)
self.assertEqual(ret['message'].sender.email, 'othello-bot@zulip.com')
self.assertIn("does not have any subscribers", most_recent_message(parent).content)
def test_bot_pm_error_handling(self) -> None:
# This just test some defensive code.
cordelia = self.example_user('cordelia')
test_bot = self.create_test_bot(
short_name='test',
user_profile=cordelia,
)
content = 'whatever'
good_realm = test_bot.realm
wrong_realm = get_realm("zephyr")
wrong_sender = cordelia
send_rate_limited_pm_notification_to_bot_owner(test_bot, wrong_realm, content)
self.assertEqual(test_bot.last_reminder, None)
send_rate_limited_pm_notification_to_bot_owner(wrong_sender, good_realm, content)
self.assertEqual(test_bot.last_reminder, None)
test_bot.realm.deactivated = True
send_rate_limited_pm_notification_to_bot_owner(test_bot, good_realm, content)
self.assertEqual(test_bot.last_reminder, None)
class DeleteMessageTest(ZulipTestCase):
def test_delete_message_invalid_request_format(self) -> None:
self.login('iago')
hamlet = self.example_user('hamlet')
msg_id = self.send_stream_message(hamlet, "Scotland")
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id + 1),
{'message_id': msg_id})
self.assert_json_error(result, "Invalid message(s)")
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
self.assert_json_success(result)
def test_delete_message_by_user(self) -> None:
def set_message_deleting_params(allow_message_deleting: bool,
message_content_delete_limit_seconds: int) -> None:
self.login('iago')
result = self.client_patch("/json/realm", {
'allow_message_deleting': ujson.dumps(allow_message_deleting),
'message_content_delete_limit_seconds': message_content_delete_limit_seconds
})
self.assert_json_success(result)
def test_delete_message_by_admin(msg_id: int) -> HttpResponse:
self.login('iago')
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
return result
def test_delete_message_by_owner(msg_id: int) -> HttpResponse:
self.login('hamlet')
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
return result
def test_delete_message_by_other_user(msg_id: int) -> HttpResponse:
self.login('cordelia')
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
return result
# Test if message deleting is not allowed(default).
set_message_deleting_params(False, 0)
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
msg_id = self.send_stream_message(hamlet, "Scotland")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_other_user(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_admin(msg_id=msg_id)
self.assert_json_success(result)
# Test if message deleting is allowed.
# Test if time limit is zero(no limit).
set_message_deleting_params(True, 0)
msg_id = self.send_stream_message(hamlet, "Scotland")
message = Message.objects.get(id=msg_id)
message.date_sent = message.date_sent - datetime.timedelta(seconds=600)
message.save()
result = test_delete_message_by_other_user(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_success(result)
# Test if time limit is non-zero.
set_message_deleting_params(True, 240)
msg_id_1 = self.send_stream_message(hamlet, "Scotland")
message = Message.objects.get(id=msg_id_1)
message.date_sent = message.date_sent - datetime.timedelta(seconds=120)
message.save()
msg_id_2 = self.send_stream_message(hamlet, "Scotland")
message = Message.objects.get(id=msg_id_2)
message.date_sent = message.date_sent - datetime.timedelta(seconds=360)
message.save()
result = test_delete_message_by_other_user(msg_id=msg_id_1)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_owner(msg_id=msg_id_1)
self.assert_json_success(result)
result = test_delete_message_by_owner(msg_id=msg_id_2)
self.assert_json_error(result, "The time limit for deleting this message has passed")
# No limit for admin.
result = test_delete_message_by_admin(msg_id=msg_id_2)
self.assert_json_success(result)
# Test multiple delete requests with no latency issues
msg_id = self.send_stream_message(hamlet, "Scotland")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_success(result)
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Invalid message(s)")
# Test handling of 500 error caused by multiple delete requests due to latency.
# see issue #11219.
with mock.patch("zerver.views.messages.do_delete_messages") as m, \
mock.patch("zerver.views.messages.validate_can_delete_message", return_value=None), \
mock.patch("zerver.views.messages.access_message", return_value=(None, None)):
m.side_effect = IntegrityError()
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Message already deleted")
m.side_effect = Message.DoesNotExist()
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Message already deleted")
class SoftDeactivationMessageTest(ZulipTestCase):
def test_reactivate_user_if_soft_deactivated(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago")]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
sender = self.example_user('iago')
stream_name = 'Denmark'
topic_name = 'foo'
def last_realm_audit_log_entry(event_type: int) -> RealmAuditLog:
return RealmAuditLog.objects.filter(
event_type=event_type
).order_by('-event_time')[0]
long_term_idle_user = self.example_user('hamlet')
# We are sending this message to ensure that long_term_idle_user has
# at least one UserMessage row.
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
message = 'Test Message 1'
message_id = self.send_stream_message(sender, stream_name,
message, topic_name)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertNotEqual(idle_user_msg_list[-1].content, message)
with queries_captured() as queries:
reactivate_user_if_soft_deactivated(long_term_idle_user)
self.assert_length(queries, 8)
self.assertFalse(long_term_idle_user.long_term_idle)
self.assertEqual(last_realm_audit_log_entry(
RealmAuditLog.USER_SOFT_ACTIVATED).modified_user, long_term_idle_user)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 1)
self.assertEqual(idle_user_msg_list[-1].content, message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, message_id)
def test_add_missing_messages(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago")]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
sender = self.example_user('iago')
realm = sender.realm
sending_client = make_client(name="test suite")
stream_name = 'Denmark'
stream = get_stream(stream_name, realm)
topic_name = 'foo'
def send_fake_message(message_content: str, stream: Stream) -> Message:
recipient = stream.recipient
message = Message(sender = sender,
recipient = recipient,
content = message_content,
date_sent = timezone_now(),
sending_client = sending_client)
message.set_topic_name(topic_name)
message.save()
return message
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
# Test that add_missing_messages() in simplest case of adding a
# message for which UserMessage row doesn't exist for this user.
sent_message = send_fake_message('Test Message 1', stream)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertNotEqual(idle_user_msg_list[-1], sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 1)
self.assertEqual(idle_user_msg_list[-1], sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message.id)
# Test that add_missing_messages() only adds messages that aren't
# already present in the UserMessage table. This test works on the
# fact that previous test just above this added a message but didn't
# updated the last_active_message_id field for the user.
sent_message = send_fake_message('Test Message 2', stream)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertNotEqual(idle_user_msg_list[-1], sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 7)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 1)
self.assertEqual(idle_user_msg_list[-1], sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message.id)
# Test UserMessage rows are created correctly in case of stream
# Subscription was altered by admin while user was away.
# Test for a public stream.
sent_message_list = []
sent_message_list.append(send_fake_message('Test Message 3', stream))
# Alter subscription to stream.
self.unsubscribe(long_term_idle_user, stream_name)
send_fake_message('Test Message 4', stream)
self.subscribe(long_term_idle_user, stream_name)
sent_message_list.append(send_fake_message('Test Message 5', stream))
sent_message_list.reverse()
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
for sent_message in sent_message_list:
self.assertNotEqual(idle_user_msg_list.pop(), sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 2)
for sent_message in sent_message_list:
self.assertEqual(idle_user_msg_list.pop(), sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message_list[0].id)
# Test consecutive subscribe/unsubscribe in a public stream
sent_message_list = []
sent_message_list.append(send_fake_message('Test Message 6', stream))
# Unsubscribe from stream and then immediately subscribe back again.
self.unsubscribe(long_term_idle_user, stream_name)
self.subscribe(long_term_idle_user, stream_name)
sent_message_list.append(send_fake_message('Test Message 7', stream))
# Again unsubscribe from stream and send a message.
# This will make sure that if initially in a unsubscribed state
# a consecutive subscribe/unsubscribe doesn't misbehave.
self.unsubscribe(long_term_idle_user, stream_name)
send_fake_message('Test Message 8', stream)
# Do a subscribe and unsubscribe immediately.
self.subscribe(long_term_idle_user, stream_name)
self.unsubscribe(long_term_idle_user, stream_name)
sent_message_list.reverse()
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
for sent_message in sent_message_list:
self.assertNotEqual(idle_user_msg_list.pop(), sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 2)
for sent_message in sent_message_list:
self.assertEqual(idle_user_msg_list.pop(), sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message_list[0].id)
# Test for when user unsubscribes before soft deactivation
# (must reactivate them in order to do this).
do_soft_activate_users([long_term_idle_user])
self.subscribe(long_term_idle_user, stream_name)
# Send a real message to update last_active_message_id
sent_message_id = self.send_stream_message(
sender, stream_name, 'Test Message 9')
self.unsubscribe(long_term_idle_user, stream_name)
# Soft deactivate and send another message to the unsubscribed stream.
do_soft_deactivate_users([long_term_idle_user])
send_fake_message('Test Message 10', stream)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertEqual(idle_user_msg_list[-1].id, sent_message_id)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
# There are no streams to fetch missing messages from, so
# the Message.objects query will be avoided.
self.assert_length(queries, 4)
idle_user_msg_list = get_user_messages(long_term_idle_user)
# No new UserMessage rows should have been created.
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count)
# Note: At this point in this test we have long_term_idle_user
# unsubscribed from the 'Denmark' stream.
# Test for a Private Stream.
stream_name = "Core"
private_stream = self.make_stream('Core', invite_only=True)
self.subscribe(self.example_user("iago"), stream_name)
sent_message_list = []
send_fake_message('Test Message 11', private_stream)
self.subscribe(self.example_user("hamlet"), stream_name)
sent_message_list.append(send_fake_message('Test Message 12', private_stream))
self.unsubscribe(long_term_idle_user, stream_name)
send_fake_message('Test Message 13', private_stream)
self.subscribe(long_term_idle_user, stream_name)
sent_message_list.append(send_fake_message('Test Message 14', private_stream))
sent_message_list.reverse()
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
for sent_message in sent_message_list:
self.assertNotEqual(idle_user_msg_list.pop(), sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 2)
for sent_message in sent_message_list:
self.assertEqual(idle_user_msg_list.pop(), sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message_list[0].id)
@mock.patch('zerver.lib.soft_deactivation.BULK_CREATE_BATCH_SIZE', 2)
def test_add_missing_messages_pagination(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago")]
stream_name = 'Denmark'
for user_profile in recipient_list:
self.subscribe(user_profile, stream_name)
sender = self.example_user('iago')
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
num_new_messages = 5
message_ids = []
for _ in range(num_new_messages):
message_id = self.send_stream_message(sender, stream_name)
message_ids.append(message_id)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 10)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + num_new_messages)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, message_ids[-1])
def test_user_message_filter(self) -> None:
# In this test we are basically testing out the logic used out in
# do_send_messages() in action.py for filtering the messages for which
# UserMessage rows should be created for a soft-deactivated user.
recipient_list = [
self.example_user("hamlet"),
self.example_user("iago"),
self.example_user('cordelia')
]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
cordelia = self.example_user('cordelia')
sender = self.example_user('iago')
stream_name = 'Denmark'
topic_name = 'foo'
def send_stream_message(content: str) -> None:
self.send_stream_message(sender, stream_name,
content, topic_name)
def send_personal_message(content: str) -> None:
self.send_personal_message(sender, self.example_user("hamlet"), content)
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
def assert_um_count(user: UserProfile, count: int) -> None:
user_messages = get_user_messages(user)
self.assertEqual(len(user_messages), count)
def assert_last_um_content(user: UserProfile, content: str, negate: bool=False) -> None:
user_messages = get_user_messages(user)
if negate:
self.assertNotEqual(user_messages[-1].content, content)
else:
self.assertEqual(user_messages[-1].content, content)
# Test that sending a message to a stream with soft deactivated user
# doesn't end up creating UserMessage row for deactivated user.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test Message 1'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message, negate=True)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test that sending a message to a stream with soft deactivated user
# and push/email notifications on creates a UserMessage row for the
# deactivated user.
sub = get_subscription(stream_name, long_term_idle_user)
sub.push_notifications = True
sub.save()
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test private stream message'
send_stream_message(message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_last_um_content(long_term_idle_user, message)
sub.push_notifications = False
sub.save()
# Test sending a private message to soft deactivated user creates
# UserMessage row.
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test PM'
send_personal_message(message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_last_um_content(long_term_idle_user, message)
# Test UserMessage row is created while user is deactivated if
# user itself is mentioned.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**King Hamlet** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is not created while user is deactivated if
# anyone is mentioned but the user.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**Cordelia Lear** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message, negate=True)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is created while user is deactivated if
# there is a wildcard mention such as @all or @everyone
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**all** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**everyone** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**stream** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is not created while user is deactivated if there
# is a alert word in message.
do_add_alert_words(long_term_idle_user, ['test_alert_word'])
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Testing test_alert_word'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is created while user is deactivated if
# message is a me message.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = '/me says test'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message, negate=True)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
class MessageHydrationTest(ZulipTestCase):
def test_hydrate_stream_recipient_info(self) -> None:
realm = get_realm('zulip')
cordelia = self.example_user('cordelia')
stream_id = get_stream('Verona', realm).id
obj = dict(
recipient_type=Recipient.STREAM,
recipient_type_id=stream_id,
sender_is_mirror_dummy=False,
sender_email=cordelia.email,
sender_full_name=cordelia.full_name,
sender_short_name=cordelia.short_name,
sender_id=cordelia.id,
)
MessageDict.hydrate_recipient_info(obj, 'Verona')
self.assertEqual(obj['display_recipient'], 'Verona')
self.assertEqual(obj['type'], 'stream')
def test_hydrate_pm_recipient_info(self) -> None:
cordelia = self.example_user('cordelia')
display_recipient: List[UserDisplayRecipient] = [
dict(
email='aaron@example.com',
full_name='Aaron Smith',
short_name='Aaron',
id=999,
is_mirror_dummy=False
),
]
obj = dict(
recipient_type=Recipient.PERSONAL,
recipient_type_id=None,
sender_is_mirror_dummy=False,
sender_email=cordelia.email,
sender_full_name=cordelia.full_name,
sender_short_name=cordelia.short_name,
sender_id=cordelia.id,
)
MessageDict.hydrate_recipient_info(obj, display_recipient)
self.assertEqual(
obj['display_recipient'],
[
dict(
email='aaron@example.com',
full_name='Aaron Smith',
short_name='Aaron',
id=999,
is_mirror_dummy=False
),
dict(
email=cordelia.email,
full_name=cordelia.full_name,
id=cordelia.id,
short_name=cordelia.short_name,
is_mirror_dummy=False,
),
],
)
self.assertEqual(obj['type'], 'private')
def test_messages_for_ids(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
stream_name = 'test stream'
self.subscribe(cordelia, stream_name)
old_message_id = self.send_stream_message(cordelia, stream_name, content='foo')
self.subscribe(hamlet, stream_name)
content = 'hello @**King Hamlet**'
new_message_id = self.send_stream_message(cordelia, stream_name, content=content)
user_message_flags = {
old_message_id: ['read', 'historical'],
new_message_id: ['mentioned'],
}
messages = messages_for_ids(
message_ids=[old_message_id, new_message_id],
user_message_flags=user_message_flags,
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self.assertEqual(len(messages), 2)
for message in messages:
if message['id'] == old_message_id:
old_message = message
elif message['id'] == new_message_id:
new_message = message
self.assertEqual(old_message['content'], '<p>foo</p>')
self.assertEqual(old_message['flags'], ['read', 'historical'])
self.assertIn('class="user-mention"', new_message['content'])
self.assertEqual(new_message['flags'], ['mentioned'])
def test_display_recipient_up_to_date(self) -> None:
"""
This is a test for a bug where due to caching of message_dicts,
after updating a user's information, fetching those cached messages
via messages_for_ids would return message_dicts with display_recipient
still having the old information. The returned message_dicts should have
up-to-date display_recipients and we check for that here.
"""
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
message_id = self.send_personal_message(hamlet, cordelia, 'test')
cordelia_recipient = cordelia.recipient
# Cause the display_recipient to get cached:
get_display_recipient(cordelia_recipient)
# Change cordelia's email:
cordelia_new_email = 'new-cordelia@zulip.com'
cordelia.email = cordelia_new_email
cordelia.save()
# Local display_recipient cache needs to be flushed.
# flush_per_request_caches() is called after every request,
# so it makes sense to run it here.
flush_per_request_caches()
messages = messages_for_ids(
message_ids=[message_id],
user_message_flags={message_id: ['read']},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
message = messages[0]
# Find which display_recipient in the list is cordelia:
for display_recipient in message['display_recipient']:
if display_recipient['short_name'] == 'cordelia':
cordelia_display_recipient = display_recipient
# Make sure the email is up-to-date.
self.assertEqual(cordelia_display_recipient['email'], cordelia_new_email)
class TestMessageForIdsDisplayRecipientFetching(ZulipTestCase):
def _verify_display_recipient(self, display_recipient: DisplayRecipientT,
expected_recipient_objects: Union[Stream, List[UserProfile]]) -> None:
if isinstance(expected_recipient_objects, Stream):
self.assertEqual(display_recipient, expected_recipient_objects.name)
else:
for user_profile in expected_recipient_objects:
recipient_dict: UserDisplayRecipient = {
'email': user_profile.email,
'full_name': user_profile.full_name,
'short_name': user_profile.short_name,
'id': user_profile.id,
'is_mirror_dummy': user_profile.is_mirror_dummy,
}
self.assertTrue(recipient_dict in display_recipient)
def test_display_recipient_personal(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
message_ids = [
self.send_personal_message(hamlet, cordelia, 'test'),
self.send_personal_message(cordelia, othello, 'test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], [hamlet, cordelia])
self._verify_display_recipient(messages[1]['display_recipient'], [cordelia, othello])
def test_display_recipient_stream(self) -> None:
cordelia = self.example_user('cordelia')
message_ids = [
self.send_stream_message(cordelia, "Verona", content='test'),
self.send_stream_message(cordelia, "Denmark", content='test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], get_stream("Verona", cordelia.realm))
self._verify_display_recipient(messages[1]['display_recipient'], get_stream("Denmark", cordelia.realm))
def test_display_recipient_huddle(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
iago = self.example_user('iago')
message_ids = [
self.send_huddle_message(hamlet, [cordelia, othello], 'test'),
self.send_huddle_message(cordelia, [hamlet, othello, iago], 'test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], [hamlet, cordelia, othello])
self._verify_display_recipient(messages[1]['display_recipient'], [hamlet, cordelia, othello, iago])
def test_display_recipient_various_types(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
iago = self.example_user('iago')
message_ids = [
self.send_huddle_message(hamlet, [cordelia, othello], 'test'),
self.send_stream_message(cordelia, "Verona", content='test'),
self.send_personal_message(hamlet, cordelia, 'test'),
self.send_stream_message(cordelia, "Denmark", content='test'),
self.send_huddle_message(cordelia, [hamlet, othello, iago], 'test'),
self.send_personal_message(cordelia, othello, 'test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], [hamlet, cordelia, othello])
self._verify_display_recipient(messages[1]['display_recipient'], get_stream("Verona", hamlet.realm))
self._verify_display_recipient(messages[2]['display_recipient'], [hamlet, cordelia])
self._verify_display_recipient(messages[3]['display_recipient'], get_stream("Denmark", hamlet.realm))
self._verify_display_recipient(messages[4]['display_recipient'], [hamlet, cordelia, othello, iago])
self._verify_display_recipient(messages[5]['display_recipient'], [cordelia, othello])
class MessageVisibilityTest(ZulipTestCase):
def test_update_first_visible_message_id(self) -> None:
Message.objects.all().delete()
message_ids = [self.send_stream_message(self.example_user("othello"), "Scotland") for i in range(15)]
# If message_visibility_limit is None update_first_visible_message_id
# should set first_visible_message_id to 0
realm = get_realm("zulip")
realm.message_visibility_limit = None
# Setting to a random value other than 0 as the default value of
# first_visible_message_id is 0
realm.first_visible_message_id = 5
realm.save()
update_first_visible_message_id(realm)
self.assertEqual(get_first_visible_message_id(realm), 0)
realm.message_visibility_limit = 10
realm.save()
expected_message_id = message_ids[5]
update_first_visible_message_id(realm)
self.assertEqual(get_first_visible_message_id(realm), expected_message_id)
# If the message_visibility_limit is greater than number of messages
# get_first_visible_message_id should return 0
realm.message_visibility_limit = 50
realm.save()
update_first_visible_message_id(realm)
self.assertEqual(get_first_visible_message_id(realm), 0)
def test_maybe_update_first_visible_message_id(self) -> None:
realm = get_realm("zulip")
lookback_hours = 30
realm.message_visibility_limit = None
realm.save()
end_time = timezone_now() - datetime.timedelta(hours=lookback_hours - 5)
stat = COUNT_STATS['messages_sent:is_bot:hour']
RealmCount.objects.create(realm=realm, property=stat.property,
end_time=end_time, value=5)
with mock.patch("zerver.lib.message.update_first_visible_message_id") as m:
maybe_update_first_visible_message_id(realm, lookback_hours)
m.assert_not_called()
realm.message_visibility_limit = 10
realm.save()
RealmCount.objects.all().delete()
with mock.patch("zerver.lib.message.update_first_visible_message_id") as m:
maybe_update_first_visible_message_id(realm, lookback_hours)
m.assert_not_called()
RealmCount.objects.create(realm=realm, property=stat.property,
end_time=end_time, value=5)
with mock.patch("zerver.lib.message.update_first_visible_message_id") as m:
maybe_update_first_visible_message_id(realm, lookback_hours)
m.assert_called_once_with(realm)
class TestBulkGetHuddleUserIds(ZulipTestCase):
def test_bulk_get_huddle_user_ids(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
iago = self.example_user('iago')
message_ids = [
self.send_huddle_message(hamlet, [cordelia, othello], 'test'),
self.send_huddle_message(cordelia, [hamlet, othello, iago], 'test')
]
messages = Message.objects.filter(id__in=message_ids).order_by("id")
first_huddle_recipient = messages[0].recipient
first_huddle_user_ids = list(get_huddle_user_ids(first_huddle_recipient))
second_huddle_recipient = messages[1].recipient
second_huddle_user_ids = list(get_huddle_user_ids(second_huddle_recipient))
huddle_user_ids = bulk_get_huddle_user_ids([first_huddle_recipient, second_huddle_recipient])
self.assertEqual(huddle_user_ids[first_huddle_recipient.id], first_huddle_user_ids)
self.assertEqual(huddle_user_ids[second_huddle_recipient.id], second_huddle_user_ids)
def test_bulk_get_huddle_user_ids_empty_list(self) -> None:
self.assertEqual(bulk_get_huddle_user_ids([]), {})
class NoRecipientIDsTest(ZulipTestCase):
def test_no_recipient_ids(self) -> None:
user_profile = self.example_user('cordelia')
Subscription.objects.filter(user_profile=user_profile, recipient__type=Recipient.STREAM).delete()
subs = gather_subscriptions_helper(user_profile)
# Checks that gather_subscriptions_helper will not return anything
# since there will not be any recipients, without crashing.
#
# This covers a rare corner case.
self.assertEqual(len(subs[0]), 0)
| 44.023764 | 145 | 0.613132 | from django.db import IntegrityError
from django.db.models import Q
from django.conf import settings
from django.http import HttpResponse
from django.test import TestCase, override_settings
from django.utils.timezone import now as timezone_now
from zerver.lib import bugdown
from zerver.decorator import JsonableError
from zerver.lib.test_runner import slow
from zerver.lib.addressee import Addressee
from zerver.lib.actions import (
check_message,
check_send_stream_message,
create_mirror_user_if_needed,
do_add_alert_words,
do_change_is_admin,
do_change_stream_invite_only,
do_change_stream_post_policy,
do_claim_attachments,
do_create_user,
do_deactivate_user,
do_send_messages,
do_update_message,
do_set_realm_property,
extract_private_recipients,
extract_stream_indicator,
gather_subscriptions_helper,
get_active_presence_idle_user_ids,
get_client,
get_last_message_id,
get_topic_messages,
get_user_info_for_message_updates,
internal_prep_private_message,
internal_prep_stream_message_by_name,
internal_send_huddle_message,
internal_send_private_message,
internal_send_stream_message,
internal_send_stream_message_by_name,
send_rate_limited_pm_notification_to_bot_owner,
)
from zerver.lib.cache import (
cache_delete,
get_stream_cache_key,
to_dict_cache_key_id,
)
from zerver.lib.create_user import (
create_user_profile,
)
from zerver.lib.message import (
MessageDict,
bulk_access_messages,
get_first_visible_message_id,
get_raw_unread_data,
get_recent_private_conversations,
maybe_update_first_visible_message_id,
messages_for_ids,
render_markdown,
sew_messages_and_reactions,
update_first_visible_message_id,
)
from zerver.lib.test_helpers import (
get_subscription,
get_user_messages,
make_client,
message_stream_count,
most_recent_message,
most_recent_usermessage,
queries_captured,
reset_emails_in_zulip_realm,
)
from zerver.lib.test_classes import (
ZulipTestCase,
)
from zerver.lib.topic import (
LEGACY_PREV_TOPIC,
DB_TOPIC_NAME,
TOPIC_LINKS,
TOPIC_NAME,
)
from zerver.lib.types import DisplayRecipientT, UserDisplayRecipient
from zerver.lib.soft_deactivation import (
add_missing_messages,
do_soft_activate_users,
do_soft_deactivate_users,
reactivate_user_if_soft_deactivated,
)
from zerver.models import (
MAX_MESSAGE_LENGTH, MAX_TOPIC_NAME_LENGTH,
Message, Realm, Recipient, Stream, UserMessage, UserProfile, Attachment,
RealmAuditLog, RealmDomain, get_realm, UserPresence, Subscription,
get_stream, get_system_bot, get_user, Reaction,
flush_per_request_caches, ScheduledMessage, get_huddle_recipient,
bulk_get_huddle_user_ids, get_huddle_user_ids,
get_display_recipient, RealmFilter
)
from zerver.lib.timestamp import convert_to_UTC, datetime_to_timestamp
from zerver.lib.timezone import get_timezone
from zerver.lib.upload import create_attachment
from zerver.lib.url_encoding import near_message_url
from zerver.views.messages import create_mirrored_message_users, InvalidMirrorInput
from analytics.lib.counts import COUNT_STATS
from analytics.models import RealmCount
import datetime
import mock
from operator import itemgetter
import time
import ujson
from typing import Any, Dict, List, Set, Union, Tuple
from collections import namedtuple
class MiscMessageTest(ZulipTestCase):
def test_get_last_message_id(self) -> None:
self.assertEqual(
get_last_message_id(),
Message.objects.latest('id').id
)
Message.objects.all().delete()
self.assertEqual(get_last_message_id(), -1)
class TopicHistoryTest(ZulipTestCase):
def test_topics_history_zephyr_mirror(self) -> None:
user_profile = self.mit_user('sipbtest')
stream_name = 'new_stream'
self.subscribe(self.mit_user("starnine"), stream_name)
stream = get_stream(stream_name, user_profile.realm)
self.send_stream_message(self.mit_user("starnine"), stream_name,
topic_name="secret topic")
self.login_user(user_profile)
self.subscribe(user_profile, stream_name)
endpoint = '/json/users/me/%d/topics' % (stream.id,)
result = self.client_get(endpoint, dict(), subdomain="zephyr")
self.assert_json_success(result)
history = result.json()['topics']
self.assertEqual(history, [])
def test_topics_history(self) -> None:
user_profile = self.example_user('iago')
self.login_user(user_profile)
stream_name = 'Verona'
stream = get_stream(stream_name, user_profile.realm)
recipient = stream.recipient
def create_test_message(topic: str) -> int:
hamlet = self.example_user('hamlet')
message = Message(
sender=hamlet,
recipient=recipient,
content='whatever',
date_sent=timezone_now(),
sending_client=get_client('whatever'),
)
message.set_topic_name(topic)
message.save()
UserMessage.objects.create(
user_profile=user_profile,
message=message,
flags=0,
)
return message.id
create_test_message('topic2')
create_test_message('toPIc1')
create_test_message('toPIc0')
create_test_message('topic2')
create_test_message('topic2')
create_test_message('Topic2')
topic2_msg_id = create_test_message('topic2')
create_test_message('topic1')
create_test_message('topic1')
topic1_msg_id = create_test_message('topic1')
topic0_msg_id = create_test_message('topic0')
endpoint = '/json/users/me/%d/topics' % (stream.id,)
result = self.client_get(endpoint, dict())
self.assert_json_success(result)
history = result.json()['topics']
history = history[:3]
self.assertEqual([topic['name'] for topic in history], [
'topic0',
'topic1',
'topic2',
])
self.assertEqual([topic['max_id'] for topic in history], [
topic0_msg_id,
topic1_msg_id,
topic2_msg_id,
])
# same results for a public stream.
self.login('cordelia')
result = self.client_get(endpoint, dict())
self.assert_json_success(result)
history = result.json()['topics']
# We only look at the most recent three topics, because
# the prior fixture data may be unreliable.
history = history[:3]
self.assertEqual([topic['name'] for topic in history], [
'topic0',
'topic1',
'topic2',
])
self.assertIn('topic0', [topic['name'] for topic in history])
self.assertEqual([topic['max_id'] for topic in history], [
topic0_msg_id,
topic1_msg_id,
topic2_msg_id,
])
# Now make stream private, but subscribe cordelia
do_change_stream_invite_only(stream, True)
self.subscribe(self.example_user("cordelia"), stream.name)
result = self.client_get(endpoint, dict())
self.assert_json_success(result)
history = result.json()['topics']
history = history[:3]
# Cordelia doesn't have these recent history items when we
self.assertNotIn('topic0', [topic['name'] for topic in history])
self.assertNotIn('topic1', [topic['name'] for topic in history])
self.assertNotIn('topic2', [topic['name'] for topic in history])
def test_bad_stream_id(self) -> None:
self.login('iago')
# non-sensible stream id
endpoint = '/json/users/me/9999999999/topics'
result = self.client_get(endpoint, dict())
self.assert_json_error(result, 'Invalid stream id')
# out of realm
bad_stream = self.make_stream(
'mit_stream',
realm=get_realm('zephyr')
)
endpoint = '/json/users/me/%s/topics' % (bad_stream.id,)
result = self.client_get(endpoint, dict())
self.assert_json_error(result, 'Invalid stream id')
# private stream to which I am not subscribed
private_stream = self.make_stream(
'private_stream',
invite_only=True
)
endpoint = '/json/users/me/%s/topics' % (private_stream.id,)
result = self.client_get(endpoint, dict())
self.assert_json_error(result, 'Invalid stream id')
class TopicDeleteTest(ZulipTestCase):
def test_topic_delete(self) -> None:
initial_last_msg_id = self.get_last_message().id
stream_name = 'new_stream'
topic_name = 'new topic 2'
# NON-ADMIN USER
user_profile = self.example_user('hamlet')
self.subscribe(user_profile, stream_name)
# Send message
stream = get_stream(stream_name, user_profile.realm)
self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
last_msg_id = self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
# Deleting the topic
self.login_user(user_profile)
endpoint = '/json/streams/' + str(stream.id) + '/delete_topic'
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_error(result, "Must be an organization administrator")
self.assertEqual(self.get_last_message().id, last_msg_id)
# Make stream private with limited history
do_change_stream_invite_only(stream, invite_only=True,
history_public_to_subscribers=False)
# ADMIN USER subscribed now
user_profile = self.example_user('iago')
self.subscribe(user_profile, stream_name)
self.login_user(user_profile)
new_last_msg_id = self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
# Now admin deletes all messages in topic -- which should only
# delete new_last_msg_id, i.e. the one sent since they joined.
self.assertEqual(self.get_last_message().id, new_last_msg_id)
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, last_msg_id)
# Try to delete all messages in the topic again. There are no messages accessible
# to the administrator, so this should do nothing.
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, last_msg_id)
# Make the stream's history public to subscribers
do_change_stream_invite_only(stream, invite_only=True,
history_public_to_subscribers=True)
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, initial_last_msg_id)
result = self.client_post(endpoint, {
"topic_name": topic_name
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, initial_last_msg_id)
class TestCrossRealmPMs(ZulipTestCase):
def make_realm(self, domain: str) -> Realm:
realm = Realm.objects.create(string_id=domain, invite_required=False)
RealmDomain.objects.create(realm=realm, domain=domain)
return realm
def create_user(self, email: str) -> UserProfile:
subdomain = email.split("@")[1]
self.register(email, 'test', subdomain=subdomain)
return get_user(email, get_realm(subdomain))
@slow("Sends a large number of messages")
@override_settings(CROSS_REALM_BOT_EMAILS=['notification-bot@zulip.com',
'welcome-bot@zulip.com',
'support@3.example.com'])
def test_realm_scenarios(self) -> None:
self.make_realm('1.example.com')
r2 = self.make_realm('2.example.com')
self.make_realm('3.example.com')
def assert_message_received(to_user: UserProfile, from_user: UserProfile) -> None:
messages = get_user_messages(to_user)
self.assertEqual(messages[-1].sender.id, from_user.id)
def assert_invalid_user() -> Any:
return self.assertRaisesRegex(
JsonableError,
'Invalid user ID ')
user1_email = 'user1@1.example.com'
user1a_email = 'user1a@1.example.com'
user2_email = 'user2@2.example.com'
user3_email = 'user3@3.example.com'
notification_bot_email = 'notification-bot@zulip.com'
support_email = 'support@3.example.com'
user1 = self.create_user(user1_email)
user1a = self.create_user(user1a_email)
user2 = self.create_user(user2_email)
user3 = self.create_user(user3_email)
notification_bot = get_system_bot(notification_bot_email)
with self.settings(CROSS_REALM_BOT_EMAILS=['notification-bot@zulip.com', 'welcome-bot@zulip.com']):
# cross-realm email, we need to hide this for now.
support_bot = self.create_user(support_email)
# Users can PM themselves
self.send_personal_message(user1, user1)
assert_message_received(user1, user1)
# Users on the same realm can PM each other
self.send_personal_message(user1, user1a)
assert_message_received(user1a, user1)
# Cross-realm bots in the zulip.com realm can PM any realm
# (They need lower level APIs to do this.)
internal_send_private_message(
realm=r2,
sender=get_system_bot(notification_bot_email),
recipient_user=get_user(user2_email, r2),
content='bla',
)
assert_message_received(user2, notification_bot)
# All users can PM cross-realm bots in the zulip.com realm
self.send_personal_message(user1, notification_bot)
assert_message_received(notification_bot, user1)
# Users can PM cross-realm bots on non-zulip realms.
# (The support bot represents some theoretical bot that we may
# create in the future that does not have zulip.com as its realm.)
self.send_personal_message(user1, support_bot)
assert_message_received(support_bot, user1)
# Allow sending PMs to two different cross-realm bots simultaneously.
# (We don't particularly need this feature, but since users can
# prevent them from sending multiple bots at once. We may revisit
# this if it's a nuisance for huddles.)
self.send_huddle_message(user1, [notification_bot, support_bot])
assert_message_received(notification_bot, user1)
assert_message_received(support_bot, user1)
with assert_invalid_user():
self.send_huddle_message(user1, [user3, support_bot])
# even if one of the users is a cross-realm bot.
with assert_invalid_user():
self.send_huddle_message(user1, [user2, notification_bot])
with assert_invalid_user():
self.send_huddle_message(notification_bot, [user1, user2])
# Users on the different realms cannot PM each other
with assert_invalid_user():
self.send_personal_message(user1, user2)
# Users on non-zulip realms can't PM "ordinary" Zulip users
with assert_invalid_user():
self.send_personal_message(user1, self.example_user('hamlet'))
with assert_invalid_user():
self.send_huddle_message(user1, [user2, user3])
class TestAddressee(ZulipTestCase):
def test_addressee_for_user_ids(self) -> None:
realm = get_realm('zulip')
user_ids = [self.example_user('cordelia').id,
self.example_user('hamlet').id,
self.example_user('othello').id]
result = Addressee.for_user_ids(user_ids=user_ids, realm=realm)
user_profiles = result.user_profiles()
result_user_ids = [user_profiles[0].id, user_profiles[1].id,
user_profiles[2].id]
self.assertEqual(set(result_user_ids), set(user_ids))
def test_addressee_for_user_ids_nonexistent_id(self) -> None:
def assert_invalid_user_id() -> Any:
return self.assertRaisesRegex(
JsonableError,
'Invalid user ID ')
with assert_invalid_user_id():
Addressee.for_user_ids(user_ids=[779], realm=get_realm('zulip'))
def test_addressee_legacy_build_for_user_ids(self) -> None:
realm = get_realm('zulip')
self.login('hamlet')
user_ids = [self.example_user('cordelia').id,
self.example_user('othello').id]
result = Addressee.legacy_build(
sender=self.example_user('hamlet'), message_type_name='private',
message_to=user_ids, topic_name='random_topic',
realm=realm
)
user_profiles = result.user_profiles()
result_user_ids = [user_profiles[0].id, user_profiles[1].id]
self.assertEqual(set(result_user_ids), set(user_ids))
def test_addressee_legacy_build_for_stream_id(self) -> None:
realm = get_realm('zulip')
self.login('iago')
sender = self.example_user('iago')
self.subscribe(sender, "Denmark")
stream = get_stream('Denmark', realm)
result = Addressee.legacy_build(
sender=sender, message_type_name='stream',
message_to=[stream.id], topic_name='random_topic',
realm=realm
)
stream_id = result.stream_id()
self.assertEqual(stream.id, stream_id)
class InternalPrepTest(ZulipTestCase):
def test_returns_for_internal_sends(self) -> None:
bad_content = ''
realm = get_realm('zulip')
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
othello = self.example_user('othello')
stream = get_stream('Verona', realm)
with mock.patch('logging.exception') as m:
internal_send_private_message(
realm=realm,
sender=cordelia,
recipient_user=hamlet,
content=bad_content,
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
with mock.patch('logging.exception') as m:
internal_send_huddle_message(
realm=realm,
sender=cordelia,
emails=[hamlet.email, othello.email],
content=bad_content,
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
with mock.patch('logging.exception') as m:
internal_send_stream_message(
realm=realm,
sender=cordelia,
topic='whatever',
content=bad_content,
stream=stream
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
with mock.patch('logging.exception') as m:
internal_send_stream_message_by_name(
realm=realm,
sender=cordelia,
stream_name=stream.name,
topic='whatever',
content=bad_content
)
arg = m.call_args_list[0][0][0]
self.assertIn('Message must not be empty', arg)
def test_error_handling(self) -> None:
realm = get_realm('zulip')
sender = self.example_user('cordelia')
recipient_user = self.example_user('hamlet')
content = 'x' * 15000
result = internal_prep_private_message(
realm=realm,
sender=sender,
recipient_user=recipient_user,
content=content)
message = result['message']
self.assertIn('message was too long', message.content)
with self.assertRaises(RuntimeError):
internal_prep_private_message(
realm=None, # should cause error
sender=sender,
recipient_user=recipient_user,
content=content)
# Simulate sending a message to somebody not in the
# realm of the sender.
recipient_user = self.mit_user('starnine')
with mock.patch('logging.exception') as logging_mock:
result = internal_prep_private_message(
realm=realm,
sender=sender,
recipient_user=recipient_user,
content=content)
arg = logging_mock.call_args_list[0][0][0]
prefix = "Error queueing internal message by cordelia@zulip.com: You can't send private messages outside of your organization."
self.assertTrue(arg.startswith(prefix))
def test_ensure_stream_gets_called(self) -> None:
realm = get_realm('zulip')
sender = self.example_user('cordelia')
stream_name = 'test_stream'
topic = 'whatever'
content = 'hello'
internal_prep_stream_message_by_name(
realm=realm,
sender=sender,
stream_name=stream_name,
topic=topic,
content=content)
Stream.objects.get(name=stream_name, realm_id=realm.id)
class ExtractTest(TestCase):
def test_extract_stream_indicator(self) -> None:
self.assertEqual(
extract_stream_indicator('development'),
"development",
)
self.assertEqual(
extract_stream_indicator('commas,are,fine'),
"commas,are,fine",
)
self.assertEqual(
extract_stream_indicator('"Who hasn\'t done this?"'),
"Who hasn't done this?",
)
self.assertEqual(
extract_stream_indicator("999"),
999,
)
# For legacy reasons it's plausible that users will
self.assertEqual(
extract_stream_indicator('["social"]'),
'social',
)
self.assertEqual(
extract_stream_indicator("[123]"),
123,
)
with self.assertRaisesRegex(JsonableError, 'Invalid data type for stream'):
extract_stream_indicator('{}')
with self.assertRaisesRegex(JsonableError, 'Invalid data type for stream'):
extract_stream_indicator('[{}]')
with self.assertRaisesRegex(JsonableError, 'Expected exactly one stream'):
extract_stream_indicator('[1,2,"general"]')
def test_extract_private_recipients_emails(self) -> None:
s = ujson.dumps([' alice@zulip.com ', ' bob@zulip.com ', ' ', 'bob@zulip.com'])
# For testing, ignorance here is better than manual casting
result = sorted(extract_private_recipients(s))
self.assertEqual(result, ['alice@zulip.com', 'bob@zulip.com'])
# simple string with one name
s = 'alice@zulip.com '
self.assertEqual(extract_private_recipients(s), ['alice@zulip.com'])
# JSON-encoded string
s = '"alice@zulip.com"'
self.assertEqual(extract_private_recipients(s), ['alice@zulip.com'])
# bare comma-delimited string
s = 'bob@zulip.com, alice@zulip.com'
result = sorted(extract_private_recipients(s))
self.assertEqual(result, ['alice@zulip.com', 'bob@zulip.com'])
# JSON-encoded, comma-delimited string
s = '"bob@zulip.com,alice@zulip.com"'
result = sorted(extract_private_recipients(s))
self.assertEqual(result, ['alice@zulip.com', 'bob@zulip.com'])
# Invalid data
s = ujson.dumps(dict(color='red'))
with self.assertRaisesRegex(JsonableError, 'Invalid data type for recipients'):
extract_private_recipients(s)
s = ujson.dumps([{}])
with self.assertRaisesRegex(JsonableError, 'Invalid data type for recipients'):
extract_private_recipients(s)
# Empty list
self.assertEqual(extract_private_recipients('[]'), [])
# Heterogeneous lists are not supported
mixed = ujson.dumps(['eeshan@example.com', 3, 4])
with self.assertRaisesRegex(JsonableError, 'Recipient lists may contain emails or user IDs, but not both.'):
extract_private_recipients(mixed)
def test_extract_recipient_ids(self) -> None:
# JSON list w/dups
s = ujson.dumps([3, 3, 12])
result = sorted(extract_private_recipients(s))
self.assertEqual(result, [3, 12])
# Invalid data
ids = ujson.dumps(dict(recipient=12))
with self.assertRaisesRegex(JsonableError, 'Invalid data type for recipients'):
extract_private_recipients(ids)
# Heterogeneous lists are not supported
mixed = ujson.dumps([3, 4, 'eeshan@example.com'])
with self.assertRaisesRegex(JsonableError, 'Recipient lists may contain emails or user IDs, but not both.'):
extract_private_recipients(mixed)
class PersonalMessagesTest(ZulipTestCase):
def test_near_pm_message_url(self) -> None:
realm = get_realm('zulip')
message = dict(
type='personal',
id=555,
display_recipient=[
dict(id=77),
dict(id=80),
],
)
url = near_message_url(
realm=realm,
message=message,
)
self.assertEqual(url, 'http://zulip.testserver/
def test_is_private_flag_not_leaked(self) -> None:
self.login('hamlet')
self.send_personal_message(self.example_user("hamlet"),
self.example_user("cordelia"),
"test")
for msg in self.get_messages():
self.assertNotIn('is_private', msg['flags'])
def test_auto_subbed_to_personals(self) -> None:
test_email = self.nonreg_email('test')
self.register(test_email, "test")
user_profile = self.nonreg_user('test')
old_messages_count = message_stream_count(user_profile)
self.send_personal_message(user_profile, user_profile)
new_messages_count = message_stream_count(user_profile)
self.assertEqual(new_messages_count, old_messages_count + 1)
recipient = Recipient.objects.get(type_id=user_profile.id,
type=Recipient.PERSONAL)
message = most_recent_message(user_profile)
self.assertEqual(message.recipient, recipient)
with mock.patch('zerver.models.get_display_recipient', return_value='recip'):
self.assertEqual(
str(message),
'<Message: recip / / '
'<UserProfile: {} {}>>'.format(user_profile.email, user_profile.realm))
user_message = most_recent_usermessage(user_profile)
self.assertEqual(
str(user_message),
'<UserMessage: recip / {} ([])>'.format(user_profile.email)
)
@slow("checks several profiles")
def test_personal_to_self(self) -> None:
old_user_profiles = list(UserProfile.objects.all())
test_email = self.nonreg_email('test1')
self.register(test_email, "test1")
old_messages = []
for user_profile in old_user_profiles:
old_messages.append(message_stream_count(user_profile))
user_profile = self.nonreg_user('test1')
self.send_personal_message(user_profile, user_profile)
new_messages = []
for user_profile in old_user_profiles:
new_messages.append(message_stream_count(user_profile))
self.assertEqual(old_messages, new_messages)
user_profile = self.nonreg_user('test1')
recipient = Recipient.objects.get(type_id=user_profile.id, type=Recipient.PERSONAL)
self.assertEqual(most_recent_message(user_profile).recipient, recipient)
def assert_personal(self, sender: UserProfile, receiver: UserProfile, content: str="testcontent") -> None:
sender_messages = message_stream_count(sender)
receiver_messages = message_stream_count(receiver)
other_user_profiles = UserProfile.objects.filter(~Q(id=sender.id) &
~Q(id=receiver.id))
old_other_messages = []
for user_profile in other_user_profiles:
old_other_messages.append(message_stream_count(user_profile))
self.send_personal_message(sender, receiver, content)
# Users outside the conversation don't get the message.
new_other_messages = []
for user_profile in other_user_profiles:
new_other_messages.append(message_stream_count(user_profile))
self.assertEqual(old_other_messages, new_other_messages)
self.assertEqual(message_stream_count(sender),
sender_messages + 1)
self.assertEqual(message_stream_count(receiver),
receiver_messages + 1)
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
self.assertEqual(most_recent_message(sender).recipient, recipient)
self.assertEqual(most_recent_message(receiver).recipient, recipient)
def test_personal(self) -> None:
self.login('hamlet')
self.assert_personal(
sender=self.example_user("hamlet"),
receiver=self.example_user("othello")
)
def test_private_message_policy(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
do_set_realm_property(user_profile.realm, "private_message_policy",
Realm.PRIVATE_MESSAGE_POLICY_DISABLED)
with self.assertRaises(JsonableError):
self.send_personal_message(user_profile, self.example_user("cordelia"))
bot_profile = self.create_test_bot("testbot", user_profile)
self.send_personal_message(user_profile, get_system_bot(settings.NOTIFICATION_BOT))
self.send_personal_message(user_profile, bot_profile)
self.send_personal_message(bot_profile, user_profile)
def test_non_ascii_personal(self) -> None:
self.login('hamlet')
self.assert_personal(
sender=self.example_user("hamlet"),
receiver=self.example_user("othello"),
content="hümbüǵ"
)
class StreamMessagesTest(ZulipTestCase):
def assert_stream_message(self, stream_name: str, topic_name: str="test topic",
content: str="test content") -> None:
realm = get_realm('zulip')
subscribers = self.users_subscribed_to_stream(stream_name, realm)
subscribers = [subscriber for subscriber in subscribers
if subscriber.bot_type != UserProfile.OUTGOING_WEBHOOK_BOT]
old_subscriber_messages = []
for subscriber in subscribers:
old_subscriber_messages.append(message_stream_count(subscriber))
non_subscribers = [user_profile for user_profile in UserProfile.objects.all()
if user_profile not in subscribers]
old_non_subscriber_messages = []
for non_subscriber in non_subscribers:
old_non_subscriber_messages.append(message_stream_count(non_subscriber))
non_bot_subscribers = [user_profile for user_profile in subscribers
if not user_profile.is_bot]
a_subscriber = non_bot_subscribers[0]
self.login_user(a_subscriber)
self.send_stream_message(a_subscriber, stream_name,
content=content, topic_name=topic_name)
# Did all of the subscribers get the message?
new_subscriber_messages = []
for subscriber in subscribers:
new_subscriber_messages.append(message_stream_count(subscriber))
# Did non-subscribers not get the message?
new_non_subscriber_messages = []
for non_subscriber in non_subscribers:
new_non_subscriber_messages.append(message_stream_count(non_subscriber))
self.assertEqual(old_non_subscriber_messages, new_non_subscriber_messages)
self.assertEqual(new_subscriber_messages, [elt + 1 for elt in old_subscriber_messages])
def test_performance(self) -> None:
num_messages = 2
num_extra_users = 10
sender = self.example_user('cordelia')
realm = sender.realm
message_content = 'whatever'
stream = get_stream('Denmark', realm)
topic_name = 'lunch'
recipient = stream.recipient
sending_client = make_client(name="test suite")
for i in range(num_extra_users):
# Make every other user be idle.
long_term_idle = i % 2 > 0
email = 'foo%d@example.com' % (i,)
user = UserProfile.objects.create(
realm=realm,
email=email,
pointer=0,
long_term_idle=long_term_idle,
)
Subscription.objects.create(
user_profile=user,
recipient=recipient
)
def send_test_message() -> None:
message = Message(
sender=sender,
recipient=recipient,
content=message_content,
date_sent=timezone_now(),
sending_client=sending_client,
)
message.set_topic_name(topic_name)
do_send_messages([dict(message=message)])
before_um_count = UserMessage.objects.count()
t = time.time()
for i in range(num_messages):
send_test_message()
delay = time.time() - t
assert(delay) # quiet down lint
# print(delay)
after_um_count = UserMessage.objects.count()
ums_created = after_um_count - before_um_count
num_active_users = num_extra_users / 2
self.assertTrue(ums_created > (num_active_users * num_messages))
def test_not_too_many_queries(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago"),
self.example_user("cordelia"), self.example_user("othello")]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
sender = self.example_user('hamlet')
sending_client = make_client(name="test suite")
stream_name = 'Denmark'
topic_name = 'foo'
content = 'whatever'
realm = sender.realm
# To get accurate count of the queries, we should make sure that
# caches don't come into play. If we count queries while caches are
flush_per_request_caches()
cache_delete(get_stream_cache_key(stream_name, realm.id))
with queries_captured() as queries:
check_send_stream_message(
sender=sender,
client=sending_client,
stream_name=stream_name,
topic=topic_name,
body=content,
)
self.assert_length(queries, 15)
def test_stream_message_dict(self) -> None:
user_profile = self.example_user('iago')
self.subscribe(user_profile, "Denmark")
self.send_stream_message(self.example_user("hamlet"), "Denmark",
content="whatever", topic_name="my topic")
message = most_recent_message(user_profile)
row = MessageDict.get_raw_db_rows([message.id])[0]
dct = MessageDict.build_dict_from_raw_db_row(row)
MessageDict.post_process_dicts([dct], apply_markdown=True, client_gravatar=False)
self.assertEqual(dct['display_recipient'], 'Denmark')
stream = get_stream('Denmark', user_profile.realm)
self.assertEqual(dct['stream_id'], stream.id)
def test_stream_message_unicode(self) -> None:
receiving_user_profile = self.example_user('iago')
sender = self.example_user('hamlet')
self.subscribe(receiving_user_profile, "Denmark")
self.send_stream_message(sender, "Denmark",
content="whatever", topic_name="my topic")
message = most_recent_message(receiving_user_profile)
self.assertEqual(str(message),
'<Message: Denmark / my topic / '
'<UserProfile: {} {}>>'.format(sender.email, sender.realm))
def test_message_mentions(self) -> None:
user_profile = self.example_user('iago')
self.subscribe(user_profile, "Denmark")
self.send_stream_message(self.example_user("hamlet"), "Denmark",
content="test @**Iago** rules")
message = most_recent_message(user_profile)
assert(UserMessage.objects.get(user_profile=user_profile, message=message).flags.mentioned.is_set)
def test_is_private_flag(self) -> None:
user_profile = self.example_user('iago')
self.subscribe(user_profile, "Denmark")
self.send_stream_message(self.example_user("hamlet"), "Denmark",
content="test")
message = most_recent_message(user_profile)
self.assertFalse(UserMessage.objects.get(user_profile=user_profile, message=message).flags.is_private.is_set)
self.send_personal_message(self.example_user("hamlet"), user_profile,
content="test")
message = most_recent_message(user_profile)
self.assertTrue(UserMessage.objects.get(user_profile=user_profile, message=message).flags.is_private.is_set)
def _send_stream_message(self, user: UserProfile, stream_name: str, content: str) -> Set[int]:
with mock.patch('zerver.lib.actions.send_event') as m:
self.send_stream_message(
user,
stream_name,
content=content
)
self.assertEqual(m.call_count, 1)
users = m.call_args[0][2]
user_ids = {u['id'] for u in users}
return user_ids
def test_unsub_mention(self) -> None:
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
stream_name = 'Test Stream'
self.subscribe(hamlet, stream_name)
UserMessage.objects.filter(
user_profile=cordelia
).delete()
def mention_cordelia() -> Set[int]:
content = 'test @**Cordelia Lear** rules'
user_ids = self._send_stream_message(
user=hamlet,
stream_name=stream_name,
content=content
)
return user_ids
def num_cordelia_messages() -> int:
return UserMessage.objects.filter(
user_profile=cordelia
).count()
user_ids = mention_cordelia()
self.assertEqual(0, num_cordelia_messages())
self.assertNotIn(cordelia.id, user_ids)
# Cordelia and mentioning her should give her a
# message.
self.subscribe(cordelia, stream_name)
user_ids = mention_cordelia()
self.assertIn(cordelia.id, user_ids)
self.assertEqual(1, num_cordelia_messages())
def test_message_bot_mentions(self) -> None:
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
realm = hamlet.realm
stream_name = 'Test Stream'
self.subscribe(hamlet, stream_name)
normal_bot = do_create_user(
email='normal-bot@zulip.com',
password='',
realm=realm,
full_name='Normal Bot',
short_name='',
bot_type=UserProfile.DEFAULT_BOT,
bot_owner=cordelia,
)
content = 'test @**Normal Bot** rules'
user_ids = self._send_stream_message(
user=hamlet,
stream_name=stream_name,
content=content
)
self.assertIn(normal_bot.id, user_ids)
user_message = most_recent_usermessage(normal_bot)
self.assertEqual(user_message.message.content, content)
self.assertTrue(user_message.flags.mentioned)
def test_stream_message_mirroring(self) -> None:
user = self.mit_user('starnine')
self.subscribe(user, 'Verona')
do_change_is_admin(user, True, 'api_super_user')
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"to": "Verona",
"sender": self.mit_email("sipbtest"),
"client": "zephyr_mirror",
"topic": "announcement",
"content": "Everyone knows Iago rules",
"forged": "true"},
subdomain="zephyr")
self.assert_json_success(result)
do_change_is_admin(user, False, 'api_super_user')
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"to": "Verona",
"sender": self.mit_email("sipbtest"),
"client": "zephyr_mirror",
"topic": "announcement",
"content": "Everyone knows Iago rules",
"forged": "true"},
subdomain="zephyr")
self.assert_json_error(result, "User not authorized for this query")
def test_message_to_stream(self) -> None:
self.assert_stream_message("Scotland")
def test_non_ascii_stream_message(self) -> None:
self.login('hamlet')
# Subscribe everyone to a stream with non-ASCII characters.
non_ascii_stream_name = "hümbüǵ"
realm = get_realm("zulip")
stream = self.make_stream(non_ascii_stream_name)
for user_profile in UserProfile.objects.filter(is_active=True, is_bot=False,
realm=realm)[0:3]:
self.subscribe(user_profile, stream.name)
self.assert_stream_message(non_ascii_stream_name, topic_name="hümbüǵ",
content="hümbüǵ")
def test_get_raw_unread_data_for_huddle_messages(self) -> None:
users = [
self.example_user('hamlet'),
self.example_user('cordelia'),
self.example_user('iago'),
self.example_user('prospero'),
self.example_user('othello'),
]
message1_id = self.send_huddle_message(users[0], users, "test content 1")
message2_id = self.send_huddle_message(users[0], users, "test content 2")
msg_data = get_raw_unread_data(users[1])
# both the messages are present in msg_data
self.assertIn(message1_id, msg_data["huddle_dict"].keys())
self.assertIn(message2_id, msg_data["huddle_dict"].keys())
# only these two messages are present in msg_data
self.assertEqual(len(msg_data["huddle_dict"].keys()), 2)
recent_conversations = get_recent_private_conversations(users[1])
self.assertEqual(len(recent_conversations), 1)
recent_conversation = list(recent_conversations.values())[0]
self.assertEqual(set(recent_conversation['user_ids']), {user.id for user in users if
user != users[1]})
self.assertEqual(recent_conversation['max_message_id'], message2_id)
class MessageDictTest(ZulipTestCase):
def test_both_codepaths(self) -> None:
def reload_message(msg_id: int) -> Message:
# Get a clean copy of the message, and
# clear the cache.
cache_delete(to_dict_cache_key_id(msg_id))
msg = Message.objects.get(id=msg_id)
return msg
def get_send_message_payload(
msg_id: int,
apply_markdown: bool,
client_gravatar: bool) -> Dict[str, Any]:
msg = reload_message(msg_id)
wide_dict = MessageDict.wide_dict(msg)
narrow_dict = MessageDict.finalize_payload(
wide_dict,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
return narrow_dict
def get_fetch_payload(
msg_id: int,
apply_markdown: bool,
client_gravatar: bool) -> Dict[str, Any]:
msg = reload_message(msg_id)
unhydrated_dict = MessageDict.to_dict_uncached_helper([msg])[0]
# The next step mutates the dict in place
# for performance reasons.
MessageDict.post_process_dicts(
[unhydrated_dict],
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
final_dict = unhydrated_dict
return final_dict
def test_message_id() -> int:
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
msg_id = self.send_stream_message(
hamlet,
"Scotland",
topic_name="editing",
content="before edit"
)
return msg_id
flag_setups = [
[False, False],
[False, True],
[True, False],
[True, True],
]
msg_id = test_message_id()
for (apply_markdown, client_gravatar) in flag_setups:
send_message_payload = get_send_message_payload(
msg_id,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
fetch_payload = get_fetch_payload(
msg_id,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
)
self.assertEqual(send_message_payload, fetch_payload)
@slow('builds lots of messages')
def test_bulk_message_fetching(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
pm_recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
stream_name = 'Çiğdem'
stream = self.make_stream(stream_name)
stream_recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
sending_client = make_client(name="test suite")
ids = []
for i in range(300):
for recipient in [pm_recipient, stream_recipient]:
message = Message(
sender=sender,
recipient=recipient,
content='whatever %d' % (i,),
rendered_content='DOES NOT MATTER',
rendered_content_version=bugdown.version,
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
ids.append(message.id)
Reaction.objects.create(user_profile=sender, message=message,
emoji_name='simple_smile')
num_ids = len(ids)
self.assertTrue(num_ids >= 600)
flush_per_request_caches()
t = time.time()
with queries_captured() as queries:
rows = list(MessageDict.get_raw_db_rows(ids))
objs = [
MessageDict.build_dict_from_raw_db_row(row)
for row in rows
]
MessageDict.post_process_dicts(objs, apply_markdown=False, client_gravatar=False)
delay = time.time() - t
# Make sure we don't take longer than 1.5ms per message to
error_msg = "Number of ids: {}. Time delay: {}".format(num_ids, delay)
self.assertTrue(delay < 0.0015 * num_ids, error_msg)
self.assert_length(queries, 7)
self.assertEqual(len(rows), num_ids)
def test_applying_markdown(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
sending_client = make_client(name="test suite")
message = Message(
sender=sender,
recipient=recipient,
content='hello **world**',
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
row = MessageDict.get_raw_db_rows([message.id])[0]
dct = MessageDict.build_dict_from_raw_db_row(row)
expected_content = '<p>hello <strong>world</strong></p>'
self.assertEqual(dct['rendered_content'], expected_content)
message = Message.objects.get(id=message.id)
self.assertEqual(message.rendered_content, expected_content)
self.assertEqual(message.rendered_content_version, bugdown.version)
@mock.patch("zerver.lib.message.bugdown.convert")
def test_applying_markdown_invalid_format(self, convert_mock: Any) -> None:
# pretend the converter returned an invalid message without raising an exception
convert_mock.return_value = None
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
sending_client = make_client(name="test suite")
message = Message(
sender=sender,
recipient=recipient,
content='hello **world**',
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
# An important part of this test is to get the message through this exact code path,
# because there is an ugly hack we need to cover. So don't just say "row = message".
row = MessageDict.get_raw_db_rows([message.id])[0]
dct = MessageDict.build_dict_from_raw_db_row(row)
error_content = '<p>[Zulip note: Sorry, we could not understand the formatting of your message]</p>'
self.assertEqual(dct['rendered_content'], error_content)
def test_topic_links_use_stream_realm(self) -> None:
zulip_realm = get_realm('zulip')
url_format_string = r"https://trac.zulip.net/ticket/%(id)s"
url = 'https://trac.zulip.net/ticket/123'
topic_name = 'test #123'
realm_filter = RealmFilter(realm=zulip_realm,
pattern=r"#(?P<id>[0-9]{2,8})",
url_format_string=url_format_string)
self.assertEqual(
realm_filter.__str__(),
'<RealmFilter(zulip): #(?P<id>[0-9]{2,8})'
' https://trac.zulip.net/ticket/%(id)s>')
def get_message(sender: UserProfile) -> Message:
msg_id = self.send_stream_message(sender, 'Denmark', 'hello world', topic_name,
zulip_realm)
return Message.objects.get(id=msg_id)
def assert_topic_links(links: List[str], msg: Message) -> None:
dct = MessageDict.to_dict_uncached_helper([msg])[0]
self.assertEqual(dct[TOPIC_LINKS], links)
assert_topic_links([], get_message(self.example_user('othello')))
assert_topic_links([], get_message(self.lear_user('cordelia')))
assert_topic_links([], get_message(self.notification_bot()))
realm_filter.save()
assert_topic_links([url], get_message(self.example_user('othello')))
assert_topic_links([url], get_message(self.lear_user('cordelia')))
assert_topic_links([url], get_message(self.notification_bot()))
def test_reaction(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
sending_client = make_client(name="test suite")
message = Message(
sender=sender,
recipient=recipient,
content='hello **world**',
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
reaction = Reaction.objects.create(
message=message, user_profile=sender,
emoji_name='simple_smile')
row = MessageDict.get_raw_db_rows([message.id])[0]
msg_dict = MessageDict.build_dict_from_raw_db_row(row)
self.assertEqual(msg_dict['reactions'][0]['emoji_name'],
reaction.emoji_name)
self.assertEqual(msg_dict['reactions'][0]['user_id'], sender.id)
self.assertEqual(msg_dict['reactions'][0]['user']['id'],
sender.id)
self.assertEqual(msg_dict['reactions'][0]['user']['email'],
sender.email)
self.assertEqual(msg_dict['reactions'][0]['user']['full_name'],
sender.full_name)
def test_missing_anchor(self) -> None:
self.login('hamlet')
result = self.client_get(
'/json/messages?use_first_unread_anchor=false&num_before=1&num_after=1')
self.assert_json_error(
result, "Missing 'anchor' argument.")
def test_invalid_anchor(self) -> None:
self.login('hamlet')
result = self.client_get(
'/json/messages?use_first_unread_anchor=false&num_before=1&num_after=1&anchor=chocolate')
self.assert_json_error(
result, "Invalid anchor")
class SewMessageAndReactionTest(ZulipTestCase):
def test_sew_messages_and_reaction(self) -> None:
sender = self.example_user('othello')
receiver = self.example_user('hamlet')
pm_recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
stream_name = 'Çiğdem'
stream = self.make_stream(stream_name)
stream_recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
sending_client = make_client(name="test suite")
needed_ids = []
for i in range(5):
for recipient in [pm_recipient, stream_recipient]:
message = Message(
sender=sender,
recipient=recipient,
content='whatever %d' % (i,),
date_sent=timezone_now(),
sending_client=sending_client,
last_edit_time=timezone_now(),
edit_history='[]'
)
message.set_topic_name('whatever')
message.save()
needed_ids.append(message.id)
reaction = Reaction(user_profile=sender, message=message,
emoji_name='simple_smile')
reaction.save()
messages = Message.objects.filter(id__in=needed_ids).values(
*['id', 'content'])
reactions = Reaction.get_raw_db_rows(needed_ids)
tied_data = sew_messages_and_reactions(messages, reactions)
for data in tied_data:
self.assertEqual(len(data['reactions']), 1)
self.assertEqual(data['reactions'][0]['emoji_name'],
'simple_smile')
self.assertTrue(data['id'])
self.assertTrue(data['content'])
class MessagePOSTTest(ZulipTestCase):
def _send_and_verify_message(self, user: UserProfile, stream_name: str, error_msg: str=None) -> None:
if error_msg is None:
msg_id = self.send_stream_message(user, stream_name)
result = self.api_get(user, '/json/messages/' + str(msg_id))
self.assert_json_success(result)
else:
with self.assertRaisesRegex(JsonableError, error_msg):
self.send_stream_message(user, stream_name)
def test_message_to_self(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_success(result)
def test_api_message_to_self(self) -> None:
user = self.example_user('hamlet')
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_success(result)
def test_message_to_stream_with_nonexistent_id(self) -> None:
cordelia = self.example_user('cordelia')
bot = self.create_test_bot(
short_name='whatever',
user_profile=cordelia,
)
result = self.api_post(
bot, "/api/v1/messages",
{
"type": "stream",
"to": ujson.dumps([99999]),
"client": "test suite",
"content": "Stream message by ID.",
"topic": "Test topic for stream ID message"
}
)
self.assert_json_error(result, "Stream with ID '99999' does not exist")
msg = self.get_last_message()
expected = ("Your bot `whatever-bot@zulip.testserver` tried to send a message to "
"stream ID 99999, but there is no stream with that ID.")
self.assertEqual(msg.content, expected)
def test_message_to_stream_by_id(self) -> None:
self.login('hamlet')
realm = get_realm('zulip')
stream = get_stream('Verona', realm)
result = self.client_post("/json/messages", {"type": "stream",
"to": ujson.dumps([stream.id]),
"client": "test suite",
"content": "Stream message by ID.",
"topic": "Test topic for stream ID message"})
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content, "Stream message by ID.")
def test_sending_message_as_stream_post_policy_admins(self) -> None:
admin_profile = self.example_user("iago")
self.login_user(admin_profile)
stream_name = "Verona"
stream = get_stream(stream_name, admin_profile.realm)
do_change_stream_post_policy(stream, Stream.STREAM_POST_POLICY_ADMINS)
self._send_and_verify_message(admin_profile, stream_name)
admin_owned_bot = self.create_test_bot(
short_name='whatever1',
full_name='whatever1',
user_profile=admin_profile,
)
self._send_and_verify_message(admin_owned_bot, stream_name)
non_admin_profile = self.example_user("hamlet")
self.login_user(non_admin_profile)
self._send_and_verify_message(non_admin_profile, stream_name,
"Only organization administrators can send to this stream.")
non_admin_owned_bot = self.create_test_bot(
short_name='whatever2',
full_name='whatever2',
user_profile=non_admin_profile,
)
self._send_and_verify_message(non_admin_owned_bot, stream_name,
"Only organization administrators can send to this stream.")
bot_without_owner = do_create_user(
email='free-bot@zulip.testserver',
password='',
realm=non_admin_profile.realm,
full_name='freebot',
short_name='freebot',
bot_type=UserProfile.DEFAULT_BOT,
)
self._send_and_verify_message(bot_without_owner, stream_name,
"Only organization administrators can send to this stream.")
notification_bot = get_system_bot("notification-bot@zulip.com")
internal_send_stream_message(stream.realm, notification_bot, stream,
'Test topic', 'Test message by notification bot')
self.assertEqual(self.get_last_message().content, 'Test message by notification bot')
def test_sending_message_as_stream_post_policy_restrict_new_members(self) -> None:
admin_profile = self.example_user("iago")
self.login_user(admin_profile)
do_set_realm_property(admin_profile.realm, 'waiting_period_threshold', 10)
admin_profile.date_joined = timezone_now() - datetime.timedelta(days=9)
admin_profile.save()
self.assertTrue(admin_profile.is_new_member)
self.assertTrue(admin_profile.is_realm_admin)
stream_name = "Verona"
stream = get_stream(stream_name, admin_profile.realm)
do_change_stream_post_policy(stream, Stream.STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS)
self._send_and_verify_message(admin_profile, stream_name)
admin_owned_bot = self.create_test_bot(
short_name='whatever1',
full_name='whatever1',
user_profile=admin_profile,
)
self._send_and_verify_message(admin_owned_bot, stream_name)
non_admin_profile = self.example_user("hamlet")
self.login_user(non_admin_profile)
non_admin_profile.date_joined = timezone_now() - datetime.timedelta(days=9)
non_admin_profile.save()
self.assertTrue(non_admin_profile.is_new_member)
self.assertFalse(non_admin_profile.is_realm_admin)
self._send_and_verify_message(non_admin_profile, stream_name,
"New members cannot send to this stream.")
non_admin_owned_bot = self.create_test_bot(
short_name='whatever2',
full_name='whatever2',
user_profile=non_admin_profile,
)
self._send_and_verify_message(non_admin_owned_bot, stream_name,
"New members cannot send to this stream.")
bot_without_owner = do_create_user(
email='free-bot@zulip.testserver',
password='',
realm=non_admin_profile.realm,
full_name='freebot',
short_name='freebot',
bot_type=UserProfile.DEFAULT_BOT,
)
self._send_and_verify_message(bot_without_owner, stream_name,
"New members cannot send to this stream.")
notification_bot = get_system_bot("notification-bot@zulip.com")
internal_send_stream_message(stream.realm, notification_bot, stream,
'Test topic', 'Test message by notification bot')
self.assertEqual(self.get_last_message().content, 'Test message by notification bot')
def test_api_message_with_default_to(self) -> None:
user = self.example_user('hamlet')
user.default_sending_stream_id = get_stream('Verona', user.realm).id
user.save()
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"client": "test suite",
"content": "Test message no to",
"topic": "Test topic"})
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content, "Test message no to")
def test_message_to_nonexistent_stream(self) -> None:
self.login('hamlet')
self.assertFalse(Stream.objects.filter(name="nonexistent_stream"))
result = self.client_post("/json/messages", {"type": "stream",
"to": "nonexistent_stream",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_error(result, "Stream 'nonexistent_stream' does not exist")
def test_message_to_nonexistent_stream_with_bad_characters(self) -> None:
self.login('hamlet')
self.assertFalse(Stream.objects.filter(name="""&<"'><non-existent>"""))
result = self.client_post("/json/messages", {"type": "stream",
"to": """&<"'><non-existent>""",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_error(result, "Stream '&<"'><non-existent>' does not exist")
def test_personal_message(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
othello = self.example_user('othello')
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": othello.email})
self.assert_json_success(result)
message_id = ujson.loads(result.content.decode())['id']
recent_conversations = get_recent_private_conversations(user_profile)
self.assertEqual(len(recent_conversations), 1)
recent_conversation = list(recent_conversations.values())[0]
recipient_id = list(recent_conversations.keys())[0]
self.assertEqual(set(recent_conversation['user_ids']), {othello.id})
self.assertEqual(recent_conversation['max_message_id'], message_id)
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": user_profile.email})
self.assert_json_success(result)
self_message_id = ujson.loads(result.content.decode())['id']
recent_conversations = get_recent_private_conversations(user_profile)
self.assertEqual(len(recent_conversations), 2)
recent_conversation = recent_conversations[recipient_id]
self.assertEqual(set(recent_conversation['user_ids']), {othello.id})
self.assertEqual(recent_conversation['max_message_id'], message_id)
del recent_conversations[recipient_id]
recent_conversation = list(recent_conversations.values())[0]
recipient_id = list(recent_conversations.keys())[0]
self.assertEqual(set(recent_conversation['user_ids']), set())
self.assertEqual(recent_conversation['max_message_id'], self_message_id)
def test_personal_message_by_id(self) -> None:
self.login('hamlet')
result = self.client_post(
"/json/messages",
{
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([self.example_user("othello").id])
}
)
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual("Test message", msg.content)
self.assertEqual(msg.recipient_id, self.example_user("othello").id)
def test_group_personal_message_by_id(self) -> None:
self.login('hamlet')
result = self.client_post(
"/json/messages",
{
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([self.example_user("othello").id,
self.example_user("cordelia").id])
}
)
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual("Test message", msg.content)
self.assertEqual(msg.recipient_id, get_huddle_recipient(
{self.example_user("hamlet").id,
self.example_user("othello").id,
self.example_user("cordelia").id}).id
)
def test_personal_message_copying_self(self) -> None:
hamlet = self.example_user('hamlet')
othello = self.example_user('othello')
self.login_user(hamlet)
result = self.client_post("/json/messages", {
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([hamlet.id, othello.id])})
self.assert_json_success(result)
msg = self.get_last_message()
self.assertNotIn("Hamlet", str(msg.recipient))
def test_personal_message_to_nonexistent_user(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": "nonexistent"})
self.assert_json_error(result, "Invalid email 'nonexistent'")
def test_personal_message_to_deactivated_user(self) -> None:
othello = self.example_user('othello')
cordelia = self.example_user('cordelia')
do_deactivate_user(othello)
self.login('hamlet')
result = self.client_post("/json/messages", {
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([othello.id])})
self.assert_json_error(result, "'{}' is no longer using Zulip.".format(othello.email))
result = self.client_post("/json/messages", {
"type": "private",
"content": "Test message",
"client": "test suite",
"to": ujson.dumps([othello.id, cordelia.id])})
self.assert_json_error(result, "'{}' is no longer using Zulip.".format(othello.email))
def test_invalid_type(self) -> None:
self.login('hamlet')
othello = self.example_user('othello')
result = self.client_post("/json/messages", {"type": "invalid type",
"content": "Test message",
"client": "test suite",
"to": othello.email})
self.assert_json_error(result, "Invalid message type")
def test_empty_message(self) -> None:
self.login('hamlet')
othello = self.example_user('othello')
result = self.client_post("/json/messages", {"type": "private",
"content": " ",
"client": "test suite",
"to": othello.email})
self.assert_json_error(result, "Message must not be empty")
def test_empty_string_topic(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": ""})
self.assert_json_error(result, "Topic can't be empty")
def test_missing_topic(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message"})
self.assert_json_error(result, "Missing topic")
def test_invalid_message_type(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "invalid",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"})
self.assert_json_error(result, "Invalid message type")
def test_private_message_without_recipients(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "private",
"content": "Test content",
"client": "test suite",
"to": ""})
self.assert_json_error(result, "Message must have recipients")
def test_mirrored_huddle(self) -> None:
result = self.api_post(self.mit_user("starnine"),
"/json/messages", {"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": ujson.dumps([self.mit_email("starnine"),
self.mit_email("espuser")])},
subdomain="zephyr")
self.assert_json_success(result)
def test_mirrored_personal(self) -> None:
result = self.api_post(self.mit_user("starnine"),
"/json/messages", {"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_success(result)
def test_mirrored_personal_browser(self) -> None:
user = self.mit_user('starnine')
self.login_user(user)
result = self.client_post("/json/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "Invalid mirrored message")
def test_mirrored_personal_to_someone_else(self) -> None:
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("espuser")},
subdomain="zephyr")
self.assert_json_error(result, "User not authorized for this query")
def test_duplicated_mirrored_huddle(self) -> None:
msg = {"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": ujson.dumps([self.mit_email("espuser"),
self.mit_email("starnine")])}
with mock.patch('DNS.dnslookup', return_value=[['starnine:*:84233:101:Athena Consulting Exchange User,,,:/mit/starnine:/bin/bash']]):
result1 = self.api_post(self.mit_user("starnine"), "/api/v1/messages", msg,
subdomain="zephyr")
self.assert_json_success(result1)
with mock.patch('DNS.dnslookup', return_value=[['espuser:*:95494:101:Esp Classroom,,,:/mit/espuser:/bin/athena/bash']]):
result2 = self.api_post(self.mit_user("espuser"), "/api/v1/messages", msg,
subdomain="zephyr")
self.assert_json_success(result2)
self.assertEqual(ujson.loads(result1.content)['id'],
ujson.loads(result2.content)['id'])
def test_message_with_null_bytes(self) -> None:
self.login('hamlet')
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": " I like null bytes \x00 in my content", "topic": "Test topic"}
result = self.client_post("/json/messages", post_data)
self.assert_json_error(result, "Message must not contain null bytes")
def test_strip_message(self) -> None:
self.login('hamlet')
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": " I like whitespace at the end! \n\n \n", "topic": "Test topic"}
result = self.client_post("/json/messages", post_data)
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content, " I like whitespace at the end!")
def test_long_message(self) -> None:
self.login('hamlet')
long_message = "A" * (MAX_MESSAGE_LENGTH + 1)
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": long_message, "topic": "Test topic"}
result = self.client_post("/json/messages", post_data)
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.content,
"A" * (MAX_MESSAGE_LENGTH - 20) + "\n[message truncated]")
def test_long_topic(self) -> None:
self.login('hamlet')
long_topic = "A" * (MAX_TOPIC_NAME_LENGTH + 1)
post_data = {"type": "stream", "to": "Verona", "client": "test suite",
"content": "test content", "topic": long_topic}
result = self.client_post("/json/messages", post_data)
self.assert_json_success(result)
sent_message = self.get_last_message()
self.assertEqual(sent_message.topic_name(),
"A" * (MAX_TOPIC_NAME_LENGTH - 3) + "...")
def test_send_forged_message_as_not_superuser(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic",
"forged": "true"})
self.assert_json_error(result, "User not authorized for this query")
def test_send_message_as_not_superuser_to_different_domain(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic",
"realm_str": "mit"})
self.assert_json_error(result, "User not authorized for this query")
def test_send_message_as_superuser_to_domain_that_dont_exist(self) -> None:
user = self.example_user("default_bot")
password = "test_password"
user.set_password(password)
user.is_api_super_user = True
user.save()
result = self.api_post(user,
"/api/v1/messages", {"type": "stream",
"to": "Verona",
"client": "test suite",
"content": "Test message",
"topic": "Test topic",
"realm_str": "non-existing"})
user.is_api_super_user = False
user.save()
self.assert_json_error(result, "Unknown organization 'non-existing'")
def test_send_message_when_sender_is_not_set(self) -> None:
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "private",
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "Missing sender")
def test_send_message_as_not_superuser_when_type_is_not_private(self) -> None:
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "not-private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "User not authorized for this query")
@mock.patch("zerver.views.messages.create_mirrored_message_users")
def test_send_message_create_mirrored_message_user_returns_invalid_input(
self, create_mirrored_message_users_mock: Any) -> None:
create_mirrored_message_users_mock.side_effect = InvalidMirrorInput()
result = self.api_post(self.mit_user("starnine"), "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": self.mit_email("starnine")},
subdomain="zephyr")
self.assert_json_error(result, "Invalid mirrored message")
@mock.patch("zerver.views.messages.create_mirrored_message_users")
def test_send_message_when_client_is_zephyr_mirror_but_string_id_is_not_zephyr(
self, create_mirrored_message_users_mock: Any) -> None:
create_mirrored_message_users_mock.return_value = mock.Mock()
user = self.mit_user("starnine")
user.realm.string_id = 'notzephyr'
user.realm.save()
result = self.api_post(user, "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": user.email},
subdomain="notzephyr")
self.assert_json_error(result, "Zephyr mirroring is not allowed in this organization")
@mock.patch("zerver.views.messages.create_mirrored_message_users")
def test_send_message_when_client_is_zephyr_mirror_but_recipient_is_user_id(
self, create_mirrored_message_users_mock: Any) -> None:
create_mirrored_message_users_mock.return_value = mock.Mock()
user = self.mit_user("starnine")
self.login_user(user)
result = self.api_post(user, "/api/v1/messages",
{"type": "private",
"sender": self.mit_email("sipbtest"),
"content": "Test message",
"client": "zephyr_mirror",
"to": ujson.dumps([user.id])},
subdomain="zephyr")
self.assert_json_error(result, "Mirroring not allowed with recipient user IDs")
def test_send_message_irc_mirror(self) -> None:
reset_emails_in_zulip_realm()
self.login('hamlet')
bot_info = {
'full_name': 'IRC bot',
'short_name': 'irc',
}
result = self.client_post("/json/bots", bot_info)
self.assert_json_success(result)
email = "irc-bot@zulip.testserver"
user = get_user(email, get_realm('zulip'))
user.is_api_super_user = True
user.save()
user = get_user(email, get_realm('zulip'))
self.subscribe(user, "IRCland")
fake_date_sent = timezone_now() - datetime.timedelta(minutes=37)
fake_timestamp = datetime_to_timestamp(fake_date_sent)
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"forged": "true",
"time": fake_timestamp,
"sender": "irc-user@irc.zulip.com",
"content": "Test message",
"client": "irc_mirror",
"topic": "from irc",
"to": "IRCLand"})
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual(int(datetime_to_timestamp(msg.date_sent)), int(fake_timestamp))
fake_date_sent = timezone_now() - datetime.timedelta(minutes=22)
fake_timestamp = datetime_to_timestamp(fake_date_sent)
result = self.api_post(user, "/api/v1/messages", {"type": "stream",
"forged": "yes",
"time": fake_timestamp,
"sender": "irc-user@irc.zulip.com",
"content": "Test message",
"client": "irc_mirror",
"topic": "from irc",
"to": "IRCLand"})
self.assert_json_success(result)
msg = self.get_last_message()
self.assertEqual(int(datetime_to_timestamp(msg.date_sent)), int(fake_timestamp))
def test_unsubscribed_api_super_user(self) -> None:
reset_emails_in_zulip_realm()
cordelia = self.example_user('cordelia')
stream_name = 'private_stream'
self.make_stream(stream_name, invite_only=True)
self.unsubscribe(cordelia, stream_name)
def test_with(sender_email: str, client: str, forged: bool) -> None:
payload = dict(
type="stream",
to=stream_name,
client=client,
topic='whatever',
content='whatever',
forged=ujson.dumps(forged),
)
if forged:
payload['sender'] = sender_email
cordelia.is_api_super_user = False
cordelia.save()
result = self.api_post(cordelia, "/api/v1/messages", payload)
self.assert_json_error_contains(result, 'authorized')
cordelia.is_api_super_user = True
cordelia.save()
result = self.api_post(cordelia, "/api/v1/messages", payload)
self.assert_json_success(result)
test_with(
sender_email=cordelia.email,
client='test suite',
forged=False,
)
test_with(
sender_email='irc_person@zulip.com',
client='irc_mirror',
forged=True,
)
def test_bot_can_send_to_owner_stream(self) -> None:
cordelia = self.example_user('cordelia')
bot = self.create_test_bot(
short_name='whatever',
user_profile=cordelia,
)
stream_name = 'private_stream'
self.make_stream(stream_name, invite_only=True)
payload = dict(
type="stream",
to=stream_name,
client='test suite',
topic='whatever',
content='whatever',
)
result = self.api_post(bot, "/api/v1/messages", payload)
self.assert_json_error_contains(result, 'Not authorized to send')
self.subscribe(bot.bot_owner, stream_name)
result = self.api_post(bot, "/api/v1/messages", payload)
self.assert_json_success(result)
def test_cross_realm_bots_can_use_api_on_own_subdomain(self) -> None:
notification_bot = self.notification_bot()
stream = self.make_stream("notify_channel", get_realm("zulipinternal"))
result = self.api_post(notification_bot,
"/api/v1/messages",
{"type": "stream",
"to": "notify_channel",
"client": "test suite",
"content": "Test message",
"topic": "Test topic"},
subdomain='zulipinternal')
self.assert_json_success(result)
message = self.get_last_message()
self.assertEqual(message.content, "Test message")
self.assertEqual(message.sender, notification_bot)
self.assertEqual(message.recipient.type_id, stream.id)
def test_create_mirror_user_despite_race(self) -> None:
realm = get_realm('zulip')
email = 'fred@example.com'
email_to_full_name = lambda email: 'fred'
def create_user(**kwargs: Any) -> UserProfile:
self.assertEqual(kwargs['full_name'], 'fred')
self.assertEqual(kwargs['email'], email)
self.assertEqual(kwargs['active'], False)
self.assertEqual(kwargs['is_mirror_dummy'], True)
kwargs['bot_type'] = None
kwargs['bot_owner'] = None
kwargs['tos_version'] = None
kwargs['timezone'] = timezone_now()
create_user_profile(**kwargs).save()
raise IntegrityError()
with mock.patch('zerver.lib.actions.create_user',
side_effect=create_user) as m:
mirror_fred_user = create_mirror_user_if_needed(
realm,
email,
email_to_full_name,
)
self.assertEqual(mirror_fred_user.delivery_email, email)
m.assert_called()
def test_guest_user(self) -> None:
sender = self.example_user('polonius')
stream_name = 'public stream'
self.make_stream(stream_name, invite_only=False)
payload = dict(
type="stream",
to=stream_name,
client='test suite',
topic='whatever',
content='whatever',
)
result = self.api_post(sender, "/api/v1/messages", payload)
self.assert_json_error(result, "Not authorized to send to stream 'public stream'")
self.subscribe(sender, stream_name)
# Guest user can send message to subscribed public streams
result = self.api_post(sender, "/api/v1/messages", payload)
self.assert_json_success(result)
class ScheduledMessageTest(ZulipTestCase):
def last_scheduled_message(self) -> ScheduledMessage:
return ScheduledMessage.objects.all().order_by('-id')[0]
def do_schedule_message(self, msg_type: str, to: str, msg: str,
defer_until: str='', tz_guess: str='',
delivery_type: str='send_later',
realm_str: str='zulip') -> HttpResponse:
self.login('hamlet')
topic_name = ''
if msg_type == 'stream':
topic_name = 'Test topic'
payload = {"type": msg_type,
"to": to,
"client": "test suite",
"content": msg,
"topic": topic_name,
"realm_str": realm_str,
"delivery_type": delivery_type,
"tz_guess": tz_guess}
if defer_until:
payload["deliver_at"] = defer_until
result = self.client_post("/json/messages", payload)
return result
def test_schedule_message(self) -> None:
content = "Test message"
defer_until = timezone_now().replace(tzinfo=None) + datetime.timedelta(days=1)
defer_until_str = str(defer_until)
# Scheduling a message to a stream you are subscribed is successful.
result = self.do_schedule_message('stream', 'Verona',
content + ' 1', defer_until_str)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 1')
self.assertEqual(message.topic_name(), 'Test topic')
self.assertEqual(message.scheduled_timestamp, convert_to_UTC(defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
# Scheduling a message for reminders.
result = self.do_schedule_message('stream', 'Verona',
content + ' 2', defer_until_str,
delivery_type='remind')
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.delivery_type, ScheduledMessage.REMIND)
# Scheduling a private message is successful.
othello = self.example_user('othello')
hamlet = self.example_user('hamlet')
result = self.do_schedule_message('private', othello.email,
content + ' 3', defer_until_str)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 3')
self.assertEqual(message.scheduled_timestamp, convert_to_UTC(defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
# Setting a reminder in PM's to other users causes a error.
result = self.do_schedule_message('private', othello.email,
content + ' 4', defer_until_str,
delivery_type='remind')
self.assert_json_error(result, 'Reminders can only be set for streams.')
# Required by reminders from message actions popover caret feature.
result = self.do_schedule_message('private', hamlet.email,
content + ' 5', defer_until_str,
delivery_type='remind')
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 5')
self.assertEqual(message.delivery_type, ScheduledMessage.REMIND)
# Scheduling a message while guessing timezone.
tz_guess = 'Asia/Kolkata'
result = self.do_schedule_message('stream', 'Verona', content + ' 6',
defer_until_str, tz_guess=tz_guess)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 6')
local_tz = get_timezone(tz_guess)
# Since mypy is not able to recognize localize and normalize as attributes of tzinfo we use ignore.
utz_defer_until = local_tz.normalize(local_tz.localize(defer_until)) # type: ignore[attr-defined] # Reason in comment on previous line.
self.assertEqual(message.scheduled_timestamp,
convert_to_UTC(utz_defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
# Test with users timezone setting as set to some timezone rather than
# empty. This will help interpret timestamp in users local timezone.
user = self.example_user("hamlet")
user.timezone = 'US/Pacific'
user.save(update_fields=['timezone'])
result = self.do_schedule_message('stream', 'Verona',
content + ' 7', defer_until_str)
message = self.last_scheduled_message()
self.assert_json_success(result)
self.assertEqual(message.content, 'Test message 7')
local_tz = get_timezone(user.timezone)
# Since mypy is not able to recognize localize and normalize as attributes of tzinfo we use ignore.
utz_defer_until = local_tz.normalize(local_tz.localize(defer_until)) # type: ignore[attr-defined] # Reason in comment on previous line.
self.assertEqual(message.scheduled_timestamp,
convert_to_UTC(utz_defer_until))
self.assertEqual(message.delivery_type, ScheduledMessage.SEND_LATER)
def test_scheduling_in_past(self) -> None:
# Scheduling a message in past should fail.
content = "Test message"
defer_until = timezone_now()
defer_until_str = str(defer_until)
result = self.do_schedule_message('stream', 'Verona',
content + ' 1', defer_until_str)
self.assert_json_error(result, 'Time must be in the future.')
def test_invalid_timestamp(self) -> None:
# Scheduling a message from which timestamp couldn't be parsed
content = "Test message"
defer_until = 'Missed the timestamp'
result = self.do_schedule_message('stream', 'Verona',
content + ' 1', defer_until)
self.assert_json_error(result, 'Invalid time format')
def test_missing_deliver_at(self) -> None:
content = "Test message"
result = self.do_schedule_message('stream', 'Verona',
content + ' 1')
self.assert_json_error(result, 'Missing deliver_at in a request for delayed message delivery')
class EditMessageTest(ZulipTestCase):
def check_topic(self,
msg_id: int,
topic_name: str) -> None:
msg = Message.objects.get(id=msg_id)
self.assertEqual(msg.topic_name(), topic_name)
def check_message(self,
msg_id: int,
topic_name: str,
content: str) -> None:
msg = Message.objects.get(id=msg_id)
self.assertEqual(msg.topic_name(), topic_name)
self.assertEqual(msg.content, content)
with queries_captured() as queries:
(fetch_message_dict,) = messages_for_ids(
message_ids = [msg.id],
user_message_flags={msg_id: []},
search_fields=dict(),
apply_markdown=False,
client_gravatar=False,
allow_edit_history=True,
)
self.assertEqual(len(queries), 2)
for query in queries:
self.assertNotIn('message', query['sql'])
self.assertEqual(
fetch_message_dict[TOPIC_NAME],
msg.topic_name()
)
self.assertEqual(
fetch_message_dict['content'],
msg.content
)
self.assertEqual(
fetch_message_dict['sender_id'],
msg.sender_id
)
if msg.edit_history:
self.assertEqual(
fetch_message_dict['edit_history'],
ujson.loads(msg.edit_history)
)
def test_query_count_on_to_dict_uncached(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
stream_name = "public_stream"
self.subscribe(user, stream_name)
message_one_id = self.send_stream_message(user,
stream_name, "Message one")
later_subscribed_user = self.example_user("cordelia")
self.subscribe(later_subscribed_user, stream_name)
message_two_id = self.send_stream_message(user,
stream_name, "Message two")
message_ids = [message_one_id, message_two_id]
messages = [Message.objects.select_related().get(id=message_id)
for message_id in message_ids]
with queries_captured() as queries:
MessageDict.to_dict_uncached(messages)
self.assertEqual(len(queries), 4)
def test_save_message(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'after edit'
})
self.assert_json_success(result)
self.check_message(msg_id, topic_name="editing", content="after edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': 'edited'
})
self.assert_json_success(result)
self.check_topic(msg_id, topic_name="edited")
def test_fetch_raw_message(self) -> None:
self.login('hamlet')
msg_id = self.send_personal_message(
from_user=self.example_user("hamlet"),
to_user=self.example_user("cordelia"),
content="**before** edit",
)
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
self.assertEqual(result.json()['raw_content'], '**before** edit')
result = self.client_get('/json/messages/999999')
self.assert_json_error(result, 'Invalid message(s)')
self.login('cordelia')
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
self.login('othello')
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_error(result, 'Invalid message(s)')
def test_fetch_raw_message_stream_wrong_realm(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.make_stream('public_stream')
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name="test", content="test")
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
mit_user = self.mit_user('sipbtest')
self.login_user(mit_user)
result = self.client_get('/json/messages/' + str(msg_id), subdomain="zephyr")
self.assert_json_error(result, 'Invalid message(s)')
def test_fetch_raw_message_private_stream(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.make_stream('private_stream', invite_only=True)
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name="test", content="test")
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_success(result)
self.login('othello')
result = self.client_get('/json/messages/' + str(msg_id))
self.assert_json_error(result, 'Invalid message(s)')
def test_edit_message_no_permission(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content after edit',
})
self.assert_json_error(result, "You don't have permission to edit this message")
def test_edit_message_no_changes(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
})
self.assert_json_error(result, "Nothing to change")
def test_edit_message_no_topic(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': ' '
})
self.assert_json_error(result, "Topic can't be empty")
def test_edit_message_no_content(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="editing", content="before edit")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': ' '
})
self.assert_json_success(result)
content = Message.objects.filter(id=msg_id).values_list('content', flat = True)[0]
self.assertEqual(content, "(deleted)")
def test_edit_message_history_disabled(self) -> None:
user_profile = self.example_user("hamlet")
do_set_realm_property(user_profile.realm, "allow_edit_history", False)
self.login('hamlet')
msg_id_1 = self.send_stream_message(self.example_user("hamlet"),
"Denmark",
topic_name="editing",
content="content before edit")
new_content_1 = 'content after edit'
result_1 = self.client_patch("/json/messages/" + str(msg_id_1), {
'message_id': msg_id_1, 'content': new_content_1
})
self.assert_json_success(result_1)
result = self.client_get(
"/json/messages/" + str(msg_id_1) + "/history")
self.assert_json_error(result, "Message edit history is disabled in this organization")
# edit history data attached.
messages_result = self.client_get("/json/messages",
{"anchor": msg_id_1, "num_before": 0, "num_after": 10})
self.assert_json_success(messages_result)
json_messages = ujson.loads(
messages_result.content.decode('utf-8'))
for msg in json_messages['messages']:
self.assertNotIn("edit_history", msg)
def test_edit_message_history(self) -> None:
self.login('hamlet')
# Single-line edit
msg_id_1 = self.send_stream_message(
self.example_user("hamlet"),
"Scotland",
topic_name="editing",
content="content before edit")
new_content_1 = 'content after edit'
result_1 = self.client_patch("/json/messages/" + str(msg_id_1), {
'message_id': msg_id_1, 'content': new_content_1
})
self.assert_json_success(result_1)
message_edit_history_1 = self.client_get(
"/json/messages/" + str(msg_id_1) + "/history")
json_response_1 = ujson.loads(
message_edit_history_1.content.decode('utf-8'))
message_history_1 = json_response_1['message_history']
# Check content of message after edit.
self.assertEqual(message_history_1[0]['rendered_content'],
'<p>content before edit</p>')
self.assertEqual(message_history_1[1]['rendered_content'],
'<p>content after edit</p>')
self.assertEqual(message_history_1[1]['content_html_diff'],
('<p>content '
'<span class="highlight_text_inserted">after</span> '
'<span class="highlight_text_deleted">before</span>'
' edit</p>'))
# Check content of message before edit.
self.assertEqual(message_history_1[1]['prev_rendered_content'],
'<p>content before edit</p>')
# Edits on new lines
msg_id_2 = self.send_stream_message(
self.example_user("hamlet"),
"Scotland",
topic_name="editing",
content=('content before edit, line 1\n'
'\n'
'content before edit, line 3'))
new_content_2 = ('content before edit, line 1\n'
'content after edit, line 2\n'
'content before edit, line 3')
result_2 = self.client_patch("/json/messages/" + str(msg_id_2), {
'message_id': msg_id_2, 'content': new_content_2
})
self.assert_json_success(result_2)
message_edit_history_2 = self.client_get(
"/json/messages/" + str(msg_id_2) + "/history")
json_response_2 = ujson.loads(
message_edit_history_2.content.decode('utf-8'))
message_history_2 = json_response_2['message_history']
self.assertEqual(message_history_2[0]['rendered_content'],
('<p>content before edit, line 1</p>\n'
'<p>content before edit, line 3</p>'))
self.assertEqual(message_history_2[1]['rendered_content'],
('<p>content before edit, line 1<br>\n'
'content after edit, line 2<br>\n'
'content before edit, line 3</p>'))
self.assertEqual(message_history_2[1]['content_html_diff'],
('<p>content before edit, line 1<br> '
'content <span class="highlight_text_inserted">after edit, line 2<br> '
'content</span> before edit, line 3</p>'))
self.assertEqual(message_history_2[1]['prev_rendered_content'],
('<p>content before edit, line 1</p>\n'
'<p>content before edit, line 3</p>'))
def test_edit_link(self) -> None:
# Link editing
self.login('hamlet')
msg_id_1 = self.send_stream_message(
self.example_user("hamlet"),
"Scotland",
topic_name="editing",
content="Here is a link to [zulip](www.zulip.org).")
new_content_1 = 'Here is a link to [zulip](www.zulipchat.com).'
result_1 = self.client_patch("/json/messages/" + str(msg_id_1), {
'message_id': msg_id_1, 'content': new_content_1
})
self.assert_json_success(result_1)
message_edit_history_1 = self.client_get(
"/json/messages/" + str(msg_id_1) + "/history")
json_response_1 = ujson.loads(
message_edit_history_1.content.decode('utf-8'))
message_history_1 = json_response_1['message_history']
# Check content of message after edit.
self.assertEqual(message_history_1[0]['rendered_content'],
'<p>Here is a link to '
'<a href="http://www.zulip.org">zulip</a>.</p>')
self.assertEqual(message_history_1[1]['rendered_content'],
'<p>Here is a link to '
'<a href="http://www.zulipchat.com">zulip</a>.</p>')
self.assertEqual(message_history_1[1]['content_html_diff'],
('<p>Here is a link to <a href="http://www.zulipchat.com"'
'>zulip '
'<span class="highlight_text_inserted"> Link: http://www.zulipchat.com .'
'</span> <span class="highlight_text_deleted"> Link: http://www.zulip.org .'
'</span> </a></p>'))
def test_edit_history_unedited(self) -> None:
self.login('hamlet')
msg_id = self.send_stream_message(
self.example_user('hamlet'),
'Scotland',
topic_name='editing',
content='This message has not been edited.')
result = self.client_get('/json/messages/{}/history'.format(msg_id))
self.assert_json_success(result)
message_history = result.json()['message_history']
self.assert_length(message_history, 1)
def test_user_info_for_updates(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
self.login_user(hamlet)
self.subscribe(hamlet, 'Scotland')
self.subscribe(cordelia, 'Scotland')
msg_id = self.send_stream_message(hamlet, 'Scotland',
content='@**Cordelia Lear**')
user_info = get_user_info_for_message_updates(msg_id)
message_user_ids = user_info['message_user_ids']
self.assertIn(hamlet.id, message_user_ids)
self.assertIn(cordelia.id, message_user_ids)
mention_user_ids = user_info['mention_user_ids']
self.assertEqual(mention_user_ids, {cordelia.id})
def test_edit_cases(self) -> None:
self.login('hamlet')
hamlet = self.example_user('hamlet')
msg_id = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic 1", content="content 1")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content 2',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0]['prev_content'], 'content 1')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.assertEqual(set(history[0].keys()),
{'timestamp', 'prev_content', 'user_id',
'prev_rendered_content', 'prev_rendered_content_version'})
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': 'topic 2',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 1')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.assertEqual(set(history[0].keys()), {'timestamp', LEGACY_PREV_TOPIC, 'user_id'})
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content 3',
'topic': 'topic 3',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0]['prev_content'], 'content 2')
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 2')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.assertEqual(set(history[0].keys()),
{'timestamp', LEGACY_PREV_TOPIC, 'prev_content', 'user_id',
'prev_rendered_content', 'prev_rendered_content_version'})
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'content': 'content 4',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0]['prev_content'], 'content 3')
self.assertEqual(history[0]['user_id'], hamlet.id)
self.login('iago')
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'topic': 'topic 4',
})
self.assert_json_success(result)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 3')
self.assertEqual(history[0]['user_id'], self.example_user('iago').id)
history = ujson.loads(Message.objects.get(id=msg_id).edit_history)
self.assertEqual(history[0][LEGACY_PREV_TOPIC], 'topic 3')
self.assertEqual(history[2][LEGACY_PREV_TOPIC], 'topic 2')
self.assertEqual(history[3][LEGACY_PREV_TOPIC], 'topic 1')
self.assertEqual(history[1]['prev_content'], 'content 3')
self.assertEqual(history[2]['prev_content'], 'content 2')
self.assertEqual(history[4]['prev_content'], 'content 1')
# Now, we verify that the edit history data sent back has the
# correct filled-out fields
message_edit_history = self.client_get("/json/messages/" + str(msg_id) + "/history")
json_response = ujson.loads(message_edit_history.content.decode('utf-8'))
# We reverse the message history view output so that the IDs line up with the above.
message_history = list(reversed(json_response['message_history']))
i = 0
for entry in message_history:
expected_entries = {'content', 'rendered_content', 'topic', 'timestamp', 'user_id'}
if i in {0, 2, 3}:
expected_entries.add('prev_topic')
if i in {1, 2, 4}:
expected_entries.add('prev_content')
expected_entries.add('prev_rendered_content')
expected_entries.add('content_html_diff')
i += 1
self.assertEqual(expected_entries, set(entry.keys()))
self.assertEqual(len(message_history), 6)
self.assertEqual(message_history[0]['prev_topic'], 'topic 3')
self.assertEqual(message_history[0]['topic'], 'topic 4')
self.assertEqual(message_history[1]['topic'], 'topic 3')
self.assertEqual(message_history[2]['topic'], 'topic 3')
self.assertEqual(message_history[2]['prev_topic'], 'topic 2')
self.assertEqual(message_history[3]['topic'], 'topic 2')
self.assertEqual(message_history[3]['prev_topic'], 'topic 1')
self.assertEqual(message_history[4]['topic'], 'topic 1')
self.assertEqual(message_history[0]['content'], 'content 4')
self.assertEqual(message_history[1]['content'], 'content 4')
self.assertEqual(message_history[1]['prev_content'], 'content 3')
self.assertEqual(message_history[2]['content'], 'content 3')
self.assertEqual(message_history[2]['prev_content'], 'content 2')
self.assertEqual(message_history[3]['content'], 'content 2')
self.assertEqual(message_history[4]['content'], 'content 2')
self.assertEqual(message_history[4]['prev_content'], 'content 1')
self.assertEqual(message_history[5]['content'], 'content 1')
self.assertEqual(message_history[5]['topic'], 'topic 1')
def test_edit_message_content_limit(self) -> None:
def set_message_editing_params(allow_message_editing: bool,
message_content_edit_limit_seconds: int,
allow_community_topic_editing: bool) -> None:
result = self.client_patch("/json/realm", {
'allow_message_editing': ujson.dumps(allow_message_editing),
'message_content_edit_limit_seconds': message_content_edit_limit_seconds,
'allow_community_topic_editing': ujson.dumps(allow_community_topic_editing),
})
self.assert_json_success(result)
def do_edit_message_assert_success(id_: int, unique_str: str, topic_only: bool=False) -> None:
new_topic = 'topic' + unique_str
new_content = 'content' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
if not topic_only:
params_dict['content'] = new_content
result = self.client_patch("/json/messages/" + str(id_), params_dict)
self.assert_json_success(result)
if topic_only:
self.check_topic(id_, topic_name=new_topic)
else:
self.check_message(id_, topic_name=new_topic, content=new_content)
def do_edit_message_assert_error(id_: int, unique_str: str, error: str,
topic_only: bool=False) -> None:
message = Message.objects.get(id=id_)
old_topic = message.topic_name()
old_content = message.content
new_topic = 'topic' + unique_str
new_content = 'content' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
if not topic_only:
params_dict['content'] = new_content
result = self.client_patch("/json/messages/" + str(id_), params_dict)
message = Message.objects.get(id=id_)
self.assert_json_error(result, error)
msg = Message.objects.get(id=id_)
self.assertEqual(msg.topic_name(), old_topic)
self.assertEqual(msg.content, old_content)
self.login('iago')
# send a message in the past
id_ = self.send_stream_message(self.example_user("iago"), "Scotland",
content="content", topic_name="topic")
message = Message.objects.get(id=id_)
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
message.save()
# test the various possible message editing settings
# high enough time limit, all edits allowed
set_message_editing_params(True, 240, False)
do_edit_message_assert_success(id_, 'A')
# out of time, only topic editing allowed
set_message_editing_params(True, 120, False)
do_edit_message_assert_success(id_, 'B', True)
do_edit_message_assert_error(id_, 'C', "The time limit for editing this message has passed")
# infinite time, all edits allowed
set_message_editing_params(True, 0, False)
do_edit_message_assert_success(id_, 'D')
# without allow_message_editing, nothing is allowed
set_message_editing_params(False, 240, False)
do_edit_message_assert_error(id_, 'E', "Your organization has turned off message editing", True)
set_message_editing_params(False, 120, False)
do_edit_message_assert_error(id_, 'F', "Your organization has turned off message editing", True)
set_message_editing_params(False, 0, False)
do_edit_message_assert_error(id_, 'G', "Your organization has turned off message editing", True)
def test_allow_community_topic_editing(self) -> None:
def set_message_editing_params(allow_message_editing: bool,
message_content_edit_limit_seconds: int,
allow_community_topic_editing: bool) -> None:
result = self.client_patch("/json/realm", {
'allow_message_editing': ujson.dumps(allow_message_editing),
'message_content_edit_limit_seconds': message_content_edit_limit_seconds,
'allow_community_topic_editing': ujson.dumps(allow_community_topic_editing),
})
self.assert_json_success(result)
def do_edit_message_assert_success(id_: int, unique_str: str) -> None:
new_topic = 'topic' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
result = self.client_patch("/json/messages/" + str(id_), params_dict)
self.assert_json_success(result)
self.check_topic(id_, topic_name=new_topic)
def do_edit_message_assert_error(id_: int, unique_str: str, error: str) -> None:
message = Message.objects.get(id=id_)
old_topic = message.topic_name()
old_content = message.content
new_topic = 'topic' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
result = self.client_patch("/json/messages/" + str(id_), params_dict)
message = Message.objects.get(id=id_)
self.assert_json_error(result, error)
msg = Message.objects.get(id=id_)
self.assertEqual(msg.topic_name(), old_topic)
self.assertEqual(msg.content, old_content)
self.login('iago')
# send a message in the past
id_ = self.send_stream_message(self.example_user("hamlet"), "Scotland",
content="content", topic_name="topic")
message = Message.objects.get(id=id_)
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
message.save()
# any user can edit the topic of a message
set_message_editing_params(True, 0, True)
# log in as a new user
self.login('cordelia')
do_edit_message_assert_success(id_, 'A')
# only admins can edit the topics of messages
self.login('iago')
set_message_editing_params(True, 0, False)
do_edit_message_assert_success(id_, 'B')
self.login('cordelia')
do_edit_message_assert_error(id_, 'C', "You don't have permission to edit this message")
self.login('iago')
set_message_editing_params(False, 0, True)
self.login('cordelia')
do_edit_message_assert_error(id_, 'D', "Your organization has turned off message editing")
message.date_sent = message.date_sent - datetime.timedelta(seconds=90000)
message.save()
self.login('iago')
set_message_editing_params(True, 0, True)
do_edit_message_assert_success(id_, 'E')
self.login('cordelia')
do_edit_message_assert_error(id_, 'F', "The time limit for editing this message has passed")
message.set_topic_name("(no topic)")
message.save()
self.login('cordelia')
do_edit_message_assert_success(id_, 'D')
@mock.patch("zerver.lib.actions.send_event")
def test_edit_topic_public_history_stream(self, mock_send_event: mock.MagicMock) -> None:
stream_name = "Macbeth"
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
self.make_stream(stream_name, history_public_to_subscribers=True)
self.subscribe(hamlet, stream_name)
self.login_user(hamlet)
message_id = self.send_stream_message(hamlet, stream_name, "Where am I?")
self.login_user(cordelia)
self.subscribe(cordelia, stream_name)
message = Message.objects.get(id=message_id)
def do_update_message_topic_success(user_profile: UserProfile, message: Message,
topic_name: str, users_to_be_notified: List[Dict[str, Any]]) -> None:
do_update_message(
user_profile=user_profile,
message=message,
new_stream=None,
topic_name=topic_name,
propagate_mode="change_later",
content=None,
rendered_content=None,
prior_mention_user_ids=set(),
mention_user_ids=set(),
mention_data=None,
)
mock_send_event.assert_called_with(mock.ANY, mock.ANY, users_to_be_notified)
def notify(user_id: int) -> Dict[str, Any]:
um = UserMessage.objects.get(message=message_id)
if um.user_profile_id == user_id:
return {
"id": user_id,
"flags": um.flags_list()
}
else:
return {
"id": user_id,
"flags": ["read"]
}
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
do_update_message_topic_success(cordelia, message, "Othello eats apple", users_to_be_notified)
cordelia.long_term_idle = True
cordelia.save()
users_to_be_notified = list(map(notify, [hamlet.id]))
do_update_message_topic_success(cordelia, message, "Another topic idle", users_to_be_notified)
cordelia.long_term_idle = False
cordelia.save()
# Even if Hamlet unsubscribes the stream, he should be notified when the topic is changed
# because he has a UserMessage row.
self.unsubscribe(hamlet, stream_name)
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
do_update_message_topic_success(cordelia, message, "Another topic", users_to_be_notified)
# Hamlet subscribes to the stream again and Cordelia unsubscribes, then Hamlet changes
# the message topic. Cordelia won't receive any updates when a message on that stream is
self.subscribe(hamlet, stream_name)
self.unsubscribe(cordelia, stream_name)
self.login_user(hamlet)
users_to_be_notified = list(map(notify, [hamlet.id]))
do_update_message_topic_success(hamlet, message, "Change again", users_to_be_notified)
@mock.patch("zerver.lib.actions.send_event")
def test_wildcard_mention(self, mock_send_event: mock.MagicMock) -> None:
stream_name = "Macbeth"
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
self.make_stream(stream_name, history_public_to_subscribers=True)
self.subscribe(hamlet, stream_name)
self.subscribe(cordelia, stream_name)
self.login_user(hamlet)
message_id = self.send_stream_message(hamlet, stream_name, "Hello everyone")
def notify(user_id: int) -> Dict[str, Any]:
return {
"id": user_id,
"flags": ["wildcard_mentioned"]
}
users_to_be_notified = sorted(map(notify, [cordelia.id, hamlet.id]), key=itemgetter("id"))
result = self.client_patch("/json/messages/" + str(message_id), {
'message_id': message_id,
'content': 'Hello @**everyone**',
})
self.assert_json_success(result)
# Extract the send_event call where event type is 'update_message'.
# Here we assert wildcard_mention_user_ids has been set properly.
called = False
for call_args in mock_send_event.call_args_list:
(arg_realm, arg_event, arg_notified_users) = call_args[0]
if arg_event['type'] == 'update_message':
self.assertEqual(arg_event['type'], 'update_message')
self.assertEqual(arg_event['wildcard_mention_user_ids'], [cordelia.id, hamlet.id])
self.assertEqual(sorted(arg_notified_users, key=itemgetter("id")), users_to_be_notified)
called = True
self.assertTrue(called)
def test_propagate_topic_forward(self) -> None:
self.login('hamlet')
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
id2 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic1")
id3 = self.send_stream_message(self.example_user("iago"), "Rome",
topic_name="topic1")
id4 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic2")
id5 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic1")
result = self.client_patch("/json/messages/" + str(id1), {
'message_id': id1,
'topic': 'edited',
'propagate_mode': 'change_later'
})
self.assert_json_success(result)
self.check_topic(id1, topic_name="edited")
self.check_topic(id2, topic_name="edited")
self.check_topic(id3, topic_name="topic1")
self.check_topic(id4, topic_name="topic2")
self.check_topic(id5, topic_name="edited")
def test_propagate_all_topics(self) -> None:
self.login('hamlet')
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
id2 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
id3 = self.send_stream_message(self.example_user("iago"), "Rome",
topic_name="topic1")
id4 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic2")
id5 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic1")
id6 = self.send_stream_message(self.example_user("iago"), "Scotland",
topic_name="topic3")
result = self.client_patch("/json/messages/" + str(id2), {
'message_id': id2,
'topic': 'edited',
'propagate_mode': 'change_all'
})
self.assert_json_success(result)
self.check_topic(id1, topic_name="edited")
self.check_topic(id2, topic_name="edited")
self.check_topic(id3, topic_name="topic1")
self.check_topic(id4, topic_name="topic2")
self.check_topic(id5, topic_name="edited")
self.check_topic(id6, topic_name="topic3")
def test_propagate_invalid(self) -> None:
self.login('hamlet')
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland",
topic_name="topic1")
result = self.client_patch("/json/messages/" + str(id1), {
'topic': 'edited',
'propagate_mode': 'invalid',
})
self.assert_json_error(result, 'Invalid propagate_mode')
self.check_topic(id1, topic_name="topic1")
result = self.client_patch("/json/messages/" + str(id1), {
'content': 'edited',
'propagate_mode': 'change_all',
})
self.assert_json_error(result, 'Invalid propagate_mode without topic edit')
self.check_topic(id1, topic_name="topic1")
def prepare_move_topics(self, user_email: str, old_stream: str, new_stream: str, topic: str) -> Tuple[UserProfile, Stream, Stream, int, int]:
user_profile = self.example_user(user_email)
self.login(user_email)
stream = self.make_stream(old_stream)
new_stream = self.make_stream(new_stream)
self.subscribe(user_profile, stream.name)
self.subscribe(user_profile, new_stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name=topic, content="First")
msg_id_lt = self.send_stream_message(user_profile, stream.name,
topic_name=topic, content="Second")
self.send_stream_message(user_profile, stream.name,
topic_name=topic, content="third")
return (user_profile, stream, new_stream, msg_id, msg_id_lt)
def test_move_message_to_stream(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all'
})
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0].content, "This topic was moved by @_**Iago|%s** to #**new stream>test**" % (user_profile.id,))
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 4)
self.assertEqual(messages[3].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%s**" % (user_profile.id,))
def test_move_message_to_stream_change_later(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id_later), {
'message_id': msg_id_later,
'stream_id': new_stream.id,
'propagate_mode': 'change_later'
})
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 2)
self.assertEqual(messages[0].id, msg_id)
self.assertEqual(messages[1].content, "This topic was moved by @_**Iago|%s** to #**new stream>test**" % (user_profile.id,))
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 3)
self.assertEqual(messages[0].id, msg_id_later)
self.assertEqual(messages[2].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%d**" % (user_profile.id,))
def test_move_message_to_stream_no_allowed(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"aaron", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all'
})
self.assert_json_error(result, "You don't have permission to move this message")
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 3)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 0)
def test_move_message_to_stream_with_content(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all',
'content': 'Not allowed'
})
self.assert_json_error(result, "Cannot change message content while changing stream")
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 3)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 0)
def test_move_message_to_stream_and_topic(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all',
'topic': 'new topic'
})
messages = get_topic_messages(user_profile, old_stream, "test")
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0].content, "This topic was moved by @_**Iago|%s** to #**new stream>new topic**" % (user_profile.id,))
messages = get_topic_messages(user_profile, new_stream, "new topic")
self.assertEqual(len(messages), 4)
self.assertEqual(messages[3].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%s**" % (user_profile.id,))
self.assert_json_success(result)
def test_move_message_to_stream_to_private_stream(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
stream = self.make_stream("test move stream")
new_stream = self.make_stream("new stream", None, True)
self.subscribe(user_profile, stream.name)
self.subscribe(user_profile, new_stream.name)
msg_id = self.send_stream_message(user_profile, stream.name,
topic_name="test", content="First")
self.send_stream_message(user_profile, stream.name,
topic_name="test", content="Second")
result = self.client_patch("/json/messages/" + str(msg_id), {
'message_id': msg_id,
'stream_id': new_stream.id,
'propagate_mode': 'change_all',
})
self.assert_json_error(result, "Streams must be public")
messages = get_topic_messages(user_profile, stream, "test")
self.assertEqual(len(messages), 2)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assertEqual(len(messages), 0)
class MirroredMessageUsersTest(ZulipTestCase):
def test_invalid_sender(self) -> None:
user = self.example_user('hamlet')
recipients: List[str] = []
Request = namedtuple('Request', ['POST'])
request = Request(POST=dict())
with self.assertRaises(InvalidMirrorInput):
create_mirrored_message_users(request, user, recipients)
def test_invalid_client(self) -> None:
client = get_client(name='banned_mirror')
user = self.example_user('hamlet')
sender = user
recipients: List[str] = []
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
with self.assertRaises(InvalidMirrorInput):
create_mirrored_message_users(request, user, recipients)
def test_invalid_email(self) -> None:
invalid_email = 'alice AT example.com'
recipients = [invalid_email]
user = self.mit_user('starnine')
sender = user
Request = namedtuple('Request', ['POST', 'client'])
for client_name in ['zephyr_mirror', 'irc_mirror', 'jabber_mirror']:
client = get_client(name=client_name)
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
with self.assertRaises(InvalidMirrorInput):
create_mirrored_message_users(request, user, recipients)
@mock.patch('DNS.dnslookup', return_value=[['sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh']])
def test_zephyr_mirror_new_recipient(self, ignored: object) -> None:
client = get_client(name='zephyr_mirror')
user = self.mit_user('starnine')
sender = self.mit_user('sipbtest')
new_user_email = 'bob_the_new_user@mit.edu'
new_user_realm = get_realm("zephyr")
recipients = [user.email, new_user_email]
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
mirror_sender = create_mirrored_message_users(request, user, recipients)
self.assertEqual(mirror_sender, sender)
realm_users = UserProfile.objects.filter(realm=sender.realm)
realm_emails = {user.email for user in realm_users}
self.assertIn(user.email, realm_emails)
self.assertIn(new_user_email, realm_emails)
bob = get_user(new_user_email, new_user_realm)
self.assertTrue(bob.is_mirror_dummy)
@mock.patch('DNS.dnslookup', return_value=[['sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh']])
def test_zephyr_mirror_new_sender(self, ignored: object) -> None:
client = get_client(name='zephyr_mirror')
user = self.mit_user('starnine')
sender_email = 'new_sender@mit.edu'
recipients = ['stream_name']
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender_email, type='stream'),
client = client)
mirror_sender = create_mirrored_message_users(request, user, recipients)
assert(mirror_sender is not None)
self.assertEqual(mirror_sender.email, sender_email)
self.assertTrue(mirror_sender.is_mirror_dummy)
def test_irc_mirror(self) -> None:
reset_emails_in_zulip_realm()
client = get_client(name='irc_mirror')
sender = self.example_user('hamlet')
recipients = [self.nonreg_email('alice'), 'bob@irc.zulip.com', self.nonreg_email('cordelia')]
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
mirror_sender = create_mirrored_message_users(request, sender, recipients)
self.assertEqual(mirror_sender, sender)
realm_users = UserProfile.objects.filter(realm=sender.realm)
realm_emails = {user.email for user in realm_users}
self.assertIn(self.nonreg_email('alice'), realm_emails)
self.assertIn('bob@irc.zulip.com', realm_emails)
bob = get_user('bob@irc.zulip.com', sender.realm)
self.assertTrue(bob.is_mirror_dummy)
def test_jabber_mirror(self) -> None:
reset_emails_in_zulip_realm()
client = get_client(name='jabber_mirror')
sender = self.example_user('hamlet')
user = sender
recipients = [self.nonreg_email('alice'), self.nonreg_email('bob'), self.nonreg_email('cordelia')]
Request = namedtuple('Request', ['POST', 'client'])
request = Request(POST = dict(sender=sender.email, type='private'),
client = client)
mirror_sender = create_mirrored_message_users(request, user, recipients)
self.assertEqual(mirror_sender, sender)
realm_users = UserProfile.objects.filter(realm=sender.realm)
realm_emails = {user.email for user in realm_users}
self.assertIn(self.nonreg_email('alice'), realm_emails)
self.assertIn(self.nonreg_email('bob'), realm_emails)
bob = get_user(self.nonreg_email('bob'), sender.realm)
self.assertTrue(bob.is_mirror_dummy)
class MessageAccessTests(ZulipTestCase):
def test_update_invalid_flags(self) -> None:
message = self.send_personal_message(
self.example_user("cordelia"),
self.example_user("hamlet"),
"hello",
)
self.login('hamlet')
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "invalid"})
self.assert_json_error(result, "Invalid flag: 'invalid'")
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "is_private"})
self.assert_json_error(result, "Invalid flag: 'is_private'")
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "active_mobile_push_notification"})
self.assert_json_error(result, "Invalid flag: 'active_mobile_push_notification'")
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([message]),
"op": "add",
"flag": "mentioned"})
self.assert_json_error(result, "Flag not editable: 'mentioned'")
def change_star(self, messages: List[int], add: bool=True, **kwargs: Any) -> HttpResponse:
return self.client_post("/json/messages/flags",
{"messages": ujson.dumps(messages),
"op": "add" if add else "remove",
"flag": "starred"},
**kwargs)
def test_change_star(self) -> None:
self.login('hamlet')
message_ids = [self.send_personal_message(self.example_user("hamlet"),
self.example_user("hamlet"),
"test")]
result = self.change_star(message_ids)
self.assert_json_success(result)
for msg in self.get_messages():
if msg['id'] in message_ids:
self.assertEqual(msg['flags'], ['starred'])
else:
self.assertEqual(msg['flags'], ['read'])
result = self.change_star(message_ids, False)
self.assert_json_success(result)
for msg in self.get_messages():
if msg['id'] in message_ids:
self.assertEqual(msg['flags'], [])
def test_change_star_public_stream_historical(self) -> None:
stream_name = "new_stream"
self.subscribe(self.example_user("hamlet"), stream_name)
self.login('hamlet')
message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
]
other_message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test_unused"),
]
received_message_ids = [
self.send_personal_message(
self.example_user("hamlet"),
self.example_user("cordelia"),
"test_received"
),
]
# Now login as another user who wasn't on that stream
self.login('cordelia')
sent_message_ids = [
self.send_personal_message(
self.example_user("cordelia"),
self.example_user("cordelia"),
"test_read_message",
),
]
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps(sent_message_ids),
"op": "add",
"flag": "read"})
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps(message_ids),
"op": "add",
"flag": "read"})
self.assert_json_error(result, 'Invalid message(s)')
# Trying to change a list of more than one historical message fails
result = self.change_star(message_ids * 2)
self.assert_json_error(result, 'Invalid message(s)')
# Confirm that one can change the historical flag now
result = self.change_star(message_ids)
self.assert_json_success(result)
for msg in self.get_messages():
if msg['id'] in message_ids:
self.assertEqual(set(msg['flags']), {'starred', 'historical', 'read'})
elif msg['id'] in received_message_ids:
self.assertEqual(msg['flags'], [])
else:
self.assertEqual(msg['flags'], ['read'])
self.assertNotIn(msg['id'], other_message_ids)
result = self.change_star(message_ids, False)
self.assert_json_success(result)
# But it still doesn't work if you're in another realm
user = self.mit_user('sipbtest')
self.login_user(user)
result = self.change_star(message_ids, subdomain="zephyr")
self.assert_json_error(result, 'Invalid message(s)')
def test_change_star_private_message_security(self) -> None:
self.login('hamlet')
message_ids = [
self.send_personal_message(
self.example_user("hamlet"),
self.example_user("hamlet"),
"test",
),
]
# Starring private messages you didn't receive fails.
self.login('cordelia')
result = self.change_star(message_ids)
self.assert_json_error(result, 'Invalid message(s)')
def test_change_star_private_stream_security(self) -> None:
stream_name = "private_stream"
self.make_stream(stream_name, invite_only=True)
self.subscribe(self.example_user("hamlet"), stream_name)
self.login('hamlet')
message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
]
result = self.change_star(message_ids)
self.assert_json_success(result)
self.login('cordelia')
result = self.change_star(message_ids)
self.assert_json_error(result, 'Invalid message(s)')
stream_name = "private_stream_2"
self.make_stream(stream_name, invite_only=True,
history_public_to_subscribers=True)
self.subscribe(self.example_user("hamlet"), stream_name)
self.login('hamlet')
message_ids = [
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
]
# With stream.history_public_to_subscribers = True, you still
# can't see it if you didn't receive the message and are
# not subscribed.
self.login('cordelia')
result = self.change_star(message_ids)
self.assert_json_error(result, 'Invalid message(s)')
# But if you subscribe, then you can star the message
self.subscribe(self.example_user("cordelia"), stream_name)
result = self.change_star(message_ids)
self.assert_json_success(result)
def test_new_message(self) -> None:
sender = self.example_user('hamlet')
self.login_user(sender)
content = "Test message for star"
self.send_stream_message(sender, "Verona",
content=content)
sent_message = UserMessage.objects.filter(
user_profile=self.example_user('hamlet')
).order_by("id").reverse()[0]
self.assertEqual(sent_message.message.content, content)
self.assertFalse(sent_message.flags.starred)
def test_change_star_public_stream_security_for_guest_user(self) -> None:
# Guest user can't access(star) unsubscribed public stream messages
normal_user = self.example_user("hamlet")
stream_name = "public_stream"
self.make_stream(stream_name)
self.subscribe(normal_user, stream_name)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 1")
]
guest_user = self.example_user('polonius')
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_error(result, 'Invalid message(s)')
self.subscribe(guest_user, stream_name)
result = self.change_star(message_id)
self.assert_json_success(result)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 2")
]
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_success(result)
def test_change_star_private_stream_security_for_guest_user(self) -> None:
normal_user = self.example_user("hamlet")
stream_name = "private_stream"
stream = self.make_stream(stream_name, invite_only=True)
self.subscribe(normal_user, stream_name)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 1")
]
guest_user = self.example_user('polonius')
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_error(result, 'Invalid message(s)')
# Guest user can't access messages of subscribed private streams if
self.subscribe(guest_user, stream_name)
result = self.change_star(message_id)
self.assert_json_error(result, 'Invalid message(s)')
do_change_stream_invite_only(stream, True, history_public_to_subscribers=True)
result = self.change_star(message_id)
self.assert_json_success(result)
do_change_stream_invite_only(stream, True, history_public_to_subscribers=False)
self.login_user(normal_user)
message_id = [
self.send_stream_message(normal_user, stream_name, "test 2")
]
self.login_user(guest_user)
result = self.change_star(message_id)
self.assert_json_success(result)
def test_bulk_access_messages_private_stream(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
stream_name = "private_stream"
stream = self.make_stream(stream_name, invite_only=True,
history_public_to_subscribers=False)
self.subscribe(user, stream_name)
message_one_id = self.send_stream_message(user,
stream_name, "Message one")
later_subscribed_user = self.example_user("cordelia")
self.subscribe(later_subscribed_user, stream_name)
message_two_id = self.send_stream_message(user,
stream_name, "Message two")
message_ids = [message_one_id, message_two_id]
messages = [Message.objects.select_related().get(id=message_id)
for message_id in message_ids]
filtered_messages = bulk_access_messages(later_subscribed_user, messages)
# subscribed user as stream has protected history
self.assertEqual(len(filtered_messages), 1)
self.assertEqual(filtered_messages[0].id, message_two_id)
do_change_stream_invite_only(stream, True, history_public_to_subscribers=True)
filtered_messages = bulk_access_messages(later_subscribed_user, messages)
# Message sent before subscribing are accessible by 8user as stream
# don't have protected history
self.assertEqual(len(filtered_messages), 2)
unsubscribed_user = self.example_user("ZOE")
filtered_messages = bulk_access_messages(unsubscribed_user, messages)
self.assertEqual(len(filtered_messages), 0)
def test_bulk_access_messages_public_stream(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
stream_name = "public_stream"
self.subscribe(user, stream_name)
message_one_id = self.send_stream_message(user,
stream_name, "Message one")
later_subscribed_user = self.example_user("cordelia")
self.subscribe(later_subscribed_user, stream_name)
message_two_id = self.send_stream_message(user,
stream_name, "Message two")
message_ids = [message_one_id, message_two_id]
messages = [Message.objects.select_related().get(id=message_id)
for message_id in message_ids]
filtered_messages = bulk_access_messages(later_subscribed_user, messages)
self.assertEqual(len(filtered_messages), 2)
unsubscribed_user = self.example_user("ZOE")
filtered_messages = bulk_access_messages(unsubscribed_user, messages)
self.assertEqual(len(filtered_messages), 2)
class MessageHasKeywordsTest(ZulipTestCase):
def setup_dummy_attachments(self, user_profile: UserProfile) -> List[str]:
sample_size = 10
realm_id = user_profile.realm_id
dummy_files = [
('zulip.txt', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt' % (realm_id,), sample_size),
('temp_file.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py' % (realm_id,), sample_size),
('abc.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py' % (realm_id,), sample_size)
]
for file_name, path_id, size in dummy_files:
create_attachment(file_name, path_id, user_profile, size)
return [x[1] for x in dummy_files]
def test_claim_attachment(self) -> None:
user_profile = self.example_user('hamlet')
dummy_path_ids = self.setup_dummy_attachments(user_profile)
dummy_urls = ["http://zulip.testserver/user_uploads/{}".format(x) for x in dummy_path_ids]
self.subscribe(user_profile, "Denmark")
def assert_attachment_claimed(path_id: str, claimed: bool) -> None:
attachment = Attachment.objects.get(path_id=path_id)
self.assertEqual(attachment.is_claimed(), claimed)
body = ("Some files here ...[zulip.txt]({})" +
"{}.... Some more...." +
"{}").format(dummy_urls[0], dummy_urls[1], dummy_urls[1])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[0], True)
assert_attachment_claimed(dummy_path_ids[1], False)
body = "Link in code: `{}`".format(dummy_urls[2])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[2], False)
body = "Link to not parse: .{}.`".format(dummy_urls[2])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[2], False)
# Finally, claim attachment 3.
body = "Link: {}".format(dummy_urls[2])
self.send_stream_message(user_profile, "Denmark", body, "test")
assert_attachment_claimed(dummy_path_ids[2], True)
assert_attachment_claimed(dummy_path_ids[1], False)
def test_finds_all_links(self) -> None:
msg_ids = []
msg_contents = ["foo.org", "[bar](baz.gov)", "http://quux.ca"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertTrue(all([msg.has_link for msg in msgs]))
def test_finds_only_links(self) -> None:
msg_ids = []
msg_contents = ["`example.org`", '``example.org```', '$$https://example.org$$', "foo"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertFalse(all([msg.has_link for msg in msgs]))
def update_message(self, msg: Message, content: str) -> None:
hamlet = self.example_user('hamlet')
realm_id = hamlet.realm.id
rendered_content = render_markdown(msg, content)
mention_data = bugdown.MentionData(realm_id, content)
do_update_message(hamlet, msg, None, None, "change_one", content,
rendered_content, set(), set(), mention_data=mention_data)
def test_finds_link_after_edit(self) -> None:
hamlet = self.example_user('hamlet')
msg_id = self.send_stream_message(hamlet, 'Denmark', content='a')
msg = Message.objects.get(id=msg_id)
self.assertFalse(msg.has_link)
self.update_message(msg, 'a http://foo.com')
self.assertTrue(msg.has_link)
self.update_message(msg, 'a')
self.assertFalse(msg.has_link)
# Check in blockquotes work
self.update_message(msg, '> http://bar.com')
self.assertTrue(msg.has_link)
self.update_message(msg, 'a `http://foo.com`')
self.assertFalse(msg.has_link)
def test_has_image(self) -> None:
msg_ids = []
msg_contents = ["Link: foo.org",
"Image: https://www.google.com/images/srpr/logo4w.png",
"Image: https://www.google.com/images/srpr/logo4w.pdf",
"[Google Link](https://www.google.com/images/srpr/logo4w.png)"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertEqual([False, True, False, True], [msg.has_image for msg in msgs])
self.update_message(msgs[0], 'https://www.google.com/images/srpr/logo4w.png')
self.assertTrue(msgs[0].has_image)
self.update_message(msgs[0], 'No Image Again')
self.assertFalse(msgs[0].has_image)
def test_has_attachment(self) -> None:
hamlet = self.example_user('hamlet')
dummy_path_ids = self.setup_dummy_attachments(hamlet)
dummy_urls = ["http://zulip.testserver/user_uploads/{}".format(x) for x in dummy_path_ids]
self.subscribe(hamlet, "Denmark")
body = ("Files ...[zulip.txt]({}) {} {}").format(dummy_urls[0], dummy_urls[1], dummy_urls[2])
msg_id = self.send_stream_message(hamlet, "Denmark", body, "test")
msg = Message.objects.get(id=msg_id)
self.assertTrue(msg.has_attachment)
self.update_message(msg, 'No Attachments')
self.assertFalse(msg.has_attachment)
self.update_message(msg, body)
self.assertTrue(msg.has_attachment)
self.update_message(msg, 'Link in code: `{}`'.format(dummy_urls[1]))
self.assertFalse(msg.has_attachment)
# Test blockquotes
self.update_message(msg, '> {}'.format(dummy_urls[1]))
self.assertTrue(msg.has_attachment)
# Additional test to check has_attachment is being set is due to the correct attachment.
self.update_message(msg, 'Outside: {}. In code: `{}`.'.format(dummy_urls[0], dummy_urls[1]))
self.assertTrue(msg.has_attachment)
self.assertTrue(msg.attachment_set.filter(path_id=dummy_path_ids[0]))
self.assertEqual(msg.attachment_set.count(), 1)
self.update_message(msg, 'Outside: {}. In code: `{}`.'.format(dummy_urls[1], dummy_urls[0]))
self.assertTrue(msg.has_attachment)
self.assertTrue(msg.attachment_set.filter(path_id=dummy_path_ids[1]))
self.assertEqual(msg.attachment_set.count(), 1)
self.update_message(msg, 'Both in code: `{} {}`.'.format(dummy_urls[1], dummy_urls[0]))
self.assertFalse(msg.has_attachment)
self.assertEqual(msg.attachment_set.count(), 0)
def test_potential_attachment_path_ids(self) -> None:
hamlet = self.example_user('hamlet')
self.subscribe(hamlet, "Denmark")
dummy_path_ids = self.setup_dummy_attachments(hamlet)
body = "Hello"
msg_id = self.send_stream_message(hamlet, "Denmark", body, "test")
msg = Message.objects.get(id=msg_id)
with mock.patch("zerver.lib.actions.do_claim_attachments",
wraps=do_claim_attachments) as m:
self.update_message(msg, '[link](http://{}/user_uploads/{})'.format(
hamlet.realm.host, dummy_path_ids[0]))
self.assertTrue(m.called)
m.reset_mock()
self.update_message(msg, '[link](/user_uploads/{})'.format(dummy_path_ids[1]))
self.assertTrue(m.called)
m.reset_mock()
self.update_message(msg, '[new text link](/user_uploads/{})'.format(dummy_path_ids[1]))
self.assertFalse(m.called)
m.reset_mock()
# It's not clear this is correct behavior
self.update_message(msg, '[link](user_uploads/{})'.format(dummy_path_ids[2]))
self.assertFalse(m.called)
m.reset_mock()
self.update_message(msg, '[link](https://github.com/user_uploads/{})'.format(
dummy_path_ids[0]))
self.assertFalse(m.called)
m.reset_mock()
class MissedMessageTest(ZulipTestCase):
def test_presence_idle_user_ids(self) -> None:
UserPresence.objects.all().delete()
sender = self.example_user('cordelia')
realm = sender.realm
hamlet = self.example_user('hamlet')
othello = self.example_user('othello')
recipient_ids = {hamlet.id, othello.id}
message_type = 'stream'
user_flags: Dict[int, List[str]] = {}
def assert_missing(user_ids: List[int]) -> None:
presence_idle_user_ids = get_active_presence_idle_user_ids(
realm=realm,
sender_id=sender.id,
message_type=message_type,
active_user_ids=recipient_ids,
user_flags=user_flags,
)
self.assertEqual(sorted(user_ids), sorted(presence_idle_user_ids))
def set_presence(user: UserProfile, client_name: str, ago: int) -> None:
when = timezone_now() - datetime.timedelta(seconds=ago)
UserPresence.objects.create(
user_profile_id=user.id,
realm_id=user.realm_id,
client=get_client(client_name),
timestamp=when,
)
message_type = 'private'
assert_missing([hamlet.id, othello.id])
message_type = 'stream'
user_flags[hamlet.id] = ['mentioned']
assert_missing([hamlet.id])
set_presence(hamlet, 'iPhone', ago=5000)
assert_missing([hamlet.id])
set_presence(hamlet, 'webapp', ago=15)
assert_missing([])
message_type = 'private'
assert_missing([othello.id])
class LogDictTest(ZulipTestCase):
def test_to_log_dict(self) -> None:
user = self.example_user('hamlet')
stream_name = 'Denmark'
topic_name = 'Copenhagen'
content = 'find me some good coffee shops'
message_id = self.send_stream_message(user, stream_name,
topic_name=topic_name,
content=content)
message = Message.objects.get(id=message_id)
dct = message.to_log_dict()
self.assertTrue('timestamp' in dct)
self.assertEqual(dct['content'], 'find me some good coffee shops')
self.assertEqual(dct['id'], message.id)
self.assertEqual(dct['recipient'], 'Denmark')
self.assertEqual(dct['sender_realm_str'], 'zulip')
self.assertEqual(dct['sender_email'], user.email)
self.assertEqual(dct['sender_full_name'], 'King Hamlet')
self.assertEqual(dct['sender_id'], user.id)
self.assertEqual(dct['sender_short_name'], 'hamlet')
self.assertEqual(dct['sending_client'], 'test suite')
self.assertEqual(dct[DB_TOPIC_NAME], 'Copenhagen')
self.assertEqual(dct['type'], 'stream')
class CheckMessageTest(ZulipTestCase):
def test_basic_check_message_call(self) -> None:
sender = self.example_user('othello')
client = make_client(name="test suite")
stream_name = 'España y Francia'
self.make_stream(stream_name)
topic_name = 'issue'
message_content = 'whatever'
addressee = Addressee.for_stream_name(stream_name, topic_name)
ret = check_message(sender, client, addressee, message_content)
self.assertEqual(ret['message'].sender.id, sender.id)
def test_bot_pm_feature(self) -> None:
parent = self.example_user('othello')
bot = do_create_user(
email='othello-bot@zulip.com',
password='',
realm=parent.realm,
full_name='',
short_name='',
bot_type=UserProfile.DEFAULT_BOT,
bot_owner=parent
)
bot.last_reminder = None
sender = bot
client = make_client(name="test suite")
stream_name = 'Россия'
topic_name = 'issue'
addressee = Addressee.for_stream_name(stream_name, topic_name)
message_content = 'whatever'
old_count = message_stream_count(parent)
# the sender
with self.assertRaises(JsonableError):
check_message(sender, client, addressee, message_content)
new_count = message_stream_count(parent)
self.assertEqual(new_count, old_count + 1)
self.assertIn("that stream does not exist.", most_recent_message(parent).content)
# Try sending to stream that exists with no subscribers soon
# after; due to rate-limiting, this should send nothing.
self.make_stream(stream_name)
ret = check_message(sender, client, addressee, message_content)
new_count = message_stream_count(parent)
self.assertEqual(new_count, old_count + 1)
# Try sending to stream that exists with no subscribers longer
# after; this should send an error to the bot owner that the
# stream doesn't exist
assert(sender.last_reminder is not None)
sender.last_reminder = sender.last_reminder - datetime.timedelta(hours=1)
sender.save(update_fields=["last_reminder"])
ret = check_message(sender, client, addressee, message_content)
new_count = message_stream_count(parent)
self.assertEqual(new_count, old_count + 2)
self.assertEqual(ret['message'].sender.email, 'othello-bot@zulip.com')
self.assertIn("does not have any subscribers", most_recent_message(parent).content)
def test_bot_pm_error_handling(self) -> None:
cordelia = self.example_user('cordelia')
test_bot = self.create_test_bot(
short_name='test',
user_profile=cordelia,
)
content = 'whatever'
good_realm = test_bot.realm
wrong_realm = get_realm("zephyr")
wrong_sender = cordelia
send_rate_limited_pm_notification_to_bot_owner(test_bot, wrong_realm, content)
self.assertEqual(test_bot.last_reminder, None)
send_rate_limited_pm_notification_to_bot_owner(wrong_sender, good_realm, content)
self.assertEqual(test_bot.last_reminder, None)
test_bot.realm.deactivated = True
send_rate_limited_pm_notification_to_bot_owner(test_bot, good_realm, content)
self.assertEqual(test_bot.last_reminder, None)
class DeleteMessageTest(ZulipTestCase):
def test_delete_message_invalid_request_format(self) -> None:
self.login('iago')
hamlet = self.example_user('hamlet')
msg_id = self.send_stream_message(hamlet, "Scotland")
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id + 1),
{'message_id': msg_id})
self.assert_json_error(result, "Invalid message(s)")
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
self.assert_json_success(result)
def test_delete_message_by_user(self) -> None:
def set_message_deleting_params(allow_message_deleting: bool,
message_content_delete_limit_seconds: int) -> None:
self.login('iago')
result = self.client_patch("/json/realm", {
'allow_message_deleting': ujson.dumps(allow_message_deleting),
'message_content_delete_limit_seconds': message_content_delete_limit_seconds
})
self.assert_json_success(result)
def test_delete_message_by_admin(msg_id: int) -> HttpResponse:
self.login('iago')
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
return result
def test_delete_message_by_owner(msg_id: int) -> HttpResponse:
self.login('hamlet')
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
return result
def test_delete_message_by_other_user(msg_id: int) -> HttpResponse:
self.login('cordelia')
result = self.client_delete('/json/messages/{msg_id}'.format(msg_id=msg_id))
return result
set_message_deleting_params(False, 0)
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
msg_id = self.send_stream_message(hamlet, "Scotland")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_other_user(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_admin(msg_id=msg_id)
self.assert_json_success(result)
set_message_deleting_params(True, 0)
msg_id = self.send_stream_message(hamlet, "Scotland")
message = Message.objects.get(id=msg_id)
message.date_sent = message.date_sent - datetime.timedelta(seconds=600)
message.save()
result = test_delete_message_by_other_user(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_success(result)
# Test if time limit is non-zero.
set_message_deleting_params(True, 240)
msg_id_1 = self.send_stream_message(hamlet, "Scotland")
message = Message.objects.get(id=msg_id_1)
message.date_sent = message.date_sent - datetime.timedelta(seconds=120)
message.save()
msg_id_2 = self.send_stream_message(hamlet, "Scotland")
message = Message.objects.get(id=msg_id_2)
message.date_sent = message.date_sent - datetime.timedelta(seconds=360)
message.save()
result = test_delete_message_by_other_user(msg_id=msg_id_1)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_owner(msg_id=msg_id_1)
self.assert_json_success(result)
result = test_delete_message_by_owner(msg_id=msg_id_2)
self.assert_json_error(result, "The time limit for deleting this message has passed")
result = test_delete_message_by_admin(msg_id=msg_id_2)
self.assert_json_success(result)
msg_id = self.send_stream_message(hamlet, "Scotland")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_success(result)
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Invalid message(s)")
with mock.patch("zerver.views.messages.do_delete_messages") as m, \
mock.patch("zerver.views.messages.validate_can_delete_message", return_value=None), \
mock.patch("zerver.views.messages.access_message", return_value=(None, None)):
m.side_effect = IntegrityError()
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Message already deleted")
m.side_effect = Message.DoesNotExist()
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Message already deleted")
class SoftDeactivationMessageTest(ZulipTestCase):
def test_reactivate_user_if_soft_deactivated(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago")]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
sender = self.example_user('iago')
stream_name = 'Denmark'
topic_name = 'foo'
def last_realm_audit_log_entry(event_type: int) -> RealmAuditLog:
return RealmAuditLog.objects.filter(
event_type=event_type
).order_by('-event_time')[0]
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
message = 'Test Message 1'
message_id = self.send_stream_message(sender, stream_name,
message, topic_name)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertNotEqual(idle_user_msg_list[-1].content, message)
with queries_captured() as queries:
reactivate_user_if_soft_deactivated(long_term_idle_user)
self.assert_length(queries, 8)
self.assertFalse(long_term_idle_user.long_term_idle)
self.assertEqual(last_realm_audit_log_entry(
RealmAuditLog.USER_SOFT_ACTIVATED).modified_user, long_term_idle_user)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 1)
self.assertEqual(idle_user_msg_list[-1].content, message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, message_id)
def test_add_missing_messages(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago")]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
sender = self.example_user('iago')
realm = sender.realm
sending_client = make_client(name="test suite")
stream_name = 'Denmark'
stream = get_stream(stream_name, realm)
topic_name = 'foo'
def send_fake_message(message_content: str, stream: Stream) -> Message:
recipient = stream.recipient
message = Message(sender = sender,
recipient = recipient,
content = message_content,
date_sent = timezone_now(),
sending_client = sending_client)
message.set_topic_name(topic_name)
message.save()
return message
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
sent_message = send_fake_message('Test Message 1', stream)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertNotEqual(idle_user_msg_list[-1], sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 1)
self.assertEqual(idle_user_msg_list[-1], sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message.id)
# Test that add_missing_messages() only adds messages that aren't
# updated the last_active_message_id field for the user.
sent_message = send_fake_message('Test Message 2', stream)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertNotEqual(idle_user_msg_list[-1], sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 7)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 1)
self.assertEqual(idle_user_msg_list[-1], sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message.id)
# Test UserMessage rows are created correctly in case of stream
# Subscription was altered by admin while user was away.
# Test for a public stream.
sent_message_list = []
sent_message_list.append(send_fake_message('Test Message 3', stream))
# Alter subscription to stream.
self.unsubscribe(long_term_idle_user, stream_name)
send_fake_message('Test Message 4', stream)
self.subscribe(long_term_idle_user, stream_name)
sent_message_list.append(send_fake_message('Test Message 5', stream))
sent_message_list.reverse()
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
for sent_message in sent_message_list:
self.assertNotEqual(idle_user_msg_list.pop(), sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 2)
for sent_message in sent_message_list:
self.assertEqual(idle_user_msg_list.pop(), sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message_list[0].id)
# Test consecutive subscribe/unsubscribe in a public stream
sent_message_list = []
sent_message_list.append(send_fake_message('Test Message 6', stream))
# Unsubscribe from stream and then immediately subscribe back again.
self.unsubscribe(long_term_idle_user, stream_name)
self.subscribe(long_term_idle_user, stream_name)
sent_message_list.append(send_fake_message('Test Message 7', stream))
# Again unsubscribe from stream and send a message.
# This will make sure that if initially in a unsubscribed state
# a consecutive subscribe/unsubscribe doesn't misbehave.
self.unsubscribe(long_term_idle_user, stream_name)
send_fake_message('Test Message 8', stream)
self.subscribe(long_term_idle_user, stream_name)
self.unsubscribe(long_term_idle_user, stream_name)
sent_message_list.reverse()
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
for sent_message in sent_message_list:
self.assertNotEqual(idle_user_msg_list.pop(), sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 2)
for sent_message in sent_message_list:
self.assertEqual(idle_user_msg_list.pop(), sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message_list[0].id)
do_soft_activate_users([long_term_idle_user])
self.subscribe(long_term_idle_user, stream_name)
sent_message_id = self.send_stream_message(
sender, stream_name, 'Test Message 9')
self.unsubscribe(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
send_fake_message('Test Message 10', stream)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
self.assertEqual(idle_user_msg_list[-1].id, sent_message_id)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 4)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count)
stream_name = "Core"
private_stream = self.make_stream('Core', invite_only=True)
self.subscribe(self.example_user("iago"), stream_name)
sent_message_list = []
send_fake_message('Test Message 11', private_stream)
self.subscribe(self.example_user("hamlet"), stream_name)
sent_message_list.append(send_fake_message('Test Message 12', private_stream))
self.unsubscribe(long_term_idle_user, stream_name)
send_fake_message('Test Message 13', private_stream)
self.subscribe(long_term_idle_user, stream_name)
sent_message_list.append(send_fake_message('Test Message 14', private_stream))
sent_message_list.reverse()
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
for sent_message in sent_message_list:
self.assertNotEqual(idle_user_msg_list.pop(), sent_message)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 6)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + 2)
for sent_message in sent_message_list:
self.assertEqual(idle_user_msg_list.pop(), sent_message)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, sent_message_list[0].id)
@mock.patch('zerver.lib.soft_deactivation.BULK_CREATE_BATCH_SIZE', 2)
def test_add_missing_messages_pagination(self) -> None:
recipient_list = [self.example_user("hamlet"), self.example_user("iago")]
stream_name = 'Denmark'
for user_profile in recipient_list:
self.subscribe(user_profile, stream_name)
sender = self.example_user('iago')
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
num_new_messages = 5
message_ids = []
for _ in range(num_new_messages):
message_id = self.send_stream_message(sender, stream_name)
message_ids.append(message_id)
idle_user_msg_list = get_user_messages(long_term_idle_user)
idle_user_msg_count = len(idle_user_msg_list)
with queries_captured() as queries:
add_missing_messages(long_term_idle_user)
self.assert_length(queries, 10)
idle_user_msg_list = get_user_messages(long_term_idle_user)
self.assertEqual(len(idle_user_msg_list), idle_user_msg_count + num_new_messages)
long_term_idle_user.refresh_from_db()
self.assertEqual(long_term_idle_user.last_active_message_id, message_ids[-1])
def test_user_message_filter(self) -> None:
recipient_list = [
self.example_user("hamlet"),
self.example_user("iago"),
self.example_user('cordelia')
]
for user_profile in recipient_list:
self.subscribe(user_profile, "Denmark")
cordelia = self.example_user('cordelia')
sender = self.example_user('iago')
stream_name = 'Denmark'
topic_name = 'foo'
def send_stream_message(content: str) -> None:
self.send_stream_message(sender, stream_name,
content, topic_name)
def send_personal_message(content: str) -> None:
self.send_personal_message(sender, self.example_user("hamlet"), content)
long_term_idle_user = self.example_user('hamlet')
self.send_stream_message(long_term_idle_user, stream_name)
do_soft_deactivate_users([long_term_idle_user])
def assert_um_count(user: UserProfile, count: int) -> None:
user_messages = get_user_messages(user)
self.assertEqual(len(user_messages), count)
def assert_last_um_content(user: UserProfile, content: str, negate: bool=False) -> None:
user_messages = get_user_messages(user)
if negate:
self.assertNotEqual(user_messages[-1].content, content)
else:
self.assertEqual(user_messages[-1].content, content)
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test Message 1'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message, negate=True)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test that sending a message to a stream with soft deactivated user
# and push/email notifications on creates a UserMessage row for the
# deactivated user.
sub = get_subscription(stream_name, long_term_idle_user)
sub.push_notifications = True
sub.save()
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test private stream message'
send_stream_message(message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_last_um_content(long_term_idle_user, message)
sub.push_notifications = False
sub.save()
# Test sending a private message to soft deactivated user creates
# UserMessage row.
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test PM'
send_personal_message(message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_last_um_content(long_term_idle_user, message)
# Test UserMessage row is created while user is deactivated if
# user itself is mentioned.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**King Hamlet** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is not created while user is deactivated if
# anyone is mentioned but the user.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**Cordelia Lear** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message, negate=True)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is created while user is deactivated if
# there is a wildcard mention such as @all or @everyone
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**all** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**everyone** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Test @**stream** mention'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is not created while user is deactivated if there
# is a alert word in message.
do_add_alert_words(long_term_idle_user, ['test_alert_word'])
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = 'Testing test_alert_word'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count + 1)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
# Test UserMessage row is created while user is deactivated if
# message is a me message.
general_user_msg_count = len(get_user_messages(cordelia))
soft_deactivated_user_msg_count = len(get_user_messages(long_term_idle_user))
message = '/me says test'
send_stream_message(message)
assert_last_um_content(long_term_idle_user, message, negate=True)
assert_um_count(long_term_idle_user, soft_deactivated_user_msg_count)
assert_um_count(cordelia, general_user_msg_count + 1)
assert_last_um_content(cordelia, message)
class MessageHydrationTest(ZulipTestCase):
def test_hydrate_stream_recipient_info(self) -> None:
realm = get_realm('zulip')
cordelia = self.example_user('cordelia')
stream_id = get_stream('Verona', realm).id
obj = dict(
recipient_type=Recipient.STREAM,
recipient_type_id=stream_id,
sender_is_mirror_dummy=False,
sender_email=cordelia.email,
sender_full_name=cordelia.full_name,
sender_short_name=cordelia.short_name,
sender_id=cordelia.id,
)
MessageDict.hydrate_recipient_info(obj, 'Verona')
self.assertEqual(obj['display_recipient'], 'Verona')
self.assertEqual(obj['type'], 'stream')
def test_hydrate_pm_recipient_info(self) -> None:
cordelia = self.example_user('cordelia')
display_recipient: List[UserDisplayRecipient] = [
dict(
email='aaron@example.com',
full_name='Aaron Smith',
short_name='Aaron',
id=999,
is_mirror_dummy=False
),
]
obj = dict(
recipient_type=Recipient.PERSONAL,
recipient_type_id=None,
sender_is_mirror_dummy=False,
sender_email=cordelia.email,
sender_full_name=cordelia.full_name,
sender_short_name=cordelia.short_name,
sender_id=cordelia.id,
)
MessageDict.hydrate_recipient_info(obj, display_recipient)
self.assertEqual(
obj['display_recipient'],
[
dict(
email='aaron@example.com',
full_name='Aaron Smith',
short_name='Aaron',
id=999,
is_mirror_dummy=False
),
dict(
email=cordelia.email,
full_name=cordelia.full_name,
id=cordelia.id,
short_name=cordelia.short_name,
is_mirror_dummy=False,
),
],
)
self.assertEqual(obj['type'], 'private')
def test_messages_for_ids(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
stream_name = 'test stream'
self.subscribe(cordelia, stream_name)
old_message_id = self.send_stream_message(cordelia, stream_name, content='foo')
self.subscribe(hamlet, stream_name)
content = 'hello @**King Hamlet**'
new_message_id = self.send_stream_message(cordelia, stream_name, content=content)
user_message_flags = {
old_message_id: ['read', 'historical'],
new_message_id: ['mentioned'],
}
messages = messages_for_ids(
message_ids=[old_message_id, new_message_id],
user_message_flags=user_message_flags,
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self.assertEqual(len(messages), 2)
for message in messages:
if message['id'] == old_message_id:
old_message = message
elif message['id'] == new_message_id:
new_message = message
self.assertEqual(old_message['content'], '<p>foo</p>')
self.assertEqual(old_message['flags'], ['read', 'historical'])
self.assertIn('class="user-mention"', new_message['content'])
self.assertEqual(new_message['flags'], ['mentioned'])
def test_display_recipient_up_to_date(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
message_id = self.send_personal_message(hamlet, cordelia, 'test')
cordelia_recipient = cordelia.recipient
# Cause the display_recipient to get cached:
get_display_recipient(cordelia_recipient)
# Change cordelia's email:
cordelia_new_email = 'new-cordelia@zulip.com'
cordelia.email = cordelia_new_email
cordelia.save()
flush_per_request_caches()
messages = messages_for_ids(
message_ids=[message_id],
user_message_flags={message_id: ['read']},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
message = messages[0]
for display_recipient in message['display_recipient']:
if display_recipient['short_name'] == 'cordelia':
cordelia_display_recipient = display_recipient
self.assertEqual(cordelia_display_recipient['email'], cordelia_new_email)
class TestMessageForIdsDisplayRecipientFetching(ZulipTestCase):
def _verify_display_recipient(self, display_recipient: DisplayRecipientT,
expected_recipient_objects: Union[Stream, List[UserProfile]]) -> None:
if isinstance(expected_recipient_objects, Stream):
self.assertEqual(display_recipient, expected_recipient_objects.name)
else:
for user_profile in expected_recipient_objects:
recipient_dict: UserDisplayRecipient = {
'email': user_profile.email,
'full_name': user_profile.full_name,
'short_name': user_profile.short_name,
'id': user_profile.id,
'is_mirror_dummy': user_profile.is_mirror_dummy,
}
self.assertTrue(recipient_dict in display_recipient)
def test_display_recipient_personal(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
message_ids = [
self.send_personal_message(hamlet, cordelia, 'test'),
self.send_personal_message(cordelia, othello, 'test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], [hamlet, cordelia])
self._verify_display_recipient(messages[1]['display_recipient'], [cordelia, othello])
def test_display_recipient_stream(self) -> None:
cordelia = self.example_user('cordelia')
message_ids = [
self.send_stream_message(cordelia, "Verona", content='test'),
self.send_stream_message(cordelia, "Denmark", content='test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], get_stream("Verona", cordelia.realm))
self._verify_display_recipient(messages[1]['display_recipient'], get_stream("Denmark", cordelia.realm))
def test_display_recipient_huddle(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
iago = self.example_user('iago')
message_ids = [
self.send_huddle_message(hamlet, [cordelia, othello], 'test'),
self.send_huddle_message(cordelia, [hamlet, othello, iago], 'test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], [hamlet, cordelia, othello])
self._verify_display_recipient(messages[1]['display_recipient'], [hamlet, cordelia, othello, iago])
def test_display_recipient_various_types(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
iago = self.example_user('iago')
message_ids = [
self.send_huddle_message(hamlet, [cordelia, othello], 'test'),
self.send_stream_message(cordelia, "Verona", content='test'),
self.send_personal_message(hamlet, cordelia, 'test'),
self.send_stream_message(cordelia, "Denmark", content='test'),
self.send_huddle_message(cordelia, [hamlet, othello, iago], 'test'),
self.send_personal_message(cordelia, othello, 'test')
]
messages = messages_for_ids(
message_ids=message_ids,
user_message_flags={message_id: ['read'] for message_id in message_ids},
search_fields={},
apply_markdown=True,
client_gravatar=True,
allow_edit_history=False,
)
self._verify_display_recipient(messages[0]['display_recipient'], [hamlet, cordelia, othello])
self._verify_display_recipient(messages[1]['display_recipient'], get_stream("Verona", hamlet.realm))
self._verify_display_recipient(messages[2]['display_recipient'], [hamlet, cordelia])
self._verify_display_recipient(messages[3]['display_recipient'], get_stream("Denmark", hamlet.realm))
self._verify_display_recipient(messages[4]['display_recipient'], [hamlet, cordelia, othello, iago])
self._verify_display_recipient(messages[5]['display_recipient'], [cordelia, othello])
class MessageVisibilityTest(ZulipTestCase):
def test_update_first_visible_message_id(self) -> None:
Message.objects.all().delete()
message_ids = [self.send_stream_message(self.example_user("othello"), "Scotland") for i in range(15)]
realm = get_realm("zulip")
realm.message_visibility_limit = None
realm.first_visible_message_id = 5
realm.save()
update_first_visible_message_id(realm)
self.assertEqual(get_first_visible_message_id(realm), 0)
realm.message_visibility_limit = 10
realm.save()
expected_message_id = message_ids[5]
update_first_visible_message_id(realm)
self.assertEqual(get_first_visible_message_id(realm), expected_message_id)
realm.message_visibility_limit = 50
realm.save()
update_first_visible_message_id(realm)
self.assertEqual(get_first_visible_message_id(realm), 0)
def test_maybe_update_first_visible_message_id(self) -> None:
realm = get_realm("zulip")
lookback_hours = 30
realm.message_visibility_limit = None
realm.save()
end_time = timezone_now() - datetime.timedelta(hours=lookback_hours - 5)
stat = COUNT_STATS['messages_sent:is_bot:hour']
RealmCount.objects.create(realm=realm, property=stat.property,
end_time=end_time, value=5)
with mock.patch("zerver.lib.message.update_first_visible_message_id") as m:
maybe_update_first_visible_message_id(realm, lookback_hours)
m.assert_not_called()
realm.message_visibility_limit = 10
realm.save()
RealmCount.objects.all().delete()
with mock.patch("zerver.lib.message.update_first_visible_message_id") as m:
maybe_update_first_visible_message_id(realm, lookback_hours)
m.assert_not_called()
RealmCount.objects.create(realm=realm, property=stat.property,
end_time=end_time, value=5)
with mock.patch("zerver.lib.message.update_first_visible_message_id") as m:
maybe_update_first_visible_message_id(realm, lookback_hours)
m.assert_called_once_with(realm)
class TestBulkGetHuddleUserIds(ZulipTestCase):
def test_bulk_get_huddle_user_ids(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
iago = self.example_user('iago')
message_ids = [
self.send_huddle_message(hamlet, [cordelia, othello], 'test'),
self.send_huddle_message(cordelia, [hamlet, othello, iago], 'test')
]
messages = Message.objects.filter(id__in=message_ids).order_by("id")
first_huddle_recipient = messages[0].recipient
first_huddle_user_ids = list(get_huddle_user_ids(first_huddle_recipient))
second_huddle_recipient = messages[1].recipient
second_huddle_user_ids = list(get_huddle_user_ids(second_huddle_recipient))
huddle_user_ids = bulk_get_huddle_user_ids([first_huddle_recipient, second_huddle_recipient])
self.assertEqual(huddle_user_ids[first_huddle_recipient.id], first_huddle_user_ids)
self.assertEqual(huddle_user_ids[second_huddle_recipient.id], second_huddle_user_ids)
def test_bulk_get_huddle_user_ids_empty_list(self) -> None:
self.assertEqual(bulk_get_huddle_user_ids([]), {})
class NoRecipientIDsTest(ZulipTestCase):
def test_no_recipient_ids(self) -> None:
user_profile = self.example_user('cordelia')
Subscription.objects.filter(user_profile=user_profile, recipient__type=Recipient.STREAM).delete()
subs = gather_subscriptions_helper(user_profile)
self.assertEqual(len(subs[0]), 0)
| true | true |
f71c2d89d64d953f36a10a9d27d38da71f45ea05 | 436 | py | Python | Python/python-practice/chapter7-while/sandwich_orders.py | jiaoqiyuan/Tests | a3595b0e4b430d910f90e428d6b6b4465f67a059 | [
"Apache-2.0"
] | null | null | null | Python/python-practice/chapter7-while/sandwich_orders.py | jiaoqiyuan/Tests | a3595b0e4b430d910f90e428d6b6b4465f67a059 | [
"Apache-2.0"
] | null | null | null | Python/python-practice/chapter7-while/sandwich_orders.py | jiaoqiyuan/Tests | a3595b0e4b430d910f90e428d6b6b4465f67a059 | [
"Apache-2.0"
] | null | null | null | sandwich_orders = ['aaa', 'pastrami', 'bbb', 'pastrami', 'ccc','pastrami']
finished_sandwiches = []
print("\nAll pastrami had been sold!")
while 'pastrami' in sandwich_orders:
sandwich_orders.remove('pastrami')
while sandwich_orders:
sandwich_order = sandwich_orders.pop()
print("\nI made your tuna sanwichi " + sandwich_order)
finished_sandwiches.append(sandwich_order)
print("\nI have finished all sandwiches!")
| 33.538462 | 74 | 0.729358 | sandwich_orders = ['aaa', 'pastrami', 'bbb', 'pastrami', 'ccc','pastrami']
finished_sandwiches = []
print("\nAll pastrami had been sold!")
while 'pastrami' in sandwich_orders:
sandwich_orders.remove('pastrami')
while sandwich_orders:
sandwich_order = sandwich_orders.pop()
print("\nI made your tuna sanwichi " + sandwich_order)
finished_sandwiches.append(sandwich_order)
print("\nI have finished all sandwiches!")
| true | true |
f71c2e710a36a4f82f3feb6e43398072d821ab5d | 219 | py | Python | olc_webportalv2/cowbat/admin.py | OLC-Bioinformatics/olc_genomics_portal | d70ec669a3a49106f8290fff5dee089726259a23 | [
"MIT"
] | 3 | 2019-01-03T21:22:21.000Z | 2019-04-23T15:47:29.000Z | olc_webportalv2/cowbat/admin.py | lowandrew/olc_webportalv2 | e75ba1b7af85bb25b59138d31e268ecde6616208 | [
"MIT"
] | 49 | 2019-01-03T18:15:12.000Z | 2022-03-11T23:37:20.000Z | olc_webportalv2/cowbat/admin.py | OLC-Bioinformatics/olc_webportalv2 | d70ec669a3a49106f8290fff5dee089726259a23 | [
"MIT"
] | 58 | 2019-01-03T21:21:59.000Z | 2021-11-02T18:00:20.000Z | from django.contrib import admin
from .models import DataFile, SequencingRun, InterOpFile
# Register your models here.
admin.site.register(DataFile)
admin.site.register(SequencingRun)
admin.site.register(InterOpFile)
| 24.333333 | 56 | 0.826484 | from django.contrib import admin
from .models import DataFile, SequencingRun, InterOpFile
admin.site.register(DataFile)
admin.site.register(SequencingRun)
admin.site.register(InterOpFile)
| true | true |
f71c3158cd7fa547b538702286ea7c2954416084 | 2,113 | py | Python | nipype/interfaces/afni/tests/test_auto_ROIStats.py | PAmcconnell/nipype | 39fbd5411a844ce7c023964d3295eb7643b95af5 | [
"Apache-2.0"
] | null | null | null | nipype/interfaces/afni/tests/test_auto_ROIStats.py | PAmcconnell/nipype | 39fbd5411a844ce7c023964d3295eb7643b95af5 | [
"Apache-2.0"
] | 2 | 2018-04-26T12:09:32.000Z | 2018-04-27T06:36:49.000Z | nipype/interfaces/afni/tests/test_auto_ROIStats.py | PAmcconnell/nipype | 39fbd5411a844ce7c023964d3295eb7643b95af5 | [
"Apache-2.0"
] | 1 | 2019-11-14T14:16:57.000Z | 2019-11-14T14:16:57.000Z | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ..preprocess import ROIStats
def test_ROIStats_inputs():
input_map = dict(
args=dict(argstr='%s', ),
debug=dict(argstr='-debug', ),
environ=dict(
nohash=True,
usedefault=True,
),
format1D=dict(
argstr='-1Dformat',
xor=['format1DR'],
),
format1DR=dict(
argstr='-1DRformat',
xor=['format1D'],
),
in_file=dict(
argstr='%s',
extensions=None,
mandatory=True,
position=-2,
),
mask=dict(
argstr='-mask %s',
deprecated='1.1.4',
extensions=None,
new_name='mask_file',
position=3,
),
mask_f2short=dict(argstr='-mask_f2short', ),
mask_file=dict(
argstr='-mask %s',
extensions=None,
),
nobriklab=dict(argstr='-nobriklab', ),
nomeanout=dict(argstr='-nomeanout', ),
num_roi=dict(argstr='-numroi %s', ),
out_file=dict(
argstr='> %s',
extensions=None,
keep_extension=False,
name_source='in_file',
name_template='%s_roistat.1D',
position=-1,
),
quiet=dict(argstr='-quiet', ),
roisel=dict(
argstr='-roisel %s',
extensions=None,
),
stat=dict(argstr='%s...', ),
zerofill=dict(
argstr='-zerofill %s',
requires=['num_roi'],
),
)
inputs = ROIStats.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_ROIStats_outputs():
output_map = dict(out_file=dict(extensions=None, ), )
outputs = ROIStats.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| 28.945205 | 67 | 0.510648 |
from ..preprocess import ROIStats
def test_ROIStats_inputs():
input_map = dict(
args=dict(argstr='%s', ),
debug=dict(argstr='-debug', ),
environ=dict(
nohash=True,
usedefault=True,
),
format1D=dict(
argstr='-1Dformat',
xor=['format1DR'],
),
format1DR=dict(
argstr='-1DRformat',
xor=['format1D'],
),
in_file=dict(
argstr='%s',
extensions=None,
mandatory=True,
position=-2,
),
mask=dict(
argstr='-mask %s',
deprecated='1.1.4',
extensions=None,
new_name='mask_file',
position=3,
),
mask_f2short=dict(argstr='-mask_f2short', ),
mask_file=dict(
argstr='-mask %s',
extensions=None,
),
nobriklab=dict(argstr='-nobriklab', ),
nomeanout=dict(argstr='-nomeanout', ),
num_roi=dict(argstr='-numroi %s', ),
out_file=dict(
argstr='> %s',
extensions=None,
keep_extension=False,
name_source='in_file',
name_template='%s_roistat.1D',
position=-1,
),
quiet=dict(argstr='-quiet', ),
roisel=dict(
argstr='-roisel %s',
extensions=None,
),
stat=dict(argstr='%s...', ),
zerofill=dict(
argstr='-zerofill %s',
requires=['num_roi'],
),
)
inputs = ROIStats.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_ROIStats_outputs():
output_map = dict(out_file=dict(extensions=None, ), )
outputs = ROIStats.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| true | true |
f71c315a2c062c492837c96d407768a4e6981339 | 2,392 | py | Python | libs/cherrypy/tutorial/tut06_default_method.py | scambra/HTPC-Manager | 1a1440db84ae1b6e7a2610c7f3bd5b6adf0aab1d | [
"MIT"
] | 674 | 2015-11-06T04:22:47.000Z | 2022-02-26T17:31:43.000Z | libs/cherrypy/tutorial/tut06_default_method.py | scambra/HTPC-Manager | 1a1440db84ae1b6e7a2610c7f3bd5b6adf0aab1d | [
"MIT"
] | 713 | 2015-11-06T10:48:58.000Z | 2018-11-27T16:32:18.000Z | libs/cherrypy/tutorial/tut06_default_method.py | scambra/HTPC-Manager | 1a1440db84ae1b6e7a2610c7f3bd5b6adf0aab1d | [
"MIT"
] | 115 | 2015-01-08T14:41:00.000Z | 2022-02-13T12:31:17.000Z | """
Tutorial - The default method
Request handler objects can implement a method called "default" that
is called when no other suitable method/object could be found.
Essentially, if CherryPy2 can't find a matching request handler object
for the given request URI, it will use the default method of the object
located deepest on the URI path.
Using this mechanism you can easily simulate virtual URI structures
by parsing the extra URI string, which you can access through
cherrypy.request.virtualPath.
The application in this tutorial simulates an URI structure looking
like /users/<username>. Since the <username> bit will not be found (as
there are no matching methods), it is handled by the default method.
"""
import cherrypy
class UsersPage:
def index(self):
# Since this is just a stupid little example, we'll simply
# display a list of links to random, made-up users. In a real
# application, this could be generated from a database result set.
return '''
<a href="./remi">Remi Delon</a><br/>
<a href="./hendrik">Hendrik Mans</a><br/>
<a href="./lorenzo">Lorenzo Lamas</a><br/>
'''
index.exposed = True
def default(self, user):
# Here we react depending on the virtualPath -- the part of the
# path that could not be mapped to an object method. In a real
# application, we would probably do some database lookups here
# instead of the silly if/elif/else construct.
if user == 'remi':
out = "Remi Delon, CherryPy lead developer"
elif user == 'hendrik':
out = "Hendrik Mans, CherryPy co-developer & crazy German"
elif user == 'lorenzo':
out = "Lorenzo Lamas, famous actor and singer!"
else:
out = "Unknown user. :-("
return '%s (<a href="./">back</a>)' % out
default.exposed = True
import os.path
tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
if __name__ == '__main__':
# CherryPy always starts with app.root when trying to map request URIs
# to objects, so we need to mount a request handler root. A request
# to '/' will be mapped to HelloWorld().index().
cherrypy.quickstart(UsersPage(), config=tutconf)
else:
# This branch is for the test suite; you can ignore it.
cherrypy.tree.mount(UsersPage(), config=tutconf)
| 37.375 | 74 | 0.669732 |
import cherrypy
class UsersPage:
def index(self):
# display a list of links to random, made-up users. In a real
# application, this could be generated from a database result set.
return '''
<a href="./remi">Remi Delon</a><br/>
<a href="./hendrik">Hendrik Mans</a><br/>
<a href="./lorenzo">Lorenzo Lamas</a><br/>
'''
index.exposed = True
def default(self, user):
# Here we react depending on the virtualPath -- the part of the
# path that could not be mapped to an object method. In a real
# application, we would probably do some database lookups here
# instead of the silly if/elif/else construct.
if user == 'remi':
out = "Remi Delon, CherryPy lead developer"
elif user == 'hendrik':
out = "Hendrik Mans, CherryPy co-developer & crazy German"
elif user == 'lorenzo':
out = "Lorenzo Lamas, famous actor and singer!"
else:
out = "Unknown user. :-("
return '%s (<a href="./">back</a>)' % out
default.exposed = True
import os.path
tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
if __name__ == '__main__':
# CherryPy always starts with app.root when trying to map request URIs
# to objects, so we need to mount a request handler root. A request
# to '/' will be mapped to HelloWorld().index().
cherrypy.quickstart(UsersPage(), config=tutconf)
else:
# This branch is for the test suite; you can ignore it.
cherrypy.tree.mount(UsersPage(), config=tutconf)
| true | true |
f71c31685adf10f4b09b39fca342aeaf980132fc | 9,541 | py | Python | image-classifier/image-classifier.py | subhadip7879/neural-net | 04aacf7cdec89ea0f58f2c7397c72adefa8c2d4e | [
"MIT"
] | null | null | null | image-classifier/image-classifier.py | subhadip7879/neural-net | 04aacf7cdec89ea0f58f2c7397c72adefa8c2d4e | [
"MIT"
] | 3 | 2017-10-29T17:39:20.000Z | 2017-10-29T18:35:08.000Z | image-classifier/image-classifier.py | subhadip7879/neural-net | 04aacf7cdec89ea0f58f2c7397c72adefa8c2d4e | [
"MIT"
] | 6 | 2017-10-29T17:32:46.000Z | 2018-10-05T09:49:31.000Z | from IPython.display import Image
Image('images/02_network_flowchart.png')
Image('images/02_convolution.png')
import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
from sklearn.metrics import confusion_matrix
import time
from datetime import timedelta
import math
from tensorflow.examples.tutorials.mnist import input_data
tf.__version__
#Convolutional Layer 1.
# will connect each neuron to only a local region of the input volume
# Convolution filters are 5 x 5 pixels.
filter_size1 = 5
num_filters1 = 16
# Convolutional Layer 2.
filter_size2 = 5
num_filters2 = 36
# Fully-connected layer.
fc_size = 128
data = input_data.read_data_sets('data/MNIST/', one_hot=True)
print("Size of:")
print("- Training-set:\t\t{}".format(len(data.train.labels)))
print("- Test-set:\t\t{}".format(len(data.test.labels)))
print("- Validation-set:\t{}".format(len(data.validation.labels)))
data.test.cls = np.argmax(data.test.labels, axis=1)
img_size = 28
# Images are stored in 1d array of this length.
img_size_flat = img_size * img_size
img_shape = (img_size, img_size)
num_channels = 1
num_classes = 10
def plot_images(images, cls_true, cls_pred=None):
assert len(images) == 9
len(cls_true) == 9
fig, axes = plt.subplots(3, 3)
fig.subplots_adjust(hspace=0.3, wspace=0.3)
for i, ax in enumerate(axes.flat):
# Plot image.
ax.imshow(images[i].reshape(img_shape), cmap='binary')
if cls_pred is None:
xlabel = "True: {0}".format(cls_true[i])
else:
xlabel = "True: {0}, Pred: {1}".format(cls_true[i], cls_pred[i])
ax.set_xlabel(xlabel)
ax.set_xticks([])
ax.set_yticks([])
plt.show()
# first images from mnist
images = data.test.images[0:9]
cls_true = data.test.cls[0:9]
# Plot the images and labels
plot_images(images=images, cls_true=cls_true)
def new_weights(shape):
return tf.Variable(tf.truncated_normal(shape, stddev=0.05))
def new_biases(length):
return tf.Variable(tf.constant(0.05, shape=[length]))
def new_conv_layer(input,num_input_channels, filter_size,num_filters,use_pooling=True):
shape = [filter_size, filter_size, num_input_channels, num_filters]
weights = new_weights(shape=shape)
biases = new_biases(length=num_filters)
layer = tf.nn.conv2d(input=input,
filter=weights,
strides=[1, 1, 1, 1],
padding='SAME')
layer += biases
if use_pooling:
layer = tf.nn.max_pool(value=layer,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME')
layer = tf.nn.relu(layer)
return layer, weights
def flatten_layer(layer):
layer_shape = layer.get_shape()
num_features = layer_shape[1:4].num_elements()
layer_flat = tf.reshape(layer, [-1, num_features])
return layer_flat, num_features
def new_fc_layer(input, num_inputs,num_outputs,use_relu=True):
weights = new_weights(shape=[num_inputs, num_outputs])
biases = new_biases(length=num_outputs)
layer = tf.matmul(input, weights) + biases
if use_relu:
layer = tf.nn.relu(layer)
return layer
x = tf.placeholder(tf.float32, shape=[None, img_size_flat], name='x')
x_image = tf.reshape(x, [-1, img_size, img_size, num_channels])
y_true = tf.placeholder(tf.float32, shape=[None, 10], name='y_true')
y_true_cls = tf.argmax(y_true, dimension=1)
layer_conv1, weights_conv1 = \
new_conv_layer(input=x_image,
num_input_channels=num_channels,
filter_size=filter_size1,
num_filters=num_filters1,
use_pooling=True)
layer_conv1
layer_conv2, weights_conv2 = \
new_conv_layer(input=layer_conv1,
num_input_channels=num_filters1,
filter_size=filter_size2,
num_filters=num_filters2,
use_pooling=True)
layer_conv2
layer_flat, num_features = flatten_layer(layer_conv2)
layer_flat
num_features
layer_fc1 = new_fc_layer(input=layer_flat,
num_inputs=num_features,
num_outputs=fc_size,
use_relu=True)
layer_fc1
layer_fc2 = new_fc_layer(input=layer_fc1,
num_inputs=fc_size,
num_outputs=num_classes,
use_relu=False)
layer_fc2
y_pred = tf.nn.softmax(layer_fc2)
y_pred_cls = tf.argmax(y_pred, dimension=1)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=layer_fc2,
labels=y_true)
cost = tf.reduce_mean(cross_entropy)
optimizer = tf.train.AdamOptimizer(learning_rate=1e-4).minimize(cost)
correct_prediction = tf.equal(y_pred_cls, y_true_cls)
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
session = tf.Session()
session.run(tf.global_variables_initializer())
train_batch_size = 64
total_iterations = 0
def optimize(num_iterations):
global total_iterations
start_time = time.time()
for i in range(total_iterations, total_iterations + num_iterations):
x_batch, y_true_batch = data.train.next_batch(train_batch_size)
feed_dict_train = {x: x_batch, y_true: y_true_batch}
session.run(optimizer, feed_dict=feed_dict_train)
if i % 100 == 0:
acc = session.run(accuracy, feed_dict=feed_dict_train)
msg = "Optimization Iteration: {0:>6}, Training Accuracy: {1:>6.1%}"
print(msg.format(i + 1, acc))
total_iterations += num_iterations
end_time = time.time()
time_dif = end_time - start_time
print("Time usage: " + str(timedelta(seconds=int(round(time_dif)))))
def plot_example_errors(cls_pred, correct):
incorrect = (correct == False)
images = data.test.images[incorrect]
cls_pred = cls_pred[incorrect]
cls_true = data.test.cls[incorrect]
plot_images(images=images[0:9], cls_true=cls_true[0:9], cls_pred=cls_pred[0:9])
def plot_confusion_matrix(cls_pred):
cls_true = data.test.cls
cm = confusion_matrix(y_true=cls_true, y_pred=cls_pred)
print(cm)
plt.matshow(cm)
plt.colorbar()
tick_marks = np.arange(num_classes)
plt.xticks(tick_marks, range(num_classes))
plt.yticks(tick_marks, range(num_classes))
plt.xlabel('Predicted')
plt.ylabel('True')
plt.show()
test_batch_size = 256
def print_test_accuracy(show_example_errors=False, show_confusion_matrix=False):
num_test = len(data.test.images)
cls_pred = np.zeros(shape=num_test, dtype=np.int)
i = 0
while i < num_test:
j = min(i + test_batch_size, num_test)
images = data.test.images[i:j, :]
labels = data.test.labels[i:j, :]
feed_dict = {x: images, y_true: labels}
cls_pred[i:j] = session.run(y_pred_cls, feed_dict=feed_dict)
i = j
cls_true = data.test.cls
correct = (cls_true == cls_pred)
correct_sum = correct.sum()
acc = float(correct_sum) / num_test
msg = "Accuracy on Test-Set: {0:.1%} ({1} / {2})"
print(msg.format(acc, correct_sum, num_test))
if show_example_errors:
print("Example errors:")
plot_example_errors(cls_pred=cls_pred, correct=correct)
if show_confusion_matrix:
print("Confusion Matrix:")
plot_confusion_matrix(cls_pred=cls_pred)
print_test_accuracy()
optimize(num_iterations=1)
print_test_accuracy()
optimize(num_iterations=99)
print_test_accuracy(show_example_errors=True)
optimize(num_iterations=900)
print_test_accuracy(show_example_errors=True)
optimize(num_iterations=9000)
print_test_accuracy(show_example_errors=True, show_confusion_matrix=True)
def plot_conv_weights(weights, input_channel=0):
w = session.run(weights)
w_min = np.min(w)
w_max = np.max(w)
num_filters = w.shape[3]
num_grids = math.ceil(math.sqrt(num_filters))
fig, axes = plt.subplots(num_grids, num_grids)
for i, ax in enumerate(axes.flat):
if i<num_filters:
img = w[:, :, input_channel, i]
ax.imshow(img, vmin=w_min, vmax=w_max,
interpolation='nearest', cmap='seismic')
ax.set_xticks([])
ax.set_yticks([])
plt.show()
def plot_conv_layer(layer, image):
feed_dict = {x: [image]}
values = session.run(layer, feed_dict=feed_dict)
num_filters = values.shape[3]
num_grids = math.ceil(math.sqrt(num_filters))
fig, axes = plt.subplots(num_grids, num_grids)
for i, ax in enumerate(axes.flat):
if i<num_filters:
img = values[0, :, :, i]
ax.imshow(img, interpolation='nearest', cmap='binary')
ax.set_xticks([])
ax.set_yticks([])
plt.show()
def plot_image(image):
plt.imshow(image.reshape(img_shape),
interpolation='nearest',
cmap='binary')
plt.show()
image1 = data.test.images[0]
plot_image(image1)
image2 = data.test.images[13]
plot_image(image2)
plot_conv_weights(weights=weights_conv1)
plot_conv_layer(layer=layer_conv1, image=image1)
plot_conv_layer(layer=layer_conv1, image=image2)
plot_conv_weights(weights=weights_conv2, input_channel=0)
plot_conv_weights(weights=weights_conv2, input_channel=1)
plot_conv_layer(layer=layer_conv2, image=image1)
plot_conv_layer(layer=layer_conv2, image=image2)
| 30.097792 | 87 | 0.659889 | from IPython.display import Image
Image('images/02_network_flowchart.png')
Image('images/02_convolution.png')
import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
from sklearn.metrics import confusion_matrix
import time
from datetime import timedelta
import math
from tensorflow.examples.tutorials.mnist import input_data
tf.__version__
filter_size1 = 5
num_filters1 = 16
filter_size2 = 5
num_filters2 = 36
fc_size = 128
data = input_data.read_data_sets('data/MNIST/', one_hot=True)
print("Size of:")
print("- Training-set:\t\t{}".format(len(data.train.labels)))
print("- Test-set:\t\t{}".format(len(data.test.labels)))
print("- Validation-set:\t{}".format(len(data.validation.labels)))
data.test.cls = np.argmax(data.test.labels, axis=1)
img_size = 28
img_size_flat = img_size * img_size
img_shape = (img_size, img_size)
num_channels = 1
num_classes = 10
def plot_images(images, cls_true, cls_pred=None):
assert len(images) == 9
len(cls_true) == 9
fig, axes = plt.subplots(3, 3)
fig.subplots_adjust(hspace=0.3, wspace=0.3)
for i, ax in enumerate(axes.flat):
ax.imshow(images[i].reshape(img_shape), cmap='binary')
if cls_pred is None:
xlabel = "True: {0}".format(cls_true[i])
else:
xlabel = "True: {0}, Pred: {1}".format(cls_true[i], cls_pred[i])
ax.set_xlabel(xlabel)
ax.set_xticks([])
ax.set_yticks([])
plt.show()
images = data.test.images[0:9]
cls_true = data.test.cls[0:9]
plot_images(images=images, cls_true=cls_true)
def new_weights(shape):
return tf.Variable(tf.truncated_normal(shape, stddev=0.05))
def new_biases(length):
return tf.Variable(tf.constant(0.05, shape=[length]))
def new_conv_layer(input,num_input_channels, filter_size,num_filters,use_pooling=True):
shape = [filter_size, filter_size, num_input_channels, num_filters]
weights = new_weights(shape=shape)
biases = new_biases(length=num_filters)
layer = tf.nn.conv2d(input=input,
filter=weights,
strides=[1, 1, 1, 1],
padding='SAME')
layer += biases
if use_pooling:
layer = tf.nn.max_pool(value=layer,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME')
layer = tf.nn.relu(layer)
return layer, weights
def flatten_layer(layer):
layer_shape = layer.get_shape()
num_features = layer_shape[1:4].num_elements()
layer_flat = tf.reshape(layer, [-1, num_features])
return layer_flat, num_features
def new_fc_layer(input, num_inputs,num_outputs,use_relu=True):
weights = new_weights(shape=[num_inputs, num_outputs])
biases = new_biases(length=num_outputs)
layer = tf.matmul(input, weights) + biases
if use_relu:
layer = tf.nn.relu(layer)
return layer
x = tf.placeholder(tf.float32, shape=[None, img_size_flat], name='x')
x_image = tf.reshape(x, [-1, img_size, img_size, num_channels])
y_true = tf.placeholder(tf.float32, shape=[None, 10], name='y_true')
y_true_cls = tf.argmax(y_true, dimension=1)
layer_conv1, weights_conv1 = \
new_conv_layer(input=x_image,
num_input_channels=num_channels,
filter_size=filter_size1,
num_filters=num_filters1,
use_pooling=True)
layer_conv1
layer_conv2, weights_conv2 = \
new_conv_layer(input=layer_conv1,
num_input_channels=num_filters1,
filter_size=filter_size2,
num_filters=num_filters2,
use_pooling=True)
layer_conv2
layer_flat, num_features = flatten_layer(layer_conv2)
layer_flat
num_features
layer_fc1 = new_fc_layer(input=layer_flat,
num_inputs=num_features,
num_outputs=fc_size,
use_relu=True)
layer_fc1
layer_fc2 = new_fc_layer(input=layer_fc1,
num_inputs=fc_size,
num_outputs=num_classes,
use_relu=False)
layer_fc2
y_pred = tf.nn.softmax(layer_fc2)
y_pred_cls = tf.argmax(y_pred, dimension=1)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=layer_fc2,
labels=y_true)
cost = tf.reduce_mean(cross_entropy)
optimizer = tf.train.AdamOptimizer(learning_rate=1e-4).minimize(cost)
correct_prediction = tf.equal(y_pred_cls, y_true_cls)
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
session = tf.Session()
session.run(tf.global_variables_initializer())
train_batch_size = 64
total_iterations = 0
def optimize(num_iterations):
global total_iterations
start_time = time.time()
for i in range(total_iterations, total_iterations + num_iterations):
x_batch, y_true_batch = data.train.next_batch(train_batch_size)
feed_dict_train = {x: x_batch, y_true: y_true_batch}
session.run(optimizer, feed_dict=feed_dict_train)
if i % 100 == 0:
acc = session.run(accuracy, feed_dict=feed_dict_train)
msg = "Optimization Iteration: {0:>6}, Training Accuracy: {1:>6.1%}"
print(msg.format(i + 1, acc))
total_iterations += num_iterations
end_time = time.time()
time_dif = end_time - start_time
print("Time usage: " + str(timedelta(seconds=int(round(time_dif)))))
def plot_example_errors(cls_pred, correct):
incorrect = (correct == False)
images = data.test.images[incorrect]
cls_pred = cls_pred[incorrect]
cls_true = data.test.cls[incorrect]
plot_images(images=images[0:9], cls_true=cls_true[0:9], cls_pred=cls_pred[0:9])
def plot_confusion_matrix(cls_pred):
cls_true = data.test.cls
cm = confusion_matrix(y_true=cls_true, y_pred=cls_pred)
print(cm)
plt.matshow(cm)
plt.colorbar()
tick_marks = np.arange(num_classes)
plt.xticks(tick_marks, range(num_classes))
plt.yticks(tick_marks, range(num_classes))
plt.xlabel('Predicted')
plt.ylabel('True')
plt.show()
test_batch_size = 256
def print_test_accuracy(show_example_errors=False, show_confusion_matrix=False):
num_test = len(data.test.images)
cls_pred = np.zeros(shape=num_test, dtype=np.int)
i = 0
while i < num_test:
j = min(i + test_batch_size, num_test)
images = data.test.images[i:j, :]
labels = data.test.labels[i:j, :]
feed_dict = {x: images, y_true: labels}
cls_pred[i:j] = session.run(y_pred_cls, feed_dict=feed_dict)
i = j
cls_true = data.test.cls
correct = (cls_true == cls_pred)
correct_sum = correct.sum()
acc = float(correct_sum) / num_test
msg = "Accuracy on Test-Set: {0:.1%} ({1} / {2})"
print(msg.format(acc, correct_sum, num_test))
if show_example_errors:
print("Example errors:")
plot_example_errors(cls_pred=cls_pred, correct=correct)
if show_confusion_matrix:
print("Confusion Matrix:")
plot_confusion_matrix(cls_pred=cls_pred)
print_test_accuracy()
optimize(num_iterations=1)
print_test_accuracy()
optimize(num_iterations=99)
print_test_accuracy(show_example_errors=True)
optimize(num_iterations=900)
print_test_accuracy(show_example_errors=True)
optimize(num_iterations=9000)
print_test_accuracy(show_example_errors=True, show_confusion_matrix=True)
def plot_conv_weights(weights, input_channel=0):
w = session.run(weights)
w_min = np.min(w)
w_max = np.max(w)
num_filters = w.shape[3]
num_grids = math.ceil(math.sqrt(num_filters))
fig, axes = plt.subplots(num_grids, num_grids)
for i, ax in enumerate(axes.flat):
if i<num_filters:
img = w[:, :, input_channel, i]
ax.imshow(img, vmin=w_min, vmax=w_max,
interpolation='nearest', cmap='seismic')
ax.set_xticks([])
ax.set_yticks([])
plt.show()
def plot_conv_layer(layer, image):
feed_dict = {x: [image]}
values = session.run(layer, feed_dict=feed_dict)
num_filters = values.shape[3]
num_grids = math.ceil(math.sqrt(num_filters))
fig, axes = plt.subplots(num_grids, num_grids)
for i, ax in enumerate(axes.flat):
if i<num_filters:
img = values[0, :, :, i]
ax.imshow(img, interpolation='nearest', cmap='binary')
ax.set_xticks([])
ax.set_yticks([])
plt.show()
def plot_image(image):
plt.imshow(image.reshape(img_shape),
interpolation='nearest',
cmap='binary')
plt.show()
image1 = data.test.images[0]
plot_image(image1)
image2 = data.test.images[13]
plot_image(image2)
plot_conv_weights(weights=weights_conv1)
plot_conv_layer(layer=layer_conv1, image=image1)
plot_conv_layer(layer=layer_conv1, image=image2)
plot_conv_weights(weights=weights_conv2, input_channel=0)
plot_conv_weights(weights=weights_conv2, input_channel=1)
plot_conv_layer(layer=layer_conv2, image=image1)
plot_conv_layer(layer=layer_conv2, image=image2)
| true | true |
f71c319545eacca0285b17d4ce54f5d246ae71d3 | 17,588 | py | Python | twilio/rest/trunking/v1/trunk/phone_number.py | neetaramaswamy/twilio-python | 28472ffab1a170824ba17f12a6c1692a5e849439 | [
"MIT"
] | 30 | 2018-06-12T12:00:53.000Z | 2021-05-02T01:27:16.000Z | venv/lib/python3.6/site-packages/twilio/rest/trunking/v1/trunk/phone_number.py | ostar0816/mc-crypto | 80ad9896aed1dc952f819a404a458ccfad207d8e | [
"MIT"
] | 10 | 2020-06-06T01:10:07.000Z | 2022-03-12T00:12:22.000Z | venv/lib/python3.6/site-packages/twilio/rest/trunking/v1/trunk/phone_number.py | ostar0816/mc-crypto | 80ad9896aed1dc952f819a404a458ccfad207d8e | [
"MIT"
] | 4 | 2018-06-12T14:14:20.000Z | 2018-06-19T16:01:49.000Z | # coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class PhoneNumberList(ListResource):
""" """
def __init__(self, version, trunk_sid):
"""
Initialize the PhoneNumberList
:param Version version: Version that contains the resource
:param trunk_sid: The trunk_sid
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberList
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberList
"""
super(PhoneNumberList, self).__init__(version)
# Path Solution
self._solution = {'trunk_sid': trunk_sid, }
self._uri = '/Trunks/{trunk_sid}/PhoneNumbers'.format(**self._solution)
def create(self, phone_number_sid):
"""
Create a new PhoneNumberInstance
:param unicode phone_number_sid: The phone_number_sid
:returns: Newly created PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
"""
data = values.of({'PhoneNumberSid': phone_number_sid, })
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return PhoneNumberInstance(self._version, payload, trunk_sid=self._solution['trunk_sid'], )
def stream(self, limit=None, page_size=None):
"""
Streams PhoneNumberInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists PhoneNumberInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of PhoneNumberInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberPage
"""
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return PhoneNumberPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of PhoneNumberInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return PhoneNumberPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a PhoneNumberContext
:param sid: The sid
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
"""
return PhoneNumberContext(self._version, trunk_sid=self._solution['trunk_sid'], sid=sid, )
def __call__(self, sid):
"""
Constructs a PhoneNumberContext
:param sid: The sid
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
"""
return PhoneNumberContext(self._version, trunk_sid=self._solution['trunk_sid'], sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Trunking.V1.PhoneNumberList>'
class PhoneNumberPage(Page):
""" """
def __init__(self, version, response, solution):
"""
Initialize the PhoneNumberPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param trunk_sid: The trunk_sid
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberPage
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberPage
"""
super(PhoneNumberPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of PhoneNumberInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
"""
return PhoneNumberInstance(self._version, payload, trunk_sid=self._solution['trunk_sid'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Trunking.V1.PhoneNumberPage>'
class PhoneNumberContext(InstanceContext):
""" """
def __init__(self, version, trunk_sid, sid):
"""
Initialize the PhoneNumberContext
:param Version version: Version that contains the resource
:param trunk_sid: The trunk_sid
:param sid: The sid
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
"""
super(PhoneNumberContext, self).__init__(version)
# Path Solution
self._solution = {'trunk_sid': trunk_sid, 'sid': sid, }
self._uri = '/Trunks/{trunk_sid}/PhoneNumbers/{sid}'.format(**self._solution)
def fetch(self):
"""
Fetch a PhoneNumberInstance
:returns: Fetched PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return PhoneNumberInstance(
self._version,
payload,
trunk_sid=self._solution['trunk_sid'],
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the PhoneNumberInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete('delete', self._uri)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Trunking.V1.PhoneNumberContext {}>'.format(context)
class PhoneNumberInstance(InstanceResource):
""" """
class AddressRequirement(object):
NONE = "none"
ANY = "any"
LOCAL = "local"
FOREIGN = "foreign"
def __init__(self, version, payload, trunk_sid, sid=None):
"""
Initialize the PhoneNumberInstance
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
"""
super(PhoneNumberInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'account_sid': payload['account_sid'],
'address_requirements': payload['address_requirements'],
'api_version': payload['api_version'],
'beta': payload['beta'],
'capabilities': payload['capabilities'],
'date_created': deserialize.iso8601_datetime(payload['date_created']),
'date_updated': deserialize.iso8601_datetime(payload['date_updated']),
'friendly_name': payload['friendly_name'],
'links': payload['links'],
'phone_number': payload['phone_number'],
'sid': payload['sid'],
'sms_application_sid': payload['sms_application_sid'],
'sms_fallback_method': payload['sms_fallback_method'],
'sms_fallback_url': payload['sms_fallback_url'],
'sms_method': payload['sms_method'],
'sms_url': payload['sms_url'],
'status_callback': payload['status_callback'],
'status_callback_method': payload['status_callback_method'],
'trunk_sid': payload['trunk_sid'],
'url': payload['url'],
'voice_application_sid': payload['voice_application_sid'],
'voice_caller_id_lookup': payload['voice_caller_id_lookup'],
'voice_fallback_method': payload['voice_fallback_method'],
'voice_fallback_url': payload['voice_fallback_url'],
'voice_method': payload['voice_method'],
'voice_url': payload['voice_url'],
}
# Context
self._context = None
self._solution = {'trunk_sid': trunk_sid, 'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: PhoneNumberContext for this PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberContext
"""
if self._context is None:
self._context = PhoneNumberContext(
self._version,
trunk_sid=self._solution['trunk_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def account_sid(self):
"""
:returns: The account_sid
:rtype: unicode
"""
return self._properties['account_sid']
@property
def address_requirements(self):
"""
:returns: The address_requirements
:rtype: PhoneNumberInstance.AddressRequirement
"""
return self._properties['address_requirements']
@property
def api_version(self):
"""
:returns: The api_version
:rtype: unicode
"""
return self._properties['api_version']
@property
def beta(self):
"""
:returns: The beta
:rtype: bool
"""
return self._properties['beta']
@property
def capabilities(self):
"""
:returns: The capabilities
:rtype: unicode
"""
return self._properties['capabilities']
@property
def date_created(self):
"""
:returns: The date_created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The date_updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def friendly_name(self):
"""
:returns: The friendly_name
:rtype: unicode
"""
return self._properties['friendly_name']
@property
def links(self):
"""
:returns: The links
:rtype: unicode
"""
return self._properties['links']
@property
def phone_number(self):
"""
:returns: The phone_number
:rtype: unicode
"""
return self._properties['phone_number']
@property
def sid(self):
"""
:returns: The sid
:rtype: unicode
"""
return self._properties['sid']
@property
def sms_application_sid(self):
"""
:returns: The sms_application_sid
:rtype: unicode
"""
return self._properties['sms_application_sid']
@property
def sms_fallback_method(self):
"""
:returns: The sms_fallback_method
:rtype: unicode
"""
return self._properties['sms_fallback_method']
@property
def sms_fallback_url(self):
"""
:returns: The sms_fallback_url
:rtype: unicode
"""
return self._properties['sms_fallback_url']
@property
def sms_method(self):
"""
:returns: The sms_method
:rtype: unicode
"""
return self._properties['sms_method']
@property
def sms_url(self):
"""
:returns: The sms_url
:rtype: unicode
"""
return self._properties['sms_url']
@property
def status_callback(self):
"""
:returns: The status_callback
:rtype: unicode
"""
return self._properties['status_callback']
@property
def status_callback_method(self):
"""
:returns: The status_callback_method
:rtype: unicode
"""
return self._properties['status_callback_method']
@property
def trunk_sid(self):
"""
:returns: The trunk_sid
:rtype: unicode
"""
return self._properties['trunk_sid']
@property
def url(self):
"""
:returns: The url
:rtype: unicode
"""
return self._properties['url']
@property
def voice_application_sid(self):
"""
:returns: The voice_application_sid
:rtype: unicode
"""
return self._properties['voice_application_sid']
@property
def voice_caller_id_lookup(self):
"""
:returns: The voice_caller_id_lookup
:rtype: bool
"""
return self._properties['voice_caller_id_lookup']
@property
def voice_fallback_method(self):
"""
:returns: The voice_fallback_method
:rtype: unicode
"""
return self._properties['voice_fallback_method']
@property
def voice_fallback_url(self):
"""
:returns: The voice_fallback_url
:rtype: unicode
"""
return self._properties['voice_fallback_url']
@property
def voice_method(self):
"""
:returns: The voice_method
:rtype: unicode
"""
return self._properties['voice_method']
@property
def voice_url(self):
"""
:returns: The voice_url
:rtype: unicode
"""
return self._properties['voice_url']
def fetch(self):
"""
Fetch a PhoneNumberInstance
:returns: Fetched PhoneNumberInstance
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the PhoneNumberInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Trunking.V1.PhoneNumberInstance {}>'.format(context)
| 30.641115 | 99 | 0.609108 |
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class PhoneNumberList(ListResource):
def __init__(self, version, trunk_sid):
super(PhoneNumberList, self).__init__(version)
self._solution = {'trunk_sid': trunk_sid, }
self._uri = '/Trunks/{trunk_sid}/PhoneNumbers'.format(**self._solution)
def create(self, phone_number_sid):
data = values.of({'PhoneNumberSid': phone_number_sid, })
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return PhoneNumberInstance(self._version, payload, trunk_sid=self._solution['trunk_sid'], )
def stream(self, limit=None, page_size=None):
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return PhoneNumberPage(self._version, response, self._solution)
def get_page(self, target_url):
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return PhoneNumberPage(self._version, response, self._solution)
def get(self, sid):
return PhoneNumberContext(self._version, trunk_sid=self._solution['trunk_sid'], sid=sid, )
def __call__(self, sid):
return PhoneNumberContext(self._version, trunk_sid=self._solution['trunk_sid'], sid=sid, )
def __repr__(self):
return '<Twilio.Trunking.V1.PhoneNumberList>'
class PhoneNumberPage(Page):
def __init__(self, version, response, solution):
super(PhoneNumberPage, self).__init__(version, response)
self._solution = solution
def get_instance(self, payload):
return PhoneNumberInstance(self._version, payload, trunk_sid=self._solution['trunk_sid'], )
def __repr__(self):
return '<Twilio.Trunking.V1.PhoneNumberPage>'
class PhoneNumberContext(InstanceContext):
def __init__(self, version, trunk_sid, sid):
super(PhoneNumberContext, self).__init__(version)
self._solution = {'trunk_sid': trunk_sid, 'sid': sid, }
self._uri = '/Trunks/{trunk_sid}/PhoneNumbers/{sid}'.format(**self._solution)
def fetch(self):
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return PhoneNumberInstance(
self._version,
payload,
trunk_sid=self._solution['trunk_sid'],
sid=self._solution['sid'],
)
def delete(self):
return self._version.delete('delete', self._uri)
def __repr__(self):
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Trunking.V1.PhoneNumberContext {}>'.format(context)
class PhoneNumberInstance(InstanceResource):
class AddressRequirement(object):
NONE = "none"
ANY = "any"
LOCAL = "local"
FOREIGN = "foreign"
def __init__(self, version, payload, trunk_sid, sid=None):
super(PhoneNumberInstance, self).__init__(version)
self._properties = {
'account_sid': payload['account_sid'],
'address_requirements': payload['address_requirements'],
'api_version': payload['api_version'],
'beta': payload['beta'],
'capabilities': payload['capabilities'],
'date_created': deserialize.iso8601_datetime(payload['date_created']),
'date_updated': deserialize.iso8601_datetime(payload['date_updated']),
'friendly_name': payload['friendly_name'],
'links': payload['links'],
'phone_number': payload['phone_number'],
'sid': payload['sid'],
'sms_application_sid': payload['sms_application_sid'],
'sms_fallback_method': payload['sms_fallback_method'],
'sms_fallback_url': payload['sms_fallback_url'],
'sms_method': payload['sms_method'],
'sms_url': payload['sms_url'],
'status_callback': payload['status_callback'],
'status_callback_method': payload['status_callback_method'],
'trunk_sid': payload['trunk_sid'],
'url': payload['url'],
'voice_application_sid': payload['voice_application_sid'],
'voice_caller_id_lookup': payload['voice_caller_id_lookup'],
'voice_fallback_method': payload['voice_fallback_method'],
'voice_fallback_url': payload['voice_fallback_url'],
'voice_method': payload['voice_method'],
'voice_url': payload['voice_url'],
}
self._context = None
self._solution = {'trunk_sid': trunk_sid, 'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
if self._context is None:
self._context = PhoneNumberContext(
self._version,
trunk_sid=self._solution['trunk_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def account_sid(self):
return self._properties['account_sid']
@property
def address_requirements(self):
return self._properties['address_requirements']
@property
def api_version(self):
return self._properties['api_version']
@property
def beta(self):
return self._properties['beta']
@property
def capabilities(self):
return self._properties['capabilities']
@property
def date_created(self):
return self._properties['date_created']
@property
def date_updated(self):
return self._properties['date_updated']
@property
def friendly_name(self):
return self._properties['friendly_name']
@property
def links(self):
return self._properties['links']
@property
def phone_number(self):
return self._properties['phone_number']
@property
def sid(self):
return self._properties['sid']
@property
def sms_application_sid(self):
return self._properties['sms_application_sid']
@property
def sms_fallback_method(self):
return self._properties['sms_fallback_method']
@property
def sms_fallback_url(self):
return self._properties['sms_fallback_url']
@property
def sms_method(self):
return self._properties['sms_method']
@property
def sms_url(self):
return self._properties['sms_url']
@property
def status_callback(self):
return self._properties['status_callback']
@property
def status_callback_method(self):
return self._properties['status_callback_method']
@property
def trunk_sid(self):
return self._properties['trunk_sid']
@property
def url(self):
return self._properties['url']
@property
def voice_application_sid(self):
return self._properties['voice_application_sid']
@property
def voice_caller_id_lookup(self):
return self._properties['voice_caller_id_lookup']
@property
def voice_fallback_method(self):
return self._properties['voice_fallback_method']
@property
def voice_fallback_url(self):
return self._properties['voice_fallback_url']
@property
def voice_method(self):
return self._properties['voice_method']
@property
def voice_url(self):
return self._properties['voice_url']
def fetch(self):
return self._proxy.fetch()
def delete(self):
return self._proxy.delete()
def __repr__(self):
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Trunking.V1.PhoneNumberInstance {}>'.format(context)
| true | true |
f71c31b5f216202481da86a95da2f3bc59155e05 | 20,865 | py | Python | autotest/osr/osr_ct.py | bnordgren/gdal | 8b9bff36d6e2c4eceb56e8f596286be7f3fc9f24 | [
"MIT"
] | 1 | 2018-11-29T10:15:53.000Z | 2018-11-29T10:15:53.000Z | autotest/osr/osr_ct.py | a0x8o/gdal | 54aa47ee60eea48fa4989d6ce41fdae4e02c6458 | [
"MIT"
] | 1 | 2017-12-30T02:12:01.000Z | 2017-12-30T02:12:01.000Z | autotest/osr/osr_ct.py | notcaremath/gdal | 2c1586ffda1e49d170b96f8f0d87bc7516554047 | [
"MIT"
] | null | null | null | #!/usr/bin/env pytest
# -*- coding: utf-8 -*-
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test coordinate transformations.
# Author: Frank Warmerdam <warmerdam@pobox.com>
#
###############################################################################
# Copyright (c) 2003, Frank Warmerdam <warmerdam@pobox.com>
# Copyright (c) 2009-2013, Even Rouault <even dot rouault at spatialys.com>
# Copyright (c) 2014, Google
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import math
import sys
from osgeo import gdal
from osgeo import osr
from osgeo import ogr
import gdaltest
import pytest
###############################################################################
# Verify that we have PROJ.4 available.
def test_osr_ct_1():
utm_srs = osr.SpatialReference()
utm_srs.SetUTM(11)
utm_srs.SetWellKnownGeogCS('WGS84')
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
try:
gdal.PushErrorHandler('CPLQuietErrorHandler')
ct = osr.CoordinateTransformation(ll_srs, utm_srs)
gdal.PopErrorHandler()
if gdal.GetLastErrorMsg().find('Unable to load PROJ.4') != -1:
pytest.skip('PROJ.4 missing, transforms not available.')
except ValueError:
gdal.PopErrorHandler()
if gdal.GetLastErrorMsg().find('Unable to load PROJ.4') != -1:
pytest.skip('PROJ.4 missing, transforms not available.')
pytest.fail(gdal.GetLastErrorMsg())
assert not (ct is None or ct.this is None), \
'Unable to create simple CoordinateTransformat.'
###############################################################################
# Actually perform a simple LL to UTM conversion.
def test_osr_ct_2():
utm_srs = osr.SpatialReference()
utm_srs.SetUTM(11)
utm_srs.SetWellKnownGeogCS('WGS84')
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
ct = osr.CoordinateTransformation(ll_srs, utm_srs)
result = ct.TransformPoint(32.0, -117.5, 0.0)
assert result[0] == pytest.approx(452772.06, abs=0.01) and result[1] == pytest.approx(3540544.89, abs=0.01) and result[2] == pytest.approx(0.0, abs=0.01), \
'Wrong LL to UTM result'
###############################################################################
# Transform an OGR geometry ... this is mostly aimed at ensuring that
# the OGRCoordinateTransformation target SRS isn't deleted till the output
# geometry which also uses it is deleted.
def test_osr_ct_3():
utm_srs = osr.SpatialReference()
utm_srs.SetUTM(11)
utm_srs.SetWellKnownGeogCS('WGS84')
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
ll_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
ct = osr.CoordinateTransformation(ll_srs, utm_srs)
pnt = ogr.CreateGeometryFromWkt('POINT(-117.5 32.0)', ll_srs)
result = pnt.Transform(ct)
assert result == 0
ll_srs = None
ct = None
utm_srs = None
out_srs = pnt.GetSpatialReference().ExportToPrettyWkt()
assert out_srs[0:6] == 'PROJCS', 'output srs corrupt, ref counting issue?'
pnt = None
###############################################################################
# Actually perform a simple LL to UTM conversion.
# Works for both OG and NG bindings
def test_osr_ct_4():
utm_srs = osr.SpatialReference()
utm_srs.SetUTM(11)
utm_srs.SetWellKnownGeogCS('WGS84')
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
ll_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
ct = osr.CoordinateTransformation(ll_srs, utm_srs)
result = ct.TransformPoints([(-117.5, 32.0, 0.0), (-117.5, 32.0)])
assert len(result) == 2
assert len(result[0]) == 3
for i in range(2):
assert result[i][0] == pytest.approx(452772.06, abs=0.01) and result[i][1] == pytest.approx(3540544.89, abs=0.01) and result[i][2] == pytest.approx(0.0, abs=0.01), \
'Wrong LL to UTM result'
###############################################################################
# Same test, but with any sequence of tuples instead of a tuple of tuple
# New in NG bindings (#3020)
def test_osr_ct_5():
utm_srs = osr.SpatialReference()
utm_srs.SetUTM(11)
utm_srs.SetWellKnownGeogCS('WGS84')
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
ll_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
ct = osr.CoordinateTransformation(ll_srs, utm_srs)
result = ct.TransformPoints(((-117.5, 32.0, 0.0), (-117.5, 32.0)))
for i in range(2):
assert result[i][0] == pytest.approx(452772.06, abs=0.01) and result[i][1] == pytest.approx(3540544.89, abs=0.01) and result[i][2] == pytest.approx(0.0, abs=0.01), \
'Wrong LL to UTM result'
###############################################################################
# Test osr.CreateCoordinateTransformation() method
def test_osr_ct_6():
with gdaltest.error_handler():
ct = osr.CreateCoordinateTransformation(None, None)
assert ct is None
utm_srs = osr.SpatialReference()
utm_srs.SetUTM(11)
utm_srs.SetWellKnownGeogCS('WGS84')
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
ll_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
ct = osr.CreateCoordinateTransformation(ll_srs, utm_srs)
assert ct is not None
result = ct.TransformPoints(((-117.5, 32.0, 0.0), (-117.5, 32.0)))
for i in range(2):
assert result[i][0] == pytest.approx(452772.06, abs=0.01) and result[i][1] == pytest.approx(3540544.89, abs=0.01) and result[i][2] == pytest.approx(0.0, abs=0.01), \
'Wrong LL to UTM result'
###############################################################################
# Actually perform a simple Pseudo Mercator to LL conversion.
def test_osr_ct_7():
pm_srs = osr.SpatialReference()
pm_srs.ImportFromEPSG(3857)
ll_srs = osr.SpatialReference()
ll_srs.SetWellKnownGeogCS('WGS84')
ll_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
ct = osr.CoordinateTransformation(pm_srs, ll_srs)
(x, y, z) = ct.TransformPoint(7000000, 7000000, 0)
(exp_x, exp_y, exp_z) = (62.8820698884, 53.0918187696, 0.0)
if (exp_x != pytest.approx(x, abs=0.00001) or
exp_y != pytest.approx(y, abs=0.00001) or
exp_z != pytest.approx(z, abs=0.00001)):
print('Got: (%f, %f, %f)' % (x, y, z))
print('Expected: (%f, %f, %f)' % (exp_x, exp_y, exp_z))
pytest.fail('Wrong LL for Pseudo Mercator result')
pnt = ogr.CreateGeometryFromWkt('POINT(%g %g)' % (7000000, 7000000),
pm_srs)
expected_pnt = ogr.CreateGeometryFromWkt('POINT(%.10f %.10f)' % (exp_x, exp_y),
ll_srs)
result = pnt.Transform(ct)
assert result == 0
if (expected_pnt.GetX() != pytest.approx(pnt.GetX(), abs=0.00001) or
expected_pnt.GetY() != pytest.approx(pnt.GetY(), abs=0.00001) or
expected_pnt.GetZ() != pytest.approx(pnt.GetZ(), abs=0.00001)):
print('Got: %s' % pnt.ExportToWkt())
print('Expected: %s' % expected_pnt.ExportToWkt())
pytest.fail('Failed to transform from Pseudo Mercator to LL')
###############################################################################
# Test WebMercator -> WGS84 optimized transform
def test_osr_ct_8():
src_srs = osr.SpatialReference()
src_srs.ImportFromEPSG(3857)
dst_srs = osr.SpatialReference()
dst_srs.SetWellKnownGeogCS('WGS84')
dst_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
ct = osr.CoordinateTransformation(src_srs, dst_srs)
pnts = [(0, 6274861.39400658), (1, 6274861.39400658)]
result = ct.TransformPoints(pnts)
expected_result = [(0.0, 49.000000000000007, 0.0), (8.9831528411952125e-06, 49.000000000000007, 0.0)]
for i in range(2):
for j in range(3):
if result[i][j] != pytest.approx(expected_result[i][j], abs=1e-10):
print('Got: %s' % str(result))
print('Expected: %s' % str(expected_result))
pytest.fail('Failed to transform from Pseudo Mercator to LL')
pnts = [(0, 6274861.39400658), (1 + 0, 1 + 6274861.39400658)]
result = ct.TransformPoints(pnts)
expected_result = [(0.0, 49.000000000000007, 0.0), (8.9831528411952125e-06, 49.000005893478189, 0.0)]
for i in range(2):
for j in range(3):
if result[i][j] != pytest.approx(expected_result[i][j], abs=1e-10):
print('Got: %s' % str(result))
print('Expected: %s' % str(expected_result))
pytest.fail('Failed to transform from Pseudo Mercator to LL')
###############################################################################
# Test coordinate transformation where only one CRS has a towgs84 clause (#1156)
def test_osr_ct_towgs84_only_one_side():
srs_towgs84 = osr.SpatialReference()
srs_towgs84.SetFromUserInput("+proj=longlat +ellps=GRS80 +towgs84=100,200,300")
srs_just_ellps = osr.SpatialReference()
srs_just_ellps.SetFromUserInput('+proj=longlat +ellps=GRS80')
ct = osr.CoordinateTransformation(srs_towgs84, srs_just_ellps)
(x, y, z) = ct.TransformPoint(0, 0, 0)
assert x == 0
assert y == 0
assert z == 0
ct = osr.CoordinateTransformation(srs_just_ellps, srs_towgs84)
(x, y, z) = ct.TransformPoint(0, 0, 0)
assert x == 0
assert y == 0
assert z == 0
###############################################################################
# Test coordinate transformation where both side have towgs84/datum clause (#1156)
def test_osr_ct_towgs84_both_side():
srs_towgs84 = osr.SpatialReference()
srs_towgs84.SetFromUserInput("+proj=longlat +ellps=GRS80 +towgs84=100,200,300")
srs_other_towgs84 = osr.SpatialReference()
srs_other_towgs84.SetFromUserInput("+proj=longlat +ellps=GRS80 +towgs84=0,0,0")
ct = osr.CoordinateTransformation(srs_towgs84, srs_other_towgs84)
(x, y, z) = ct.TransformPoint(0, 0, 20)
assert x != 0
assert y != 0
assert z == 20
srs_datum_wgs84 = osr.SpatialReference()
srs_datum_wgs84.SetFromUserInput("+proj=longlat +datum=WGS84")
ct = osr.CoordinateTransformation(srs_towgs84, srs_datum_wgs84)
(x, y, z) = ct.TransformPoint(0, 0, 20)
assert x != 0
assert y != 0
assert z == 20
ct = osr.CoordinateTransformation(srs_datum_wgs84, srs_towgs84)
(x, y, z) = ct.TransformPoint(0, 0, 20)
assert x != 0
assert y != 0
assert z == 20
###############################################################################
# Test coordinate transformation with custom operation
def test_osr_ct_options_operation():
options = osr.CoordinateTransformationOptions()
assert options.SetOperation('+proj=affine +s11=-1')
ct = osr.CoordinateTransformation(None, None, options)
assert ct
x, y, z = ct.TransformPoint(1, 2, 3)
assert x == -1
assert y == 2
assert z == 3
###############################################################################
# Test coordinate transformation with area of interest
def test_osr_ct_options_area_of_interest():
srs_nad27 = osr.SpatialReference()
srs_nad27.SetFromUserInput("NAD27")
srs_wgs84 = osr.SpatialReference()
srs_wgs84.SetFromUserInput("WGS84")
options = osr.CoordinateTransformationOptions()
assert not options.SetAreaOfInterest(-200,40,-99,41)
assert not options.SetAreaOfInterest(-100,-100,-99,41)
assert not options.SetAreaOfInterest(-100,40,200,41)
assert not options.SetAreaOfInterest(-100,40,-99,100)
assert options.SetAreaOfInterest(-100,40,-99,41)
ct = osr.CoordinateTransformation(srs_nad27, srs_wgs84, options)
assert ct
x, y, z = ct.TransformPoint(40.5,-99.5,0)
assert x != 40.5
assert x == pytest.approx(40.5, abs=1e-3)
x, y, z = ct.TransformPoint(0,0,0)
if sys.platform == 'darwin':
print("ct.TransformPoint(0,0,0) doesn't return expected result on MacOSX. Not sure why.")
else:
assert x == float('inf')
###############################################################################
# Test 4D transformations
def test_osr_ct_4D():
options = osr.CoordinateTransformationOptions()
assert options.SetOperation('+proj=pipeline +step +proj=unitconvert +xy_in=deg +xy_out=rad +step +proj=cart +step +proj=helmert +convention=position_vector +x=0.0127 +dx=-0.0029 +rx=-0.00039 +drx=-0.00011 +y=0.0065 +dy=-0.0002 +ry=0.00080 +dry=-0.00019 +z=-0.0209 +dz=-0.0006 +rz=-0.00114 +drz=0.00007 +s=0.00195 +ds=0.00001 +t_epoch=1988.0 +step +proj=cart +inv +step +proj=unitconvert +xy_in=rad +xy_out=deg')
ct = osr.CoordinateTransformation(None, None, options)
assert ct
x, y, z, t = ct.TransformPoint(2, 49, 0, 2000)
assert x == pytest.approx(2.0000005420366, abs=1e-10), x
assert y == pytest.approx(49.0000003766711, abs=1e-10), y
assert z == pytest.approx(-0.0222802283242345, abs=1e-8), z
assert t == pytest.approx(2000, abs=1e-10), t
ret = ct.TransformPoints([[2, 49, 0, 2000], [2, 49, 0, 1988]])
assert len(ret) == 2, ret
assert len(ret[0]) == 4, ret
x, y, z, t = ret[0]
assert x == pytest.approx(2.0000005420366, abs=1e-10), x
assert y == pytest.approx(49.0000003766711, abs=1e-10), y
assert z == pytest.approx(-0.0222802283242345, abs=1e-8), z
assert t == pytest.approx(2000, abs=1e-10), t
assert len(ret[1]) == 4, ret
x, y, z, t = ret[1]
assert x == pytest.approx(1.9999998809056305, abs=1e-10), x
assert y == pytest.approx(48.9999995630005, abs=1e-10), y
assert z == pytest.approx(0.005032399669289589, abs=1e-8), z
assert t == pytest.approx(1988, abs=1e-10), t
###############################################################################
# Test geocentric transformations
def test_osr_ct_geocentric():
s = osr.SpatialReference()
s.SetFromUserInput("IGNF:RGR92")
t = osr.SpatialReference()
t.SetFromUserInput("IGNF:REUN47")
ct = osr.CoordinateTransformation(s, t)
assert ct
x, y, z = ct.TransformPoint(3356123.5400, 1303218.3090, 5247430.6050)
assert x == pytest.approx(3353420.949, abs=1e-1)
assert y == pytest.approx(1304075.021, abs=1e-1)
assert z == pytest.approx(5248935.144, abs=1e-1)
###############################################################################
# Test with +lon_wrap=180
def test_osr_ct_lon_wrap():
if osr.GetPROJVersionMajor() * 10000 + osr.GetPROJVersionMinor() * 100 + osr.GetPROJVersionMicro() < 70001:
# Issue before PROJ 7.0.1
pytest.skip()
s = osr.SpatialReference()
s.SetFromUserInput("+proj=longlat +ellps=GRS80")
t = osr.SpatialReference()
t.SetFromUserInput("+proj=longlat +ellps=GRS80 +lon_wrap=180")
ct = osr.CoordinateTransformation(s, t)
assert ct
x, y, _ = ct.TransformPoint(-25, 60, 0)
assert x == pytest.approx(-25 + 360, abs=1e-12)
assert y == pytest.approx(60, abs=1e-12)
###############################################################################
# Test ct.TransformPointWithErrorCode
def test_osr_ct_transformpointwitherrorcode():
if osr.GetPROJVersionMajor() < 8:
# Issue before PROJ 8
pytest.skip()
s = osr.SpatialReference()
s.SetFromUserInput("+proj=longlat +ellps=GRS80")
t = osr.SpatialReference()
t.SetFromUserInput("+proj=tmerc +ellps=GRS80")
ct = osr.CoordinateTransformation(s, t)
assert ct
x, y, z, t, error_code = ct.TransformPointWithErrorCode(1, 2, 3, 4)
assert x == pytest.approx(111257.80439304397, rel=1e-10)
assert y == pytest.approx(221183.3401672801, rel=1e-10)
assert z == 3
assert t == 4
assert error_code == 0
x, y, z, t, error_code = ct.TransformPointWithErrorCode(90, 0, 0, 0)
assert math.isinf(x)
assert error_code == osr.PROJ_ERR_COORD_TRANSFM_OUTSIDE_PROJECTION_DOMAIN
###############################################################################
# Test CoordinateTransformationOptions.SetDesiredAccuracy
def test_osr_ct_options_accuracy():
s = osr.SpatialReference()
s.SetFromUserInput("EPSG:4326")
t = osr.SpatialReference()
t.SetFromUserInput("EPSG:4258") # ETRS89
options = osr.CoordinateTransformationOptions()
options.SetDesiredAccuracy(0.05)
with gdaltest.error_handler():
ct = osr.CoordinateTransformation(s, t, options)
try:
ct.TransformPoint(49, 2, 0)
assert False
except:
pass
###############################################################################
# Test CoordinateTransformationOptions.SetBallparkAllowed
def test_osr_ct_options_ballpark_disallowed():
s = osr.SpatialReference()
s.SetFromUserInput("EPSG:4267") # NAD27
t = osr.SpatialReference()
t.SetFromUserInput("EPSG:4258") # ETRS89
options = osr.CoordinateTransformationOptions()
options.SetBallparkAllowed(False)
with gdaltest.error_handler():
ct = osr.CoordinateTransformation(s, t, options)
try:
ct.TransformPoint(49, 2, 0)
assert False
except:
pass
###############################################################################
# Test that we pass a neutral time when not explicitly specified
def test_osr_ct_non_specified_time_with_time_dependent_transformation():
options = osr.CoordinateTransformationOptions()
options.SetOperation('+proj=pipeline +step +proj=axisswap +order=2,1 +step +proj=unitconvert +xy_in=deg +z_in=m +xy_out=rad +z_out=m +step +proj=cart +ellps=GRS80 +step +inv +proj=helmert +dx=0.0008 +dy=-0.0006 +dz=-0.0014 +drx=6.67e-05 +dry=-0.0007574 +drz=-5.13e-05 +ds=-7e-05 +t_epoch=2010 +convention=coordinate_frame +step +inv +proj=cart +ellps=GRS80 +step +proj=unitconvert +xy_in=rad +z_in=m +xy_out=deg +z_out=m +step +proj=axisswap +order=2,1')
ct = osr.CoordinateTransformation(None, None, options)
assert ct
x, y, _ = ct.TransformPoint(50, -40, 0)
assert x == pytest.approx(50, abs=1e-10)
assert y == pytest.approx(-40, abs=1e-10)
###############################################################################
# Test using OGRSpatialReference::CoordinateEpoch()
def test_osr_ct_take_into_account_srs_coordinate_epoch():
if osr.GetPROJVersionMajor() * 100 + osr.GetPROJVersionMinor() < 702:
pytest.skip('requires PROJ 7.2 or later')
s = osr.SpatialReference()
s.SetFromUserInput("EPSG:7844") # GDA2020
t_2020 = osr.SpatialReference()
t_2020.SetFromUserInput("EPSG:9000") # ITRF2014
t_2020.SetCoordinateEpoch(2020)
# 2020 is the central epoch of the transformation, so no coordinate
# change is expected
ct = osr.CoordinateTransformation(s, t_2020)
x, y, _ = ct.TransformPoint(-30, 150, 0)
assert x == pytest.approx(-30, abs=1e-10)
assert y == pytest.approx(150, abs=1e-10)
t_2030 = osr.SpatialReference()
t_2030.SetFromUserInput("EPSG:9000") # ITRF2014
t_2030.SetCoordinateEpoch(2030)
ct = osr.CoordinateTransformation(s, t_2030)
x, y, _ = ct.TransformPoint(-30, 150, 0)
assert x == pytest.approx(-29.9999950478, abs=1e-10)
assert y == pytest.approx(150.0000022212, abs=1e-10)
ct = osr.CoordinateTransformation(t_2030, s)
x, y, _ = ct.TransformPoint(-29.9999950478, 150.0000022212, 0)
assert x == pytest.approx(-30, abs=1e-10)
assert y == pytest.approx(150, abs=1e-10)
# Not properly supported currently
gdal.ErrorReset()
with gdaltest.error_handler():
ct = osr.CoordinateTransformation(t_2020, t_2030)
assert gdal.GetLastErrorMsg() != ''
| 36.161179 | 458 | 0.620129 | true | true | |
f71c32e40c090845453a2573a352eecf99ded05c | 2,896 | py | Python | openml/__init__.py | Rong-Inspur/openml-python | 07d429c843cf589d8096db76d520317acf7a99ab | [
"BSD-3-Clause"
] | null | null | null | openml/__init__.py | Rong-Inspur/openml-python | 07d429c843cf589d8096db76d520317acf7a99ab | [
"BSD-3-Clause"
] | null | null | null | openml/__init__.py | Rong-Inspur/openml-python | 07d429c843cf589d8096db76d520317acf7a99ab | [
"BSD-3-Clause"
] | null | null | null | """
The OpenML module implements a python interface to
`OpenML <https://www.openml.org>`_, a collaborative platform for machine
learning. OpenML can be used to
* store, download and analyze datasets
* make experiments and their results (e.g. models, predictions)
accesible and reproducible for everybody
* analyze experiments (uploaded by you and other collaborators) and conduct
meta studies
In particular, this module implements a python interface for the
`OpenML REST API <https://www.openml.org/guide#!rest_services>`_
(`REST on wikipedia
<http://en.wikipedia.org/wiki/Representational_state_transfer>`_).
"""
# License: BSD 3-Clause
from . import _api_calls
from . import config
from .datasets import OpenMLDataset, OpenMLDataFeature
from . import datasets
from . import evaluations
from .evaluations import OpenMLEvaluation
from . import extensions
from . import exceptions
from . import tasks
from .tasks import (
OpenMLTask,
OpenMLSplit,
OpenMLSupervisedTask,
OpenMLClassificationTask,
OpenMLRegressionTask,
OpenMLClusteringTask,
OpenMLLearningCurveTask,
)
from . import runs
from .runs import OpenMLRun
from . import flows
from .flows import OpenMLFlow
from . import study
from .study import OpenMLStudy, OpenMLBenchmarkSuite
from . import utils
from . import setups
from .setups import OpenMLSetup, OpenMLParameter
from .__version__ import __version__
def populate_cache(task_ids=None, dataset_ids=None, flow_ids=None,
run_ids=None):
"""
Populate a cache for offline and parallel usage of the OpenML connector.
Parameters
----------
task_ids : iterable
dataset_ids : iterable
flow_ids : iterable
run_ids : iterable
Returns
-------
None
"""
if task_ids is not None:
for task_id in task_ids:
tasks.functions.get_task(task_id)
if dataset_ids is not None:
for dataset_id in dataset_ids:
datasets.functions.get_dataset(dataset_id)
if flow_ids is not None:
for flow_id in flow_ids:
flows.functions.get_flow(flow_id)
if run_ids is not None:
for run_id in run_ids:
runs.functions.get_run(run_id)
__all__ = [
'OpenMLDataset',
'OpenMLDataFeature',
'OpenMLRun',
'OpenMLSplit',
'OpenMLEvaluation',
'OpenMLSetup',
'OpenMLParameter',
'OpenMLTask',
'OpenMLSupervisedTask',
'OpenMLClusteringTask',
'OpenMLLearningCurveTask',
'OpenMLRegressionTask',
'OpenMLClassificationTask',
'OpenMLFlow',
'OpenMLStudy',
'OpenMLBenchmarkSuite',
'datasets',
'evaluations',
'exceptions',
'extensions',
'config',
'runs',
'flows',
'tasks',
'setups',
'study',
'utils',
'_api_calls',
'__version__',
]
# Load the scikit-learn extension by default
import openml.extensions.sklearn # noqa: F401
| 23.737705 | 76 | 0.699931 |
from . import _api_calls
from . import config
from .datasets import OpenMLDataset, OpenMLDataFeature
from . import datasets
from . import evaluations
from .evaluations import OpenMLEvaluation
from . import extensions
from . import exceptions
from . import tasks
from .tasks import (
OpenMLTask,
OpenMLSplit,
OpenMLSupervisedTask,
OpenMLClassificationTask,
OpenMLRegressionTask,
OpenMLClusteringTask,
OpenMLLearningCurveTask,
)
from . import runs
from .runs import OpenMLRun
from . import flows
from .flows import OpenMLFlow
from . import study
from .study import OpenMLStudy, OpenMLBenchmarkSuite
from . import utils
from . import setups
from .setups import OpenMLSetup, OpenMLParameter
from .__version__ import __version__
def populate_cache(task_ids=None, dataset_ids=None, flow_ids=None,
run_ids=None):
if task_ids is not None:
for task_id in task_ids:
tasks.functions.get_task(task_id)
if dataset_ids is not None:
for dataset_id in dataset_ids:
datasets.functions.get_dataset(dataset_id)
if flow_ids is not None:
for flow_id in flow_ids:
flows.functions.get_flow(flow_id)
if run_ids is not None:
for run_id in run_ids:
runs.functions.get_run(run_id)
__all__ = [
'OpenMLDataset',
'OpenMLDataFeature',
'OpenMLRun',
'OpenMLSplit',
'OpenMLEvaluation',
'OpenMLSetup',
'OpenMLParameter',
'OpenMLTask',
'OpenMLSupervisedTask',
'OpenMLClusteringTask',
'OpenMLLearningCurveTask',
'OpenMLRegressionTask',
'OpenMLClassificationTask',
'OpenMLFlow',
'OpenMLStudy',
'OpenMLBenchmarkSuite',
'datasets',
'evaluations',
'exceptions',
'extensions',
'config',
'runs',
'flows',
'tasks',
'setups',
'study',
'utils',
'_api_calls',
'__version__',
]
import openml.extensions.sklearn
| true | true |
f71c33cccf1872a2d06d40b32de68921437e9c87 | 540 | py | Python | src/bos_consensus/middlewares/blockchain/base.py | LuffyEMonkey/isaac-consensus-protocol | 806d967d56ef8862a477b2515c7854af289c10a0 | [
"Apache-2.0"
] | 1 | 2018-04-10T11:00:59.000Z | 2018-04-10T11:00:59.000Z | src/bos_consensus/middlewares/blockchain/base.py | LuffyEMonkey/isaac-consensus-protocol | 806d967d56ef8862a477b2515c7854af289c10a0 | [
"Apache-2.0"
] | null | null | null | src/bos_consensus/middlewares/blockchain/base.py | LuffyEMonkey/isaac-consensus-protocol | 806d967d56ef8862a477b2515c7854af289c10a0 | [
"Apache-2.0"
] | null | null | null | from bos_consensus.util import LoggingMixin
class NoFurtherBlockchainMiddlewares(Exception):
pass
class StopReceiveBallot(Exception):
pass
class BaseBlockchainMiddleware(LoggingMixin):
blockchain = None
def __init__(self, blockchain):
self.blockchain = blockchain
super(BaseBlockchainMiddleware, self).__init__()
self.set_logging('middleware', node=self.blockchain.consensus.node.name)
def received_ballot(self, ballot):
pass
def finished_ballot(self, ballot):
pass
| 20.769231 | 80 | 0.724074 | from bos_consensus.util import LoggingMixin
class NoFurtherBlockchainMiddlewares(Exception):
pass
class StopReceiveBallot(Exception):
pass
class BaseBlockchainMiddleware(LoggingMixin):
blockchain = None
def __init__(self, blockchain):
self.blockchain = blockchain
super(BaseBlockchainMiddleware, self).__init__()
self.set_logging('middleware', node=self.blockchain.consensus.node.name)
def received_ballot(self, ballot):
pass
def finished_ballot(self, ballot):
pass
| true | true |
f71c347fa097a89874460ad438e91b290e146610 | 6,795 | py | Python | Materials/views.py | Gguidini/artheart-db-explorer | 8e854248ff799f74f2702f767e5614e154e4a7f8 | [
"MIT"
] | null | null | null | Materials/views.py | Gguidini/artheart-db-explorer | 8e854248ff799f74f2702f767e5614e154e4a7f8 | [
"MIT"
] | null | null | null | Materials/views.py | Gguidini/artheart-db-explorer | 8e854248ff799f74f2702f767e5614e154e4a7f8 | [
"MIT"
] | null | null | null | """
This file defines functions to manipulate user interaction with the web-interface.
Responsible for views related to the models defined in Materials/models.py.
"""
import os
from django.core.paginator import Paginator
from django.http import HttpResponseForbidden
from django.shortcuts import HttpResponse, redirect, render
from django.urls import reverse_lazy
from ArtHeart.settings import MEDIA_ROOT, MEDIA_URL
from .forms import ApostilaUpload, CategoryUpload, ProjectUpload
from .models import Apostila, Categoria, Project
# Create your views here.
def search(request):
"""
Shows all available Apostilas.
It's possible to search by title, category, and project.
Template for this view is 'Materials/seatch.html'
Links to detail view.
"""
data = {}
projs = Project.objects.all().order_by('name')
cats = Categoria.objects.all().order_by('category')
data['projects'] = projs
data['categories'] = cats
data['media'] = MEDIA_URL
aps = Apostila.objects.all()
# filter entries to be displayed
if request.method == 'GET' and not request.GET == {}:
if 'search' in request.GET:
search = request.GET['search']
aps = aps.filter(title__icontains=search)
if 'project' in request.GET:
proj = request.GET['project']
if proj != '':
aps = aps.filter(project=proj)
if 'categories' in request.GET:
cats = request.GET.getlist('categories')
aps = aps.filter(categories__in=cats)
# Paginator
paginator = Paginator(aps, 30)
page = request.GET.get('page')
aps = paginator.get_page(page)
data['entries'] = aps
return render(request, 'Materials/search.html', data)
def detail(request, pk):
"""
Edit existing Apostila or add new one.
On successful operation, returns to search view.
Template for this view is 'Materials/detail.html'
"""
pk = int(pk)
if request.method == 'GET':
if pk != -1:
ap = Apostila.objects.get(pk=pk)
form = ApostilaUpload(instance=ap)
data = {'doc': ap, 'form': form, 'edit': True}
else:
form = ApostilaUpload()
data = {'form': form, 'edit': False}
return render(request, 'Materials/detail.html', data)
else:
if pk != -1:
if 'file-clear' in request.POST or 'file' in request.FILES:
deleteFile(pk)
form = ApostilaUpload(
request.POST or None, request.FILES or None, instance=Apostila.objects.get(pk=pk))
else:
form = ApostilaUpload(
request.POST or None, request.FILES or None)
if form.is_valid():
entry = form.save()
cats = request.POST.getlist('categories')
projects = request.POST.getlist('project')
selected = []
for c in cats:
selected.append(Categoria.objects.get(pk=c))
entry.categories.set(selected)
selected = []
for p in projects:
selected.append(Project.objects.get(pk=p))
entry.project.set(selected)
entry.save()
return redirect(reverse_lazy('url_search'))
else:
data = {'form': form}
if pk != -1:
data['doc'] = Apostila.objects.get(pk=pk)
data['edit'] = True
else:
data['edit'] = False
return render(request, 'Materials/details.html', )
def projects(request):
"""
Shows available Projects. Also possible to add new Project.
Quick glance at Project's title, client, and state of completion.
User that are not authenticated CANNOT create new projects
Template for this view is 'Materials/projects.html'
Links to edit_project view.
"""
data = {}
data['projects'] = Project.objects.all().order_by('name')
data['form'] = ProjectUpload()
if request.method == 'POST':
form = ProjectUpload(request.POST or None)
if form.is_valid():
form.save()
data['projects'] = Project.objects.all()
data['form'] = form
return render(request, 'Materials/projects.html', data)
else:
return render(request, 'Materials/projects.html', data)
def edit_project(request, pk):
"""
Manages de editing of an existing Project.
On successful operation returns to projects view.
Template for this view is 'Materials/edit_project.html'
"""
p = Project.objects.get(pk=pk)
form = ProjectUpload(instance=p)
entries = p.apostila_set.all()
data = {'doc': p, 'form': form, 'entries':entries}
if request.method == 'POST':
form = ProjectUpload(request.POST, instance=p)
if form.is_valid():
entry = form.save(commit=False)
if 'completed' in request.POST:
entry.completed = True
entry.save()
return redirect(reverse_lazy('url_projects'))
else:
data['form'] = form
return render(request, 'Materials/edit_project.html', data)
return render(request, 'Materials/edit_project.html', data)
def category(request, pk):
"""
Manages creation and Deletion of Categoria.
Template for this view is 'Materials/category.html'
"""
if request.method == 'GET':
if pk is not None:
#try:
Categoria.objects.get(pk=pk).delete()
#except:
# print("Shit happened")
else:
form = CategoryUpload(request.POST or None)
if form.is_valid():
form.save()
cats = Categoria.objects.all().order_by('name')
form = CategoryUpload()
data = {'cat': cats, 'form': form}
return render(request, 'Materials/category.html', data)
def deleteFile(pk):
"""
Deletes a file referenced by an Apostila.
Has no template or URL.
"""
ap = Apostila.objects.get(pk=pk)
try:
os.remove(os.path.join(MEDIA_ROOT, str(ap.file)))
except:
pass # silence error when no file is there
def delete(request, pk):
"""
Deleta uma Apostila do banco de dados e qualquer arquivo referênciado por ele.
Has no template.
"""
doc = Apostila.objects.get(pk=pk)
deleteFile(pk)
doc.delete()
return redirect('url_search')
def delete_project(request, pk):
"""
Deletes a Project along with ALL the Apostilas related to it.
If the Apostilas have files they are deleted as well.
Has no template.
"""
p = Project.objects.get(pk=pk)
aps = p.apostila_set.all()
for a in aps:
# desociates project and Apostila
a.project.remove(p)
p.delete()
return redirect('url_projects')
| 31.027397 | 98 | 0.607653 |
import os
from django.core.paginator import Paginator
from django.http import HttpResponseForbidden
from django.shortcuts import HttpResponse, redirect, render
from django.urls import reverse_lazy
from ArtHeart.settings import MEDIA_ROOT, MEDIA_URL
from .forms import ApostilaUpload, CategoryUpload, ProjectUpload
from .models import Apostila, Categoria, Project
def search(request):
data = {}
projs = Project.objects.all().order_by('name')
cats = Categoria.objects.all().order_by('category')
data['projects'] = projs
data['categories'] = cats
data['media'] = MEDIA_URL
aps = Apostila.objects.all()
if request.method == 'GET' and not request.GET == {}:
if 'search' in request.GET:
search = request.GET['search']
aps = aps.filter(title__icontains=search)
if 'project' in request.GET:
proj = request.GET['project']
if proj != '':
aps = aps.filter(project=proj)
if 'categories' in request.GET:
cats = request.GET.getlist('categories')
aps = aps.filter(categories__in=cats)
paginator = Paginator(aps, 30)
page = request.GET.get('page')
aps = paginator.get_page(page)
data['entries'] = aps
return render(request, 'Materials/search.html', data)
def detail(request, pk):
pk = int(pk)
if request.method == 'GET':
if pk != -1:
ap = Apostila.objects.get(pk=pk)
form = ApostilaUpload(instance=ap)
data = {'doc': ap, 'form': form, 'edit': True}
else:
form = ApostilaUpload()
data = {'form': form, 'edit': False}
return render(request, 'Materials/detail.html', data)
else:
if pk != -1:
if 'file-clear' in request.POST or 'file' in request.FILES:
deleteFile(pk)
form = ApostilaUpload(
request.POST or None, request.FILES or None, instance=Apostila.objects.get(pk=pk))
else:
form = ApostilaUpload(
request.POST or None, request.FILES or None)
if form.is_valid():
entry = form.save()
cats = request.POST.getlist('categories')
projects = request.POST.getlist('project')
selected = []
for c in cats:
selected.append(Categoria.objects.get(pk=c))
entry.categories.set(selected)
selected = []
for p in projects:
selected.append(Project.objects.get(pk=p))
entry.project.set(selected)
entry.save()
return redirect(reverse_lazy('url_search'))
else:
data = {'form': form}
if pk != -1:
data['doc'] = Apostila.objects.get(pk=pk)
data['edit'] = True
else:
data['edit'] = False
return render(request, 'Materials/details.html', )
def projects(request):
data = {}
data['projects'] = Project.objects.all().order_by('name')
data['form'] = ProjectUpload()
if request.method == 'POST':
form = ProjectUpload(request.POST or None)
if form.is_valid():
form.save()
data['projects'] = Project.objects.all()
data['form'] = form
return render(request, 'Materials/projects.html', data)
else:
return render(request, 'Materials/projects.html', data)
def edit_project(request, pk):
p = Project.objects.get(pk=pk)
form = ProjectUpload(instance=p)
entries = p.apostila_set.all()
data = {'doc': p, 'form': form, 'entries':entries}
if request.method == 'POST':
form = ProjectUpload(request.POST, instance=p)
if form.is_valid():
entry = form.save(commit=False)
if 'completed' in request.POST:
entry.completed = True
entry.save()
return redirect(reverse_lazy('url_projects'))
else:
data['form'] = form
return render(request, 'Materials/edit_project.html', data)
return render(request, 'Materials/edit_project.html', data)
def category(request, pk):
if request.method == 'GET':
if pk is not None:
Categoria.objects.get(pk=pk).delete()
else:
form = CategoryUpload(request.POST or None)
if form.is_valid():
form.save()
cats = Categoria.objects.all().order_by('name')
form = CategoryUpload()
data = {'cat': cats, 'form': form}
return render(request, 'Materials/category.html', data)
def deleteFile(pk):
ap = Apostila.objects.get(pk=pk)
try:
os.remove(os.path.join(MEDIA_ROOT, str(ap.file)))
except:
pass
def delete(request, pk):
doc = Apostila.objects.get(pk=pk)
deleteFile(pk)
doc.delete()
return redirect('url_search')
def delete_project(request, pk):
p = Project.objects.get(pk=pk)
aps = p.apostila_set.all()
for a in aps:
a.project.remove(p)
p.delete()
return redirect('url_projects')
| true | true |
f71c352600ea4dd69dce91611e638d4ac22561c3 | 285 | py | Python | utils/__init__.py | 1219521375/bottom-up-attention.pytorch | 4a2e64383f024cc56728dd2a0ee63c8a171663c8 | [
"Apache-2.0"
] | 3 | 2022-02-18T13:38:47.000Z | 2022-03-30T11:30:35.000Z | utils/__init__.py | 1219521375/bottom-up-attention.pytorch | 4a2e64383f024cc56728dd2a0ee63c8a171663c8 | [
"Apache-2.0"
] | null | null | null | utils/__init__.py | 1219521375/bottom-up-attention.pytorch | 4a2e64383f024cc56728dd2a0ee63c8a171663c8 | [
"Apache-2.0"
] | null | null | null | from .utils import save_features
from .extract_features_faster import extract_feat_faster_start
from .extract_features_multigpu import extract_feat_multigpu_start
from .extract_features_singlegpu import extract_feat_singlegpu_start
from .extract_d2features import extract_feat_d2_start | 57 | 68 | 0.915789 | from .utils import save_features
from .extract_features_faster import extract_feat_faster_start
from .extract_features_multigpu import extract_feat_multigpu_start
from .extract_features_singlegpu import extract_feat_singlegpu_start
from .extract_d2features import extract_feat_d2_start | true | true |
f71c359d2395a2f0e5c51625afa87da5d3779d1a | 296,654 | py | Python | tests/test_core.py | Matheus28/emscripten | c1d3ca07227607043a92d55dee65a733851d6813 | [
"MIT"
] | null | null | null | tests/test_core.py | Matheus28/emscripten | c1d3ca07227607043a92d55dee65a733851d6813 | [
"MIT"
] | null | null | null | tests/test_core.py | Matheus28/emscripten | c1d3ca07227607043a92d55dee65a733851d6813 | [
"MIT"
] | null | null | null | # Copyright 2013 The Emscripten Authors. All rights reserved.
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License. Both these licenses can be
# found in the LICENSE file.
import glob
import hashlib
import json
import logging
import os
import random
import re
import shutil
import sys
import time
import unittest
from pathlib import Path
from functools import wraps
if __name__ == '__main__':
raise Exception('do not run this file directly; do something like: tests/runner')
from tools.shared import try_delete, PIPE
from tools.shared import PYTHON, EMCC, EMAR
from tools.utils import WINDOWS, MACOS
from tools import shared, building, config, webassembly
from common import RunnerCore, path_from_root, requires_native_clang, test_file, create_file
from common import skip_if, needs_dylink, no_windows, no_mac, is_slow_test, parameterized
from common import env_modify, with_env_modify, disabled, node_pthreads
from common import read_file, read_binary, require_node, require_v8
from common import NON_ZERO, WEBIDL_BINDER, EMBUILDER
import clang_native
# decorators for limiting which modes a test can run in
logger = logging.getLogger("test_core")
def wasm_simd(f):
def decorated(self):
self.require_v8()
if not self.is_wasm():
self.skipTest('wasm2js only supports MVP for now')
if '-O3' in self.emcc_args:
self.skipTest('SIMD tests are too slow with -O3 in the new LLVM pass manager, https://github.com/emscripten-core/emscripten/issues/13427')
self.emcc_args.append('-msimd128')
self.emcc_args.append('-fno-lax-vector-conversions')
self.v8_args.append('--experimental-wasm-simd')
f(self)
return decorated
def wasm_relaxed_simd(f):
def decorated(self):
# We don't actually run any tests yet, so don't require any engines.
if not self.is_wasm():
self.skipTest('wasm2js only supports MVP for now')
self.emcc_args.append('-mrelaxed-simd')
f(self)
return decorated
def needs_non_trapping_float_to_int(f):
def decorated(self):
if not self.is_wasm():
self.skipTest('wasm2js only supports MVP for now')
f(self)
return decorated
def also_with_wasm_bigint(f):
def decorated(self):
self.set_setting('WASM_BIGINT', 0)
f(self)
if self.is_wasm():
self.set_setting('WASM_BIGINT')
self.require_node()
self.node_args.append('--experimental-wasm-bigint')
f(self)
return decorated
# without EMTEST_ALL_ENGINES set we only run tests in a single VM by
# default. in some tests we know that cross-VM differences may happen and
# so are worth testing, and they should be marked with this decorator
def all_engines(f):
def decorated(self):
old = self.use_all_engines
self.use_all_engines = True
self.set_setting('ENVIRONMENT', 'web,node,shell')
try:
f(self)
finally:
self.use_all_engines = old
return decorated
# Tests exception handling in emscripten exception handling mode, and if
# possible, new wasm EH mode.
def with_both_exception_handling(f):
assert callable(f)
def metafunc(self, native_exceptions):
if native_exceptions:
# Wasm EH is currently supported only in wasm backend and V8
if not self.is_wasm():
self.skipTest('wasm2js does not support wasm exceptions')
self.require_v8()
# FIXME Temporarily disabled. Enable this later when the bug is fixed.
if '-fsanitize=address' in self.emcc_args:
self.skipTest('Wasm EH does not work with asan yet')
self.emcc_args.append('-fwasm-exceptions')
self.v8_args.append('--experimental-wasm-eh')
f(self)
else:
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
f(self)
metafunc._parameterize = {'': (False,),
'wasm_eh': (True,)}
return metafunc
def no_wasm2js(note=''):
assert not callable(note)
def decorated(f):
return skip_if(f, 'is_wasm2js', note)
return decorated
def also_with_noderawfs(func):
def decorated(self):
orig_args = self.emcc_args.copy()
func(self)
print('noderawfs')
self.emcc_args = orig_args + ['-DNODERAWFS']
self.set_setting('NODERAWFS')
self.js_engines = [config.NODE_JS]
func(self)
return decorated
def can_do_standalone(self):
return self.is_wasm() and \
self.get_setting('STACK_OVERFLOW_CHECK', 0) < 2 and \
not self.get_setting('MINIMAL_RUNTIME') and \
not self.get_setting('SAFE_HEAP') and \
'-fsanitize=address' not in self.emcc_args
def also_with_wasmfs(func):
def decorated(self):
func(self)
print('wasmfs')
if self.get_setting('STANDALONE_WASM'):
self.skipTest("test currently cannot run both with WASMFS and STANDALONE_WASM")
self.set_setting('WASMFS')
func(self)
return decorated
# Impure means a test that cannot run in a wasm VM yet, as it is not 100%
# standalone. We can still run them with the JS code though.
def also_with_standalone_wasm(wasm2c=False, impure=False):
def decorated(func):
def metafunc(self, standalone):
if not standalone:
func(self)
else:
if can_do_standalone(self):
self.set_setting('STANDALONE_WASM')
# we will not legalize the JS ffi interface, so we must use BigInt
# support in order for JS to have a chance to run this without trapping
# when it sees an i64 on the ffi.
self.set_setting('WASM_BIGINT')
self.emcc_args.append('-Wno-unused-command-line-argument')
# if we are impure, disallow all wasm engines
if impure:
self.wasm_engines = []
self.js_engines = [config.NODE_JS]
self.node_args.append('--experimental-wasm-bigint')
func(self)
if wasm2c:
print('wasm2c')
self.set_setting('WASM2C')
self.wasm_engines = []
func(self)
metafunc._parameterize = {'': (False,),
'standalone': (True,)}
return metafunc
return decorated
def no_optimize(note=''):
assert not callable(note)
def decorator(func):
assert callable(func)
def decorated(self):
if self.is_optimizing():
self.skipTest(note)
func(self)
return decorated
return decorator
def needs_make(note=''):
assert not callable(note)
if WINDOWS:
return unittest.skip('Tool not available on Windows bots (%s)' % note)
return lambda f: f
def no_asan(note):
assert not callable(note)
def decorator(f):
assert callable(f)
@wraps(f)
def decorated(self, *args, **kwargs):
if '-fsanitize=address' in self.emcc_args:
self.skipTest(note)
f(self, *args, **kwargs)
return decorated
return decorator
def no_lsan(note):
assert not callable(note)
def decorator(f):
assert callable(f)
@wraps(f)
def decorated(self, *args, **kwargs):
if '-fsanitize=leak' in self.emcc_args:
self.skipTest(note)
f(self, *args, **kwargs)
return decorated
return decorator
def make_no_decorator_for_setting(name):
def outer_decorator(note):
assert not callable(note)
def decorator(f):
assert callable(f)
@wraps(f)
def decorated(self, *args, **kwargs):
if (name + '=1') in self.emcc_args or self.get_setting(name):
self.skipTest(note)
f(self, *args, **kwargs)
return decorated
return decorator
return outer_decorator
no_minimal_runtime = make_no_decorator_for_setting('MINIMAL_RUNTIME')
no_safe_heap = make_no_decorator_for_setting('SAFE_HEAP')
def is_sanitizing(args):
return '-fsanitize=' in str(args)
class TestCoreBase(RunnerCore):
def is_wasm2js(self):
return self.get_setting('WASM') == 0
# A simple check whether the compiler arguments cause optimization.
def is_optimizing(self):
return '-O' in str(self.emcc_args) and '-O0' not in self.emcc_args
def can_use_closure(self):
return '-g' not in self.emcc_args and '--profiling' not in self.emcc_args and ('-O2' in self.emcc_args or '-Os' in self.emcc_args)
# Use closure in some tests for some additional coverage
def maybe_closure(self):
if '--closure=1' not in self.emcc_args and self.can_use_closure():
self.emcc_args += ['--closure=1']
logger.debug('using closure compiler..')
return True
return False
def assertStartswith(self, output, prefix):
self.assertEqual(prefix, output[:len(prefix)])
def verify_in_strict_mode(self, filename):
js = read_file(filename)
filename += '.strict.js'
with open(filename, 'w') as outfile:
outfile.write('"use strict";\n' + js)
self.run_js(filename)
def do_core_test(self, testname, **kwargs):
self.do_run_in_out_file_test(Path('core', testname), **kwargs)
def get_bullet_library(self, use_cmake):
if use_cmake:
configure_commands = ['cmake', '.']
configure_args = ['-DBUILD_DEMOS=OFF', '-DBUILD_EXTRAS=OFF', '-DUSE_GLUT=OFF']
# Depending on whether 'configure' or 'cmake' is used to build, Bullet
# places output files in different directory structures.
generated_libs = [Path('src/BulletDynamics/libBulletDynamics.a'),
Path('src/BulletCollision/libBulletCollision.a'),
Path('src/LinearMath/libLinearMath.a')]
else:
configure_commands = ['sh', './configure']
# Force a nondefault --host= so that the configure script will interpret
# that we are doing cross-compilation
# and skip attempting to run the generated executable with './a.out',
# which would fail since we are building a .js file.
configure_args = ['--disable-shared', '--host=i686-pc-linux-gnu',
'--disable-demos', '--disable-dependency-tracking']
generated_libs = [Path('src/.libs/libBulletDynamics.a'),
Path('src/.libs/libBulletCollision.a'),
Path('src/.libs/libLinearMath.a')]
return self.get_library('third_party/bullet', generated_libs,
configure=configure_commands,
configure_args=configure_args,
cache_name_extra=configure_commands[0])
@also_with_standalone_wasm()
@also_with_wasmfs
def test_hello_world(self):
self.do_core_test('test_hello_world.c')
# must not emit this unneeded internal thing
self.assertNotContained('EMSCRIPTEN_GENERATED_FUNCTIONS', read_file('test_hello_world.js'))
def test_wasm_synchronous_compilation(self):
self.set_setting('STRICT_JS')
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.do_core_test('test_hello_world.c')
@also_with_standalone_wasm()
def test_hello_argc(self):
self.do_core_test('test_hello_argc.c')
def test_intvars(self):
self.do_core_test('test_intvars.cpp')
def test_sintvars(self):
self.do_core_test('test_sintvars.c')
def test_int53(self):
self.emcc_args += ['-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=[$convertI32PairToI53,$convertU32PairToI53,$readI53FromU64,$readI53FromI64,$writeI53ToI64,$writeI53ToI64Clamped,$writeI53ToU64Clamped,$writeI53ToI64Signaling,$writeI53ToU64Signaling]']
self.do_core_test('test_int53.c', interleaved_output=False)
def test_i64(self):
self.do_core_test('test_i64.c')
def test_i64_2(self):
self.do_core_test('test_i64_2.cpp')
def test_i64_3(self):
self.do_core_test('test_i64_3.cpp')
def test_i64_4(self):
# stuff that also needs sign corrections
self.do_core_test('test_i64_4.c')
def test_i64_b(self):
self.do_core_test('test_i64_b.cpp')
def test_i64_cmp(self):
self.do_core_test('test_i64_cmp.cpp')
def test_i64_cmp2(self):
self.do_core_test('test_i64_cmp2.c')
def test_i64_double(self):
self.do_core_test('test_i64_double.cpp')
def test_i64_umul(self):
self.do_core_test('test_i64_umul.c')
@also_with_standalone_wasm()
def test_i64_precise(self):
self.do_core_test('test_i64_precise.c')
def test_i64_precise_needed(self):
self.do_core_test('test_i64_precise_needed.c')
def test_i64_llabs(self):
self.do_core_test('test_i64_llabs.c')
def test_i64_zextneg(self):
self.do_core_test('test_i64_zextneg.c')
def test_i64_7z(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_i64_7z.c', args=['hallo'])
def test_i64_i16(self):
self.do_core_test('test_i64_i16.c')
def test_i64_qdouble(self):
self.do_core_test('test_i64_qdouble.c')
def test_i64_varargs(self):
self.do_core_test('test_i64_varargs.c', args='waka fleefl asdfasdfasdfasdf'.split())
@no_wasm2js('wasm_bigint')
def test_i64_invoke_bigint(self):
self.set_setting('WASM_BIGINT')
self.emcc_args += ['-fexceptions']
self.node_args += ['--experimental-wasm-bigint']
self.do_core_test('test_i64_invoke_bigint.cpp', js_engines=[config.NODE_JS])
def test_vararg_copy(self):
self.do_run_in_out_file_test('va_arg/test_va_copy.c')
def test_llvm_fabs(self):
self.do_core_test('test_llvm_fabs.c')
def test_double_varargs(self):
self.do_core_test('test_double_varargs.c')
def test_trivial_struct_varargs(self):
self.do_core_test('test_trivial_struct_varargs.c')
def test_struct_varargs(self):
self.do_core_test('test_struct_varargs.c')
def test_zero_struct_varargs(self):
self.do_core_test('test_zero_struct_varargs.c')
def zzztest_nested_struct_varargs(self):
self.do_core_test('test_nested_struct_varargs.c')
def test_i32_mul_precise(self):
self.do_core_test('test_i32_mul_precise.c')
def test_i16_emcc_intrinsic(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_i16_emcc_intrinsic.c')
def test_double_i64_conversion(self):
self.do_core_test('test_double_i64_conversion.c')
def test_float32_precise(self):
self.do_core_test('test_float32_precise.c')
def test_negative_zero(self):
self.do_core_test('test_negative_zero.c')
def test_literal_negative_zero(self):
self.do_core_test('test_literal_negative_zero.c')
@also_with_standalone_wasm()
def test_bswap64(self):
self.do_core_test('test_bswap64.cpp')
def test_sha1(self):
self.do_runf(test_file('sha1.c'), 'SHA1=15dd99a1991e0b3826fede3deffc1feba42278e6')
def test_wasm32_unknown_emscripten(self):
# No other configuration is supported, so always run this.
self.do_runf(test_file('wasm32-unknown-emscripten.c'), '')
def test_cube2md5(self):
self.emcc_args += ['--embed-file', 'cube2md5.txt']
shutil.copyfile(test_file('cube2md5.txt'), 'cube2md5.txt')
self.do_run_from_file(test_file('cube2md5.cpp'), test_file('cube2md5.ok'), assert_returncode=NON_ZERO)
@also_with_standalone_wasm(wasm2c=True)
@needs_make('make')
def test_cube2hash(self):
# A good test of i64 math
self.do_run('// empty file', 'Usage: hashstring <seed>',
libraries=self.get_library('third_party/cube2hash', ['libcube2hash.a'], configure=None),
includes=[test_file('third_party/cube2hash')], assert_returncode=NON_ZERO)
for text, output in [('fleefl', '892BDB6FD3F62E863D63DA55851700FDE3ACF30204798CE9'),
('fleefl2', 'AA2CC5F96FC9D540CA24FDAF1F71E2942753DB83E8A81B61'),
('64bitisslow', '64D8470573635EC354FEE7B7F87C566FCAF1EFB491041670')]:
self.do_run('src.js', 'hash value: ' + output, args=[text], no_build=True)
def test_unaligned(self):
self.skipTest('LLVM marks the reads of s as fully aligned, making this test invalid')
src = r'''
#include <stdio.h>
struct S {
double x;
int y;
};
int main() {
// the 64-bit value here will not be 8-byte aligned
S s0[3] = { {0x12a751f430142, 22}, {0x17a5c85bad144, 98}, {1, 1}};
char buffer[10*sizeof(S)];
int b = int(buffer);
S *s = (S*)(b + 4-b%8);
s[0] = s0[0];
s[1] = s0[1];
s[2] = s0[2];
printf("*%d : %d : %d\n", sizeof(S), ((unsigned int)&s[0]) % 8 != ((unsigned int)&s[1]) % 8,
((unsigned int)&s[1]) - ((unsigned int)&s[0]));
s[0].x++;
s[0].y++;
s[1].x++;
s[1].y++;
printf("%.1f,%d,%.1f,%d\n", s[0].x, s[0].y, s[1].x, s[1].y);
return 0;
}
'''
# TODO: A version of this with int64s as well
self.do_run(src, '*12 : 1 : 12\n328157500735811.0,23,416012775903557.0,99\n')
return # TODO: continue to the next part here
# Test for undefined behavior in C. This is not legitimate code, but does exist
src = r'''
#include <stdio.h>
int main()
{
int x[10];
char *p = (char*)&x[0];
p++;
short *q = (short*)p;
*q = 300;
printf("*%d:%ld*\n", *q, ((long)q)%2);
int *r = (int*)p;
*r = 515559;
printf("*%d*\n", *r);
long long *t = (long long*)p;
*t = 42949672960;
printf("*%lld*\n", *t);
return 0;
}
'''
try:
self.do_run(src, '*300:1*\n*515559*\n*42949672960*\n')
except Exception as e:
assert 'must be aligned' in str(e), e # expected to fail without emulation
def test_align64(self):
src = r'''
#include <stdio.h>
// inspired by poppler
enum Type {
A = 10,
B = 20
};
struct Object {
Type type;
union {
int intg;
double real;
char *name;
};
};
struct Principal {
double x;
Object a;
double y;
};
int main(int argc, char **argv)
{
int base = argc-1;
Object *o = NULL;
printf("%zu,%zu\n", sizeof(Object), sizeof(Principal));
printf("%ld,%ld,%ld,%ld\n", (long)&o[base].type, (long)&o[base].intg, (long)&o[base].real, (long)&o[base].name);
printf("%ld,%ld,%ld,%ld\n", (long)&o[base+1].type, (long)&o[base+1].intg, (long)&o[base+1].real, (long)&o[base+1].name);
Principal p, q;
p.x = p.y = q.x = q.y = 0;
p.a.type = A;
p.a.real = 123.456;
*(&q.a) = p.a;
printf("%.2f,%d,%.2f,%.2f : %.2f,%d,%.2f,%.2f\n", p.x, p.a.type, p.a.real, p.y, q.x, q.a.type, q.a.real, q.y);
return 0;
}
'''
self.do_run(src, '''16,32
0,8,8,8
16,24,24,24
0.00,10,123.46,0.00 : 0.00,10,123.46,0.00
''')
@no_asan('asan errors on corner cases we check')
def test_aligned_alloc(self):
self.do_runf(test_file('test_aligned_alloc.c'), '',
emcc_args=['-Wno-non-power-of-two-alignment'])
def test_unsigned(self):
src = '''
#include <stdio.h>
const signed char cvals[2] = { -1, -2 }; // compiler can store this is a string, so -1 becomes \\FF, and needs re-signing
int main()
{
{
unsigned char x = 200;
printf("*%d*\\n", x);
unsigned char y = -22;
printf("*%d*\\n", y);
}
int varey = 100;
unsigned int MAXEY = -1, MAXEY2 = -77;
printf("*%u,%d,%u*\\n", MAXEY, varey >= MAXEY, MAXEY2); // 100 >= -1? not in unsigned!
int y = cvals[0];
printf("*%d,%d,%d,%d*\\n", cvals[0], cvals[0] < 0, y, y < 0);
y = cvals[1];
printf("*%d,%d,%d,%d*\\n", cvals[1], cvals[1] < 0, y, y < 0);
// zext issue - see mathop in jsifier
unsigned char x8 = -10;
unsigned long hold = 0;
hold += x8;
int y32 = hold+50;
printf("*%lu,%d*\\n", hold, y32);
// Comparisons
x8 = 0;
for (int i = 0; i < 254; i++) x8++; // make it an actual 254 in JS - not a -2
printf("*%d,%d*\\n", x8+1 == 0xff, x8+1 != 0xff); // 0xff may be '-1' in the bitcode
return 0;
}
'''
self.do_run(src, '*4294967295,0,4294967219*\n*-1,1,-1,1*\n*-2,1,-2,1*\n*246,296*\n*1,0*')
self.emcc_args.append('-Wno-constant-conversion')
src = '''
#include <stdio.h>
int main()
{
{
unsigned char x;
unsigned char *y = &x;
*y = -1;
printf("*%d*\\n", x);
}
{
unsigned short x;
unsigned short *y = &x;
*y = -1;
printf("*%d*\\n", x);
}
/*{ // This case is not checked. The hint for unsignedness is just the %u in printf, and we do not analyze that
unsigned int x;
unsigned int *y = &x;
*y = -1;
printf("*%u*\\n", x);
}*/
{
char x;
char *y = &x;
*y = 255;
printf("*%d*\\n", x);
}
{
char x;
char *y = &x;
*y = 65535;
printf("*%d*\\n", x);
}
{
char x;
char *y = &x;
*y = 0xffffffff;
printf("*%d*\\n", x);
}
return 0;
}
'''
self.do_run(src, '*255*\n*65535*\n*-1*\n*-1*\n*-1*')
def test_bitfields(self):
self.do_core_test('test_bitfields.c')
def test_floatvars(self):
self.do_core_test('test_floatvars.cpp')
def test_closebitcasts(self):
self.do_core_test('closebitcasts.c')
def test_fast_math(self):
self.emcc_args += ['-ffast-math']
self.do_core_test('test_fast_math.c', args=['5', '6', '8'])
def test_zerodiv(self):
self.do_core_test('test_zerodiv.c')
def test_zero_multiplication(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_zero_multiplication.c')
def test_isnan(self):
self.do_core_test('test_isnan.c')
def test_globaldoubles(self):
self.do_core_test('test_globaldoubles.c')
def test_math(self):
self.do_core_test('test_math.c')
def test_erf(self):
self.do_core_test('test_erf.c')
def test_math_hyperbolic(self):
self.do_core_test('test_math_hyperbolic.c')
def test_math_lgamma(self):
self.do_run_in_out_file_test('math/lgamma.c', assert_returncode=NON_ZERO)
def test_math_fmodf(self):
self.do_run_in_out_file_test('math/fmodf.c')
def test_frexp(self):
self.do_core_test('test_frexp.c')
def test_rounding(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_rounding.c')
def test_fcvt(self):
self.do_core_test('test_fcvt.cpp')
def test_llrint(self):
self.do_core_test('test_llrint.c')
def test_getgep(self):
# Generated code includes getelementptr (getelementptr, 0, 1), i.e., GEP as the first param to GEP
self.do_core_test('test_getgep.c')
def test_multiply_defined_symbols(self):
create_file('a1.c', 'int f() { return 1; }')
create_file('a2.c', 'void x() {}')
create_file('b1.c', 'int f() { return 2; }')
create_file('b2.c', 'void y() {}')
create_file('main.c', r'''
#include <stdio.h>
int f();
int main() {
printf("result: %d\n", f());
return 0;
}
''')
self.emcc('a1.c', ['-c'])
self.emcc('a2.c', ['-c'])
self.emcc('b1.c', ['-c'])
self.emcc('b2.c', ['-c'])
self.emcc('main.c', ['-c'])
building.emar('cr', 'liba.a', ['a1.c.o', 'a2.c.o'])
building.emar('cr', 'libb.a', ['b1.c.o', 'b2.c.o'])
building.link_to_object(['main.c.o', 'liba.a', 'libb.a'], 'all.o')
self.emcc('all.o', self.get_emcc_args(), 'all.js')
self.do_run('all.js', 'result: 1', no_build=True)
def test_if(self):
self.do_core_test('test_if.c')
def test_if_else(self):
self.do_core_test('test_if_else.c')
def test_loop(self):
self.do_core_test('test_loop.c')
def test_stack(self):
self.set_setting('INLINING_LIMIT')
# some extra coverage in all test suites for stack checks
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.do_core_test('test_stack.c')
def test_stack_align(self):
src = test_file('core/test_stack_align.cpp')
def test():
self.do_runf(src, ['''align 4: 0
align 8: 0
align 16: 0
align 32: 0
base align: 0, 0, 0, 0'''])
test()
@no_asan('stack size is too low for asan to work properly')
def test_stack_placement(self):
self.set_setting('TOTAL_STACK', 1024)
self.do_core_test('test_stack_placement.c')
self.set_setting('GLOBAL_BASE', 102400)
self.do_core_test('test_stack_placement.c')
@no_asan('asan does not support main modules')
@no_wasm2js('MAIN_MODULE support')
def test_stack_placement_pic(self):
self.set_setting('TOTAL_STACK', 1024)
self.set_setting('MAIN_MODULE')
self.do_core_test('test_stack_placement.c')
self.set_setting('GLOBAL_BASE', 102400)
self.do_core_test('test_stack_placement.c')
def test_strings(self):
self.do_core_test('test_strings.c', args=['wowie', 'too', '74'])
def test_strcmp_uni(self):
self.do_core_test('test_strcmp_uni.c')
def test_strndup(self):
self.do_core_test('test_strndup.c')
def test_errar(self):
self.do_core_test('test_errar.c')
def test_mainenv(self):
self.do_core_test('test_mainenv.c')
def test_funcs(self):
self.do_core_test('test_funcs.c')
def test_structs(self):
self.do_core_test('test_structs.c')
gen_struct_src = '''
#include <stdio.h>
#include <stdlib.h>
#include "emscripten.h"
struct S
{
int x, y;
};
int main()
{
S* a = {{gen_struct}};
a->x = 51; a->y = 62;
printf("*%d,%d*\\n", a->x, a->y);
{{del_struct}}(a);
return 0;
}
'''
def test_mallocstruct(self):
self.do_run(self.gen_struct_src.replace('{{gen_struct}}', '(S*)malloc(sizeof(S))').replace('{{del_struct}}', 'free'), '*51,62*')
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
@parameterized({
'normal': [],
'memvalidate': ['-DEMMALLOC_MEMVALIDATE'],
'memvalidate_verbose': ['-DEMMALLOC_MEMVALIDATE', '-DEMMALLOC_VERBOSE', '-DRANDOM_ITERS=130'],
})
def test_emmalloc(self, *args):
# in newer clang+llvm, the internal calls to malloc in emmalloc may be optimized under
# the assumption that they are external, so like in system_libs.py where we build
# malloc, we need to disable builtin here too
self.set_setting('MALLOC', 'none')
self.emcc_args += ['-fno-builtin'] + list(args)
self.do_run(read_file(path_from_root('system/lib/emmalloc.c')) +
read_file(path_from_root('system/lib/sbrk.c')) +
read_file(test_file('core/test_emmalloc.c')),
read_file(test_file('core/test_emmalloc.out')), force_c=True)
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
def test_emmalloc_usable_size(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += list(args)
self.do_core_test('test_malloc_usable_size.c')
@no_optimize('output is sensitive to optimization flags, so only test unoptimized builds')
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
def test_emmalloc_memory_statistics(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += ['-s', 'INITIAL_MEMORY=128MB', '-g'] + list(args)
self.do_core_test('test_emmalloc_memory_statistics.cpp')
@no_optimize('output is sensitive to optimization flags, so only test unoptimized builds')
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
def test_emmalloc_trim(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += ['-s', 'INITIAL_MEMORY=128MB', '-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MAXIMUM_MEMORY=2147418112'] + list(args)
self.do_core_test('test_emmalloc_trim.cpp')
# Test case against https://github.com/emscripten-core/emscripten/issues/10363
def test_emmalloc_memalign_corruption(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.do_core_test('emmalloc_memalign_corruption.cpp')
def test_newstruct(self):
self.do_run(self.gen_struct_src.replace('{{gen_struct}}', 'new S').replace('{{del_struct}}', 'delete'), '*51,62*')
def test_addr_of_stacked(self):
self.do_core_test('test_addr_of_stacked.c')
def test_globals(self):
self.do_core_test('test_globals.c')
def test_linked_list(self):
self.do_core_test('test_linked_list.c')
def test_sup(self):
self.do_run_in_out_file_test(test_file('core/test_sup.cpp'))
@also_with_standalone_wasm()
def test_assert(self):
self.do_core_test('test_assert.cpp', assert_returncode=NON_ZERO)
def test_wcslen(self):
self.do_core_test('test_wcslen.c')
def test_regex(self):
self.do_core_test('test_regex.c')
@also_with_standalone_wasm(wasm2c=True, impure=True)
def test_longjmp(self):
self.do_core_test('test_longjmp.c')
def test_longjmp2(self):
self.do_core_test('test_longjmp2.c')
@needs_dylink
def test_longjmp2_main_module(self):
# Test for binaryen regression:
# https://github.com/WebAssembly/binaryen/issues/2180
self.set_setting('MAIN_MODULE')
self.do_core_test('test_longjmp2.c')
def test_longjmp3(self):
self.do_core_test('test_longjmp3.c')
def test_longjmp4(self):
self.do_core_test('test_longjmp4.c')
def test_longjmp_funcptr(self):
self.do_core_test('test_longjmp_funcptr.c')
def test_longjmp_repeat(self):
self.do_core_test('test_longjmp_repeat.c')
def test_longjmp_stacked(self):
self.do_core_test('test_longjmp_stacked.c', assert_returncode=NON_ZERO)
def test_longjmp_exc(self):
self.do_core_test('test_longjmp_exc.c', assert_returncode=NON_ZERO)
def test_longjmp_throw(self):
for disable_throw in [0, 1]:
print(disable_throw)
self.set_setting('DISABLE_EXCEPTION_CATCHING', disable_throw)
self.do_core_test('test_longjmp_throw.cpp')
def test_longjmp_unwind(self):
self.do_core_test('test_longjmp_unwind.c', assert_returncode=NON_ZERO)
def test_longjmp_i64(self):
self.emcc_args += ['-g']
self.do_core_test('test_longjmp_i64.c', assert_returncode=NON_ZERO)
def test_siglongjmp(self):
self.do_core_test('test_siglongjmp.c')
def test_setjmp_many(self):
src = r'''
#include <stdio.h>
#include <setjmp.h>
int main(int argc, char** argv) {
jmp_buf buf;
for (int i = 0; i < NUM; i++) printf("%d\n", setjmp(buf));
if (argc-- == 1131) longjmp(buf, 11);
return 0;
}
'''
for num in [1, 5, 20, 1000]:
print('NUM=%d' % num)
self.do_run(src.replace('NUM', str(num)), '0\n' * num)
def test_setjmp_many_2(self):
src = r'''
#include <setjmp.h>
#include <stdio.h>
jmp_buf env;
void luaWork(int d){
int x;
printf("d is at %d\n", d);
longjmp(env, 1);
}
int main()
{
const int ITERATIONS=25;
for(int i = 0; i < ITERATIONS; i++){
if(!setjmp(env)){
luaWork(i);
}
}
return 0;
}
'''
self.do_run(src, r'''d is at 24''')
def test_setjmp_noleak(self):
self.do_runf(test_file('core/test_setjmp_noleak.c'), 'ok.')
@with_both_exception_handling
def test_exceptions(self):
self.set_setting('EXCEPTION_DEBUG')
self.maybe_closure()
for support_longjmp in [0, 1]:
self.set_setting('SUPPORT_LONGJMP', support_longjmp)
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_caught.out'))
def test_exceptions_off(self):
for support_longjmp in [0, 1]:
self.set_setting('DISABLE_EXCEPTION_CATCHING')
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_uncaught.out'), assert_returncode=NON_ZERO)
@no_asan('TODO: ASan support in minimal runtime')
def test_exceptions_minimal_runtime(self):
self.set_setting('EXCEPTION_DEBUG')
self.set_setting('EXIT_RUNTIME')
self.maybe_closure()
self.set_setting('MINIMAL_RUNTIME')
self.emcc_args += ['--pre-js', test_file('minimal_runtime_exit_handling.js')]
for support_longjmp in [0, 1]:
self.set_setting('SUPPORT_LONGJMP', support_longjmp)
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_caught.out'))
self.set_setting('DISABLE_EXCEPTION_CATCHING')
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_uncaught.out'), assert_returncode=NON_ZERO)
@with_both_exception_handling
def test_exceptions_custom(self):
self.set_setting('EXCEPTION_DEBUG')
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.maybe_closure()
src = '''
#include <iostream>
class MyException
{
public:
MyException(){ std::cout << "Construct..."; }
MyException( const MyException & ) { std::cout << "Copy..."; }
~MyException(){ std::cout << "Destruct..."; }
};
int function()
{
std::cout << "Throw...";
throw MyException();
}
int function2()
{
return function();
}
int main()
{
try
{
function2();
}
catch (MyException & e)
{
std::cout << "Caught...";
}
try
{
function2();
}
catch (MyException e)
{
std::cout << "Caught...";
}
return 0;
}
'''
self.do_run(src, 'Throw...Construct...Caught...Destruct...Throw...Construct...Copy...Caught...Destruct...Destruct...')
@with_both_exception_handling
def test_exceptions_2(self):
for safe in [0, 1]:
print(safe)
if safe and '-fsanitize=address' in self.emcc_args:
# Can't use safe heap with ASan
continue
self.set_setting('SAFE_HEAP', safe)
self.do_core_test('test_exceptions_2.cpp')
@with_both_exception_handling
def test_exceptions_3(self):
src = r'''
#include <iostream>
#include <stdexcept>
int main(int argc, char **argv)
{
if (argc != 2) {
std::cout << "need an arg" << std::endl;
return 1;
}
int arg = argv[1][0] - '0';
try {
if (arg == 0) throw "a c string";
if (arg == 1) throw std::exception();
if (arg == 2) throw std::runtime_error("Hello");
} catch(const char * ex) {
std::cout << "Caught C string: " << ex << std::endl;
} catch(const std::exception &ex) {
std::cout << "Caught exception: " << ex.what() << std::endl;
} catch(...) {
std::cout << "Caught something else" << std::endl;
}
std::cout << "Done.\n";
}
'''
print('0')
self.do_run(src, 'Caught C string: a c string\nDone.', args=['0'])
print('1')
self.do_run('src.js', 'Caught exception: std::exception\nDone.', args=['1'], no_build=True)
print('2')
self.do_run('src.js', 'Caught exception: Hello\nDone.', args=['2'], no_build=True)
def test_exceptions_allowed(self):
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ["_Z12somefunctionv"])
# otherwise it is inlined and not identified
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_exceptions_allowed.cpp')
size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'orig.js')
# check that an empty allow list works properly (as in, same as exceptions disabled)
src = test_file('core/test_exceptions_allowed.cpp')
empty_output = test_file('core/test_exceptions_allowed_empty.out')
self.set_setting('EXCEPTION_CATCHING_ALLOWED', [])
self.do_run_from_file(src, empty_output, assert_returncode=NON_ZERO)
empty_size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
empty_size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'empty.js')
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ['fake'])
self.do_run_from_file(src, empty_output, assert_returncode=NON_ZERO)
fake_size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
fake_size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'fake.js')
self.clear_setting('EXCEPTION_CATCHING_ALLOWED')
self.do_run_from_file(src, empty_output, assert_returncode=NON_ZERO)
disabled_size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
disabled_size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'disabled.js')
print('size: %d' % size)
print('empty_size: %d' % empty_size)
print('fake_size: %d' % fake_size)
print('disabled_size: %d' % disabled_size)
# empty list acts the same as fully disabled
self.assertEqual(empty_size, disabled_size)
# big change when we disable exception catching of the function
self.assertGreater(size - empty_size, 0.01 * size)
# full disable can remove a little bit more
self.assertLess(disabled_size, fake_size)
def test_exceptions_allowed_2(self):
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ["main"])
# otherwise it is inlined and not identified
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_exceptions_allowed_2.cpp')
# When 'main' function does not have a signature, its contents will be
# outlined to '__original_main'. Check if we can handle that case.
self.emcc_args += ['-DMAIN_NO_SIGNATURE']
self.do_core_test('test_exceptions_allowed_2.cpp')
def test_exceptions_allowed_uncaught(self):
self.emcc_args += ['-std=c++11']
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ["_Z4testv"])
# otherwise it is inlined and not identified
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_exceptions_allowed_uncaught.cpp')
def test_exceptions_allowed_misuse(self):
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ['foo'])
# Test old =2 setting for DISABLE_EXCEPTION_CATCHING
self.set_setting('DISABLE_EXCEPTION_CATCHING', 2)
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING=X is no longer needed when specifying EXCEPTION_CATCHING_ALLOWED [-Wdeprecated] [-Werror]', err)
# =0 should also be a warning
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING=X is no longer needed when specifying EXCEPTION_CATCHING_ALLOWED [-Wdeprecated] [-Werror]', err)
# =1 should be a hard error
self.set_setting('DISABLE_EXCEPTION_CATCHING', 1)
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING and EXCEPTION_CATCHING_ALLOWED are mutually exclusive', err)
# even setting an empty list should trigger the error;
self.set_setting('EXCEPTION_CATCHING_ALLOWED', [])
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING and EXCEPTION_CATCHING_ALLOWED are mutually exclusive', err)
@with_both_exception_handling
def test_exceptions_uncaught(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
src = r'''
#include <stdio.h>
#include <exception>
struct X {
~X() {
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
}
};
int main() {
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
try {
X x;
throw 1;
} catch(...) {
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
}
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
return 0;
}
'''
self.do_run(src, 'exception? no\nexception? yes\nexception? no\nexception? no\n')
src = r'''
#include <fstream>
#include <iostream>
int main() {
std::ofstream os("test");
os << std::unitbuf << "foo"; // trigger a call to std::uncaught_exception from
// std::basic_ostream::sentry::~sentry
std::cout << "success";
}
'''
self.do_run(src, 'success')
@with_both_exception_handling
def test_exceptions_uncaught_2(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
src = r'''
#include <iostream>
#include <exception>
int main() {
try {
throw std::exception();
} catch(std::exception) {
try {
throw;
} catch(std::exception) {}
}
if (std::uncaught_exception())
std::cout << "ERROR: uncaught_exception still set.";
else
std::cout << "OK";
}
'''
self.do_run(src, 'OK\n')
@with_both_exception_handling
def test_exceptions_typed(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.clear_setting('SAFE_HEAP') # Throwing null will cause an ignorable null pointer access.
self.do_core_test('test_exceptions_typed.cpp')
@with_both_exception_handling
def test_exceptions_virtual_inheritance(self):
self.do_core_test('test_exceptions_virtual_inheritance.cpp')
@with_both_exception_handling
def test_exceptions_convert(self):
self.do_core_test('test_exceptions_convert.cpp')
# TODO Make setjmp-longjmp also use Wasm exception handling
@with_both_exception_handling
def test_exceptions_multi(self):
self.do_core_test('test_exceptions_multi.cpp')
@with_both_exception_handling
def test_exceptions_std(self):
self.clear_setting('SAFE_HEAP')
self.do_core_test('test_exceptions_std.cpp')
@with_both_exception_handling
def test_exceptions_alias(self):
self.do_core_test('test_exceptions_alias.cpp')
@with_both_exception_handling
def test_exceptions_rethrow(self):
self.do_core_test('test_exceptions_rethrow.cpp')
@with_both_exception_handling
def test_exceptions_uncaught_count(self):
self.do_core_test('test_exceptions_uncaught_count.cpp')
@with_both_exception_handling
def test_exceptions_resume(self):
self.set_setting('EXCEPTION_DEBUG')
self.do_core_test('test_exceptions_resume.cpp')
@with_both_exception_handling
def test_exceptions_destroy_virtual(self):
self.do_core_test('test_exceptions_destroy_virtual.cpp')
@with_both_exception_handling
def test_exceptions_refcount(self):
self.do_core_test('test_exceptions_refcount.cpp')
@with_both_exception_handling
def test_exceptions_primary(self):
self.do_core_test('test_exceptions_primary.cpp')
@with_both_exception_handling
def test_exceptions_simplify_cfg(self):
self.do_core_test('test_exceptions_simplify_cfg.cpp')
@with_both_exception_handling
def test_exceptions_libcxx(self):
self.do_core_test('test_exceptions_libcxx.cpp')
@with_both_exception_handling
def test_exceptions_multiple_inherit(self):
self.do_core_test('test_exceptions_multiple_inherit.cpp')
@with_both_exception_handling
def test_exceptions_multiple_inherit_rethrow(self):
self.do_core_test('test_exceptions_multiple_inherit_rethrow.cpp')
@with_both_exception_handling
def test_exceptions_rethrow_missing(self):
create_file('main.cpp', 'int main() { throw; }')
self.do_runf('main.cpp', None, assert_returncode=NON_ZERO)
@with_both_exception_handling
def test_bad_typeid(self):
self.do_run(r'''
// exception example
#include <iostream> // std::cerr
#include <typeinfo> // operator typeid
#include <exception> // std::exception
class Polymorphic {virtual void member(){}};
int main () {
try
{
Polymorphic * pb = 0;
const std::type_info& ti = typeid(*pb); // throws a bad_typeid exception
}
catch (std::exception& e)
{
std::cerr << "exception caught: " << e.what() << '\n';
}
return 0;
}
''', 'exception caught: std::bad_typeid')
def test_iostream_ctors(self):
# iostream stuff must be globally constructed before user global
# constructors, so iostream works in global constructors
self.do_run(r'''
#include <iostream>
struct A {
A() { std::cout << "bug"; }
};
A a;
int main() {
std::cout << "free code" << std::endl;
return 0;
}
''', 'bugfree code')
def test_exceptions_longjmp1(self):
self.set_setting('SUPPORT_LONGJMP')
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_core_test('test_exceptions_longjmp1.cpp')
def test_exceptions_longjmp2(self):
self.set_setting('SUPPORT_LONGJMP')
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_core_test('test_exceptions_longjmp2.cpp')
def test_exceptions_longjmp3(self):
self.set_setting('SUPPORT_LONGJMP')
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_core_test('test_exceptions_longjmp3.cpp')
# Marked as impure since the WASI reactor modules (modules without main)
# are not yet suppored by the wasm engines we test against.
@also_with_standalone_wasm(impure=True)
def test_ctors_no_main(self):
self.emcc_args.append('--no-entry')
self.do_core_test('test_ctors_no_main.cpp')
def test_class(self):
self.do_core_test('test_class.cpp')
def test_inherit(self):
self.do_core_test('test_inherit.cpp')
def test_isdigit_l(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_isdigit_l.cpp')
def test_iswdigit(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_iswdigit.cpp')
def test_polymorph(self):
self.do_core_test('test_polymorph.cpp')
def test_complex(self):
self.do_core_test('test_complex.c')
def test_float_builtins(self):
# tests wasm_libc_rt
self.do_core_test('test_float_builtins.c')
@no_asan('SAFE_HEAP cannot be used with ASan')
def test_segfault(self):
self.set_setting('SAFE_HEAP')
for addr in ['get_null()', 'new D2()']:
print(addr)
src = r'''
#include <stdio.h>
#include <emscripten.h>
struct Classey {
virtual void doIt() = 0;
};
struct D1 : Classey {
virtual void doIt() { printf("fleefl\n"); }
};
struct D2 : Classey {
virtual void doIt() { printf("marfoosh\n"); }
};
EM_JS(Classey*, get_null, (), {
return 0;
});
int main(int argc, char **argv)
{
Classey *p = argc == 100 ? new D1() : (Classey*)%s;
p->doIt();
return 0;
}
''' % addr
if 'get_null' in addr:
self.do_run(src, 'segmentation fault', assert_returncode=NON_ZERO)
else:
self.do_run(src, 'marfoosh')
def test_dynamic_cast(self):
self.do_core_test('test_dynamic_cast.cpp')
def test_dynamic_cast_b(self):
self.do_core_test('test_dynamic_cast_b.cpp')
def test_dynamic_cast_2(self):
self.do_core_test('test_dynamic_cast_2.cpp')
def test_funcptr(self):
self.do_core_test('test_funcptr.c')
def test_mathfuncptr(self):
self.do_core_test('test_mathfuncptr.c')
def test_funcptrfunc(self):
self.do_core_test('test_funcptrfunc.c')
def test_funcptr_namecollide(self):
self.do_core_test('test_funcptr_namecollide.c')
def test_emptyclass(self):
self.do_core_test('test_emptyclass.cpp')
def test_alloca(self):
self.do_core_test('test_alloca.c')
def test_rename(self):
self.do_run_in_out_file_test('stdio/test_rename.c')
def test_remove(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('cstdio/test_remove.cpp')
def test_alloca_stack(self):
self.do_core_test('test_alloca_stack.c')
def test_stack_byval(self):
self.do_core_test('test_stack_byval.cpp')
def test_stack_varargs(self):
# in node.js we allocate argv[0] on the stack, which means the length
# of the program directory influences how much stack we need, and so
# long random temp dir names can lead to random failures. The stack
# size was increased here to avoid that.
self.set_setting('INLINING_LIMIT')
self.set_setting('TOTAL_STACK', 8 * 1024)
self.do_core_test('test_stack_varargs.c')
def test_stack_varargs2(self):
# in node.js we allocate argv[0] on the stack, which means the length
# of the program directory influences how much stack we need, and so
# long random temp dir names can lead to random failures. The stack
# size was increased here to avoid that.
self.set_setting('TOTAL_STACK', 8 * 1024)
src = r'''
#include <stdio.h>
#include <stdlib.h>
void func(int i) {
}
int main() {
for (int i = 0; i < 7000; i++) {
printf("%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",
i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i);
}
printf("ok!\n");
return 0;
}
'''
self.do_run(src, 'ok!')
print('with return')
src = r'''
#include <stdio.h>
#include <stdlib.h>
int main() {
for (int i = 0; i < 7000; i++) {
int j = printf("%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d",
i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i);
printf(" (%d)\n", j);
}
printf("ok!\n");
return 0;
}
'''
self.do_run(src, 'ok!')
print('with definitely no return')
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <stdarg.h>
void vary(const char *s, ...)
{
va_list v;
va_start(v, s);
char d[20];
vsnprintf(d, 20, s, v);
puts(d);
// Try it with copying
va_list tempva;
va_copy(tempva, v);
vsnprintf(d, 20, s, tempva);
puts(d);
va_end(v);
}
int main() {
for (int i = 0; i < 7000; i++) {
int j = printf("%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d",
i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i);
printf(" (%d)\n", j);
vary("*cheez: %d+%d*", 99, 24);
vary("*albeit*");
}
printf("ok!\n");
return 0;
}
'''
self.do_run(src, 'ok!')
def test_stack_void(self):
self.emcc_args.append('-Wno-format-extra-args')
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_stack_void.c')
def test_life(self):
self.emcc_args += ['-std=c99']
self.do_run_in_out_file_test('life.c', args=['2'])
def test_array2(self):
self.do_core_test('test_array2.c')
def test_array2b(self):
self.do_core_test('test_array2b.c')
def test_constglobalstructs(self):
self.do_core_test('test_constglobalstructs.c')
def test_conststructs(self):
self.do_core_test('test_conststructs.c')
def test_bigarray(self):
self.do_core_test('test_bigarray.c')
def test_mod_globalstruct(self):
self.do_core_test('test_mod_globalstruct.c')
def test_sizeof(self):
self.do_core_test('test_sizeof.cpp')
def test_llvm_used(self):
self.do_core_test('test_llvm_used.c')
@no_asan('SAFE_HEAP cannot be used with ASan')
def test_set_align(self):
self.set_setting('SAFE_HEAP')
self.do_core_test('test_set_align.c')
def test_emscripten_api(self):
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_save_me_aimee'])
self.do_core_test('test_emscripten_api.cpp')
if '-fsanitize=address' not in self.emcc_args:
# test EXPORT_ALL (this is not compatible with asan, which doesn't
# support dynamic linking at all or the LINKING flag)
self.set_setting('EXPORTED_FUNCTIONS', [])
self.set_setting('EXPORT_ALL')
self.set_setting('LINKABLE')
self.do_core_test('test_emscripten_api.cpp')
def test_emscripten_run_script_string_int(self):
src = r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
const char *str = emscripten_run_script_string("1+1");
printf("got string: %s\n", str);
return 0;
}
'''
self.do_run(src, '''got string: 2''')
def test_emscripten_run_script_string_utf8(self):
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <emscripten.h>
int main() {
const char *str = emscripten_run_script_string("'\\u2603 \\u2603 \\u2603 Hello!'");
printf("length of returned string: %zu. Position of substring 'Hello': %zu\n", strlen(str), strstr(str, "Hello")-str);
return 0;
}
'''
self.do_run(src, '''length of returned string: 18. Position of substring 'Hello': 12''')
def test_emscripten_run_script_string_null(self):
src = r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
const char *str = emscripten_run_script_string("void(0)");
if (str) {
printf("got string: %s\n", str);
} else {
puts("got null");
}
return 0;
}
'''
self.do_run(src, 'got null')
def test_emscripten_get_now(self):
self.banned_js_engines = [config.V8_ENGINE] # timer limitations in v8 shell
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.maybe_closure()
self.do_runf(test_file('emscripten_get_now.cpp'), 'Timer resolution is good')
def test_emscripten_get_compiler_setting(self):
src = test_file('core/emscripten_get_compiler_setting.c')
output = shared.replace_suffix(src, '.out')
# with assertions, a nice message is shown
self.set_setting('ASSERTIONS')
self.do_runf(src, 'You must build with -s RETAIN_COMPILER_SETTINGS=1', assert_returncode=NON_ZERO)
self.clear_setting('ASSERTIONS')
self.set_setting('RETAIN_COMPILER_SETTINGS')
self.do_runf(src, read_file(output).replace('waka', shared.EMSCRIPTEN_VERSION))
def test_emscripten_has_asyncify(self):
src = r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("%d\n", emscripten_has_asyncify());
return 0;
}
'''
self.set_setting('ASYNCIFY', 0)
self.do_run(src, '0')
self.set_setting('ASYNCIFY')
self.do_run(src, '1')
# TODO: test only worked in non-fastcomp
def test_inlinejs(self):
self.skipTest('non-fastcomp is deprecated and fails in 3.5') # only supports EM_ASM
self.do_core_test('test_inlinejs.c')
if self.emcc_args == []:
# opts will eliminate the comments
out = read_file('src.js')
for i in range(1, 5):
assert ('comment%d' % i) in out
# TODO: test only worked in non-fastcomp
def test_inlinejs2(self):
self.skipTest('non-fastcomp is deprecated and fails in 3.5') # only supports EM_ASM
self.do_core_test('test_inlinejs2.c')
def test_inlinejs3(self):
if self.is_wasm():
self.skipTest('wasm requires a proper asm module')
src = test_file('core/test_inlinejs3.c')
output = shared.unsuffixed(src) + '.out'
self.do_core_test('test_inlinejs3.c')
print('no debugger, check validation')
src = read_file(src).replace('emscripten_debugger();', '')
self.do_run(src, read_file(output))
def test_inlinejs4(self):
self.do_run(r'''
#include <emscripten.h>
#define TO_STRING_INNER(x) #x
#define TO_STRING(x) TO_STRING_INNER(x)
#define assert_msg(msg, file, line) EM_ASM( throw 'Assert (' + msg + ') failed in ' + file + ':' + line + '!'; )
#define assert(expr) { \
if (!(expr)) { \
assert_msg(#expr, TO_STRING(__FILE__), TO_STRING(__LINE__)); \
} \
}
int main(int argc, char **argv) {
assert(argc != 17);
assert(false);
return 0;
}
''', 'false', assert_returncode=NON_ZERO)
def test_em_asm(self):
self.do_core_test('test_em_asm.cpp')
self.emcc_args.append('-std=gnu89')
self.do_core_test('test_em_asm.cpp', force_c=True)
# Tests various different ways to invoke the EM_ASM(), EM_ASM_INT()
# and EM_ASM_DOUBLE() macros.
def test_em_asm_2(self):
self.do_core_test('test_em_asm_2.cpp')
self.emcc_args.append('-std=gnu89')
self.do_core_test('test_em_asm_2.cpp', force_c=True)
# Tests various different ways to invoke the MAIN_THREAD_EM_ASM(), MAIN_THREAD_EM_ASM_INT() and MAIN_THREAD_EM_ASM_DOUBLE() macros.
# This test is identical to test_em_asm_2, just search-replaces EM_ASM to MAIN_THREAD_EM_ASM on the test file. That way if new
# test cases are added to test_em_asm_2.cpp for EM_ASM, they will also get tested in MAIN_THREAD_EM_ASM form.
@no_asan('Cannot use ASan: test depends exactly on heap size')
def test_main_thread_em_asm(self):
src = read_file(test_file('core/test_em_asm_2.cpp'))
create_file('src.cpp', src.replace('EM_ASM', 'MAIN_THREAD_EM_ASM'))
expected_result = read_file(test_file('core/test_em_asm_2.out'))
create_file('result.out', expected_result.replace('EM_ASM', 'MAIN_THREAD_EM_ASM'))
self.do_run_from_file('src.cpp', 'result.out')
self.do_run_from_file('src.cpp', 'result.out', force_c=True)
def test_main_thread_async_em_asm(self):
self.do_core_test('test_main_thread_async_em_asm.cpp')
self.do_core_test('test_main_thread_async_em_asm.cpp', force_c=True)
# Tests MAIN_THREAD_EM_ASM_INT() function call with different signatures.
def test_main_thread_em_asm_signatures(self):
self.do_core_test('test_em_asm_signatures.cpp', assert_returncode=NON_ZERO)
def test_em_asm_unicode(self):
self.do_core_test('test_em_asm_unicode.cpp')
self.do_core_test('test_em_asm_unicode.cpp', force_c=True)
def test_em_asm_types(self):
self.do_core_test('test_em_asm_types.cpp')
self.do_core_test('test_em_asm_types.cpp', force_c=True)
def test_em_asm_unused_arguments(self):
self.do_core_test('test_em_asm_unused_arguments.cpp')
# Verify that EM_ASM macros support getting called with multiple arities.
# Maybe tests will later be joined into larger compilation units?
# Then this must still be compiled separately from other code using EM_ASM
# macros with arities 1-3. Otherwise this may incorrectly report a success.
def test_em_asm_parameter_pack(self):
self.do_core_test('test_em_asm_parameter_pack.cpp')
def test_em_asm_arguments_side_effects(self):
self.do_core_test('test_em_asm_arguments_side_effects.cpp')
self.do_core_test('test_em_asm_arguments_side_effects.cpp', force_c=True)
def test_em_asm_direct(self):
self.do_core_test('test_em_asm_direct.c')
@parameterized({
'': ([], False),
'c': ([], True),
'linked': (['-s', 'MAIN_MODULE'], False),
'linked_c': (['-s', 'MAIN_MODULE'], True),
})
def test_em_js(self, args, force_c):
if 'MAIN_MODULE' in args and not self.is_wasm():
self.skipTest('main module support for non-wasm')
if '-fsanitize=address' in self.emcc_args:
self.skipTest('no dynamic library support in asan yet')
self.emcc_args += args + ['-s', 'EXPORTED_FUNCTIONS=_main,_malloc']
self.do_core_test('test_em_js.cpp', force_c=force_c)
self.assertContained("no args returning int", read_file('test_em_js.js'))
def test_runtime_stacksave(self):
self.do_runf(test_file('core/test_runtime_stacksave.c'), 'success')
# Tests that -s MINIMAL_RUNTIME=1 builds can utilize -s ALLOW_MEMORY_GROWTH=1 option.
def test_minimal_runtime_memorygrowth(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
self.set_setting('MINIMAL_RUNTIME')
src = test_file('core/test_memorygrowth.c')
# Fail without memory growth
self.do_runf(src, 'OOM', assert_returncode=NON_ZERO)
# Win with it
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
def test_memorygrowth(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if self.maybe_closure():
# verify NO_DYNAMIC_EXECUTION is compatible with closure
self.set_setting('DYNAMIC_EXECUTION', 0)
# With typed arrays in particular, it is dangerous to use more memory than INITIAL_MEMORY,
# since we then need to enlarge the heap(s).
src = test_file('core/test_memorygrowth.c')
# Fail without memory growth
self.do_runf(src, 'OOM', assert_returncode=NON_ZERO)
fail = read_file('test_memorygrowth.js')
# Win with it
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
win = read_file('test_memorygrowth.js')
if '-O2' in self.emcc_args and not self.is_wasm():
# Make sure ALLOW_MEMORY_GROWTH generates different code (should be less optimized)
possible_starts = ['// EMSCRIPTEN_START_FUNCS', 'var TOTAL_STACK']
code_start = None
for s in possible_starts:
if fail.find(s) >= 0:
code_start = s
break
assert code_start is not None, 'Generated code must contain one of ' + str(possible_starts)
fail = fail[fail.find(code_start):]
win = win[win.find(code_start):]
assert len(fail) < len(win), 'failing code - without memory growth on - is more optimized, and smaller' + str([len(fail), len(win)])
# Tracing of memory growths should work
# (SAFE_HEAP would instrument the tracing code itself, leading to recursion)
if not self.get_setting('SAFE_HEAP'):
self.emcc_args += ['--tracing']
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
def test_memorygrowth_2(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
# With typed arrays in particular, it is dangerous to use more memory than INITIAL_MEMORY,
# since we then need to enlarge the heap(s).
src = test_file('core/test_memorygrowth_2.c')
# Fail without memory growth
self.do_runf(src, 'OOM', assert_returncode=NON_ZERO)
fail = read_file('test_memorygrowth_2.js')
# Win with it
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
win = read_file('test_memorygrowth_2.js')
if '-O2' in self.emcc_args and not self.is_wasm():
# Make sure ALLOW_MEMORY_GROWTH generates different code (should be less optimized)
assert len(fail) < len(win), 'failing code - without memory growth on - is more optimized, and smaller' + str([len(fail), len(win)])
def test_memorygrowth_3(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
# checks handling of malloc failure properly
self.set_setting('ABORTING_MALLOC', 0)
self.set_setting('SAFE_HEAP')
self.do_core_test('test_memorygrowth_3.c')
@also_with_standalone_wasm(impure=True)
def test_memorygrowth_MAXIMUM_MEMORY(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if not self.is_wasm():
self.skipTest('wasm memory specific test')
# check that memory growth does not exceed the wasm mem max limit
self.emcc_args += ['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'INITIAL_MEMORY=64Mb', '-s', 'MAXIMUM_MEMORY=100Mb']
self.do_core_test('test_memorygrowth_wasm_mem_max.c')
def test_memorygrowth_linear_step(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if not self.is_wasm():
self.skipTest('wasm memory specific test')
# check that memory growth does not exceed the wasm mem max limit and is exactly or one step below the wasm mem max
self.emcc_args += ['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'TOTAL_STACK=1Mb', '-s', 'INITIAL_MEMORY=64Mb', '-s', 'MAXIMUM_MEMORY=130Mb', '-s', 'MEMORY_GROWTH_LINEAR_STEP=1Mb']
self.do_core_test('test_memorygrowth_memory_growth_step.c')
def test_memorygrowth_geometric_step(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if not self.is_wasm():
self.skipTest('wasm memory specific test')
self.emcc_args += ['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MEMORY_GROWTH_GEOMETRIC_STEP=8.5', '-s', 'MEMORY_GROWTH_GEOMETRIC_CAP=32MB']
self.do_core_test('test_memorygrowth_geometric_step.c')
def test_memorygrowth_3_force_fail_reallocBuffer(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('TEST_MEMORY_GROWTH_FAILS')
self.do_core_test('test_memorygrowth_3.c')
@parameterized({
'nogrow': ([],),
'grow': (['-sALLOW_MEMORY_GROWTH', '-sMAXIMUM_MEMORY=18MB'],)
})
@no_asan('requires more memory when growing')
def test_aborting_new(self, args):
# test that C++ new properly errors if we fail to malloc when growth is
# enabled, with or without growth
self.emcc_args += args
self.do_core_test('test_aborting_new.cpp')
@no_wasm2js('no WebAssembly.Memory()')
@no_asan('ASan alters the memory size')
def test_module_wasm_memory(self):
self.emcc_args += ['--pre-js', test_file('core/test_module_wasm_memory.js')]
self.set_setting('IMPORTED_MEMORY')
self.do_runf(test_file('core/test_module_wasm_memory.c'), 'success')
def test_ssr(self): # struct self-ref
src = '''
#include <stdio.h>
// see related things in openjpeg
typedef struct opj_mqc_state {
unsigned int qeval;
int mps;
struct opj_mqc_state *nmps;
struct opj_mqc_state *nlps;
} opj_mqc_state_t;
static opj_mqc_state_t mqc_states[4] = {
{0x5600, 0, &mqc_states[2], &mqc_states[3]},
{0x5602, 1, &mqc_states[3], &mqc_states[2]},
};
int main() {
printf("*%ld*\\n", (long)(mqc_states+1)-(long)mqc_states);
for (int i = 0; i < 2; i++)
printf("%d:%d,%d,%ld,%ld\\n", i, mqc_states[i].qeval, mqc_states[i].mps,
(long)mqc_states[i].nmps-(long)mqc_states, (long)mqc_states[i].nlps-(long)mqc_states);
return 0;
}
'''
self.do_run(src, '''*16*\n0:22016,0,32,48\n1:22018,1,48,32\n''')
def test_tinyfuncstr(self):
self.do_core_test('test_tinyfuncstr.cpp')
def test_llvmswitch(self):
self.do_core_test('test_llvmswitch.c')
def test_cxx_version(self):
self.do_core_test('test_cxx_version.cpp')
@no_wasm2js('massive switches can break js engines')
def test_bigswitch(self):
self.do_runf(test_file('bigswitch.cpp'), '''34962: GL_ARRAY_BUFFER (0x8892)
26214: what?
35040: GL_STREAM_DRAW (0x88E0)
3060: what?
''', args=['34962', '26214', '35040', str(0xbf4)])
@no_wasm2js('massive switches can break js engines')
@is_slow_test
def test_biggerswitch(self):
if not self.is_optimizing():
self.skipTest('nodejs takes >6GB to compile this if the wasm is not optimized, which OOMs, see https://github.com/emscripten-core/emscripten/issues/7928#issuecomment-458308453')
if '-Os' in self.emcc_args:
self.skipTest('hangs in recent upstream clang, see https://bugs.llvm.org/show_bug.cgi?id=43468')
num_cases = 20000
switch_case = self.run_process([PYTHON, test_file('gen_large_switchcase.py'), str(num_cases)], stdout=PIPE, stderr=PIPE).stdout
self.do_run(switch_case, '''58996: 589965899658996
59297: 592975929759297
59598: default
59899: 598995989959899
Success!''')
def test_indirectbr(self):
self.emcc_args = [x for x in self.emcc_args if x != '-g']
self.do_core_test('test_indirectbr.c')
@no_asan('local count too large for VMs')
@no_wasm2js('extremely deep nesting, hits stack limit on some VMs')
def test_indirectbr_many(self):
self.do_core_test('test_indirectbr_many.c')
def test_pack(self):
src = '''
#include <stdio.h>
#include <string.h>
#pragma pack(push,1)
typedef struct header
{
unsigned char id;
unsigned short colour;
unsigned char desc;
} header;
#pragma pack(pop)
typedef struct fatheader
{
unsigned char id;
unsigned short colour;
unsigned char desc;
} fatheader;
int main( int argc, const char *argv[] ) {
header h, *ph = 0;
fatheader fh, *pfh = 0;
printf("*%zu,%ld,%ld*\\n", sizeof(header), (long)((long)&h.desc - (long)&h.id), (long)(&ph[1])-(long)(&ph[0]));
printf("*%zu,%ld,%ld*\\n", sizeof(fatheader), (long)((long)&fh.desc - (long)&fh.id), (long)(&pfh[1])-(long)(&pfh[0]));
return 0;
}
'''
self.do_run(src, '*4,3,4*\n*6,4,6*')
def test_varargs(self):
self.do_core_test('test_varargs.c')
def test_varargs_multi(self):
self.do_core_test('test_varargs_multi.c')
@unittest.skip('clang cannot compile this code with that target yet')
def test_varargs_byval(self):
src = r'''
#include <stdio.h>
#include <stdarg.h>
typedef struct type_a {
union {
double f;
void *p;
int i;
short sym;
} value;
} type_a;
enum mrb_vtype {
MRB_TT_FALSE = 0, /* 0 */
MRB_TT_CLASS = 9 /* 9 */
};
typedef struct type_b {
enum mrb_vtype tt:8;
} type_b;
void print_type_a(int argc, ...);
void print_type_b(int argc, ...);
int main(int argc, char *argv[])
{
type_a a;
type_b b;
a.value.p = (void*) 0x12345678;
b.tt = MRB_TT_CLASS;
printf("The original address of a is: %p\n", a.value.p);
printf("The original type of b is: %d\n", b.tt);
print_type_a(1, a);
print_type_b(1, b);
return 0;
}
void print_type_a(int argc, ...) {
va_list ap;
type_a a;
va_start(ap, argc);
a = va_arg(ap, type_a);
va_end(ap);
printf("The current address of a is: %p\n", a.value.p);
}
void print_type_b(int argc, ...) {
va_list ap;
type_b b;
va_start(ap, argc);
b = va_arg(ap, type_b);
va_end(ap);
printf("The current type of b is: %d\n", b.tt);
}
'''
self.do_run(src, '''The original address of a is: 0x12345678
The original type of b is: 9
The current address of a is: 0x12345678
The current type of b is: 9
''')
def test_functionpointer_libfunc_varargs(self):
self.do_core_test('test_functionpointer_libfunc_varargs.c')
def test_structbyval(self):
self.set_setting('INLINING_LIMIT')
# part 1: make sure that normally, passing structs by value works
src = r'''
#include <stdio.h>
struct point
{
int x, y;
};
void dump(struct point p) {
p.x++; // should not modify
p.y++; // anything in the caller!
printf("dump: %d,%d\n", p.x, p.y);
}
void dumpmod(struct point *p) {
p->x++; // should not modify
p->y++; // anything in the caller!
printf("dump: %d,%d\n", p->x, p->y);
}
int main( int argc, const char *argv[] ) {
point p = { 54, 2 };
printf("pre: %d,%d\n", p.x, p.y);
dump(p);
void (*dp)(point p) = dump; // And, as a function pointer
dp(p);
printf("post: %d,%d\n", p.x, p.y);
dumpmod(&p);
dumpmod(&p);
printf("last: %d,%d\n", p.x, p.y);
return 0;
}
'''
self.do_run(src, 'pre: 54,2\ndump: 55,3\ndump: 55,3\npost: 54,2\ndump: 55,3\ndump: 56,4\nlast: 56,4')
def test_stdlibs(self):
# safe heap prints a warning that messes up our output.
self.set_setting('SAFE_HEAP', 0)
# needs atexit
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_stdlibs.c')
def test_stdbool(self):
create_file('test_stdbool.c', r'''
#include <stdio.h>
#include <stdbool.h>
int main() {
bool x = true;
bool y = false;
printf("*%d*\n", x != y);
return 0;
}
''')
self.do_runf('test_stdbool.c', '*1*')
def test_strtoll_hex(self):
# tests strtoll for hex strings (0x...)
self.do_core_test('test_strtoll_hex.c')
def test_strtoll_dec(self):
# tests strtoll for decimal strings (0x...)
self.do_core_test('test_strtoll_dec.c')
def test_strtoll_bin(self):
# tests strtoll for binary strings (0x...)
self.do_core_test('test_strtoll_bin.c')
def test_strtoll_oct(self):
# tests strtoll for decimal strings (0x...)
self.do_core_test('test_strtoll_oct.c')
def test_strtol_hex(self):
# tests strtoll for hex strings (0x...)
self.do_core_test('test_strtol_hex.c')
def test_strtol_dec(self):
# tests strtoll for decimal strings (0x...)
self.do_core_test('test_strtol_dec.c')
def test_strtol_bin(self):
# tests strtoll for binary strings (0x...)
self.do_core_test('test_strtol_bin.c')
def test_strtol_oct(self):
# tests strtoll for decimal strings (0x...)
self.do_core_test('test_strtol_oct.c')
@also_with_standalone_wasm()
def test_atexit(self):
# Confirms they are called in the proper reverse order
if not self.get_setting('STANDALONE_WASM'):
# STANDALONE_WASM mode always sets EXIT_RUNTIME if main exists
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_atexit.c')
def test_atexit_threads(self):
# also tests thread exit (__cxa_thread_atexit)
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_atexit_threads.c')
@no_asan('test relies on null pointer reads')
def test_pthread_specific(self):
self.do_run_in_out_file_test('pthread/specific.c')
def test_pthread_equal(self):
self.do_run_in_out_file_test('pthread/test_pthread_equal.cpp')
@node_pthreads
def test_pthread_dispatch_after_exit(self):
self.do_run_in_out_file_test('pthread/test_pthread_dispatch_after_exit.c', interleaved_output=False)
@node_pthreads
def test_pthread_atexit(self):
# Test to ensure threads are still running when atexit-registered functions are called
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREAD_POOL_SIZE', 1)
self.do_run_in_out_file_test('pthread/test_pthread_atexit.c')
@node_pthreads
def test_pthread_nested_work_queue(self):
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREAD_POOL_SIZE', 1)
self.do_run_in_out_file_test('pthread/test_pthread_nested_work_queue.c')
@node_pthreads
def test_pthread_thread_local_storage(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_run_in_out_file_test('pthread/test_pthread_thread_local_storage.cpp')
@node_pthreads
def test_pthread_cleanup(self):
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREAD_POOL_SIZE', 4)
self.do_run_in_out_file_test('pthread/test_pthread_cleanup.cpp')
@node_pthreads
def test_pthread_setspecific_mainthread(self):
self.set_setting('EXIT_RUNTIME')
print('.. return')
self.do_runf(test_file('pthread/test_pthread_setspecific_mainthread.c'), 'done!', emcc_args=['-DRETURN'])
print('.. exit')
self.do_runf(test_file('pthread/test_pthread_setspecific_mainthread.c'), 'done!', emcc_args=['-DEXIT'])
print('.. pthread_exit')
self.do_run_in_out_file_test('pthread/test_pthread_setspecific_mainthread.c')
@node_pthreads
@no_mac('https://github.com/emscripten-core/emscripten/issues/15014')
def test_pthread_abort(self):
self.set_setting('PROXY_TO_PTHREAD')
# Add the onAbort handler at runtime during preRun. This means that onAbort
# handler will only be present in the main thread (much like it would if it
# was passed in by pre-populating the module object on prior to loading).
self.add_pre_run("Module.onAbort = function() { console.log('onAbort called'); }")
self.do_run_in_out_file_test('pthread/test_pthread_abort.c', assert_returncode=NON_ZERO)
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
@node_pthreads
def test_pthread_emmalloc(self):
self.emcc_args += ['-fno-builtin']
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASSERTIONS=2')
self.set_setting('MALLOC', 'emmalloc')
self.do_core_test('test_emmalloc.c')
def test_tcgetattr(self):
self.do_runf(test_file('termios/test_tcgetattr.c'), 'success')
def test_time(self):
self.do_core_test('test_time.cpp')
for tz in ['EST+05EDT', 'UTC+0']:
print('extra tz test:', tz)
with env_modify({'TZ': tz}):
# Run the test with different time zone settings if
# possible. It seems that the TZ environment variable does not
# work all the time (at least it's not well respected by
# Node.js on Windows), but it does no harm either.
self.do_core_test('test_time.cpp')
def test_timeb(self):
# Confirms they are called in reverse order
self.do_core_test('test_timeb.c')
def test_time_c(self):
self.do_core_test('test_time_c.c')
def test_gmtime(self):
self.do_core_test('test_gmtime.c')
def test_strptime_tm(self):
self.do_core_test('test_strptime_tm.c')
def test_strptime_days(self):
self.do_core_test('test_strptime_days.c')
def test_strptime_reentrant(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_strptime_reentrant.c')
def test_strftime(self):
self.do_core_test('test_strftime.cpp')
def test_trickystring(self):
self.do_core_test('test_trickystring.c')
def test_statics(self):
self.do_core_test('test_statics.cpp')
def test_copyop(self):
# clang generated code is vulnerable to this, as it uses
# memcpy for assignments, with hardcoded numbers of bytes
# (llvm-gcc copies items one by one).
self.do_core_test('test_copyop.cpp')
def test_memcpy_memcmp(self):
self.banned_js_engines = [config.V8_ENGINE] # Currently broken under V8_ENGINE but not node
def check(output):
output = output.replace('\n \n', '\n') # remove extra node output
return hashlib.sha1(output.encode('utf-8')).hexdigest()
self.do_core_test('test_memcpy_memcmp.c', output_nicerizer=check)
def test_memcpy2(self):
self.do_core_test('test_memcpy2.c')
def test_memcpy3(self):
self.do_core_test('test_memcpy3.c')
@also_with_standalone_wasm()
def test_memcpy_alignment(self):
self.do_runf(test_file('test_memcpy_alignment.cpp'), 'OK.')
def test_memset_alignment(self):
self.do_runf(test_file('test_memset_alignment.cpp'), 'OK.')
def test_memset(self):
self.do_core_test('test_memset.c')
def test_getopt(self):
self.do_core_test('test_getopt.c', args=['-t', '12', '-n', 'foobar'])
def test_getopt_long(self):
self.do_core_test('test_getopt_long.c', args=['--file', 'foobar', '-b'])
def test_memmove(self):
self.do_core_test('test_memmove.c')
def test_memmove2(self):
self.do_core_test('test_memmove2.c')
def test_memmove3(self):
self.do_core_test('test_memmove3.c')
def test_flexarray_struct(self):
self.do_core_test('test_flexarray_struct.c')
def test_bsearch(self):
self.do_core_test('test_bsearch.c')
def test_stack_overflow(self):
self.set_setting('ASSERTIONS', 2)
self.do_runf(test_file('core/stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
def test_stackAlloc(self):
self.do_core_test('stackAlloc.cpp')
def test_nestedstructs(self):
src = '''
#include <stdio.h>
#include "emscripten.h"
struct base {
int x;
float y;
union {
int a;
float b;
};
char c;
};
struct hashtableentry {
int key;
base data;
};
struct hashset {
typedef hashtableentry entry;
struct chain { entry elem; chain *next; };
// struct chainchunk { chain chains[100]; chainchunk *next; };
};
struct hashtable : hashset {
hashtable() {
base *b = NULL;
entry *e = NULL;
chain *c = NULL;
printf("*%zu,%ld,%ld,%ld,%ld,%ld|%zu,%ld,%ld,%ld,%ld,%ld,%ld,%ld|%zu,%ld,%ld,%ld,%ld,%ld,%ld,%ld,%ld,%ld*\\n",
sizeof(base),
long(&(b->x)), long(&(b->y)), long(&(b->a)), long(&(b->b)), long(&(b->c)),
sizeof(hashtableentry),
long(&(e->key)), long(&(e->data)), long(&(e->data.x)), long(&(e->data.y)), long(&(e->data.a)), long(&(e->data.b)), long(&(e->data.c)),
sizeof(hashset::chain),
long(&(c->elem)), long(&(c->next)), long(&(c->elem.key)), long(&(c->elem.data)), long(&(c->elem.data.x)), long(&(c->elem.data.y)), long(&(c->elem.data.a)), long(&(c->elem.data.b)), long(&(c->elem.data.c))
);
}
};
struct B { char buffer[62]; int last; char laster; char laster2; };
struct Bits {
unsigned short A : 1;
unsigned short B : 1;
unsigned short C : 1;
unsigned short D : 1;
unsigned short x1 : 1;
unsigned short x2 : 1;
unsigned short x3 : 1;
unsigned short x4 : 1;
};
int main() {
hashtable t;
// Part 2 - the char[] should be compressed, BUT have a padding space at the end so the next
// one is aligned properly. Also handle char; char; etc. properly.
B *b = NULL;
printf("*%ld,%ld,%ld,%ld,%ld,%ld,%ld,%ld,%zu*\\n", long(b), long(&(b->buffer)), long(&(b->buffer[0])), long(&(b->buffer[1])), long(&(b->buffer[2])),
long(&(b->last)), long(&(b->laster)), long(&(b->laster2)), sizeof(B));
// Part 3 - bitfields, and small structures
Bits *b2 = NULL;
printf("*%zu*\\n", sizeof(Bits));
return 0;
}
'''
# Bloated memory; same layout as C/C++
self.do_run(src, '*16,0,4,8,8,12|20,0,4,4,8,12,12,16|24,0,20,0,4,4,8,12,12,16*\n*0,0,0,1,2,64,68,69,72*\n*2*')
def prep_dlfcn_main(self):
self.set_setting('MAIN_MODULE')
self.set_setting('NODERAWFS')
self.clear_setting('SIDE_MODULE')
def build_dlfcn_lib(self, filename):
self.clear_setting('MAIN_MODULE')
self.set_setting('SIDE_MODULE')
outfile = self.build(filename, js_outfile=not self.is_wasm())
shutil.move(outfile, 'liblib.so')
@needs_dylink
def test_dlfcn_missing(self):
self.set_setting('MAIN_MODULE')
self.set_setting('ASSERTIONS')
src = r'''
#include <dlfcn.h>
#include <stdio.h>
#include <assert.h>
int main() {
void* lib_handle = dlopen("libfoo.so", RTLD_NOW);
assert(!lib_handle);
printf("error: %s\n", dlerror());
return 0;
}
'''
self.do_run(src, "error: Could not load dynamic lib: libfoo.so\nError: ENOENT: no such file or directory, open 'libfoo.so'")
@needs_dylink
def test_dlfcn_basic(self):
create_file('liblib.cpp', '''
#include <cstdio>
class Foo {
public:
Foo() {
puts("Constructing lib object.");
}
};
Foo global;
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = '''
#include <cstdio>
#include <dlfcn.h>
class Bar {
public:
Bar() {
puts("Constructing main object.");
}
};
Bar global;
int main() {
dlopen("liblib.so", RTLD_NOW);
return 0;
}
'''
self.do_run(src, 'Constructing main object.\nConstructing lib object.\n')
@needs_dylink
def test_dlfcn_i64(self):
create_file('liblib.c', '''
#include <inttypes.h>
int64_t foo(int x) {
return (long long)x / (long long)1234;
}
''')
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
src = r'''
#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
#include <dlfcn.h>
typedef int64_t (*int64func)(int);
int main() {
void *lib_handle = dlopen("liblib.so", RTLD_NOW);
if (!lib_handle) {
puts(dlerror());
abort();
}
printf("dll handle: %p\n", lib_handle);
int64func x = (int64func)dlsym(lib_handle, "foo");
printf("foo func handle: %p\n", x);
if (!x) {
printf("dlsym failed: %s\n", dlerror());
return 1;
}
printf("|%lld|\n", x(81234567));
return 0;
}
'''
self.do_run(src, '|65830|')
@needs_dylink
@disabled('EM_ASM in not yet supported in SIDE_MODULE')
def test_dlfcn_em_asm(self):
create_file('liblib.cpp', '''
#include <emscripten.h>
class Foo {
public:
Foo() {
EM_ASM( out("Constructing lib object.") );
}
};
Foo global;
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = '''
#include <emscripten.h>
#include <dlfcn.h>
class Bar {
public:
Bar() {
EM_ASM( out("Constructing main object.") );
}
};
Bar global;
int main() {
dlopen("liblib.so", RTLD_NOW);
EM_ASM( out("All done.") );
return 0;
}
'''
self.do_run(src, 'Constructing main object.\nConstructing lib object.\nAll done.\n')
@needs_dylink
def test_dlfcn_qsort(self):
self.set_setting('EXPORTED_FUNCTIONS', ['_get_cmp'])
create_file('liblib.cpp', '''
int lib_cmp(const void* left, const void* right) {
const int* a = (const int*) left;
const int* b = (const int*) right;
if(*a > *b) return 1;
else if(*a == *b) return 0;
else return -1;
}
typedef int (*CMP_TYPE)(const void*, const void*);
extern "C" CMP_TYPE get_cmp() {
return lib_cmp;
}
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
src = '''
#include <stdio.h>
#include <stdlib.h>
#include <dlfcn.h>
typedef int (*CMP_TYPE)(const void*, const void*);
int main_cmp(const void* left, const void* right) {
const int* a = (const int*) left;
const int* b = (const int*) right;
if(*a < *b) return 1;
else if(*a == *b) return 0;
else return -1;
}
int main() {
void* lib_handle;
CMP_TYPE (*getter_ptr)();
CMP_TYPE lib_cmp_ptr;
int arr[5] = {4, 2, 5, 1, 3};
qsort((void*)arr, 5, sizeof(int), main_cmp);
printf("Sort with main comparison: ");
for (int i = 0; i < 5; i++) {
printf("%d ", arr[i]);
}
printf("\\n");
lib_handle = dlopen("liblib.so", RTLD_NOW);
if (lib_handle == NULL) {
printf("Could not load lib.\\n");
return 1;
}
getter_ptr = (CMP_TYPE (*)()) dlsym(lib_handle, "get_cmp");
if (getter_ptr == NULL) {
printf("Could not find func.\\n");
return 1;
}
lib_cmp_ptr = getter_ptr();
qsort((void*)arr, 5, sizeof(int), lib_cmp_ptr);
printf("Sort with lib comparison: ");
for (int i = 0; i < 5; i++) {
printf("%d ", arr[i]);
}
printf("\\n");
return 0;
}
'''
self.do_run(src, 'Sort with main comparison: 5 4 3 2 1 *Sort with lib comparison: 1 2 3 4 5 *',
output_nicerizer=lambda x: x.replace('\n', '*'))
@needs_dylink
def test_dlfcn_data_and_fptr(self):
# Failing under v8 since: https://chromium-review.googlesource.com/712595
if self.is_wasm():
self.banned_js_engines = [config.V8_ENGINE]
create_file('liblib.cpp', r'''
#include <stdio.h>
int theglobal = 42;
extern void parent_func(); // a function that is defined in the parent
int* lib_get_global_addr() {
return &theglobal;
}
void lib_fptr() {
printf("Second calling lib_fptr from main.\n");
parent_func();
// call it also through a pointer, to check indexizing
void (*p_f)();
p_f = parent_func;
p_f();
}
extern "C" void (*func(int x, void(*fptr)()))() {
printf("In func: %d\n", x);
fptr();
return lib_fptr;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_func'])
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <stdio.h>
#include <dlfcn.h>
#include <emscripten.h>
typedef void (*FUNCTYPE(int, void(*)()))();
FUNCTYPE func;
void EMSCRIPTEN_KEEPALIVE parent_func() {
printf("parent_func called from child\n");
}
void main_fptr() {
printf("First calling main_fptr from lib.\n");
}
int main() {
void* lib_handle;
FUNCTYPE* func_fptr;
// Test basic lib loading.
lib_handle = dlopen("liblib.so", RTLD_NOW);
if (lib_handle == NULL) {
printf("Could not load lib.\n");
return 1;
}
// Test looked up function.
func_fptr = (FUNCTYPE*) dlsym(lib_handle, "func");
// Load twice to test cache.
func_fptr = (FUNCTYPE*) dlsym(lib_handle, "func");
if (func_fptr == NULL) {
printf("Could not find func.\n");
return 1;
}
// Test passing function pointers across module bounds.
void (*fptr)() = func_fptr(13, main_fptr);
fptr();
// Test global data.
int* globaladdr = (int*) dlsym(lib_handle, "theglobal");
if (globaladdr == NULL) {
printf("Could not find global.\n");
return 1;
}
printf("Var: %d\n", *globaladdr);
return 0;
}
'''
self.set_setting('EXPORTED_FUNCTIONS', ['_main'])
self.do_run(src, '''\
In func: 13
First calling main_fptr from lib.
Second calling lib_fptr from main.
parent_func called from child
parent_func called from child
Var: 42
''')
@needs_dylink
def test_dlfcn_varargs(self):
# this test is not actually valid - it fails natively. the child should fail
# to be loaded, not load and successfully see the parent print_ints func
create_file('liblib.cpp', r'''
void print_ints(int n, ...);
extern "C" void func() {
print_ints(2, 13, 42);
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_func'])
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <stdarg.h>
#include <stdio.h>
#include <dlfcn.h>
#include <assert.h>
void print_ints(int n, ...) {
va_list args;
va_start(args, n);
for (int i = 0; i < n; i++) {
printf("%d\n", va_arg(args, int));
}
va_end(args);
}
int main() {
void* lib_handle;
void (*fptr)();
print_ints(2, 100, 200);
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle);
fptr = (void (*)())dlsym(lib_handle, "func");
fptr();
return 0;
}
'''
self.set_setting('EXPORTED_FUNCTIONS', ['_main'])
self.do_run(src, '100\n200\n13\n42\n')
@needs_dylink
def test_dlfcn_alignment_and_zeroing(self):
self.set_setting('INITIAL_MEMORY', '16mb')
create_file('liblib.c', r'''
int prezero = 0;
__attribute__((aligned(1024))) int superAligned = 12345;
int postzero = 0;
''')
self.build_dlfcn_lib('liblib.c')
for i in range(10):
curr = '%d.so' % i
shutil.copyfile('liblib.so', curr)
self.prep_dlfcn_main()
self.set_setting('INITIAL_MEMORY', '128mb')
create_file('src.c', r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
#include <assert.h>
#include <emscripten.h>
int main() {
printf("'prepare' memory with non-zero inited stuff\n");
int num = 120 * 1024 * 1024; // total is 128; we'll use 5*5 = 25 at least, so allocate pretty much all of it
void* mem = malloc(num);
assert(mem);
printf("setting this range to non-zero: %ld - %ld\n", (long)mem, ((long)mem) + num);
memset(mem, 1, num);
EM_ASM({
var value = HEAP8[64*1024*1024];
out('verify middle of memory is non-zero: ' + value);
assert(value === 1);
});
free(mem);
for (int i = 0; i < 10; i++) {
char curr[] = "?.so";
curr[0] = '0' + i;
printf("loading %s\n", curr);
void* lib_handle = dlopen(curr, RTLD_NOW);
if (!lib_handle) {
puts(dlerror());
assert(0);
}
printf("getting superAligned\n");
int* superAligned = (int*)dlsym(lib_handle, "superAligned");
assert(superAligned);
assert(((long)superAligned) % 1024 == 0); // alignment
printf("checking value of superAligned, at %p\n", superAligned);
assert(*superAligned == 12345); // value
printf("getting prezero\n");
int* prezero = (int*)dlsym(lib_handle, "prezero");
assert(prezero);
printf("checking value of prezero, at %p\n", prezero);
assert(*prezero == 0);
*prezero = 1;
assert(*prezero != 0);
printf("getting postzero\n");
int* postzero = (int*)dlsym(lib_handle, "postzero");
printf("checking value of postzero, at %p\n", postzero);
assert(postzero);
printf("checking value of postzero\n");
assert(*postzero == 0);
*postzero = 1;
assert(*postzero != 0);
}
printf("success.\n");
return 0;
}
''')
self.do_runf('src.c', 'success.\n')
@needs_dylink
def test_dlfcn_self(self):
self.set_setting('MAIN_MODULE')
self.set_setting('EXPORT_ALL')
def get_data_export_count(wasm):
wat = self.get_wasm_text(wasm)
lines = wat.splitlines()
exports = [l for l in lines if l.strip().startswith('(export ')]
data_exports = [l for l in exports if '(global ' in l]
return len(data_exports)
self.do_core_test('test_dlfcn_self.c')
export_count = get_data_export_count('test_dlfcn_self.wasm')
# ensure there aren't too many globals; we don't want unnamed_addr
self.assertGreater(export_count, 20)
self.assertLess(export_count, 56)
@needs_dylink
def test_dlfcn_unique_sig(self):
create_file('liblib.c', r'''
#include <stdio.h>
int myfunc(int a, int b, int c, int d, int e, int f, int g, int h, int i, int j, int k, int l, int m) {
return 13;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_myfunc'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
typedef int (*FUNCTYPE)(int, int, int, int, int, int, int, int, int, int, int, int, int);
int main() {
void *lib_handle;
FUNCTYPE func_ptr;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
func_ptr = (FUNCTYPE)dlsym(lib_handle, "myfunc");
assert(func_ptr != NULL);
assert(func_ptr(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) == 13);
puts("success");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
self.do_runf('main.c', 'success')
@needs_dylink
def test_dlfcn_info(self):
create_file('liblib.c', r'''
#include <stdio.h>
int myfunc(int a, int b, int c, int d, int e, int f, int g, int h, int i, int j, int k, int l, int m) {
return 13;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_myfunc'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', '''
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include <dlfcn.h>
typedef int (*FUNCTYPE)(int, int, int, int, int, int, int, int, int, int, int, int, int);
int main() {
void *lib_handle;
FUNCTYPE func_ptr;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
func_ptr = (FUNCTYPE)dlsym(lib_handle, "myfunc");
assert(func_ptr != NULL);
assert(func_ptr(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) == 13);
/* Verify that we don't corrupt func_ptr when calling dladdr. */
Dl_info info;
memset(&info, 0, sizeof(info));
dladdr(func_ptr, &info);
assert(func_ptr != NULL);
assert(func_ptr(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) == 13);
/* Verify something useful lives in info. */
assert(info.dli_fname != NULL);
assert(info.dli_fbase == NULL);
assert(info.dli_sname == NULL);
assert(info.dli_saddr == NULL);
puts("success");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
self.do_runf('main.c', 'success')
@needs_dylink
def test_dlfcn_stacks(self):
create_file('liblib.c', r'''
#include <assert.h>
#include <stdio.h>
#include <string.h>
int myfunc(const char *input) {
char bigstack[1024] = { 0 };
// make sure we didn't just trample the stack!
assert(!strcmp(input, "foobar"));
snprintf(bigstack, sizeof(bigstack), "%s", input);
return strlen(bigstack);
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_myfunc'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', '''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
#include <string.h>
typedef int (*FUNCTYPE)(const char *);
int main() {
void *lib_handle;
FUNCTYPE func_ptr;
char str[128];
snprintf(str, sizeof(str), "foobar");
// HACK: Use strcmp in the main executable so that it doesn't get optimized out and the dynamic library
// is able to use it.
assert(!strcmp(str, "foobar"));
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
func_ptr = (FUNCTYPE)dlsym(lib_handle, "myfunc");
assert(func_ptr != NULL);
assert(func_ptr(str) == 6);
puts("success");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_strcmp'])
self.do_runf('main.c', 'success')
@needs_dylink
def test_dlfcn_funcs(self):
create_file('liblib.c', r'''
#include <assert.h>
#include <stdio.h>
#include <string.h>
typedef void (*voidfunc)();
typedef void (*intfunc)(int);
void callvoid(voidfunc f) { f(); }
void callint(voidfunc f, int x) { f(x); }
void void_0() { printf("void 0\n"); }
void void_1() { printf("void 1\n"); }
voidfunc getvoid(int i) {
switch(i) {
case 0: return void_0;
case 1: return void_1;
default: return NULL;
}
}
void int_0(int x) { printf("int 0 %d\n", x); }
void int_1(int x) { printf("int 1 %d\n", x); }
intfunc getint(int i) {
switch(i) {
case 0: return int_0;
case 1: return int_1;
default: return NULL;
}
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_callvoid', '_callint', '_getvoid', '_getint'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
typedef void (*voidfunc)();
typedef void (*intfunc)(int);
typedef void (*voidcaller)(voidfunc);
typedef void (*intcaller)(intfunc, int);
typedef voidfunc (*voidgetter)(int);
typedef intfunc (*intgetter)(int);
void void_main() { printf("void_main.\n"); }
void int_main(int x) { printf("int_main %d\n", x); }
int main() {
printf("go\n");
void *lib_handle;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
voidcaller callvoid = (voidcaller)dlsym(lib_handle, "callvoid");
assert(callvoid != NULL);
callvoid(void_main);
intcaller callint = (intcaller)dlsym(lib_handle, "callint");
assert(callint != NULL);
callint(int_main, 201);
voidgetter getvoid = (voidgetter)dlsym(lib_handle, "getvoid");
assert(getvoid != NULL);
callvoid(getvoid(0));
callvoid(getvoid(1));
intgetter getint = (intgetter)dlsym(lib_handle, "getint");
assert(getint != NULL);
callint(getint(0), 54);
callint(getint(1), 9000);
assert(getint(1000) == NULL);
puts("ok");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
self.do_runf('main.c', '''go
void_main.
int_main 201
void 0
void 1
int 0 54
int 1 9000
ok
''')
@needs_dylink
def test_dlfcn_mallocs(self):
# will be exhausted without functional malloc/free
self.set_setting('INITIAL_MEMORY', '64mb')
create_file('liblib.c', r'''
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
void *mallocproxy(int n) { return malloc(n); }
void freeproxy(void *p) { free(p); }
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_mallocproxy', '_freeproxy'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.do_runf(test_file('dlmalloc_proxy.c'), '*294,153*')
@needs_dylink
def test_dlfcn_longjmp(self):
create_file('liblib.c', r'''
#include <setjmp.h>
#include <stdio.h>
void jumpy(jmp_buf buf) {
static int i = 0;
i++;
if (i == 10) longjmp(buf, i);
printf("pre %d\n", i);
}
''')
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
#include <setjmp.h>
typedef void (*jumpfunc)(jmp_buf);
int main() {
printf("go!\n");
void *lib_handle;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
jumpfunc jumpy = (jumpfunc)dlsym(lib_handle, "jumpy");
assert(jumpy);
jmp_buf buf;
int jmpval = setjmp(buf);
if (jmpval == 0) {
while (1) jumpy(buf);
} else {
printf("out!\n");
}
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.do_runf('main.c', '''go!
pre 1
pre 2
pre 3
pre 4
pre 5
pre 6
pre 7
pre 8
pre 9
out!
''', force_c=True)
# TODO: make this work. need to forward tempRet0 across modules
# TODO Enable @with_both_exception_handling (the test is not working now)
@needs_dylink
def zzztest_dlfcn_exceptions(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
create_file('liblib.cpp', r'''
extern "C" {
int ok() {
return 65;
}
int fail() {
throw 123;
}
}
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
typedef int (*intfunc)();
int main() {
printf("go!\n");
void *lib_handle;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
intfunc okk = (intfunc)dlsym(lib_handle, "ok");
intfunc faill = (intfunc)dlsym(lib_handle, "fail");
assert(okk && faill);
try {
printf("ok: %d\n", okk());
} catch(...) {
printf("wha\n");
}
try {
printf("fail: %d\n", faill());
} catch(int x) {
printf("int %d\n", x);
}
try {
printf("fail: %d\n", faill());
} catch(double x) {
printf("caught %f\n", x);
}
return 0;
}
'''
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.do_run(src, '''go!
ok: 65
int 123
ok
''')
@needs_dylink
def test_dlfcn_handle_alloc(self):
# verify that dlopen does not allocate already used handles
dirname = self.get_dir()
def indir(name):
return os.path.join(dirname, name)
create_file('a.cpp', r'''
#include <stdio.h>
static struct a {
a() {
puts("a: loaded");
}
} _;
''')
create_file('b.cpp', r'''
#include <stdio.h>
static struct b {
b() {
puts("b: loaded");
}
} _;
''')
self.build_dlfcn_lib('a.cpp')
shutil.move(indir('liblib.so'), indir('liba.so'))
self.build_dlfcn_lib('b.cpp')
shutil.move(indir('liblib.so'), indir('libb.so'))
self.set_setting('MAIN_MODULE')
self.set_setting('NODERAWFS')
self.clear_setting('SIDE_MODULE')
create_file('main.c', r'''
#include <dlfcn.h>
#include <assert.h>
#include <stddef.h>
int main() {
void *liba, *libb, *liba2, *libb2;
int err;
liba = dlopen("liba.so", RTLD_NOW);
assert(liba != NULL);
libb = dlopen("libb.so", RTLD_NOW);
assert(libb != NULL);
// Test that opening libb a second times gives the same handle
libb2 = dlopen("libb.so", RTLD_NOW);
assert(libb == libb2);
err = dlclose(liba);
assert(!err);
liba2 = dlopen("liba.so", RTLD_NOW);
assert(liba2 != libb);
return 0;
}
''')
self.do_runf('main.c', 'a: loaded\nb: loaded\n')
@needs_dylink
@needs_non_trapping_float_to_int
def test_dlfcn_feature_in_lib(self):
self.emcc_args.append('-mnontrapping-fptoint')
create_file('liblib.cpp', r'''
extern "C" int magic(float x) {
return __builtin_wasm_trunc_saturate_s_i32_f32(x);
}
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <dlfcn.h>
#include <stdio.h>
#include <stdlib.h>
typedef int (*fi)(float);
int main() {
void *lib_handle = dlopen("liblib.so", RTLD_NOW);
if (!lib_handle) {
puts(dlerror());
abort();
}
fi x = (fi)dlsym(lib_handle, "magic");
if (!x) {
puts(dlerror());
abort();
}
printf("float: %d.\n", x(42.99));
return 0;
}
'''
self.do_run(src, 'float: 42.\n')
def dylink_test(self, main, side, expected=None, header=None, force_c=False,
main_module=2, **kwargs):
# Same as dylink_testf but take source code in string form
if not isinstance(side, list):
side_file = 'liblib.cpp' if not force_c else 'liblib.c'
create_file(side_file, side)
side = side_file
if not isinstance(main, list):
main_file = 'main.cpp' if not force_c else 'main.c'
create_file(main_file, main)
main = main_file
if header:
create_file('header.h', header)
return self.dylink_testf(main, side, expected, force_c, main_module=main_module, **kwargs)
def dylink_testf(self, main, side=None, expected=None, force_c=False, main_emcc_args=[],
main_module=2,
so_name='liblib.so',
need_reverse=True, **kwargs):
self.maybe_closure()
# Same as dylink_test but takes source code as filenames on disc.
old_args = self.emcc_args.copy()
if not expected:
outfile = shared.replace_suffix(main, '.out')
expected = read_file(outfile)
if not side:
side, ext = os.path.splitext(main)
side += '_side' + ext
# side settings
self.clear_setting('MAIN_MODULE')
self.set_setting('SIDE_MODULE')
side_suffix = 'wasm' if self.is_wasm() else 'js'
if isinstance(side, list):
out_file = 'liblib.' + side_suffix
# side is just a library
self.run_process([EMCC] + side + self.get_emcc_args() + ['-o', out_file])
else:
out_file = self.build(side, js_outfile=(side_suffix == 'js'))
shutil.move(out_file, so_name)
# main settings
self.set_setting('MAIN_MODULE', main_module)
self.clear_setting('SIDE_MODULE')
self.emcc_args += main_emcc_args
self.emcc_args.append(so_name)
if force_c:
self.emcc_args.append('-nostdlib++')
if isinstance(main, list):
# main is just a library
try_delete('main.js')
self.run_process([EMCC] + main + self.get_emcc_args() + ['-o', 'main.js'])
self.do_run('main.js', expected, no_build=True, **kwargs)
else:
self.do_runf(main, expected, force_c=force_c, **kwargs)
self.emcc_args = old_args
if need_reverse:
print('flip')
# Test the reverse as well. There we flip the role of the side module and main module.
# - We add --no-entry since the side module doesn't have a `main`
self.dylink_testf(side, main, expected, force_c, main_emcc_args + ['--no-entry'],
need_reverse=False, **kwargs)
def do_basic_dylink_test(self, **kwargs):
self.dylink_test(r'''
#include <stdio.h>
#include "header.h"
int main() {
printf("other says %d.\n", sidey());
return 0;
}
''', '''
#include "header.h"
int sidey() {
return 11;
}
''', 'other says 11.', 'int sidey();', force_c=True, **kwargs)
@needs_dylink
def test_dylink_basics(self):
self.do_basic_dylink_test(need_reverse=False)
self.verify_in_strict_mode('main.js')
@needs_dylink
def test_dylink_basics_no_modify(self):
if self.is_optimizing():
self.skipTest('no modify mode only works with non-optimizing builds')
self.set_setting('WASM_BIGINT')
self.set_setting('ERROR_ON_WASM_CHANGES_AFTER_LINK')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_basics_lld_report_undefined(self):
self.set_setting('LLD_REPORT_UNDEFINED')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_no_export(self):
self.set_setting('NO_DECLARE_ASM_MODULE_EXPORTS')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_memory_growth(self):
if not self.is_wasm():
self.skipTest('wasm only')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_safe_heap(self):
self.set_setting('SAFE_HEAP')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_function_pointer_equality(self):
self.dylink_test(r'''
#include <stdio.h>
#include "header.h"
int main() {
void* puts_side = get_address();
printf("main module address %p.\n", &puts);
printf("side module address address %p.\n", puts_side);
if (&puts == puts_side)
printf("success\n");
else
printf("failure\n");
return 0;
}
''', '''
#include <stdio.h>
#include "header.h"
void* get_address() {
return (void*)&puts;
}
''', 'success', header='void* get_address();', force_c=True)
@needs_dylink
def test_dylink_floats(self):
self.dylink_test(r'''
#include <stdio.h>
extern float sidey();
int main() {
printf("other says %.2f.\n", sidey()+1);
return 0;
}
''', '''
float sidey() { return 11.5; }
''', 'other says 12.50', force_c=True)
@needs_dylink
def test_dylink_printf(self):
self.dylink_test(r'''
#include <stdio.h>
void sidey();
int main() {
printf("hello from main\n");
sidey();
return 0;
}
''', r'''
#include <stdio.h>
void sidey() {
printf("hello from side\n");
}
''', 'hello from main\nhello from side\n', force_c=True)
# Verify that a function pointer can be passed back and forth and invoked
# on both sides.
@needs_dylink
def test_dylink_funcpointer(self):
self.dylink_test(
main=r'''
#include <stdio.h>
#include <assert.h>
#include "header.h"
intfunc sidey(intfunc f);
void a(int arg) { printf("hello from funcptr: %d\n", arg); }
int main() {
intfunc b = sidey(a);
assert(a == b);
b(0);
return 0;
}
''',
side='''
#include "header.h"
intfunc sidey(intfunc f) { f(1); return f; }
''',
expected='hello from funcptr: 1\nhello from funcptr: 0\n',
header='typedef void (*intfunc)(int );', force_c=True)
@needs_dylink
# test dynamic linking of a module with multiple function pointers, stored
# statically
def test_dylink_static_funcpointers(self):
self.dylink_test(
main=r'''
#include <stdio.h>
#include "header.h"
void areturn0() { printf("hello 0\n"); }
void areturn1() { printf("hello 1\n"); }
void areturn2() { printf("hello 2\n"); }
voidfunc func_ptrs[3] = { areturn0, areturn1, areturn2 };
int main(int argc, char **argv) {
sidey(func_ptrs[0]);
sidey(func_ptrs[1]);
sidey(func_ptrs[2]);
return 0;
}
''',
side='''
#include "header.h"
void sidey(voidfunc f) { f(); }
''',
expected='hello 0\nhello 1\nhello 2\n',
header='typedef void (*voidfunc)(); void sidey(voidfunc f);', force_c=True)
@needs_dylink
def test_dylink_funcpointers_wrapper(self):
self.dylink_test(
main=r'''\
#include <stdio.h>
#include "header.h"
int main(int argc, char **argv) {
charfunc f1 = emscripten_run_script;
f1("console.log('one')");
charfunc f2 = get();
f2("console.log('two')");
return 0;
}
''',
side='''\
#include "header.h"
charfunc get() {
return emscripten_run_script;
}
''',
expected='one\ntwo\n',
header='''\
#include <emscripten.h>
typedef void (*charfunc)(const char*);
extern charfunc get();
''', force_c=True)
@needs_dylink
def test_dylink_static_funcpointer_float(self):
self.dylink_test(
main=r'''\
#include <stdio.h>
#include "header.h"
int sidey(floatfunc f);
float func1(float f) { printf("hello 1: %f\n", f); return 0; }
floatfunc f1 = &func1;
int main(int argc, char **argv) {
printf("got: %d\n", sidey(f1));
f1(12.34);
return 0;
}
''',
side='''\
#include "header.h"
int sidey(floatfunc f) { f(56.78); return 1; }
''',
expected='hello 1: 56.779999\ngot: 1\nhello 1: 12.340000\n',
header='typedef float (*floatfunc)(float);', force_c=True)
@needs_dylink
def test_missing_signatures(self):
create_file('test_sig.c', r'''#include <emscripten.h>
int main() {
return 0 == ( (long)&emscripten_run_script_string +
(long)&emscripten_run_script );
}''')
self.set_setting('MAIN_MODULE', 1)
# also test main module with 4GB of memory. we need to emit a "maximum"
# clause then, even though 4GB is the maximum; see
# https://github.com/emscripten-core/emscripten/issues/14130
self.set_setting('ALLOW_MEMORY_GROWTH', '1')
self.set_setting('MAXIMUM_MEMORY', '4GB')
self.do_runf('test_sig.c', '')
@needs_dylink
def test_dylink_global_init(self):
self.dylink_test(r'''
#include <stdio.h>
struct Class {
Class() { printf("a new Class\n"); }
};
static Class c;
int main() {
return 0;
}
''', r'''
void nothing() {}
''', 'a new Class\n')
@needs_dylink
def test_dylink_global_inits(self):
def test():
self.dylink_test(header=r'''
#include <stdio.h>
struct Class {
Class(const char *name) { printf("new %s\n", name); }
};
''', main=r'''
#include "header.h"
static Class c("main");
int main() {
return 0;
}
''', side=r'''
#include "header.h"
static Class c("side");
''', expected=['new main\nnew side\n', 'new side\nnew main\n'])
test()
print('check warnings')
self.set_setting('ASSERTIONS', 2)
test()
# TODO: this in wasm
# full = self.run_js('src.js')
# self.assertNotContained('already exists', full)
@needs_dylink
def test_dylink_i64(self):
self.dylink_test(r'''
#include <stdio.h>
#include <stdint.h>
extern int64_t sidey();
int main() {
printf("other says %lld.\n", sidey());
return 0;
}
''', '''
#include <stdint.h>
int64_t sidey() {
return 42;
}
''', 'other says 42.', force_c=True)
@all_engines
@needs_dylink
def test_dylink_i64_b(self):
self.dylink_test(r'''
#include <stdio.h>
#include <stdint.h>
extern int64_t sidey();
int64_t testAdd(int64_t a) {
return a + 1;
}
int64_t testAddB(int a) {
return a + 1;
}
typedef int64_t (*testAddHandler)(int64_t);
testAddHandler h = &testAdd;
typedef int64_t (*testAddBHandler)(int);
testAddBHandler hb = &testAddB;
int main() {
printf("other says %lld.\n", sidey());
int64_t r = h(42);
printf("my fp says: %lld.\n", r);
int64_t rb = hb(42);
printf("my second fp says: %lld.\n", r);
}
''', '''
#include <stdint.h>
int64_t sidey() {
volatile int64_t x = 0x12345678abcdef12LL;
x += x % 17;
x = 18 - x;
return x;
}
''', 'other says -1311768467750121224.\nmy fp says: 43.\nmy second fp says: 43.', force_c=True)
@needs_dylink
@also_with_wasm_bigint
def test_dylink_i64_c(self):
self.dylink_test(r'''
#include <stdio.h>
#include <inttypes.h>
#include "header.h"
typedef int32_t (*fp_type_32)(int32_t, int32_t, int32_t);
typedef int64_t (*fp_type_64)(int32_t, int32_t, int32_t);
int32_t internal_function_ret_32(int32_t i, int32_t j, int32_t k) {
return 32;
}
int64_t internal_function_ret_64(int32_t i, int32_t j, int32_t k) {
return 64;
}
int main() {
fp_type_32 fp32_internal = &internal_function_ret_32;
fp_type_32 fp32_external = &function_ret_32;
fp_type_64 fp64_external = &function_ret_64;
fp_type_64 fp64_internal = &internal_function_ret_64;
int32_t ires32 = fp32_internal(0,0,0);
printf("res32 - internal %d\n",ires32);
int32_t eres32 = fp32_external(0,0,0);
printf("res32 - external %d\n",eres32);
int64_t ires64 = fp64_internal(0,0,0);
printf("res64 - internal %" PRId64 "\n",ires64);
int64_t eres64 = fp64_external(0,0,0);
printf("res64 - external %" PRId64 "\n",eres64);
return 0;
}
''', '''
#include "header.h"
int32_t function_ret_32(int32_t i, int32_t j, int32_t k) {
return 32;
}
int64_t function_ret_64(int32_t i, int32_t j, int32_t k) {
return 64;
}
''', '''res32 - internal 32
res32 - external 32
res64 - internal 64
res64 - external 64\n''', header='''
#include <emscripten.h>
#include <stdint.h>
EMSCRIPTEN_KEEPALIVE int32_t function_ret_32(int32_t i, int32_t j, int32_t k);
EMSCRIPTEN_KEEPALIVE int64_t function_ret_64(int32_t i, int32_t j, int32_t k);
''', force_c=True)
@needs_dylink
@also_with_wasm_bigint
def test_dylink_i64_invoke(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.dylink_test(r'''\
#include <stdio.h>
#include <stdint.h>
extern "C" int64_t sidey(int64_t arg);
int main(int argc, char *argv[]) {
int64_t temp = 42;
printf("got %lld\n", sidey(temp));
return 0;
}''', r'''\
#include <stdint.h>
#include <stdio.h>
#include <emscripten.h>
extern "C" {
EMSCRIPTEN_KEEPALIVE int64_t do_call(int64_t arg) {
if (arg == 0) {
throw;
}
return 2 * arg;
}
int64_t sidey(int64_t arg) {
try {
return do_call(arg);
} catch(...) {
return 0;
}
}
}''', 'got 84', need_reverse=False)
@needs_dylink
def test_dylink_class(self):
self.dylink_test(header=r'''
#include <stdio.h>
struct Class {
Class(const char *name);
};
''', main=r'''
#include "header.h"
int main() {
Class c("main");
return 0;
}
''', side=r'''
#include "header.h"
Class::Class(const char *name) { printf("new %s\n", name); }
''', expected=['new main\n'])
@needs_dylink
def test_dylink_global_var(self):
self.dylink_test(main=r'''
#include <stdio.h>
extern int x;
int main() {
printf("extern is %d.\n", x);
return 0;
}
''', side=r'''
int x = 123;
''', expected=['extern is 123.\n'], force_c=True)
@needs_dylink
def test_dylink_global_var_modded(self):
self.dylink_test(main=r'''
#include <stdio.h>
extern int x;
int main() {
printf("extern is %d.\n", x);
return 0;
}
''', side=r'''
int x = 123;
struct Initter {
Initter() { x = 456; }
};
Initter initter;
''', expected=['extern is 456.\n'])
@needs_dylink
def test_dylink_stdlib(self):
self.dylink_test(header=r'''
#include <math.h>
#include <stdlib.h>
#include <string.h>
char *side(const char *data);
double pow_two(double x);
''', main=r'''
#include <stdio.h>
#include "header.h"
int main() {
char *temp = side("hello through side\n");
char *ret = (char*)malloc(strlen(temp)+1);
strcpy(ret, temp);
temp[1] = 'x';
puts(ret);
printf("pow_two: %d.\n", (int)pow_two(5.9));
return 0;
}
''', side=r'''
#include "header.h"
char *side(const char *data) {
char *ret = (char*)malloc(strlen(data)+1);
strcpy(ret, data);
return ret;
}
double pow_two(double x) {
return pow(2, x);
}
''', expected=['hello through side\n\npow_two: 59.'], force_c=True)
@needs_dylink
def test_dylink_jslib(self):
create_file('lib.js', r'''
mergeInto(LibraryManager.library, {
test_lib_func: function(x) {
return x + 17.2;
}
});
''')
self.dylink_test(header=r'''
extern double test_lib_func(int input);
''', main=r'''
#include <stdio.h>
#include "header.h"
extern double sidey();
int main2() { return 11; }
int main() {
int input = sidey();
double temp = test_lib_func(input);
printf("other says %.2f\n", temp);
printf("more: %.5f, %d\n", temp, input);
return 0;
}
''', side=r'''
#include <stdio.h>
#include "header.h"
extern int main2();
double sidey() {
int temp = main2();
printf("main2 sed: %d\n", temp);
printf("main2 sed: %u, %c\n", temp, temp/2);
return test_lib_func(temp);
}
''', expected='other says 45.2', main_emcc_args=['--js-library', 'lib.js'], force_c=True)
@needs_dylink
def test_dylink_many_postsets(self):
NUM = 1234
self.dylink_test(header=r'''
#include <stdio.h>
typedef void (*voidfunc)();
static void simple() {
printf("simple.\n");
}
static volatile voidfunc funcs[''' + str(NUM) + '] = { ' + ','.join(['simple'] * NUM) + r''' };
static void test() {
volatile int i = ''' + str(NUM - 1) + r''';
funcs[i]();
i = 0;
funcs[i]();
}
extern void more();
''', main=r'''
#include "header.h"
int main() {
test();
more();
return 0;
}
''', side=r'''
#include "header.h"
void more() {
test();
}
''', expected=['simple.\nsimple.\nsimple.\nsimple.\n'], force_c=True)
@needs_dylink
def test_dylink_postsets_chunking(self):
self.dylink_test(header=r'''
extern int global_var;
''', main=r'''
#include <stdio.h>
#include "header.h"
// prepare 99 global variable with local initializer
static int p = 1;
#define P(x) __attribute__((used)) int *padding##x = &p;
P(01) P(02) P(03) P(04) P(05) P(06) P(07) P(08) P(09) P(10)
P(11) P(12) P(13) P(14) P(15) P(16) P(17) P(18) P(19) P(20)
P(21) P(22) P(23) P(24) P(25) P(26) P(27) P(28) P(29) P(30)
P(31) P(32) P(33) P(34) P(35) P(36) P(37) P(38) P(39) P(40)
P(41) P(42) P(43) P(44) P(45) P(46) P(47) P(48) P(49) P(50)
P(51) P(52) P(53) P(54) P(55) P(56) P(57) P(58) P(59) P(60)
P(61) P(62) P(63) P(64) P(65) P(66) P(67) P(68) P(69) P(70)
P(71) P(72) P(73) P(74) P(75) P(76) P(77) P(78) P(79) P(80)
P(81) P(82) P(83) P(84) P(85) P(86) P(87) P(88) P(89) P(90)
P(91) P(92) P(93) P(94) P(95) P(96) P(97) P(98) P(99)
// prepare global variable with global initializer
int *ptr = &global_var;
int main(int argc, char *argv[]) {
printf("%d\n", *ptr);
}
''', side=r'''
#include "header.h"
int global_var = 12345;
''', expected=['12345\n'], force_c=True)
@needs_dylink
@parameterized({
'libcxx': ('libc,libc++,libmalloc,libc++abi',),
'all': ('1',),
'missing': ('libc,libmalloc', False, False, False),
'missing_assertions': ('libc,libmalloc', False, False, True),
})
def test_dylink_syslibs(self, syslibs, expect_pass=True, need_reverse=True, assertions=True):
# one module uses libcxx, need to force its inclusion when it isn't the main
self.emcc_args.append('-Wno-deprecated')
self.set_setting('WARN_ON_UNDEFINED_SYMBOLS', 0)
if assertions is not None:
self.set_setting('ASSERTIONS', int(assertions))
passed = True
try:
with env_modify({'EMCC_FORCE_STDLIBS': syslibs, 'EMCC_ONLY_FORCED_STDLIBS': '1'}):
self.dylink_test(main=r'''
void side();
int main() {
side();
return 0;
}
''', side=r'''
#include <iostream>
void side() { std::cout << "cout hello from side\n"; }
''', expected=['cout hello from side\n'], need_reverse=need_reverse, main_module=1)
except Exception as e:
if expect_pass:
raise
print('(seeing expected fail)')
passed = False
assertion = 'build the MAIN_MODULE with EMCC_FORCE_STDLIBS=1 in the environment'
if self.get_setting('ASSERTIONS'):
self.assertContained(assertion, str(e))
else:
self.assertNotContained(assertion, str(e))
assert passed == expect_pass, ['saw', passed, 'but expected', expect_pass]
@needs_dylink
@with_env_modify({'EMCC_FORCE_STDLIBS': 'libc++'})
def test_dylink_iostream(self):
self.dylink_test(header=r'''
#include <iostream>
#include <string>
std::string side();
''', main=r'''
#include "header.h"
int main() {
std::cout << "hello from main " << side() << std::endl;
return 0;
}
''', side=r'''
#include "header.h"
std::string side() { return "and hello from side"; }
''', expected=['hello from main and hello from side\n'])
@needs_dylink
def test_dylink_dynamic_cast(self): # issue 3465
self.dylink_test(header=r'''
class Base {
public:
virtual void printName();
};
class Derived : public Base {
public:
void printName();
};
''', main=r'''
#include "header.h"
#include <iostream>
using namespace std;
int main() {
cout << "starting main" << endl;
Base *base = new Base();
Base *derived = new Derived();
base->printName();
derived->printName();
if (dynamic_cast<Derived*>(derived)) {
cout << "OK" << endl;
} else {
cout << "KO" << endl;
}
return 0;
}
''', side=r'''
#include "header.h"
#include <iostream>
using namespace std;
void Base::printName() {
cout << "Base" << endl;
}
void Derived::printName() {
cout << "Derived" << endl;
}
''', expected=['starting main\nBase\nDerived\nOK'])
@with_both_exception_handling
@needs_dylink
def test_dylink_raii_exceptions(self):
self.dylink_test(main=r'''
#include <stdio.h>
extern int side();
int main() {
printf("from side: %d.\n", side());
}
''', side=r'''
#include <stdio.h>
typedef int (*ifdi)(float, double, int);
int func_with_special_sig(float a, double b, int c) {
printf("special %f %f %d\n", a, b, c);
return 1337;
}
struct DestructorCaller {
~DestructorCaller() { printf("destroy\n"); }
};
int side() {
// d has a destructor that must be called on function
// exit, which means an invoke will be used for the
// indirect call here - and the signature of that call
// is special and not present in the main module, so
// it must be generated for the side module.
DestructorCaller d;
volatile ifdi p = func_with_special_sig;
return p(2.18281, 3.14159, 42);
}
''', expected=['special 2.182810 3.141590 42\ndestroy\nfrom side: 1337.\n'])
@needs_dylink
@disabled('https://github.com/emscripten-core/emscripten/issues/12815')
def test_dylink_hyper_dupe(self):
self.set_setting('INITIAL_MEMORY', '64mb')
self.set_setting('ASSERTIONS', 2)
# test hyper-dynamic linking, and test duplicate warnings
create_file('third.cpp', r'''
#include <stdio.h>
int sidef() { return 36; }
int sideg = 49;
int bsidef() { return 536; }
extern void only_in_second_1(int x);
extern int second_to_third;
int third_to_second = 1337;
void only_in_third_0() {
// note we access our own globals directly, so
// it doesn't matter that overriding failed
printf("only_in_third_0: %d, %d, %d\n", sidef(), sideg, second_to_third);
only_in_second_1(2112);
}
void only_in_third_1(int x) {
printf("only_in_third_1: %d, %d, %d, %d\n", sidef(), sideg, second_to_third, x);
}
''')
if self.is_wasm():
libname = 'third.wasm'
else:
libname = 'third.js'
self.run_process([EMCC, 'third.cpp', '-o', libname, '-s', 'SIDE_MODULE'] + self.get_emcc_args())
self.dylink_test(main=r'''
#include <stdio.h>
#include <emscripten.h>
extern int sidef();
extern int sideg;
extern int bsidef();
extern int bsideg;
extern void only_in_second_0();
extern void only_in_third_0();
int main() {
EM_ASM({
loadDynamicLibrary('%s'); // hyper-dynamic! works at least for functions (and consts not used in same block)
});
printf("sidef: %%d, sideg: %%d.\n", sidef(), sideg);
printf("bsidef: %%d.\n", bsidef());
only_in_second_0();
only_in_third_0();
}
''' % libname,
side=r'''
#include <stdio.h>
int sidef() { return 10; } // third will try to override these, but fail!
int sideg = 20;
extern void only_in_third_1(int x);
int second_to_third = 500;
extern int third_to_second;
void only_in_second_0() {
printf("only_in_second_0: %d, %d, %d\n", sidef(), sideg, third_to_second);
only_in_third_1(1221);
}
void only_in_second_1(int x) {
printf("only_in_second_1: %d, %d, %d, %d\n", sidef(), sideg, third_to_second, x);
}
''',
expected=['sidef: 10, sideg: 20.\nbsidef: 536.\nonly_in_second_0: 10, 20, 1337\nonly_in_third_1: 36, 49, 500, 1221\nonly_in_third_0: 36, 49, 500\nonly_in_second_1: 10, 20, 1337, 2112\n'],
# in wasm, we can't flip as the side would have an EM_ASM, which we don't support yet TODO
need_reverse=not self.is_wasm())
print('check warnings')
full = self.run_js('src.js')
self.assertContained("warning: symbol '_sideg' from '%s' already exists" % libname, full)
@needs_dylink
def test_dylink_load_compiled_side_module(self):
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args.append('-lnodefs.js')
self.set_setting('INITIAL_MEMORY', '64mb')
# This test loads the module at runtime with loadWebAssemblyModule so we
# want to suppress the automatic loading that would otherwise be done at
# startup.
self.set_setting('NO_AUTOLOAD_DYLIBS')
self.dylink_test(main=r'''
#include <stdio.h>
#include <emscripten.h>
extern int sidef();
int main() {
EM_ASM({
FS.mkdir('/working');
FS.mount(NODEFS,{ root: '.' }, '/working');
var libData = FS.readFile('/working/liblib.so', {encoding: 'binary'});
if (!(libData instanceof Uint8Array)) {
libData = new Uint8Array(libData);
}
var compiledModule = new WebAssembly.Module(libData);
var sideExports = loadWebAssemblyModule(compiledModule, {loadAsync: false, nodelete: true});
mergeLibSymbols(sideExports, 'liblib.so');
});
printf("sidef: %d.\n", sidef());
}
''',
side=r'''
#include <stdio.h>
int sidef() { return 10; }
''',
expected=['sidef: 10'],
# in wasm, we can't flip as the side would have an EM_ASM, which we don't support yet TODO
need_reverse=not self.is_wasm())
@needs_dylink
def test_dylink_dso_needed(self):
def do_run(src, expected_output, emcc_args=[]):
create_file('main.c', src + 'int main() { return test_main(); }')
self.do_runf('main.c', expected_output, emcc_args=emcc_args)
self._test_dylink_dso_needed(do_run)
@needs_dylink
def test_dylink_dot_a(self):
# .a linking must force all .o files inside it, when in a shared module
create_file('third.c', 'int sidef() { return 36; }')
create_file('fourth.c', 'int sideg() { return 17; }')
self.run_process([EMCC, '-fPIC', '-c', 'third.c', '-o', 'third.o'] + self.get_emcc_args())
self.run_process([EMCC, '-fPIC', '-c', 'fourth.c', '-o', 'fourth.o'] + self.get_emcc_args())
self.run_process([EMAR, 'rc', 'libfourth.a', 'fourth.o'])
self.dylink_test(main=r'''
#include <stdio.h>
#include <emscripten.h>
int sidef();
int sideg();
int main() {
printf("sidef: %d, sideg: %d.\n", sidef(), sideg());
}
''',
# contents of libfourth.a must be included, even if they aren't referred to!
side=['libfourth.a', 'third.o'],
expected=['sidef: 36, sideg: 17.\n'], force_c=True)
@needs_dylink
def test_dylink_spaghetti(self):
self.dylink_test(main=r'''
#include <stdio.h>
int main_x = 72;
extern int side_x;
int adjust = side_x + 10;
int *ptr = &side_x;
struct Class {
Class() {
printf("main init sees %d, %d, %d.\n", adjust, *ptr, main_x);
}
};
Class cm;
int main() {
printf("main main sees %d, %d, %d.\n", adjust, *ptr, main_x);
return 0;
}
''', side=r'''
#include <stdio.h>
extern int main_x;
int side_x = -534;
int adjust2 = main_x + 10;
int *ptr2 = &main_x;
struct SideClass {
SideClass() {
printf("side init sees %d, %d, %d.\n", adjust2, *ptr2, side_x);
}
};
SideClass cs;
''', expected=['''\
side init sees 82, 72, -534.
main init sees -524, -534, 72.
main main sees -524, -534, 72.
''', '''\
main init sees -524, -534, 72.
side init sees 82, 72, -534.
main main sees -524, -534, 72.
'''])
@needs_make('mingw32-make')
@needs_dylink
def test_dylink_zlib(self):
self.emcc_args += ['-Wno-shift-negative-value', '-I' + test_file('third_party/zlib')]
self.set_setting('RELOCATABLE')
zlib_archive = self.get_zlib_library()
self.dylink_test(main=read_file(test_file('third_party/zlib/example.c')),
side=zlib_archive,
expected=read_file(test_file('core/test_zlib.out')),
force_c=True)
# @needs_dylink
# def test_dylink_bullet(self):
# self.emcc_args += ['-I' + test_file('bullet/src')]
# side = self.get_bullet_library(self, True)
# self.dylink_test(main=read_file(test_file('bullet/Demos/HelloWorld/HelloWorld.cpp')),
# side=side,
# expected=[read_file(test_file('bullet/output.txt')), # different roundings
# read_file(test_file('bullet/output2.txt')),
# read_file(test_file('bullet/output3.txt'))])
@needs_dylink
def test_dylink_rtti(self):
# Verify that objects created in one module and be dynamic_cast<> correctly
# in the another module.
# Each module will define its own copy of certain COMDAT symbols such as
# each classs's typeinfo, but at runtime they should both use the same one.
# Use LLD_REPORT_UNDEFINED to test that it works as expected with weak/COMDAT
# symbols.
self.set_setting('LLD_REPORT_UNDEFINED')
header = '''
#include <cstddef>
class Foo {
public:
virtual ~Foo() {}
};
class Bar : public Foo {
public:
virtual ~Bar() {}
};
bool is_bar(Foo* foo);
'''
main = '''
#include <stdio.h>
#include "header.h"
int main() {
Bar bar;
if (!is_bar(&bar)) {
puts("failure");
return 1;
}
puts("success");
return 0;
}
'''
side = '''
#include "header.h"
bool is_bar(Foo* foo) {
return dynamic_cast<Bar*>(foo) != nullptr;
}
'''
self.dylink_test(main=main,
side=side,
header=header,
expected='success')
@needs_dylink
def test_dylink_argv_argc(self):
# Verify that argc and argv can be sent to main when main is in a side module
self.emcc_args += ['--extern-pre-js', 'pre.js']
create_file('pre.js', '''
var Module = { arguments: ['hello', 'world!'] }
''')
self.dylink_test(
'', # main module is empty.
r'''
#include <stdio.h>
int main(int argc, char const *argv[]) {
printf("%d ", argc);
for (int i=1; i<argc; i++) printf("%s ", argv[i]);
printf("\n");
return 0;
}
''',
expected='3 hello world!',
need_reverse=False)
@needs_dylink
def test_dylink_weak(self):
# Verify that weakly defined symbols can be defined in both side module and main
# module but that only one gets used at runtime.
self.dylink_testf(test_file('core/test_dylink_weak.c'), need_reverse=False)
@node_pthreads
@needs_dylink
def test_dylink_tls(self):
self.emcc_args.append('-Wno-experimental')
self.dylink_testf(test_file('core/test_dylink_tls.c'),
need_reverse=False)
@node_pthreads
@needs_dylink
def test_dylink_tls_export(self):
self.emcc_args.append('-Wno-experimental')
self.dylink_testf(test_file('core/test_dylink_tls_export.c'),
need_reverse=False)
def test_random(self):
src = r'''#include <stdlib.h>
#include <stdio.h>
int main()
{
srandom(0xdeadbeef);
printf("%ld\n", random());
}
'''
self.do_run(src, '956867869')
def test_rand(self):
src = r'''#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
int main()
{
// we need RAND_MAX to be a bitmask (power of 2 minus 1). this assertions guarantees
// if RAND_MAX changes the test failure will focus attention on that issue here.
assert(RAND_MAX == 0x7fffffff);
srand(0xdeadbeef);
for(int i = 0; i < 10; ++i)
printf("%d\n", rand());
unsigned int seed = 0xdeadbeef;
for(int i = 0; i < 10; ++i)
printf("%d\n", rand_r(&seed));
bool haveEvenAndOdd = true;
for(int i = 1; i <= 30; ++i)
{
int mask = 1 << i;
if (mask > RAND_MAX) break;
bool haveEven = false;
bool haveOdd = false;
for(int j = 0; j < 1000 && (!haveEven || !haveOdd); ++j)
{
if ((rand() & mask) == 0)
haveEven = true;
else
haveOdd = true;
}
haveEvenAndOdd = haveEvenAndOdd && haveEven && haveOdd;
}
if (haveEvenAndOdd)
printf("Have even and odd!\n");
return 0;
}
'''
expected = '''490242850
2074599277
1480056542
1912638067
931112055
2110392489
2053422194
1614832492
216117595
174823244
760368382
602359081
1121118963
1291018924
1608306807
352705809
958258461
1182561381
114276303
1481323674
Have even and odd!
'''
self.do_run(src, expected)
def test_strtod(self):
self.do_core_test('test_strtod.c')
def test_strtold(self):
self.do_core_test('test_strtold.c')
def test_strtok(self):
self.do_core_test('test_strtok.c')
def test_strtol(self):
self.do_core_test('test_strtol.c')
def test_transtrcase(self):
self.do_core_test('test_transtrcase.c')
@no_wasm2js('very slow to compile')
@is_slow_test
def test_printf(self):
# needs to flush stdio streams
self.emcc_args.append('-Wno-format')
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('printf/test.c')
def test_printf_2(self):
self.do_core_test('test_printf_2.c')
def test_printf_float(self):
self.do_run_in_out_file_test('printf/test_float.c')
def test_printf_octal(self):
self.do_run_in_out_file_test('printf/test_octal.c')
def test_printf_macros(self):
self.do_core_test('test_printf_macros.c')
def test_vprintf(self):
self.do_core_test('test_vprintf.c')
def test_vsnprintf(self):
self.do_core_test('test_vsnprintf.c')
def test_printf_more(self):
self.do_core_test('test_printf_more.c')
def test_perrar(self):
self.do_core_test('test_perrar.c')
def test_atoX(self):
self.do_core_test('test_atoX.c')
def test_strstr(self):
self.do_core_test('test_strstr.c')
def test_fnmatch(self):
self.do_core_test('test_fnmatch.cpp')
def test_sscanf(self):
self.do_core_test('test_sscanf.c')
def test_sscanf_2(self):
# doubles
for ftype in ['float', 'double']:
src = r'''
#include <stdio.h>
int main(){
char strval1[] = "1.2345678901";
char strval2[] = "1.23456789e5";
char strval3[] = "1.23456789E5";
char strval4[] = "1.2345678e-5";
char strval5[] = "1.2345678E-5";
double dblval = 1.2345678901;
double tstval;
sscanf(strval1, "%lf", &tstval);
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval2, "%lf", &tstval);
dblval = 123456.789;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval3, "%lf", &tstval);
dblval = 123456.789;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval4, "%lf", &tstval);
dblval = 0.000012345678;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval5, "%lf", &tstval);
dblval = 0.000012345678;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
return 0;
}
'''
if ftype == 'float':
self.do_run(src.replace('%lf', '%f').replace('double', 'float'), '''Pass: 1.234568 1.234568
Pass: 123456.789062 123456.789062
Pass: 123456.789062 123456.789062
Pass: 0.000012 0.000012
Pass: 0.000012 0.000012''')
else:
self.do_run(src, '''Pass: 1.234568 1.234568
Pass: 123456.789000 123456.789000
Pass: 123456.789000 123456.789000
Pass: 0.000012 0.000012
Pass: 0.000012 0.000012''')
def test_sscanf_n(self):
self.do_core_test('test_sscanf_n.c')
def test_sscanf_whitespace(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_sscanf_whitespace.c')
def test_sscanf_other_whitespace(self):
# use i16s in printf
self.set_setting('SAFE_HEAP', 0)
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_sscanf_other_whitespace.c')
def test_sscanf_3(self):
self.do_core_test('test_sscanf_3.c')
def test_sscanf_4(self):
self.do_core_test('test_sscanf_4.c')
def test_sscanf_5(self):
self.do_core_test('test_sscanf_5.c')
def test_sscanf_6(self):
self.do_core_test('test_sscanf_6.c')
def test_sscanf_skip(self):
self.do_core_test('test_sscanf_skip.c')
def test_sscanf_caps(self):
self.do_core_test('test_sscanf_caps.c')
def test_sscanf_hex(self):
self.do_core_test('test_sscanf_hex.cpp')
def test_sscanf_float(self):
self.do_core_test('test_sscanf_float.c')
def test_langinfo(self):
self.do_core_test('test_langinfo.c')
def test_files(self):
self.banned_js_engines = [config.SPIDERMONKEY_ENGINE] # closure can generate variables called 'gc', which pick up js shell stuff
if self.maybe_closure(): # Use closure here, to test we don't break FS stuff
self.emcc_args = [x for x in self.emcc_args if x != '-g'] # ensure we test --closure 1 --memory-init-file 1 (-g would disable closure)
elif '-O3' in self.emcc_args and not self.is_wasm():
print('closure 2')
self.emcc_args += ['--closure', '2'] # Use closure 2 here for some additional coverage
return self.skipTest('TODO: currently skipped because CI runs out of memory running Closure in this test!')
self.emcc_args += ['--pre-js', 'pre.js']
self.set_setting('FORCE_FILESYSTEM')
print('base', self.emcc_args)
create_file('pre.js', '''
/** @suppress{checkTypes}*/
Module = {
'noFSInit': true,
'preRun': function() {
FS.createLazyFile('/', 'test.file', 'test.file', true, false);
// Test FS_* exporting
Module['FS_createDataFile']('/', 'somefile.binary', [100, 200, 50, 25, 10, 77, 123], true, false, false); // 200 becomes -56, since signed chars are used in memory
var test_files_input = 'hi there!';
var test_files_input_index = 0;
FS.init(function() {
return test_files_input.charCodeAt(test_files_input_index++) || null;
});
}
};
''')
create_file('test.file', 'some data')
mem_file = 'files.js.mem'
try_delete(mem_file)
def clean(out):
return '\n'.join([line for line in out.split('\n') if 'binaryen' not in line and 'wasm' not in line and 'so not running' not in line])
self.do_runf(test_file('files.cpp'), ('size: 7\ndata: 100,-56,50,25,10,77,123\nloop: 100 -56 50 25 10 77 123 \ninput:hi there!\ntexto\n$\n5 : 10,30,20,11,88\nother=some data.\nseeked=me da.\nseeked=ata.\nseeked=ta.\nfscanfed: 10 - hello\n5 bytes to dev/null: 5\nok.\ntexte\n', 'size: 7\ndata: 100,-56,50,25,10,77,123\nloop: 100 -56 50 25 10 77 123 \ninput:hi there!\ntexto\ntexte\n$\n5 : 10,30,20,11,88\nother=some data.\nseeked=me da.\nseeked=ata.\nseeked=ta.\nfscanfed: 10 - hello\n5 bytes to dev/null: 5\nok.\n'),
output_nicerizer=clean)
if self.uses_memory_init_file():
self.assertExists(mem_file)
def test_files_m(self):
# Test for Module.stdin etc.
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
create_file('pre.js', '''
Module = {
data: [10, 20, 40, 30],
stdin: function() { return Module.data.pop() || null },
stdout: function(x) { out('got: ' + x) }
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
src = r'''
#include <stdio.h>
#include <unistd.h>
int main () {
char c;
fprintf(stderr, "isatty? %d,%d,%d\n", isatty(fileno(stdin)), isatty(fileno(stdout)), isatty(fileno(stderr)));
while ((c = fgetc(stdin)) != EOF) {
putc(c+5, stdout);
}
return 0;
}
'''
def clean(out):
return '\n'.join(l for l in out.splitlines() if 'warning' not in l and 'binaryen' not in l)
self.do_run(src, ('got: 35\ngot: 45\ngot: 25\ngot: 15\nisatty? 0,0,1\n', 'got: 35\ngot: 45\ngot: 25\ngot: 15\nisatty? 0,0,1', 'isatty? 0,0,1\ngot: 35\ngot: 45\ngot: 25\ngot: 15'), output_nicerizer=clean)
def test_mount(self):
self.set_setting('FORCE_FILESYSTEM')
self.do_runf(test_file('fs/test_mount.c'), 'success')
def test_getdents64(self):
self.do_runf(test_file('fs/test_getdents64.cpp'), '..')
def test_getdents64_special_cases(self):
# https://bugs.chromium.org/p/v8/issues/detail?id=6881
self.banned_js_engines = [config.V8_ENGINE]
self.do_run_in_out_file_test('fs/test_getdents64_special_cases.cpp')
def test_getcwd_with_non_ascii_name(self):
# https://bugs.chromium.org/p/v8/issues/detail?id=6881
self.banned_js_engines = [config.V8_ENGINE]
self.do_run_in_out_file_test('fs/test_getcwd_with_non_ascii_name.cpp')
def test_proc_self_fd(self):
self.do_run_in_out_file_test('fs/test_proc_self_fd.c')
def test_fwrite_0(self):
self.do_core_test('test_fwrite_0.c')
def test_fgetc_ungetc(self):
print('TODO: update this test once the musl ungetc-on-EOF-stream bug is fixed upstream and reaches us')
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
print(fs)
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('stdio/test_fgetc_ungetc.c'), 'success', js_engines=[config.NODE_JS])
def test_fgetc_unsigned(self):
src = r'''
#include <stdio.h>
int main() {
FILE *file = fopen("file_with_byte_234.txt", "rb");
int c = fgetc(file);
printf("*%d\n", c);
}
'''
create_file('file_with_byte_234.txt', b'\xea', binary=True)
self.emcc_args += ['--embed-file', 'file_with_byte_234.txt']
self.do_run(src, '*234\n')
def test_fgets_eol(self):
src = r'''
#include <stdio.h>
char buf[32];
int main()
{
const char *r = "SUCCESS";
FILE *f = fopen("eol.txt", "r");
while (fgets(buf, 32, f) != NULL) {
if (buf[0] == '\0') {
r = "FAIL";
break;
}
}
printf("%s\n", r);
fclose(f);
return 0;
}
'''
open('eol.txt', 'wb').write(b'\n')
self.emcc_args += ['--embed-file', 'eol.txt']
self.do_run(src, 'SUCCESS\n')
def test_fscanf(self):
create_file('three_numbers.txt', '-1 0.1 -.1')
src = r'''
#include <stdio.h>
#include <assert.h>
#include <float.h>
int main()
{
float x = FLT_MAX, y = FLT_MAX, z = FLT_MAX;
FILE* fp = fopen("three_numbers.txt", "r");
if (fp) {
int match = fscanf(fp, " %f %f %f ", &x, &y, &z);
printf("match = %d\n", match);
printf("x = %0.1f, y = %0.1f, z = %0.1f\n", x, y, z);
} else {
printf("failed to open three_numbers.txt\n");
}
return 0;
}
'''
self.emcc_args += ['--embed-file', 'three_numbers.txt']
self.do_run(src, 'match = 3\nx = -1.0, y = 0.1, z = -0.1\n')
def test_fscanf_2(self):
create_file('a.txt', '''1/2/3 4/5/6 7/8/9
''')
self.emcc_args += ['--embed-file', 'a.txt']
self.do_run(r'''#include <cstdio>
#include <iostream>
using namespace std;
int
main( int argv, char ** argc ) {
cout << "fscanf test" << endl;
FILE * file;
file = fopen("a.txt", "rb");
int vertexIndex[4];
int normalIndex[4];
int uvIndex[4];
int matches = fscanf(file, "%d/%d/%d %d/%d/%d %d/%d/%d %d/%d/%d\n", &vertexIndex[0], &uvIndex[0], &normalIndex[0], &vertexIndex [1], &uvIndex[1], &normalIndex[1], &vertexIndex[2], &uvIndex[2], &normalIndex[2], &vertexIndex[3], &uvIndex[3], &normalIndex[3]);
cout << matches << endl;
return 0;
}
''', 'fscanf test\n9\n')
def test_fileno(self):
create_file('empty.txt', '')
src = r'''
#include <stdio.h>
#include <unistd.h>
int main()
{
FILE* fp = fopen("empty.txt", "r");
if (fp) {
printf("%d\n", fileno(fp));
} else {
printf("failed to open empty.txt\n");
}
return 0;
}
'''
self.emcc_args += ['--embed-file', 'empty.txt']
self.do_run(src, '3\n')
def test_readdir(self):
self.do_run_in_out_file_test('dirent/test_readdir.c')
def test_readdir_empty(self):
self.do_run_in_out_file_test('dirent/test_readdir_empty.c')
def test_stat(self):
self.do_runf(test_file('stat/test_stat.c'), 'success')
self.verify_in_strict_mode('test_stat.js')
def test_fstatat(self):
self.do_runf(test_file('stat/test_fstatat.c'), 'success')
def test_stat_chmod(self):
self.do_runf(test_file('stat/test_chmod.c'), 'success')
def test_stat_mknod(self):
self.do_runf(test_file('stat/test_mknod.c'), 'success')
def test_fcntl(self):
self.add_pre_run("FS.createDataFile('/', 'test', 'abcdef', true, true, false);")
self.do_run_in_out_file_test('fcntl/test_fcntl.c')
def test_fcntl_open(self):
self.do_run_in_out_file_test('fcntl/test_fcntl_open.c')
@also_with_wasm_bigint
def test_fcntl_misc(self):
self.add_pre_run("FS.createDataFile('/', 'test', 'abcdef', true, true, false);")
self.do_run_in_out_file_test('fcntl/test_fcntl_misc.c')
def test_poll(self):
self.add_pre_run('''
var dummy_device = FS.makedev(64, 0);
FS.registerDevice(dummy_device, {});
FS.createDataFile('/', 'file', 'abcdef', true, true, false);
FS.mkdev('/device', dummy_device);
''')
self.do_core_test('test_poll.c')
def test_statvfs(self):
self.do_core_test('test_statvfs.c')
def test_libgen(self):
self.do_core_test('test_libgen.c')
def test_utime(self):
self.do_runf(test_file('utime/test_utime.c'), 'success')
def test_futimens(self):
self.do_runf(test_file('utime', 'test_futimens.c'), 'success')
@no_minimal_runtime('MINIMAL_RUNTIME does not have getValue() and setValue() (TODO add it to a JS library function to get it in)')
def test_utf(self):
self.banned_js_engines = [config.SPIDERMONKEY_ENGINE] # only node handles utf well
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.set_setting('EXPORTED_RUNTIME_METHODS', ['getValue', 'setValue', 'UTF8ToString', 'stringToUTF8'])
self.do_core_test('test_utf.c')
def test_utf32(self):
if self.get_setting('MINIMAL_RUNTIME'):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$UTF32ToString', '$stringToUTF32', '$lengthBytesUTF32'])
else:
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF32ToString', 'stringToUTF32', 'lengthBytesUTF32'])
self.do_runf(test_file('utf32.cpp'), 'OK.')
self.do_runf(test_file('utf32.cpp'), 'OK.', args=['-fshort-wchar'])
def test_utf16(self):
self.do_runf(test_file('core/test_utf16.cpp'), 'OK.')
def test_utf8(self):
if self.get_setting('MINIMAL_RUNTIME'):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$AsciiToString', '$stringToAscii', '$writeAsciiToMemory'])
else:
self.set_setting('EXPORTED_RUNTIME_METHODS',
['UTF8ToString', 'stringToUTF8', 'AsciiToString', 'stringToAscii'])
self.do_runf(test_file('utf8.cpp'), 'OK.')
@also_with_wasm_bigint
def test_utf8_textdecoder(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF8ToString', 'stringToUTF8'])
self.emcc_args += ['--embed-file', test_file('utf8_corpus.txt') + '@/utf8_corpus.txt']
self.do_runf(test_file('benchmark_utf8.cpp'), 'OK.')
# Test that invalid character in UTF8 does not cause decoding to crash.
def test_utf8_invalid(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF8ToString', 'stringToUTF8'])
for decoder_mode in [[], ['-s', 'TEXTDECODER']]:
self.emcc_args += decoder_mode
print(str(decoder_mode))
self.do_runf(test_file('utf8_invalid.cpp'), 'OK.')
# Test that invalid character in UTF8 does not cause decoding to crash.
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_utf8_invalid(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF8ToString', 'stringToUTF8'])
self.set_setting('MINIMAL_RUNTIME')
for decoder_mode in [False, True]:
self.set_setting('TEXTDECODER', decoder_mode)
print(str(decoder_mode))
self.do_runf(test_file('utf8_invalid.cpp'), 'OK.')
def test_utf16_textdecoder(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF16ToString', 'stringToUTF16', 'lengthBytesUTF16'])
self.emcc_args += ['--embed-file', test_file('utf16_corpus.txt') + '@/utf16_corpus.txt']
self.do_runf(test_file('benchmark_utf16.cpp'), 'OK.')
def test_wprintf(self):
self.do_core_test('test_wprintf.cpp')
def test_write_stdout_fileno(self):
self.do_core_test('test_write_stdout_fileno.c')
self.do_core_test('test_write_stdout_fileno.c', args=['-s', 'FILESYSTEM=0'])
def test_direct_string_constant_usage(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_direct_string_constant_usage.cpp')
def test_std_cout_new(self):
self.do_core_test('test_std_cout_new.cpp')
def test_std_function_incomplete_return(self):
self.do_core_test('test_std_function_incomplete_return.cpp')
def test_istream(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
for linkable in [0]: # , 1]:
print(linkable)
# regression check for issue #273
self.set_setting('LINKABLE', linkable)
self.do_core_test('test_istream.cpp')
def test_fs_base(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$FS'])
self.uses_es6 = True
self.add_pre_run(read_file(test_file('filesystem/src.js')))
src = 'int main() {return 0;}\n'
expected = read_file(test_file('filesystem/output.txt'))
self.do_run(src, expected)
@also_with_noderawfs
@is_slow_test
def test_fs_nodefs_rw(self):
# TODO(sbc): This test exposes in issue in the way we run closure compiler and
# causes it to generate non-ES5 output.
# Remove this line once we fix: https://github.com/emscripten-core/emscripten/issues/12628
self.uses_es6 = True
self.emcc_args += ['-lnodefs.js']
self.set_setting('SYSCALL_DEBUG')
self.do_runf(test_file('fs/test_nodefs_rw.c'), 'success')
if self.maybe_closure():
self.do_runf(test_file('fs/test_nodefs_rw.c'), 'success')
@also_with_noderawfs
def test_fs_nodefs_cloexec(self):
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('fs/test_nodefs_cloexec.c'), 'success')
def test_fs_nodefs_home(self):
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('fs/test_nodefs_home.c'), 'success', js_engines=[config.NODE_JS])
def test_fs_nodefs_nofollow(self):
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('fs/test_nodefs_nofollow.c'), 'success', js_engines=[config.NODE_JS])
def test_fs_trackingdelegate(self):
self.set_setting('FS_DEBUG')
self.do_run_in_out_file_test('fs/test_trackingdelegate.c')
@also_with_noderawfs
def test_fs_writeFile(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING') # see issue 2334
self.do_run_in_out_file_test('fs/test_writeFile.cpp')
def test_fs_write(self):
self.do_run_in_out_file_test('fs/test_write.cpp')
@also_with_noderawfs
def test_fs_emptyPath(self):
self.do_run_in_out_file_test('fs/test_emptyPath.c')
@also_with_noderawfs
def test_fs_append(self):
self.do_runf(test_file('fs/test_append.c'), 'success')
def test_fs_mmap(self):
self.uses_es6 = True
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS', 'NODERAWFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
if fs == 'NODERAWFS':
self.emcc_args += ['-lnodefs.js', '-lnoderawfs.js']
self.do_run_in_out_file_test('fs/test_mmap.c')
@parameterized({
'': [],
'minimal_runtime': ['-s', 'MINIMAL_RUNTIME=1']
})
def test_fs_no_main(self, *args):
# library_fs.js uses hooks to enable ignoreing of permisions up until ATMAINs are run. This
# test verified that they work correctly, even in programs without a main function.
create_file('pre.js', '''
Module['preRun'] = function() {
assert(FS.ignorePermissions, "ignorePermissions not set during preRun");
}
Module['onRuntimeInitialized'] = function() {
assert(!FS.ignorePermissions, "ignorePermissions not unset during onRuntimeInitialized");
assert(_foo() == 42);
}
''')
self.set_setting('EXPORTED_FUNCTIONS', '_foo')
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args += ['--pre-js', 'pre.js'] + list(args)
self.do_run('int foo() { return 42; }', '', force_c=True)
@also_with_noderawfs
def test_fs_errorstack(self):
# Enables strict mode, which may catch some strict-mode-only errors
# so that users can safely work with strict JavaScript if enabled.
create_file('pre.js', '"use strict";')
self.emcc_args += ['--pre-js', 'pre.js']
self.set_setting('FORCE_FILESYSTEM')
self.set_setting('ASSERTIONS')
self.do_run(r'''
#include <emscripten.h>
#include <iostream>
int main(void) {
std::cout << "hello world\n"; // should work with strict mode
EM_ASM(
try {
FS.readFile('/dummy.txt');
} catch (err) {
err.stack = err.stack; // should be writable
throw err;
}
);
return 0;
}
''', 'at Object.readFile', assert_returncode=NON_ZERO) # engines has different error stack format
@also_with_noderawfs
def test_fs_llseek(self):
self.set_setting('FORCE_FILESYSTEM')
self.do_runf(test_file('fs/test_llseek.c'), 'success')
def test_fs_64bit(self):
self.do_runf(test_file('fs/test_64bit.c'), 'success')
def test_sigalrm(self):
self.do_runf(test_file('test_sigalrm.c'), 'Received alarm!')
self.set_setting('EXIT_RUNTIME')
self.do_runf(test_file('test_sigalrm.c'), 'Received alarm!')
def test_signals(self):
self.do_core_test(test_file('test_signals.c'))
@no_windows('https://github.com/emscripten-core/emscripten/issues/8882')
def test_unistd_access(self):
self.uses_es6 = True
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/access.c', js_engines=[config.NODE_JS])
# Node.js fs.chmod is nearly no-op on Windows
if not WINDOWS:
self.emcc_args = orig_compiler_opts
self.set_setting('NODERAWFS')
self.do_run_in_out_file_test('unistd/access.c', js_engines=[config.NODE_JS])
def test_unistd_curdir(self):
self.uses_es6 = True
self.do_run_in_out_file_test('unistd/curdir.c')
@also_with_noderawfs
def test_unistd_close(self):
self.do_run_in_out_file_test('unistd/close.c')
def test_unistd_confstr(self):
self.do_run_in_out_file_test('unistd/confstr.c')
def test_unistd_ttyname(self):
self.do_runf(test_file('unistd/ttyname.c'), 'success')
@also_with_noderawfs
def test_unistd_pipe(self):
self.do_runf(test_file('unistd/pipe.c'), 'success')
@also_with_noderawfs
def test_unistd_dup(self):
self.do_run_in_out_file_test('unistd/dup.c')
def test_unistd_pathconf(self):
self.do_run_in_out_file_test('unistd/pathconf.c')
def test_unistd_truncate(self):
self.uses_es6 = True
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/truncate.c', js_engines=[config.NODE_JS])
@no_windows("Windows throws EPERM rather than EACCES or EINVAL")
@unittest.skipIf(WINDOWS or os.geteuid() == 0, "Root access invalidates this test by being able to write on readonly files")
def test_unistd_truncate_noderawfs(self):
self.uses_es6 = True
self.set_setting('NODERAWFS')
self.maybe_closure()
self.do_run_in_out_file_test('unistd/truncate.c', js_engines=[config.NODE_JS])
def test_unistd_swab(self):
self.do_run_in_out_file_test('unistd/swab.c')
def test_unistd_isatty(self):
self.do_runf(test_file('unistd/isatty.c'), 'success')
@also_with_standalone_wasm()
def test_unistd_sysconf(self):
self.do_run_in_out_file_test('unistd/sysconf.c')
@no_asan('ASan alters memory layout')
def test_unistd_sysconf_phys_pages(self):
filename = test_file('unistd/sysconf_phys_pages.c')
if self.get_setting('ALLOW_MEMORY_GROWTH'):
expected = (2 * 1024 * 1024 * 1024) // webassembly.WASM_PAGE_SIZE
else:
expected = 16 * 1024 * 1024 // webassembly.WASM_PAGE_SIZE
self.do_runf(filename, str(expected) + ', errno: 0')
def test_unistd_login(self):
self.do_run_in_out_file_test('unistd/login.c')
@no_windows('https://github.com/emscripten-core/emscripten/issues/8882')
def test_unistd_unlink(self):
self.clear()
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
# symlinks on node.js on non-linux behave differently (e.g. on Windows they require administrative privileges)
# so skip testing those bits on that combination.
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
if WINDOWS:
self.emcc_args += ['-DNO_SYMLINK=1']
if MACOS:
continue
self.do_runf(test_file('unistd/unlink.c'), 'success', js_engines=[config.NODE_JS])
# Several differences/bugs on non-linux including https://github.com/nodejs/node/issues/18014
if not WINDOWS and not MACOS:
self.emcc_args = orig_compiler_opts + ['-DNODERAWFS']
# 0 if root user
if os.geteuid() == 0:
self.emcc_args += ['-DSKIP_ACCESS_TESTS']
self.set_setting('NODERAWFS')
self.do_runf(test_file('unistd/unlink.c'), 'success', js_engines=[config.NODE_JS])
def test_unistd_links(self):
self.clear()
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
if WINDOWS and fs == 'NODEFS':
print('Skipping NODEFS part of this test for test_unistd_links on Windows, since it would require administrative privileges.', file=sys.stderr)
# Also, other detected discrepancies if you do end up running this test on NODEFS:
# test expects /, but Windows gives \ as path slashes.
# Calling readlink() on a non-link gives error 22 EINVAL on Unix, but simply error 0 OK on Windows.
continue
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/links.c', js_engines=[config.NODE_JS])
@no_windows('Skipping NODEFS test, since it would require administrative privileges.')
def test_unistd_symlink_on_nodefs(self):
# Also, other detected discrepancies if you do end up running this test on NODEFS:
# test expects /, but Windows gives \ as path slashes.
# Calling readlink() on a non-link gives error 22 EINVAL on Unix, but simply error 0 OK on Windows.
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/symlink_on_nodefs.c', js_engines=[config.NODE_JS])
def test_unistd_sleep(self):
self.do_run_in_out_file_test('unistd/sleep.c')
@also_with_wasm_bigint
def test_unistd_io(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$ERRNO_CODES'])
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.clear()
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/io.c')
@no_windows('https://github.com/emscripten-core/emscripten/issues/8882')
def test_unistd_misc(self):
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/misc.c', js_engines=[config.NODE_JS], interleaved_output=False)
def test_unistd_fstatfs(self):
self.do_run_in_out_file_test('unistd/fstatfs.c')
# i64s in the API, which we'd need to legalize for JS, so in standalone mode
# all we can test is wasm VMs
@also_with_standalone_wasm(wasm2c=True)
def test_posixtime(self):
self.banned_js_engines = [config.V8_ENGINE] # v8 lacks monotonic time
self.do_core_test('test_posixtime.c')
def test_uname(self):
self.do_core_test('test_uname.c')
def test_unary_literal(self):
self.do_core_test('test_unary_literal.cpp')
def test_env(self):
expected = read_file(test_file('env/output.txt'))
self.do_runf(test_file('env/src.c'), [
expected.replace('{{{ THIS_PROGRAM }}}', self.in_dir('src.js')).replace('\\', '/'), # node, can find itself properly
expected.replace('{{{ THIS_PROGRAM }}}', './this.program') # spidermonkey, v8
])
def test_environ(self):
expected = read_file(test_file('env/output-mini.txt'))
self.do_runf(test_file('env/src-mini.c'), [
expected.replace('{{{ THIS_PROGRAM }}}', self.in_dir('src-mini.js')).replace('\\', '/'), # node, can find itself properly
expected.replace('{{{ THIS_PROGRAM }}}', './this.program') # spidermonkey, v8
])
def test_systypes(self):
self.do_core_test('test_systypes.c')
def test_stddef(self):
self.do_core_test('test_stddef.cpp')
self.do_core_test('test_stddef.cpp', force_c=True)
def test_getloadavg(self):
self.do_core_test('test_getloadavg.c')
def test_nl_types(self):
self.do_core_test('test_nl_types.c')
def test_799(self):
src = test_file('799.cpp')
self.do_runf(src, '''Set PORT family: 0, port: 3979
Get PORT family: 0
PORT: 3979
''')
def test_ctype(self):
self.do_core_test('test_ctype.c')
def test_strcasecmp(self):
self.do_core_test('test_strcasecmp.c')
def test_atomic(self):
self.do_core_test('test_atomic.c')
def test_atomic_cxx(self):
# the wasm backend has lock-free atomics, but not asm.js or asm2wasm
self.emcc_args += ['-DIS_64BIT_LOCK_FREE=1']
self.do_core_test('test_atomic_cxx.cpp')
# TODO: test with USE_PTHREADS in wasm backend as well
def test_phiundef(self):
self.do_core_test('test_phiundef.c')
def test_netinet_in(self):
self.do_run_in_out_file_test('netinet/in.cpp')
@needs_dylink
def test_main_module_static_align(self):
if self.get_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('no shared modules with memory growth')
self.set_setting('MAIN_MODULE')
self.do_core_test('test_main_module_static_align.cpp')
# libc++ tests
def test_iostream_and_determinism(self):
create_file('src.cpp', '''
#include <iostream>
int main()
{
std::cout << "hello world" << std::endl << 77 << "." << std::endl;
return 0;
}
''')
num = 5
for i in range(num):
print('(iteration %d)' % i)
# add some timing nondeterminism here, not that we need it, but whatever
time.sleep(random.random() / (10 * num))
self.do_runf('src.cpp', 'hello world\n77.\n')
# Verify that this build is identical to the previous one
if os.path.exists('src.js.previous'):
self.assertBinaryEqual('src.js', 'src.js.previous')
shutil.copy2('src.js', 'src.js.previous')
# Same but for the wasm file.
if self.is_wasm() and not self.get_setting('WASM2JS'):
if os.path.exists('src.wasm.previous'):
self.assertBinaryEqual('src.wasm', 'src.wasm.previous')
shutil.copy2('src.wasm', 'src.wasm.previous')
def test_stdvec(self):
self.do_core_test('test_stdvec.cpp')
def test_random_device(self):
self.maybe_closure()
self.do_core_test('test_random_device.cpp')
def test_reinterpreted_ptrs(self):
self.do_core_test('test_reinterpreted_ptrs.cpp')
def test_js_libraries(self):
create_file('main.cpp', '''
#include <stdio.h>
extern "C" {
extern void printey();
extern int calcey(int x, int y);
}
int main() {
printey();
printf("*%d*\\n", calcey(10, 22));
return 0;
}
''')
create_file('mylib1.js', '''
mergeInto(LibraryManager.library, {
printey: function() {
out('hello from lib!');
}
});
''')
create_file('mylib2.js', '''
mergeInto(LibraryManager.library, {
calcey: function(x, y) {
return x + y;
}
});
''')
self.emcc_args += ['--js-library', 'mylib1.js', '--js-library', 'mylib2.js']
self.do_runf('main.cpp', 'hello from lib!\n*32*\n')
def test_unicode_js_library(self):
create_file('main.cpp', '''
#include <stdio.h>
extern "C" {
extern void printey();
}
int main() {
printey();
return 0;
}
''')
self.emcc_args += ['--js-library', test_file('unicode_library.js')]
self.do_runf('main.cpp', u'Unicode snowman \u2603 says hello!')
def test_funcptr_import_type(self):
self.emcc_args += ['--js-library', test_file('core/test_funcptr_import_type.js')]
self.do_core_test('test_funcptr_import_type.cpp')
@no_asan('ASan does not work with EXPORT_ALL')
def test_constglobalunion(self):
self.set_setting('EXPORT_ALL')
self.do_run(r'''
#include <stdio.h>
struct one_const {
long a;
};
struct two_consts {
long a;
long b;
};
union some_consts {
struct one_const one;
struct two_consts two;
};
union some_consts my_consts = {{
1
}};
struct one_const addr_of_my_consts = {
(long)(&my_consts)
};
int main(void) {
printf("%li\n", (long)!!addr_of_my_consts.a);
return 0;
}
''', '1')
### 'Medium' tests
def test_fannkuch(self):
results = [(1, 0), (2, 1), (3, 2), (4, 4), (5, 7), (6, 10), (7, 16), (8, 22)]
self.build(test_file('fannkuch.cpp'))
for i, j in results:
print(i, j)
self.do_run('fannkuch.js', 'Pfannkuchen(%d) = %d.' % (i, j), args=[str(i)], no_build=True)
def test_raytrace(self):
# TODO: Should we remove this test?
self.skipTest('Relies on double value rounding, extremely sensitive')
src = read_file(test_file('raytrace.cpp')).replace('double', 'float')
output = read_file(test_file('raytrace.ppm'))
self.do_run(src, output, args=['3', '16'])
def test_fasta(self):
results = [(1, '''GG*ctt**tgagc*'''),
(20, '''GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTT*cttBtatcatatgctaKggNcataaaSatgtaaaDcDRtBggDtctttataattcBgtcg**tacgtgtagcctagtgtttgtgttgcgttatagtctatttgtggacacagtatggtcaaa**tgacgtcttttgatctgacggcgttaacaaagatactctg*'''),
(50, '''GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGA*TCACCTGAGGTCAGGAGTTCGAGACCAGCCTGGCCAACAT*cttBtatcatatgctaKggNcataaaSatgtaaaDcDRtBggDtctttataattcBgtcg**tactDtDagcctatttSVHtHttKtgtHMaSattgWaHKHttttagacatWatgtRgaaa**NtactMcSMtYtcMgRtacttctWBacgaa**agatactctgggcaacacacatacttctctcatgttgtttcttcggacctttcataacct**ttcctggcacatggttagctgcacatcacaggattgtaagggtctagtggttcagtgagc**ggaatatcattcgtcggtggtgttaatctatctcggtgtagcttataaatgcatccgtaa**gaatattatgtttatttgtcggtacgttcatggtagtggtgtcgccgatttagacgtaaa**ggcatgtatg*''')]
old = self.emcc_args
orig_src = read_file(test_file('fasta.cpp'))
def test(extra_args):
self.emcc_args = old + extra_args
for t in ['float', 'double']:
print(t)
src = orig_src.replace('double', t)
with open('fasta.cpp', 'w') as f:
f.write(src)
self.build('fasta.cpp')
for arg, output in results:
self.do_run('fasta.js', output, args=[str(arg)], output_nicerizer=lambda x: x.replace('\n', '*'), no_build=True)
shutil.copyfile('fasta.js', '%s.js' % t)
test([])
@needs_non_trapping_float_to_int
def test_fasta_nontrapping(self):
self.emcc_args += ['-mnontrapping-fptoint']
self.test_fasta()
def test_whets(self):
self.do_runf(test_file('whets.cpp'), 'Single Precision C Whetstone Benchmark')
# node is slower, and fail on 64-bit
@require_v8
@no_asan('depends on the specifics of memory size, which for asan we are forced to increase')
def test_dlmalloc_inline(self):
# needed with typed arrays
self.set_setting('INITIAL_MEMORY', '128mb')
src = read_file(path_from_root('system/lib/dlmalloc.c')) + '\n\n\n' + read_file(test_file('dlmalloc_test.c'))
self.do_run(src, '*1,0*', args=['200', '1'], force_c=True)
self.do_run('src.js', '*400,0*', args=['400', '400'], force_c=True, no_build=True)
# node is slower, and fail on 64-bit
@require_v8
@no_asan('depends on the specifics of memory size, which for asan we are forced to increase')
def test_dlmalloc(self):
# needed with typed arrays
self.set_setting('INITIAL_MEMORY', '128mb')
# Linked version
self.do_runf(test_file('dlmalloc_test.c'), '*1,0*', args=['200', '1'])
self.do_run('dlmalloc_test.js', '*400,0*', args=['400', '400'], no_build=True)
# TODO: do this in other passes too, passing their opts into emcc
if self.emcc_args == []:
# emcc should build in dlmalloc automatically, and do all the sign correction etc. for it
try_delete('src.js')
self.run_process([EMCC, test_file('dlmalloc_test.c'), '-s', 'INITIAL_MEMORY=128MB', '-o', 'src.js'], stdout=PIPE, stderr=self.stderr_redirect)
self.do_run(None, '*1,0*', ['200', '1'], no_build=True)
self.do_run(None, '*400,0*', ['400', '400'], no_build=True)
# The same for new and all its variants
src = read_file(test_file('new.cpp'))
for new, delete in [
('malloc(100)', 'free'),
('new char[100]', 'delete[]'),
('new Structy', 'delete'),
('new int', 'delete'),
('new Structy[10]', 'delete[]'),
]:
self.do_run(src.replace('{{{ NEW }}}', new).replace('{{{ DELETE }}}', delete), '*1,0*')
# Tests that a large allocation should gracefully fail
@no_asan('the memory size limit here is too small for asan')
def test_dlmalloc_large(self):
self.emcc_args += ['-s', 'ABORTING_MALLOC=0', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'MAXIMUM_MEMORY=128MB']
self.do_runf(test_file('dlmalloc_test_large.c'), '0 0 0 1')
@no_asan('asan also changes malloc, and that ends up linking in new twice')
def test_dlmalloc_partial(self):
# present part of the symbols of dlmalloc, not all
src = read_file(test_file('new.cpp')).replace('{{{ NEW }}}', 'new int').replace('{{{ DELETE }}}', 'delete') + '''
#include <new>
void* operator new(size_t size) {
printf("new %zu!\\n", size);
return malloc(size);
}
'''
self.do_run(src, 'new 4!\n*1,0*')
@no_asan('asan also changes malloc, and that ends up linking in new twice')
def test_dlmalloc_partial_2(self):
if 'SAFE_HEAP' in str(self.emcc_args):
self.skipTest('we do unsafe stuff here')
# present part of the symbols of dlmalloc, not all. malloc is harder to link than new which is weak.
self.do_core_test('test_dlmalloc_partial_2.c', assert_returncode=NON_ZERO)
def test_libcxx(self):
self.do_runf(test_file('hashtest.cpp'),
'june -> 30\nPrevious (in alphabetical order) is july\nNext (in alphabetical order) is march')
self.do_run('''
#include <set>
#include <stdio.h>
int main() {
std::set<int> fetchOriginatorNums;
fetchOriginatorNums.insert(171);
printf("hello world\\n");
return 0;
}
''', 'hello world')
def test_typeid(self):
self.do_core_test('test_typeid.cpp')
def test_static_variable(self):
# needs atexit
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_static_variable.cpp')
def test_fakestat(self):
self.do_core_test('test_fakestat.c')
def test_mmap(self):
# ASan needs more memory, but that is set up separately
if '-fsanitize=address' not in self.emcc_args:
self.set_setting('INITIAL_MEMORY', '128mb')
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_mmap.c')
def test_mmap_file(self):
for extra_args in [[]]:
self.emcc_args += ['--embed-file', 'data.dat'] + extra_args
x = 'data from the file........'
s = ''
while len(s) < 9000:
if len(s) + len(x) < 9000:
s += x
continue
s += '.'
assert len(s) == 9000
create_file('data.dat', s)
self.do_runf(test_file('mmap_file.c'), '*\n' + s[0:20] + '\n' + s[4096:4096 + 20] + '\n*\n')
@no_lsan('Test code contains memory leaks')
def test_cubescript(self):
# uses register keyword
self.emcc_args += ['-std=c++03', '-Wno-dynamic-class-memaccess']
self.maybe_closure()
self.emcc_args += ['-I', test_file('third_party/cubescript')]
# Test code contains memory leaks
if '-fsanitize=address' in self.emcc_args:
self.emcc_args += ['--pre-js', test_file('asan-no-leak.js')]
def test():
src = test_file('third_party/cubescript/command.cpp')
self.do_runf(src, '*\nTemp is 33\n9\n5\nhello, everyone\n*')
test()
print('asyncify') # extra coverage
self.set_setting('ASYNCIFY')
test()
@needs_dylink
def test_relocatable_void_function(self):
self.set_setting('RELOCATABLE')
self.do_core_test('test_relocatable_void_function.c')
@wasm_simd
def test_wasm_intrinsics_simd(self):
def run():
self.do_runf(test_file('test_wasm_intrinsics_simd.c'), 'Success!')
# Improves test readability
self.emcc_args.append('-Wno-c++11-narrowing')
self.emcc_args.extend(['-Wpedantic', '-Werror', '-Wall', '-xc++'])
run()
self.emcc_args.append('-funsigned-char')
run()
# Tests invoking the NEON SIMD API via arm_neon.h header
@wasm_simd
def test_neon_wasm_simd(self):
self.emcc_args.append('-Wno-c++11-narrowing')
self.emcc_args.append('-mfpu=neon')
self.emcc_args.append('-msimd128')
self.do_runf(test_file('neon/test_neon_wasm_simd.cpp'), 'Success!')
# Tests invoking the SIMD API via x86 SSE1 xmmintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
@no_safe_heap('has unaligned 64-bit operations in wasm')
def test_sse1(self):
src = test_file('sse/test_sse1.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse', '-o', 'test_sse1', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse1', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse']
self.maybe_closure()
self.do_runf(src, native_result)
# Tests invoking the SIMD API via x86 SSE2 emmintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
@no_safe_heap('has unaligned 64-bit operations in wasm')
@is_slow_test
def test_sse2(self):
src = test_file('sse/test_sse2.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse2', '-Wno-argument-outside-range', '-o', 'test_sse2', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse2', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse2', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
# Tests invoking the SIMD API via x86 SSE3 pmmintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
def test_sse3(self):
src = test_file('sse/test_sse3.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse3', '-Wno-argument-outside-range', '-o', 'test_sse3', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse3', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse3', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
# Tests invoking the SIMD API via x86 SSSE3 tmmintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
def test_ssse3(self):
src = test_file('sse/test_ssse3.cpp')
self.run_process([shared.CLANG_CXX, src, '-mssse3', '-Wno-argument-outside-range', '-o', 'test_ssse3', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_ssse3', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-mssse3', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
# Tests invoking the SIMD API via x86 SSE4.1 smmintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
@is_slow_test
def test_sse4_1(self):
src = test_file('sse/test_sse4_1.cpp')
if not self.is_optimizing() and '-fsanitize=address' in self.emcc_args:
# ASan with -O0 fails with:
# Compiling function #69:"__original_main" failed: local count too large
self.emcc_args.append('-O1')
self.run_process([shared.CLANG_CXX, src, '-msse4.1', '-Wno-argument-outside-range', '-o', 'test_sse4_1', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse4_1', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse4.1', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
# Tests invoking the SIMD API via x86 SSE4.2 nmmintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
def test_sse4_2(self):
src = test_file('sse/test_sse4_2.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse4.2', '-Wno-argument-outside-range', '-o', 'test_sse4_2', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse4_2', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse4.2', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
# Tests invoking the SIMD API via x86 AVX avxintrin.h header (_mm_x() functions)
@wasm_simd
@requires_native_clang
def test_avx(self):
src = test_file('sse/test_avx.cpp')
self.run_process([shared.CLANG_CXX, src, '-mavx', '-Wno-argument-outside-range', '-o', 'test_avx', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_avx', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-mavx', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
def test_sse_diagnostics(self):
self.emcc_args.remove('-Werror')
src = test_file('sse/test_sse_diagnostic.cpp')
p = self.run_process(
[shared.EMXX, src, '-msse', '-DWASM_SIMD_COMPAT_SLOW'] + self.get_emcc_args(),
stderr=PIPE)
self.assertContained('Instruction emulated via slow path.', p.stderr)
@requires_native_clang
@wasm_relaxed_simd
def test_relaxed_simd_implies_simd128(self):
src = test_file('sse/test_sse1.cpp')
self.build(src, emcc_args=['-msse'])
@no_asan('call stack exceeded on some versions of node')
def test_gcc_unmangler(self):
self.emcc_args += ['-I' + test_file('third_party/libiberty')]
self.do_runf(test_file('third_party/libiberty/cp-demangle.c'), '*d_demangle(char const*, int, unsigned int*)*', args=['_ZL10d_demanglePKciPj'])
@needs_make('make')
def test_lua(self):
self.emcc_args.remove('-Werror')
libs = self.get_library('third_party/lua', [Path('src/lua.o'), Path('src/liblua.a')], make=['make', 'generic'], configure=None)
self.do_run('',
'hello lua world!\n17\n1\n2\n3\n4\n7',
args=['-e', '''print("hello lua world!");print(17);for x = 1,4 do print(x) end;print(10-3)'''],
libraries=libs,
includes=[test_file('lua')],
output_nicerizer=lambda output: output.replace('\n\n', '\n').replace('\n\n', '\n'))
@no_asan('issues with freetype itself')
@needs_make('configure script')
@is_slow_test
def test_freetype(self):
self.add_pre_run("FS.createDataFile('/', 'font.ttf', %s, true, false, false);" % str(
list(bytearray(read_binary(test_file('freetype/LiberationSansBold.ttf'))))
))
# Not needed for js, but useful for debugging
shutil.copyfile(test_file('freetype/LiberationSansBold.ttf'), 'font.ttf')
# Main
self.do_run_from_file(test_file('freetype/main.c'),
test_file('freetype/ref.txt'),
args=['font.ttf', 'test!', '150', '120', '25'],
libraries=self.get_freetype_library(),
includes=[test_file('third_party/freetype/include')])
# github issue 324
print('[issue 324]')
self.do_run_from_file(test_file('freetype/main_2.c'),
test_file('freetype/ref_2.txt'),
args=['font.ttf', 'w', '32', '32', '25'],
libraries=self.get_freetype_library(),
includes=[test_file('third_party/freetype/include')])
print('[issue 324 case 2]')
self.do_run_from_file(test_file('freetype/main_3.c'),
test_file('freetype/ref_3.txt'),
args=['font.ttf', 'W', '32', '32', '0'],
libraries=self.get_freetype_library(),
includes=[test_file('third_party/freetype/include')])
print('[issue 324 case 3]')
self.do_run('main_3.js',
read_file(test_file('freetype/ref_4.txt')),
args=['font.ttf', 'ea', '40', '32', '0'],
no_build=True)
@no_asan('local count too large for VMs')
@is_slow_test
def test_sqlite(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_sqlite3_open', '_sqlite3_close', '_sqlite3_exec', '_sqlite3_free'])
if '-g' in self.emcc_args:
print("disabling inlining") # without registerize (which -g disables), we generate huge amounts of code
self.set_setting('INLINING_LIMIT')
# newer clang has a warning for implicit conversions that lose information,
# which happens in sqlite (see #9138)
self.emcc_args += ['-Wno-implicit-int-float-conversion']
# newer clang warns about "suspicious concatenation of string literals in an
# array initialization; did you mean to separate the elements with a comma?"
self.emcc_args += ['-Wno-string-concatenation']
# ignore unknown flags, which lets the above flags be used on github CI
# before the LLVM change rolls in (the same LLVM change that adds the
# warning also starts to warn on it)
self.emcc_args += ['-Wno-unknown-warning-option']
self.emcc_args += ['-Wno-pointer-bool-conversion']
self.emcc_args += ['-I' + test_file('third_party/sqlite')]
src = '''
#define SQLITE_DISABLE_LFS
#define LONGDOUBLE_TYPE double
#define SQLITE_INT64_TYPE long long int
#define SQLITE_THREADSAFE 0
'''
src += read_file(test_file('third_party/sqlite/sqlite3.c'))
src += read_file(test_file('sqlite/benchmark.c'))
self.do_run(src,
read_file(test_file('sqlite/benchmark.txt')),
includes=[test_file('sqlite')],
force_c=True)
@needs_make('mingw32-make')
@is_slow_test
@parameterized({
'cmake': (True,),
'configure': (False,)
})
def test_zlib(self, use_cmake):
if WINDOWS and not use_cmake:
self.skipTest("Windows cannot run configure sh scripts")
self.maybe_closure()
self.emcc_args.append('-Wno-shift-negative-value')
if '-g' in self.emcc_args:
self.emcc_args.append('-gsource-map') # more source maps coverage
if use_cmake:
make_args = []
configure = ['cmake', '.']
else:
make_args = ['libz.a']
configure = ['sh', './configure']
self.do_run_from_file(
test_file('third_party/zlib/example.c'),
test_file('core/test_zlib.out'),
libraries=self.get_library('third_party/zlib', 'libz.a', make_args=make_args, configure=configure),
includes=[test_file('third_party/zlib'), 'building', 'zlib'])
@needs_make('make')
@is_slow_test
@parameterized({
'cmake': (True,),
'autoconf': (False,)
})
# Called thus so it runs late in the alphabetical cycle... it is long
def test_bullet(self, use_cmake):
if WINDOWS and not use_cmake:
self.skipTest("Windows cannot run configure sh scripts")
self.emcc_args += [
'-Wno-c++11-narrowing',
'-Wno-deprecated-register',
'-Wno-writable-strings',
'-Wno-shift-negative-value',
'-Wno-format'
]
# extra testing for ASSERTIONS == 2
if use_cmake:
self.set_setting('ASSERTIONS', 2)
self.emcc_args.append('-Wno-unused-command-line-argument')
self.do_runf(test_file('third_party/bullet/Demos/HelloWorld/HelloWorld.cpp'),
[read_file(test_file('bullet/output.txt')), # different roundings
read_file(test_file('bullet/output2.txt')),
read_file(test_file('bullet/output3.txt')),
read_file(test_file('bullet/output4.txt'))],
libraries=self.get_bullet_library(use_cmake),
includes=[test_file('third_party/bullet/src')])
@unittest.skip('LLVM changes have caused this C++ to no longer compile, https://github.com/emscripten-core/emscripten/issues/14614')
@no_asan('issues with freetype itself')
@needs_make('depends on freetype')
@is_slow_test
def test_poppler(self):
pdf_data = read_binary(test_file('poppler/paper.pdf'))
create_file('paper.pdf.js', str(list(bytearray(pdf_data))))
create_file('pre.js', '''
Module.preRun = function() {
FS.createDataFile('/', 'paper.pdf', eval(read_('paper.pdf.js')), true, false, false);
};
Module.postRun = function() {
var FileData = Array.from(MEMFS.getFileDataAsTypedArray(FS.root.contents['filename-1.ppm']));
out("Data: " + JSON.stringify(FileData.map(function(x) { return unSign(x, 8) })));
};
''')
self.emcc_args += ['--pre-js', 'pre.js', '-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=$unSign']
ppm_data = str(list(bytearray(read_binary(test_file('poppler/ref.ppm')))))
self.do_run('', ppm_data.replace(' ', ''),
libraries=self.get_poppler_library(),
args=['-scale-to', '512', 'paper.pdf', 'filename'])
@needs_make('make')
@is_slow_test
def test_openjpeg(self):
def do_test_openjpeg():
def line_splitter(data):
out = ''
counter = 0
for ch in data:
out += ch
if ch == ' ' and counter > 60:
out += '\n'
counter = 0
else:
counter += 1
return out
# remove -g, so we have one test without it by default
self.emcc_args = [x for x in self.emcc_args if x != '-g']
original_j2k = test_file('openjpeg/syntensity_lobby_s.j2k')
image_bytes = list(bytearray(read_binary(original_j2k)))
create_file('pre.js', """
Module.preRun = function() { FS.createDataFile('/', 'image.j2k', %s, true, false, false); };
Module.postRun = function() {
out('Data: ' + JSON.stringify(Array.from(MEMFS.getFileDataAsTypedArray(FS.analyzePath('image.raw').object))));
};
""" % line_splitter(str(image_bytes)))
# ensure libpng is built so that openjpeg's configure step can detect it.
# If we don't do this then we don't know what the state of the cache will be
# and this test would different non-deterministic results based on, for example,
# what other tests had previously run.
self.run_process([EMBUILDER, 'build', 'libpng'])
lib = self.get_library('third_party/openjpeg',
[Path('codec/CMakeFiles/j2k_to_image.dir/index.c.o'),
Path('codec/CMakeFiles/j2k_to_image.dir/convert.c.o'),
Path('codec/CMakeFiles/j2k_to_image.dir/__/common/color.c.o'),
Path('bin/libopenjpeg.a')],
configure=['cmake', '.'],
# configure_args=['--enable-tiff=no', '--enable-jp3d=no', '--enable-png=no'],
make_args=[]) # no -j 2, since parallel builds can fail
# We use doubles in JS, so we get slightly different values than native code. So we
# check our output by comparing the average pixel difference
def image_compare(output):
# Get the image generated by JS, from the JSON.stringify'd array
m = re.search(r'\[[\d, -]*\]', output)
self.assertIsNotNone(m, 'Failed to find proper image output in: ' + output)
# Evaluate the output as a python array
js_data = eval(m.group(0))
js_data = [x if x >= 0 else 256 + x for x in js_data] # Our output may be signed, so unsign it
# Get the correct output
true_data = bytearray(read_binary(test_file('openjpeg/syntensity_lobby_s.raw')))
# Compare them
assert(len(js_data) == len(true_data))
num = len(js_data)
diff_total = js_total = true_total = 0
for i in range(num):
js_total += js_data[i]
true_total += true_data[i]
diff_total += abs(js_data[i] - true_data[i])
js_mean = js_total / float(num)
true_mean = true_total / float(num)
diff_mean = diff_total / float(num)
image_mean = 83.265
# print '[image stats:', js_mean, image_mean, true_mean, diff_mean, num, ']'
assert abs(js_mean - image_mean) < 0.01, [js_mean, image_mean]
assert abs(true_mean - image_mean) < 0.01, [true_mean, image_mean]
assert diff_mean < 0.01, diff_mean
return output
# Explictly disable EXIT_RUNTIME, since otherwise addOnPostRun does not work.
# https://github.com/emscripten-core/emscripten/issues/15080
self.set_setting('EXIT_RUNTIME', 0)
self.emcc_args += ['--minify=0'] # to compare the versions
self.emcc_args += ['--pre-js', 'pre.js']
def do_test():
self.do_runf(test_file('third_party/openjpeg/codec/j2k_to_image.c'),
'Successfully generated', # The real test for valid output is in image_compare
args='-i image.j2k -o image.raw'.split(),
emcc_args=['-sUSE_LIBPNG'],
libraries=lib,
includes=[test_file('third_party/openjpeg/libopenjpeg'),
test_file('third_party/openjpeg/codec'),
test_file('third_party/openjpeg/common'),
Path(self.get_build_dir(), 'third_party/openjpeg')],
output_nicerizer=image_compare)
do_test()
# extra testing
if self.get_setting('ALLOW_MEMORY_GROWTH') == 1:
print('no memory growth', file=sys.stderr)
self.set_setting('ALLOW_MEMORY_GROWTH', 0)
do_test()
if '-fsanitize=address' in self.emcc_args:
# In ASan mode we need a large initial memory (or else wasm-ld fails).
# The OpenJPEG CMake will build several executables (which we need parts
# of in our testing, see above), so we must enable the flag for them all.
with env_modify({'EMMAKEN_CFLAGS': '-sINITIAL_MEMORY=300MB'}):
do_test_openjpeg()
else:
do_test_openjpeg()
@no_asan('call stack exceeded on some versions of node')
@is_slow_test
def test_fuzz(self):
self.emcc_args += ['-I' + test_file('fuzz/include'), '-w']
def run_all(x):
print(x)
for name in sorted(glob.glob(test_file('fuzz/*.c')) + glob.glob(test_file('fuzz/*.cpp'))):
if 'newfail' in name:
continue
if os.path.basename(name).startswith('temp_fuzzcode'):
continue
print(name)
if name.endswith('.cpp'):
self.emcc_args.append('-std=c++03')
self.do_runf(test_file('fuzz', name),
read_file(test_file('fuzz', name + '.txt')))
if name.endswith('.cpp'):
self.emcc_args.remove('-std=c++03')
run_all('normal')
self.emcc_args += ['-flto']
run_all('lto')
@also_with_standalone_wasm(wasm2c=True, impure=True)
@no_asan('autodebug logging interferes with asan')
@with_env_modify({'EMCC_AUTODEBUG': '1'})
def test_autodebug_wasm(self):
# test that the program both works and also emits some of the logging
# (but without the specific output, as it is logging the actual locals
# used and so forth, which will change between opt modes and updates of
# llvm etc.)
def check(out):
for msg in ['log_execution', 'get_i32', 'set_i32', 'load_ptr', 'load_val', 'store_ptr', 'store_val']:
self.assertIn(msg, out)
return out
self.do_runf(test_file('core/test_autodebug.c'),
'success', output_nicerizer=check)
@parameterized({
'full': ('full',),
'mask': ('mask',),
'none': ('none',),
})
def test_wasm2c_sandboxing(self, mode):
if not can_do_standalone(self):
return self.skipTest('standalone mode not supported')
self.set_setting('STANDALONE_WASM')
self.set_setting('WASM2C')
self.set_setting('WASM2C_SANDBOXING', mode)
self.wasm_engines = []
self.do_core_test('test_hello_world.c')
### Integration tests
def test_ccall(self):
self.emcc_args.append('-Wno-return-stack-address')
self.set_setting('EXPORTED_RUNTIME_METHODS', ['ccall', 'cwrap'])
self.set_setting('WASM_ASYNC_COMPILATION', 0)
create_file('post.js', '''
out('*');
var ret;
ret = Module['ccall']('get_int', 'number'); out([typeof ret, ret].join(','));
ret = ccall('get_float', 'number'); out([typeof ret, ret.toFixed(2)].join(','));
ret = ccall('get_bool', 'boolean'); out([typeof ret, ret].join(','));
ret = ccall('get_string', 'string'); out([typeof ret, ret].join(','));
ret = ccall('print_int', null, ['number'], [12]); out(typeof ret);
ret = ccall('print_float', null, ['number'], [14.56]); out(typeof ret);
ret = ccall('print_bool', null, ['boolean'], [true]); out(typeof ret);
ret = ccall('print_string', null, ['string'], ["cheez"]); out(typeof ret);
ret = ccall('print_string', null, ['array'], [[97, 114, 114, 45, 97, 121, 0]]); out(typeof ret); // JS array
ret = ccall('print_string', null, ['array'], [new Uint8Array([97, 114, 114, 45, 97, 121, 0])]); out(typeof ret); // typed array
ret = ccall('multi', 'number', ['number', 'number', 'number', 'string'], [2, 1.4, 3, 'more']); out([typeof ret, ret].join(','));
var p = ccall('malloc', 'pointer', ['number'], [4]);
setValue(p, 650, 'i32');
ret = ccall('pointer', 'pointer', ['pointer'], [p]); out([typeof ret, getValue(ret, 'i32')].join(','));
out('*');
// part 2: cwrap
var noThirdParam = Module['cwrap']('get_int', 'number');
out(noThirdParam());
var multi = Module['cwrap']('multi', 'number', ['number', 'number', 'number', 'string']);
out(multi(2, 1.4, 3, 'atr'));
out(multi(8, 5.4, 4, 'bret'));
out('*');
// part 3: avoid stack explosion and check it's restored correctly
for (var i = 0; i < TOTAL_STACK/60; i++) {
ccall('multi', 'number', ['number', 'number', 'number', 'string'], [0, 0, 0, '123456789012345678901234567890123456789012345678901234567890']);
}
out('stack is ok.');
ccall('call_ccall_again', null);
''')
self.emcc_args += ['--post-js', 'post.js']
self.set_setting('EXPORTED_FUNCTIONS', ['_get_int', '_get_float', '_get_bool', '_get_string', '_print_int', '_print_float', '_print_bool', '_print_string', '_multi', '_pointer', '_call_ccall_again', '_malloc'])
self.do_core_test('test_ccall.cpp')
if self.maybe_closure():
self.do_core_test('test_ccall.cpp')
def test_EXPORTED_RUNTIME_METHODS(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$dynCall'])
self.do_core_test('EXPORTED_RUNTIME_METHODS.c')
# test dyncall (and other runtime methods in support.js) can be exported
self.emcc_args += ['-DEXPORTED']
self.set_setting('EXPORTED_RUNTIME_METHODS', ['dynCall', 'addFunction', 'lengthBytesUTF8', 'getTempRet0', 'setTempRet0'])
self.do_core_test('EXPORTED_RUNTIME_METHODS.c')
@parameterized({
'': [],
'minimal_runtime': ['-s', 'MINIMAL_RUNTIME=1']
})
def test_dyncall_specific(self, *args):
cases = [
('DIRECT', []),
('DYNAMIC_SIG', ['-s', 'DYNCALLS=1', '-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=$dynCall']),
]
if 'MINIMAL_RUNTIME=1' in args:
self.emcc_args += ['--pre-js', test_file('minimal_runtime_exit_handling.js')]
else:
cases += [
('EXPORTED', []),
('EXPORTED_DYNAMIC_SIG', ['-s', 'DYNCALLS=1', '-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=$dynCall', '-s', 'EXPORTED_RUNTIME_METHODS=dynCall']),
('FROM_OUTSIDE', ['-s', 'EXPORTED_RUNTIME_METHODS=dynCall_iiji'])
]
for which, extra_args in cases:
print(str(args) + ' ' + which)
self.do_core_test('dyncall_specific.c', emcc_args=['-D' + which] + list(args) + extra_args)
def test_getValue_setValue(self):
# these used to be exported, but no longer are by default
def test(output_prefix='', args=[], assert_returncode=0):
src = test_file('core/getValue_setValue.cpp')
expected = test_file('core/getValue_setValue' + output_prefix + '.out')
self.do_run_from_file(src, expected, assert_returncode=assert_returncode, emcc_args=args)
# see that direct usage (not on module) works. we don't export, but the use
# keeps it alive through JSDCE
test(args=['-DDIRECT'])
# see that with assertions, we get a nice error message
self.set_setting('EXPORTED_RUNTIME_METHODS', [])
self.set_setting('ASSERTIONS')
test('_assert', assert_returncode=NON_ZERO)
self.set_setting('ASSERTIONS', 0)
# see that when we export them, things work on the module
self.set_setting('EXPORTED_RUNTIME_METHODS', ['getValue', 'setValue'])
test()
@parameterized({
'': ([],),
'_files': (['-DUSE_FILES'],)
})
def test_FS_exports(self, extra_args):
# these used to be exported, but no longer are by default
def test(output_prefix='', args=[], assert_returncode=0):
args += extra_args
print(args)
self.do_runf(test_file('core/FS_exports.cpp'),
(read_file(test_file('core/FS_exports' + output_prefix + '.out')),
read_file(test_file('core/FS_exports' + output_prefix + '_2.out'))),
assert_returncode=assert_returncode, emcc_args=args)
# see that direct usage (not on module) works. we don't export, but the use
# keeps it alive through JSDCE
test(args=['-DDIRECT', '-s', 'FORCE_FILESYSTEM'])
# see that with assertions, we get a nice error message
self.set_setting('EXPORTED_RUNTIME_METHODS', [])
self.set_setting('ASSERTIONS')
test('_assert', assert_returncode=NON_ZERO)
self.set_setting('ASSERTIONS', 0)
# see that when we export them, things work on the module
self.set_setting('EXPORTED_RUNTIME_METHODS', ['FS_createDataFile'])
test(args=['-s', 'FORCE_FILESYSTEM'])
def test_legacy_exported_runtime_numbers(self):
# these used to be exported, but no longer are by default
def test(output_prefix='', args=[], assert_returncode=0):
old = self.emcc_args.copy()
self.emcc_args += args
src = test_file('core/legacy_exported_runtime_numbers.cpp')
expected = test_file('core/legacy_exported_runtime_numbers%s.out' % output_prefix)
self.do_run_from_file(src, expected, assert_returncode=assert_returncode)
self.emcc_args = old
# see that direct usage (not on module) works. we don't export, but the use
# keeps it alive through JSDCE
test(args=['-DDIRECT'])
# see that with assertions, we get a nice error message
self.set_setting('EXPORTED_RUNTIME_METHODS', [])
self.set_setting('ASSERTIONS')
test('_assert', assert_returncode=NON_ZERO)
self.set_setting('ASSERTIONS', 0)
# see that when we export them, things work on the module
self.set_setting('EXPORTED_RUNTIME_METHODS', ['ALLOC_STACK'])
test()
def test_response_file(self):
response_data = '-o %s/response_file.js %s' % (self.get_dir(), test_file('hello_world.cpp'))
create_file('rsp_file', response_data.replace('\\', '\\\\'))
self.run_process([EMCC, "@rsp_file"] + self.get_emcc_args())
self.do_run('response_file.js', 'hello, world', no_build=True)
self.assertContained('response file not found: foo.txt', self.expect_fail([EMCC, '@foo.txt']))
def test_linker_response_file(self):
objfile = 'response_file.o'
self.run_process([EMCC, '-c', test_file('hello_world.cpp'), '-o', objfile] + self.get_emcc_args())
# This should expand into -Wl,--start-group <objfile> -Wl,--end-group
response_data = '--start-group ' + objfile + ' --end-group'
create_file('rsp_file', response_data.replace('\\', '\\\\'))
self.run_process([EMCC, "-Wl,@rsp_file", '-o', 'response_file.o.js'] + self.get_emcc_args())
self.do_run('response_file.o.js', 'hello, world', no_build=True)
def test_exported_response(self):
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <emscripten.h>
extern "C" {
int other_function() { return 5; }
}
int main() {
int x = EM_ASM_INT({ return Module._other_function() });
emscripten_run_script_string(""); // Add a reference to a symbol that exists in src/deps_info.json to uncover issue #2836 in the test suite.
printf("waka %d!\n", x);
return 0;
}
'''
create_file('exps', '["_main","_other_function"]')
self.set_setting('EXPORTED_FUNCTIONS', '@exps')
self.do_run(src, '''waka 5!''')
assert 'other_function' in read_file('src.js')
def test_large_exported_response(self):
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <emscripten.h>
extern "C" {
'''
js_funcs = []
num_exports = 5000
count = 0
while count < num_exports:
src += 'int exported_func_from_response_file_%d () { return %d;}\n' % (count, count)
js_funcs.append('_exported_func_from_response_file_%d' % count)
count += 1
src += r'''
}
int main() {
int x = EM_ASM_INT({ return Module._exported_func_from_response_file_4999() });
emscripten_run_script_string(""); // Add a reference to a symbol that exists in src/deps_info.json to uncover issue #2836 in the test suite.
printf("waka %d!\n", x);
return 0;
}
'''
js_funcs.append('_main')
create_file('large_exported_response.json', json.dumps(js_funcs))
self.set_setting('EXPORTED_FUNCTIONS', '@large_exported_response.json')
self.do_run(src, 'waka 4999!')
self.assertContained('_exported_func_from_response_file_1', read_file('src.js'))
def test_add_function(self):
self.set_setting('INVOKE_RUN', 0)
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.set_setting('RESERVED_FUNCTION_POINTERS')
self.set_setting('EXPORTED_RUNTIME_METHODS', ['callMain'])
src = test_file('interop/test_add_function.cpp')
post_js = test_file('interop/test_add_function_post.js')
self.emcc_args += ['--post-js', post_js]
print('basics')
self.do_run_in_out_file_test('interop/test_add_function.cpp')
print('with RESERVED_FUNCTION_POINTERS=0')
self.set_setting('RESERVED_FUNCTION_POINTERS', 0)
expected = 'Unable to grow wasm table'
if self.is_wasm2js():
# in wasm2js the error message doesn't come from the VM, but from our
# emulation code. when ASSERTIONS are enabled we show a clear message, but
# in optimized builds we don't waste code size on that, and the JS engine
# shows a generic error.
expected = 'wasmTable.grow is not a function'
self.do_runf(src, expected, assert_returncode=NON_ZERO)
print('- with table growth')
self.set_setting('ALLOW_TABLE_GROWTH')
self.emcc_args += ['-DGROWTH']
# enable costly assertions to verify correct table behavior
self.set_setting('ASSERTIONS', 2)
self.do_run_in_out_file_test('interop/test_add_function.cpp', interleaved_output=False)
def test_getFuncWrapper_sig_alias(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$getFuncWrapper'])
src = r'''
#include <stdio.h>
#include <emscripten.h>
void func1(int a) {
printf("func1\n");
}
void func2(int a, int b) {
printf("func2\n");
}
int main() {
EM_ASM({
getFuncWrapper($0, 'vi')(0);
getFuncWrapper($1, 'vii')(0, 0);
}, func1, func2);
return 0;
}
'''
self.do_run(src, 'func1\nfunc2\n')
def test_emulate_function_pointer_casts(self):
# Forcibly disable EXIT_RUNTIME due to:
# https://github.com/emscripten-core/emscripten/issues/15081
self.set_setting('EXIT_RUNTIME', 0)
self.set_setting('EMULATE_FUNCTION_POINTER_CASTS')
self.do_core_test('test_emulate_function_pointer_casts.cpp')
@no_wasm2js('TODO: nicely printed names in wasm2js')
@parameterized({
'normal': ([],),
'noexcept': (['-fno-exceptions'],)
})
def test_demangle_stacks(self, extra_args):
self.emcc_args += extra_args
self.set_setting('DEMANGLE_SUPPORT')
self.set_setting('ASSERTIONS')
# disable aggressive inlining in binaryen
self.set_setting('BINARYEN_EXTRA_PASSES', '--one-caller-inline-max-function-size=1')
# ensure function names are preserved
self.emcc_args += ['--profiling-funcs']
self.do_core_test('test_demangle_stacks.cpp', assert_returncode=NON_ZERO)
print('without assertions, the stack is not printed, but a message suggesting assertions is')
self.set_setting('ASSERTIONS', 0)
self.do_core_test('test_demangle_stacks_noassert.cpp', assert_returncode=NON_ZERO)
def test_demangle_stacks_symbol_map(self):
# disable aggressive inlining in binaryen
self.set_setting('BINARYEN_EXTRA_PASSES', '--one-caller-inline-max-function-size=1')
self.set_setting('DEMANGLE_SUPPORT')
if '-O' not in str(self.emcc_args) or '-O0' in self.emcc_args or '-O1' in self.emcc_args or '-g' in self.emcc_args:
self.skipTest("without opts, we don't emit a symbol map")
self.emcc_args += ['--emit-symbol-map']
self.do_runf(test_file('core/test_demangle_stacks.cpp'), 'Aborted', assert_returncode=NON_ZERO)
# make sure the shortened name is the right one
full_aborter = None
short_aborter = None
for line in open('test_demangle_stacks.js.symbols').readlines():
if ':' not in line:
continue
# split by the first ':' (wasm backend demangling may include more :'s later on)
short, full = line.split(':', 1)
if 'Aborter' in full:
short_aborter = short
full_aborter = full
self.assertIsNotNone(full_aborter)
self.assertIsNotNone(short_aborter)
print('full:', full_aborter, 'short:', short_aborter)
if config.SPIDERMONKEY_ENGINE and os.path.exists(config.SPIDERMONKEY_ENGINE[0]):
output = self.run_js('test_demangle_stacks.js', engine=config.SPIDERMONKEY_ENGINE, assert_returncode=NON_ZERO)
# we may see the full one, if -g, or the short one if not
if ' ' + short_aborter + ' ' not in output and ' ' + full_aborter + ' ' not in output:
# stack traces may also be ' name ' or 'name@' etc
if '\n' + short_aborter + ' ' not in output and '\n' + full_aborter + ' ' not in output and 'wasm-function[' + short_aborter + ']' not in output:
if '\n' + short_aborter + '@' not in output and '\n' + full_aborter + '@' not in output:
self.assertContained(' ' + short_aborter + ' ' + '\n' + ' ' + full_aborter + ' ', output)
@no_safe_heap('tracing from sbrk into JS leads to an infinite loop')
def test_tracing(self):
self.emcc_args += ['--tracing']
self.do_core_test('test_tracing.c')
@disabled('https://github.com/emscripten-core/emscripten/issues/9527')
def test_eval_ctors(self):
if '-O2' not in str(self.emcc_args) or '-O1' in str(self.emcc_args):
self.skipTest('need js optimizations')
if not self.is_wasm():
self.skipTest('this test uses wasm binaries')
print('leave printf in ctor')
self.set_setting('EVAL_CTORS')
self.do_run(r'''
#include <stdio.h>
struct C {
C() { printf("constructing!\n"); } // don't remove this!
};
C c;
int main() {}
''', "constructing!\n")
def get_code_size():
if self.is_wasm():
# Use number of functions as a for code size
return self.count_wasm_contents('hello_libcxx.wasm', 'funcs')
else:
return os.path.getsize('hello_libcxx.js')
def get_mem_size():
if self.is_wasm():
# Use number of functions as a for code size
return self.count_wasm_contents('hello_libcxx.wasm', 'memory-data')
if self.uses_memory_init_file():
return os.path.getsize('hello_libcxx.js.mem')
# otherwise we ignore memory size
return 0
def do_test(test):
self.set_setting('EVAL_CTORS')
test()
ec_code_size = get_code_size()
ec_mem_size = get_mem_size()
self.clear_setting('EVAL_CTORS')
test()
code_size = get_code_size()
mem_size = get_mem_size()
if mem_size:
print('mem: ', mem_size, '=>', ec_mem_size)
self.assertGreater(ec_mem_size, mem_size)
print('code:', code_size, '=>', ec_code_size)
self.assertLess(ec_code_size, code_size)
print('remove ctor of just assigns to memory')
def test1():
self.do_run(r'''
#include <stdio.h>
struct C {
int x;
C() {
volatile int y = 10;
y++;
x = y;
}
};
C c;
int main() {
printf("x: %d\n", c.x);
}
''', "x: 11\n")
do_test(test1)
# The wasm backend currently exports a single initalizer so the ctor
# evaluation is all or nothing. As well as that it doesn't currently
# do DCE of libcxx symbols (because the are marked as visibility(defaault)
# and because of that we end up not being able to eval ctors unless all
# libcxx constrcutors can be eval'd
print('libcxx - remove 2 ctors from iostream code')
output = 'hello, world!'
def test2():
self.do_runf(test_file('hello_libcxx.cpp'), output)
do_test(test2)
print('assertions too')
self.set_setting('ASSERTIONS')
self.do_runf(test_file('hello_libcxx.cpp'), output)
self.set_setting('ASSERTIONS', 0)
print('remove just some, leave others')
def test3():
self.do_run(r'''
#include <iostream>
#include <string>
class std_string {
public:
std_string(): ptr(nullptr) { std::cout << "std_string()\n"; }
std_string(const char* s): ptr(s) { std::cout << "std_string(const char* s)" << std::endl; }
std_string(const std_string& s): ptr(s.ptr) { std::cout << "std_string(const std_string& s) " << std::endl; }
const char* data() const { return ptr; }
private:
const char* ptr;
};
const std_string txtTestString("212121\0");
const std::string s2text("someweirdtext");
int main() {
std::cout << s2text << std::endl;
std::cout << txtTestString.data() << std::endl;
std::cout << txtTestString.data() << std::endl;
return 0;
}
''', '''std_string(const char* s)
someweirdtext
212121
212121
''') # noqa
do_test(test3)
def test_embind(self):
self.emcc_args += ['--bind']
create_file('test_embind.cpp', r'''
#include <stdio.h>
#include <emscripten/val.h>
using namespace emscripten;
int main() {
val Math = val::global("Math");
// two ways to call Math.abs
printf("abs(-10): %d\n", Math.call<int>("abs", -10));
printf("abs(-11): %d\n", Math["abs"](-11).as<int>());
return 0;
}
''')
self.do_runf('test_embind.cpp', 'abs(-10): 10\nabs(-11): 11')
def test_embind_2(self):
self.emcc_args += ['--bind', '--post-js', 'post.js']
create_file('post.js', '''
function printLerp() {
out('lerp ' + Module.lerp(100, 200, 66) + '.');
}
''')
create_file('test_embind_2.cpp', r'''
#include <stdio.h>
#include <emscripten.h>
#include <emscripten/bind.h>
using namespace emscripten;
int lerp(int a, int b, int t) {
return (100 - t) * a + t * b;
}
EMSCRIPTEN_BINDINGS(my_module) {
function("lerp", &lerp);
}
int main(int argc, char **argv) {
EM_ASM(printLerp());
return 0;
}
''')
self.do_runf('test_embind_2.cpp', 'lerp 166')
def test_embind_3(self):
self.emcc_args += ['--bind', '--post-js', 'post.js']
create_file('post.js', '''
function ready() {
try {
Module.compute(new Uint8Array([1,2,3]));
} catch(e) {
out(e);
}
}
''')
create_file('test_embind_3.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
using namespace emscripten;
int compute(int array[]) {
return 0;
}
EMSCRIPTEN_BINDINGS(my_module) {
function("compute", &compute, allow_raw_pointers());
}
int main(int argc, char **argv) {
EM_ASM(ready());
return 0;
}
''')
self.do_runf('test_embind_3.cpp', 'UnboundTypeError: Cannot call compute due to unbound types: Pi')
def test_embind_4(self):
self.emcc_args += ['--bind', '--post-js', 'post.js']
create_file('post.js', '''
function printFirstElement() {
out(Module.getBufferView()[0]);
}
''')
create_file('test_embind_4.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
#include <emscripten/val.h>
#include <stdio.h>
using namespace emscripten;
const size_t kBufferSize = 1024;
double buffer[kBufferSize];
val getBufferView(void) {
val v = val(typed_memory_view(kBufferSize, buffer));
return v;
}
EMSCRIPTEN_BINDINGS(my_module) {
function("getBufferView", &getBufferView);
}
int main(int argc, char **argv) {
buffer[0] = 107;
EM_ASM(printFirstElement());
return 0;
}
''')
self.do_runf('test_embind_4.cpp', '107')
def test_embind_5(self):
self.emcc_args += ['--bind']
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_embind_5.cpp')
def test_embind_custom_marshal(self):
self.emcc_args += ['--bind', '--pre-js', test_file('embind/test_custom_marshal.js')]
self.do_run_in_out_file_test('embind/test_custom_marshal.cpp', assert_identical=True)
def test_embind_float_constants(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_float_constants.cpp')
def test_embind_negative_constants(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_negative_constants.cpp')
@also_with_wasm_bigint
def test_embind_unsigned(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_unsigned.cpp')
def test_embind_val(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_val.cpp')
@no_wasm2js('wasm_bigint')
def test_embind_i64_val(self):
self.set_setting('WASM_BIGINT')
self.emcc_args += ['--bind']
self.node_args += ['--experimental-wasm-bigint']
self.do_run_in_out_file_test('embind/test_i64_val.cpp', assert_identical=True)
@no_wasm2js('wasm_bigint')
def test_embind_i64_binding(self):
self.set_setting('WASM_BIGINT')
self.emcc_args += ['--bind']
self.node_args += ['--experimental-wasm-bigint']
self.do_run_in_out_file_test('embind/test_i64_binding.cpp', assert_identical=True)
def test_embind_no_rtti(self):
create_file('main.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
#include <emscripten/val.h>
#include <stdio.h>
EM_JS(void, calltest, (), {
console.log("dotest returned: " + Module.dotest());
});
int main(int argc, char** argv){
printf("418\n");
calltest();
return 0;
}
int test() {
return 42;
}
EMSCRIPTEN_BINDINGS(my_module) {
emscripten::function("dotest", &test);
}
''')
self.emcc_args += ['--bind', '-fno-rtti', '-DEMSCRIPTEN_HAS_UNBOUND_TYPE_NAMES=0']
self.do_runf('main.cpp', '418\ndotest returned: 42\n')
def test_embind_polymorphic_class_no_rtti(self):
self.emcc_args += ['--bind', '-fno-rtti', '-DEMSCRIPTEN_HAS_UNBOUND_TYPE_NAMES=0']
self.do_core_test('test_embind_polymorphic_class_no_rtti.cpp')
def test_embind_no_rtti_followed_by_rtti(self):
src = r'''
#include <emscripten.h>
#include <emscripten/bind.h>
#include <emscripten/val.h>
#include <stdio.h>
EM_JS(void, calltest, (), {
console.log("dotest returned: " + Module.dotest());
});
int main(int argc, char** argv){
printf("418\n");
calltest();
return 0;
}
int test() {
return 42;
}
EMSCRIPTEN_BINDINGS(my_module) {
emscripten::function("dotest", &test);
}
'''
self.emcc_args += ['--bind', '-fno-rtti', '-frtti']
self.do_run(src, '418\ndotest returned: 42\n')
@parameterized({
'': (None, False),
'all': ('ALL', False),
'fast': ('FAST', False),
'default': ('DEFAULT', False),
'all_growth': ('ALL', True),
})
def test_webidl(self, mode, allow_memory_growth):
self.uses_es6 = True
self.set_setting('WASM_ASYNC_COMPILATION', 0)
if self.maybe_closure():
# avoid closure minified names competing with our test code in the global name space
self.set_setting('MODULARIZE')
else:
self.set_setting('WASM_ASYNC_COMPILATION', 0)
# Force IDL checks mode
with env_modify({'IDL_CHECKS': mode}):
self.run_process([WEBIDL_BINDER, test_file('webidl/test.idl'), 'glue'])
self.assertExists('glue.cpp')
self.assertExists('glue.js')
post_js = '\n\n'
if self.get_setting('MODULARIZE'):
post_js += 'var TheModule = Module();\n'
else:
post_js += 'var TheModule = Module;\n'
post_js += '\n\n'
if allow_memory_growth:
post_js += "var isMemoryGrowthAllowed = true;\n"
else:
post_js += "var isMemoryGrowthAllowed = false;\n"
post_js += read_file(test_file('webidl/post.js'))
post_js += '\n\n'
create_file('extern-post.js', post_js)
# Export things on "TheModule". This matches the typical use pattern of the bound library
# being used as Box2D.* or Ammo.*, and we cannot rely on "Module" being always present (closure may remove it).
self.emcc_args += ['-s', 'EXPORTED_FUNCTIONS=_malloc,_free', '--post-js=glue.js', '--extern-post-js=extern-post.js']
if allow_memory_growth:
self.set_setting('ALLOW_MEMORY_GROWTH')
if not mode:
mode = 'DEFAULT'
expected = test_file('webidl/output_%s.txt' % mode)
self.do_run_from_file(test_file('webidl/test.cpp'), expected)
### Tests for tools
@no_wasm2js('TODO: source maps in wasm2js')
@parameterized({
'': ([],),
'minimal_runtime': (['-s', 'MINIMAL_RUNTIME'],),
})
def test_source_map(self, args):
if '-g' not in self.emcc_args:
self.emcc_args.append('-g')
self.emcc_args += args
src = '''
#include <stdio.h>
#include <assert.h>
__attribute__((noinline)) int foo() {
printf("hi"); // line 6
return 1; // line 7
}
int main() {
printf("%d", foo()); // line 11
return 0; // line 12
}
'''
create_file('src.cpp', src)
out_filename = 'a.out.js'
wasm_filename = 'a.out.wasm'
no_maps_filename = 'no-maps.out.js'
assert '-gsource-map' not in self.emcc_args
self.emcc('src.cpp', self.get_emcc_args(), out_filename)
# the file name may find its way into the generated code, so make sure we
# can do an apples-to-apples comparison by compiling with the same file name
shutil.move(out_filename, no_maps_filename)
no_maps_file = read_file(no_maps_filename)
no_maps_file = re.sub(' *//[@#].*$', '', no_maps_file, flags=re.MULTILINE)
self.emcc_args.append('-gsource-map')
self.emcc(os.path.abspath('src.cpp'),
self.get_emcc_args(),
out_filename,
stderr=PIPE)
map_referent = out_filename if not self.is_wasm() else wasm_filename
# after removing the @line and @sourceMappingURL comments, the build
# result should be identical to the non-source-mapped debug version.
# this is worth checking because the parser AST swaps strings for token
# objects when generating source maps, so we want to make sure the
# optimizer can deal with both types.
map_filename = map_referent + '.map'
data = json.load(open(map_filename))
if hasattr(data, 'file'):
# the file attribute is optional, but if it is present it needs to refer
# the output file.
self.assertPathsIdentical(map_referent, data['file'])
self.assertGreater(len(data['sources']), 1)
self.assertPathsIdentical('src.cpp', data['sources'][0])
if hasattr(data, 'sourcesContent'):
# the sourcesContent attribute is optional, but if it is present it
# needs to containt valid source text.
self.assertTextDataIdentical(src, data['sourcesContent'][0])
mappings = json.loads(self.run_js(
path_from_root('tests/sourcemap2json.js'),
args=[map_filename]))
seen_lines = set()
for m in mappings:
if m['source'] == 'src.cpp':
seen_lines.add(m['originalLine'])
# ensure that all the 'meaningful' lines in the original code get mapped
# when optimizing, the binaryen optimizer may remove some of them (by inlining, etc.)
if self.is_optimizing():
self.assertTrue(seen_lines.issuperset([11, 12]), seen_lines)
else:
self.assertTrue(seen_lines.issuperset([6, 7, 11, 12]), seen_lines)
@no_wasm2js('TODO: source maps in wasm2js')
def test_dwarf(self):
self.emcc_args.append('-g')
create_file('src.cpp', '''
#include <emscripten.h>
EM_JS(int, out_to_js, (int x), {})
void foo() {
out_to_js(0); // line 5
out_to_js(1); // line 6
out_to_js(2); // line 7
// A silly possible recursion to avoid binaryen doing any inlining.
if (out_to_js(3)) foo();
}
int main() {
foo();
}
''')
js_filename = 'a.out.js'
wasm_filename = 'a.out.wasm'
self.emcc('src.cpp', self.get_emcc_args(), js_filename)
out = self.run_process([shared.LLVM_DWARFDUMP, wasm_filename, '-all'], stdout=PIPE).stdout
# parse the sections
sections = {}
curr_section_name = ''
curr_section_body = ''
def add_section():
if curr_section_name:
sections[curr_section_name] = curr_section_body
for line in out.splitlines():
if ' contents:' in line:
# a new section, a line like ".debug_str contents:"
add_section()
curr_section_name = line.split(' ')[0]
curr_section_body = ''
else:
# possibly a line in a section
if curr_section_name:
curr_section_body += line + '\n'
add_section()
# make sure the right sections exist
self.assertIn('.debug_abbrev', sections)
self.assertIn('.debug_info', sections)
self.assertIn('.debug_line', sections)
self.assertIn('.debug_str', sections)
self.assertIn('.debug_ranges', sections)
# verify some content in the sections
self.assertIn('"src.cpp"', sections['.debug_info'])
# the line section looks like this:
# Address Line Column File ISA Discriminator Flags
# ------------------ ------ ------ ------ --- ------------- -------------
# 0x000000000000000b 5 0 3 0 0 is_stmt
src_to_addr = {}
found_src_cpp = False
for line in sections['.debug_line'].splitlines():
if 'name: "src.cpp"' in line:
found_src_cpp = True
if not found_src_cpp:
continue
if 'debug_line' in line:
break
if line.startswith('0x'):
while ' ' in line:
line = line.replace(' ', ' ')
addr, line, col = line.split(' ')[:3]
key = (int(line), int(col))
src_to_addr.setdefault(key, []).append(addr)
# each of the calls must remain in the binary, and be mapped
self.assertIn((5, 9), src_to_addr)
self.assertIn((6, 9), src_to_addr)
self.assertIn((7, 9), src_to_addr)
def get_dwarf_addr(line, col):
addrs = src_to_addr[(line, col)]
# we assume the simple calls have one address
self.assertEqual(len(addrs), 1)
return int(addrs[0], 0)
# the lines must appear in sequence (as calls to JS, the optimizer cannot
# reorder them)
self.assertLess(get_dwarf_addr(5, 9), get_dwarf_addr(6, 9))
self.assertLess(get_dwarf_addr(6, 9), get_dwarf_addr(7, 9))
# Get the wat, printing with -g which has binary offsets
wat = self.run_process([Path(building.get_binaryen_bin(), 'wasm-opt'),
wasm_filename, '-g', '--print'], stdout=PIPE).stdout
# We expect to see a pattern like this in optimized builds (there isn't
# much that can change with such calls to JS (they can't be reordered or
# anything else):
#
# ;; code offset: 0x?
# (drop
# ;; code offset: 0x?
# (call $out_to_js
# ;; code offset: 0x?
# (local.get ?) or (i32.const ?)
# )
# )
#
# In the stacky stream of instructions form, it is
#
# local.get or i32.const
# call $out_to_js
# drop
#
# However, in an unoptimized build the constant may be assigned earlier in
# some other manner, so stop here.
if not self.is_optimizing():
return
# get_wat_addr gets the address of one of the 3 interesting calls, by its
# index (0,1,2).
def get_wat_addr(call_index):
# find the call_index-th call
call_loc = -1
for i in range(call_index + 1):
call_loc = wat.find('call $out_to_js', call_loc + 1)
assert call_loc > 0
# the call begins with the local.get/i32.const printed below it, which is
# the first instruction in the stream, so it has the lowest address
start_addr_loc = wat.find('0x', call_loc)
assert start_addr_loc > 0
start_addr_loc_end = wat.find('\n', start_addr_loc)
start_addr = int(wat[start_addr_loc:start_addr_loc_end], 0)
# the call ends with the drop, which is the last in the stream, at the
# highest address
end_addr_loc = wat.rfind('drop', 0, call_loc)
assert end_addr_loc > 0
end_addr_loc = wat.rfind('0x', 0, end_addr_loc)
assert end_addr_loc > 0
end_addr_loc_end = wat.find('\n', end_addr_loc)
assert end_addr_loc_end > 0
end_addr = int(wat[end_addr_loc:end_addr_loc_end], 0)
return (start_addr, end_addr)
# match up the DWARF and the wat
for i in range(3):
dwarf_addr = get_dwarf_addr(5 + i, 9)
start_wat_addr, end_wat_addr = get_wat_addr(i)
# the dwarf may match any of the 3 instructions that form the stream of
# of instructions implementing the call in the source code, in theory
self.assertLessEqual(start_wat_addr, dwarf_addr)
self.assertLessEqual(dwarf_addr, end_wat_addr)
def test_modularize_closure_pre(self):
# test that the combination of modularize + closure + pre-js works. in that mode,
# closure should not minify the Module object in a way that the pre-js cannot use it.
create_file('post.js', 'var TheModule = Module();\n')
self.emcc_args += [
'--pre-js', test_file('core/modularize_closure_pre.js'),
'--extern-post-js=post.js',
'--closure=1',
'-g1',
'-s',
'MODULARIZE=1',
]
self.do_core_test('modularize_closure_pre.c')
@no_wasm2js('symbol names look different wasm2js backtraces')
def test_emscripten_log(self):
self.banned_js_engines = [config.V8_ENGINE] # v8 doesn't support console.log
self.set_setting('DEMANGLE_SUPPORT')
if '-g' not in self.emcc_args:
self.emcc_args.append('-g')
self.emcc_args += ['-DRUN_FROM_JS_SHELL']
self.do_run_in_out_file_test('emscripten_log/emscripten_log.cpp', interleaved_output=False)
# test closure compiler as well
if self.maybe_closure():
self.emcc_args += ['-g1'] # extra testing
self.do_run_in_out_file_test('emscripten_log/emscripten_log_with_closure.cpp', interleaved_output=False)
def test_float_literals(self):
self.do_run_in_out_file_test('test_float_literals.cpp')
def test_exit_status(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
create_file('exit.c', r'''
#include <stdio.h>
#include <assert.h>
#include <stdlib.h>
#include <unistd.h>
static void cleanup() {
#ifndef NORMAL_EXIT
assert(0 && "cleanup should only be called from normal exit()");
#endif
printf("cleanup\n");
}
int main() {
atexit(cleanup); // this atexit should still be called
printf("hello, world!\n");
// Unusual exit status to make sure it's working!
#ifdef CAPITAL_EXIT
_Exit(118);
#elif defined(UNDER_EXIT)
_exit(118);
#elif defined(NORMAL_EXIT)
exit(118);
#endif
}
''')
create_file('pre.js', '''
Module.onExit = function() {
out('I see exit status: ' + EXITSTATUS);
}
''')
self.emcc_args += ['--pre-js', 'pre.js']
print('.. exit')
self.do_runf('exit.c', 'hello, world!\ncleanup\nI see exit status: 118', assert_returncode=118, emcc_args=['-DNORMAL_EXIT'])
print('.. _exit')
self.do_runf('exit.c', 'hello, world!\nI see exit status: 118', assert_returncode=118, emcc_args=['-DUNDER_EXIT'])
print('.. _Exit')
self.do_runf('exit.c', 'hello, world!\nI see exit status: 118', assert_returncode=118, emcc_args=['-DCAPITAL_EXIT'])
def test_noexitruntime(self):
src = r'''
#include <emscripten.h>
#include <stdio.h>
static int testPre = TEST_PRE;
struct Global {
Global() {
printf("in Global()\n");
if (testPre) { EM_ASM(noExitRuntime = true;); }
}
~Global() { printf("ERROR: in ~Global()\n"); }
} global;
int main() {
if (!testPre) { EM_ASM(noExitRuntime = true;); }
printf("in main()\n");
}
'''
self.do_run(src.replace('TEST_PRE', '0'), 'in Global()\nin main()')
self.do_run(src.replace('TEST_PRE', '1'), 'in Global()\nin main()')
def test_minmax(self):
self.do_runf(test_file('test_minmax.c'), 'NAN != NAN\nSuccess!')
def test_localeconv(self):
self.do_run_in_out_file_test('core/test_localeconv.c')
def test_newlocale(self):
self.do_run_in_out_file_test('core/test_newlocale.c')
def test_setlocale(self):
self.do_run_in_out_file_test('core/test_setlocale.c')
def test_vswprintf_utf8(self):
self.do_run_in_out_file_test('vswprintf_utf8.c')
# needs setTimeout which only node has
@require_node
def test_async_hello(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASYNCIFY')
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
void f(void *p) {
*(int*)p = 99;
printf("!");
}
int main() {
int i = 0;
printf("Hello");
emscripten_async_call(f, &i, 1);
printf("World");
emscripten_sleep(100);
printf("%d\n", i);
}
''')
self.do_runf('main.c', 'HelloWorld!99')
@require_node
def test_async_ccall_bad(self):
# check bad ccall use
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASYNCIFY')
self.set_setting('ASSERTIONS')
self.set_setting('INVOKE_RUN', 0)
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("Hello");
emscripten_sleep(100);
printf("World\n");
}
''')
create_file('pre.js', '''
Module['onRuntimeInitialized'] = function() {
try {
ccall('main', 'number', ['number', 'string'], [2, 'waka']);
var never = true;
} catch(e) {
out(e);
assert(!never);
}
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_runf('main.c', 'The call to main is running asynchronously.')
@require_node
def test_async_ccall_good(self):
# check reasonable ccall use
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASYNCIFY')
self.set_setting('ASSERTIONS')
self.set_setting('INVOKE_RUN', 0)
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("Hello");
emscripten_sleep(100);
printf("World\n");
}
''')
create_file('pre.js', '''
Module['onRuntimeInitialized'] = function() {
ccall('main', null, ['number', 'string'], [2, 'waka'], { async: true });
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_runf('main.c', 'HelloWorld')
@parameterized({
'': (False,),
'exit_runtime': (True,),
})
def test_async_ccall_promise(self, exit_runtime):
self.set_setting('ASYNCIFY')
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASSERTIONS')
self.set_setting('INVOKE_RUN', 0)
self.set_setting('EXIT_RUNTIME', exit_runtime)
self.set_setting('EXPORTED_FUNCTIONS', ['_stringf', '_floatf'])
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
const char* stringf(char* param) {
emscripten_sleep(20);
printf("stringf: %s", param);
return "second";
}
double floatf() {
emscripten_sleep(20);
emscripten_sleep(20);
return 6.4;
}
''')
create_file('pre.js', r'''
Module['onRuntimeInitialized'] = function() {
runtimeKeepalivePush();
ccall('stringf', 'string', ['string'], ['first\n'], { async: true })
.then(function(val) {
console.log(val);
ccall('floatf', 'number', null, null, { async: true }).then(function(arg) {
console.log(arg);
runtimeKeepalivePop();
maybeExit();
});
});
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_runf('main.c', 'stringf: first\nsecond\n6.4')
def test_fibers_asyncify(self):
self.set_setting('ASYNCIFY')
self.maybe_closure()
self.do_runf(test_file('test_fibers.cpp'), '*leaf-0-100-1-101-1-102-2-103-3-104-5-105-8-106-13-107-21-108-34-109-*')
def test_asyncify_unused(self):
# test a program not using asyncify, but the pref is set
self.set_setting('ASYNCIFY')
self.do_core_test('test_hello_world.c')
@parameterized({
'normal': ([], True),
'removelist_a': (['-s', 'ASYNCIFY_REMOVE=["foo(int, double)"]'], False),
'removelist_b': (['-s', 'ASYNCIFY_REMOVE=["bar()"]'], True),
'removelist_c': (['-s', 'ASYNCIFY_REMOVE=["baz()"]'], False),
'onlylist_a': (['-s', 'ASYNCIFY_ONLY=["main","__original_main","foo(int, double)","baz()","c_baz","Structy::funcy()","bar()"]'], True),
'onlylist_b': (['-s', 'ASYNCIFY_ONLY=["main","__original_main","foo(int, double)","baz()","c_baz","Structy::funcy()"]'], True),
'onlylist_c': (['-s', 'ASYNCIFY_ONLY=["main","__original_main","foo(int, double)","baz()","c_baz"]'], False),
'onlylist_d': (['-s', 'ASYNCIFY_ONLY=["foo(int, double)","baz()","c_baz","Structy::funcy()"]'], False, None, True),
'onlylist_b_response': ([], True, '["main","__original_main","foo(int, double)","baz()","c_baz","Structy::funcy()"]'),
'onlylist_c_response': ([], False, '["main","__original_main","foo(int, double)","baz()","c_baz"]'),
})
def test_asyncify_lists(self, args, should_pass, response=None, no_san=False):
if no_san and is_sanitizing(self.emcc_args):
self.skipTest('remaining asyncify+sanitizer TODO')
if response is not None:
create_file('response.file', response)
self.set_setting('ASYNCIFY_ONLY', '@response.file')
self.set_setting('ASYNCIFY')
self.emcc_args += args
if should_pass:
self.do_core_test('test_asyncify_lists.cpp', assert_identical=True)
else:
self.do_runf(test_file('core/test_asyncify_lists.cpp'), ('RuntimeError', 'Thrown at'), assert_returncode=NON_ZERO)
# use of ASYNCIFY_* options may require intermediate debug info. that should
# not end up emitted in the final binary
# (note that we can't check this if sanitizers run, as they include a lot of
# static strings that would match the search)
if self.is_wasm() and not is_sanitizing(self.emcc_args):
binary = read_binary('test_asyncify_lists.wasm')
# there should be no name section
self.assertFalse(b'name' in binary)
# in a fully-optimized build, imports and exports are minified too and we
# can verify that our function names appear nowhere
if '-O3' in self.emcc_args:
self.assertFalse(b'main' in binary)
@parameterized({
'normal': ([], True),
'ignoreindirect': (['-s', 'ASYNCIFY_IGNORE_INDIRECT'], False),
'add': (['-s', 'ASYNCIFY_IGNORE_INDIRECT', '-s', 'ASYNCIFY_ADD=["__original_main","main","virt()"]'], True),
})
def test_asyncify_indirect_lists(self, args, should_pass):
self.set_setting('ASYNCIFY')
self.emcc_args += args
try:
self.do_core_test('test_asyncify_indirect_lists.cpp', assert_identical=True)
if not should_pass:
should_pass = True
raise Exception('should not have passed')
except Exception:
if should_pass:
raise
@no_asan('asyncify stack operations confuse asan')
def test_emscripten_scan_registers(self):
self.set_setting('ASYNCIFY')
self.do_core_test('test_emscripten_scan_registers.cpp')
def test_asyncify_assertions(self):
self.set_setting('ASYNCIFY')
self.set_setting('ASYNCIFY_IMPORTS', ['suspend'])
self.set_setting('ASSERTIONS')
self.do_core_test('test_asyncify_assertions.c', assert_returncode=NON_ZERO)
@no_lsan('leaks asyncify stack during exit')
@no_asan('leaks asyncify stack during exit')
def test_asyncify_during_exit(self):
self.set_setting('ASYNCIFY')
self.set_setting('ASSERTIONS')
self.set_setting('EXIT_RUNTIME', 1)
self.do_core_test('test_asyncify_during_exit.cpp', assert_returncode=NON_ZERO)
print('NO_ASYNC')
self.do_core_test('test_asyncify_during_exit.cpp', emcc_args=['-DNO_ASYNC'], out_suffix='_no_async')
@no_asan('asyncify stack operations confuse asan')
@no_wasm2js('dynamic linking support in wasm2js')
def test_asyncify_main_module(self):
self.set_setting('ASYNCIFY', 1)
self.set_setting('MAIN_MODULE', 2)
self.do_core_test('test_hello_world.c')
@no_asan('asyncify stack operations confuse asan')
@no_wasm2js('TODO: lazy loading in wasm2js')
@parameterized({
'conditional': (True,),
'unconditional': (False,),
})
def test_emscripten_lazy_load_code(self, conditional):
self.set_setting('ASYNCIFY_LAZY_LOAD_CODE')
self.set_setting('ASYNCIFY_IGNORE_INDIRECT')
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += ['--profiling-funcs'] # so that we can find the functions for the changes below
if conditional:
self.emcc_args += ['-DCONDITIONAL']
self.do_core_test('emscripten_lazy_load_code.cpp', args=['0'])
first_size = os.path.getsize('emscripten_lazy_load_code.wasm')
second_size = os.path.getsize('emscripten_lazy_load_code.wasm.lazy.wasm')
print('first wasm size', first_size)
print('second wasm size', second_size)
if not conditional and self.is_optimizing() and '-g' not in self.emcc_args:
# If the call to lazy-load is unconditional, then the optimizer can dce
# out more than half
self.assertLess(first_size, 0.6 * second_size)
wasm1 = read_binary('emscripten_lazy_load_code.wasm')
wasm2 = read_binary('emscripten_lazy_load_code.wasm.lazy.wasm')
self.assertNotEqual(wasm1, wasm2)
# attempts to "break" the wasm by adding an unreachable in $foo_end. returns whether we found it.
def break_wasm(name):
wat = self.run_process([Path(building.get_binaryen_bin(), 'wasm-dis'), name], stdout=PIPE).stdout
lines = wat.splitlines()
wat = None
for i in range(len(lines)):
if '(func $foo_end ' in lines[i]:
j = i + 1
while '(local ' in lines[j]:
j += 1
# we found the first line after the local defs
lines[j] = '(unreachable)' + lines[j]
wat = '\n'.join(lines)
break
if wat is None:
# $foo_end is not present in the wasm, nothing to break
shutil.copyfile(name, name + '.orig')
return False
with open('wat.wat', 'w') as f:
f.write(wat)
shutil.move(name, name + '.orig')
self.run_process([Path(building.get_binaryen_bin(), 'wasm-as'), 'wat.wat', '-o', name, '-g'])
return True
def verify_working(args=['0']):
self.assertContained('foo_end\n', self.run_js('emscripten_lazy_load_code.js', args=args))
def verify_broken(args=['0']):
self.assertNotContained('foo_end\n', self.run_js('emscripten_lazy_load_code.js', args=args, assert_returncode=NON_ZERO))
# the first-loaded wasm will not reach the second call, since we call it after lazy-loading.
# verify that by changing the first wasm to throw in that function
found_foo_end = break_wasm('emscripten_lazy_load_code.wasm')
if not conditional and self.is_optimizing():
self.assertFalse(found_foo_end, 'should have optimizd out $foo_end')
verify_working()
# but breaking the second wasm actually breaks us
break_wasm('emscripten_lazy_load_code.wasm.lazy.wasm')
verify_broken()
# restore
shutil.copyfile('emscripten_lazy_load_code.wasm.orig', 'emscripten_lazy_load_code.wasm')
shutil.copyfile('emscripten_lazy_load_code.wasm.lazy.wasm.orig', 'emscripten_lazy_load_code.wasm.lazy.wasm')
verify_working()
if conditional:
# if we do not call the lazy load function, then we do not need the lazy wasm,
# and we do the second call in the first wasm
os.remove('emscripten_lazy_load_code.wasm.lazy.wasm')
verify_broken()
verify_working(['42'])
break_wasm('emscripten_lazy_load_code.wasm')
verify_broken()
# Test basic wasm2js functionality in all core compilation modes.
@no_asan('no wasm2js support yet in asan')
def test_wasm2js(self):
if not self.is_wasm():
self.skipTest('redundant to test wasm2js in wasm2js* mode')
self.set_setting('WASM', 0)
self.do_core_test('test_hello_world.c')
# a mem init file is emitted just like with JS
expect_memory_init_file = self.uses_memory_init_file()
if expect_memory_init_file:
self.assertExists('test_hello_world.js.mem')
mem = read_binary('test_hello_world.js.mem')
self.assertTrue(mem[-1] != b'\0')
else:
self.assertNotExists('test_hello_world.js.mem')
@no_asan('no wasm2js support yet in asan')
def test_maybe_wasm2js(self):
if not self.is_wasm():
self.skipTest('redundant to test wasm2js in wasm2js* mode')
self.set_setting('MAYBE_WASM2JS')
# see that running as wasm works
self.do_core_test('test_hello_world.c')
# run wasm2js, bundle the code, and use the wasm2js path
cmd = [PYTHON, path_from_root('tools/maybe_wasm2js.py'), 'test_hello_world.js', 'test_hello_world.wasm']
if self.is_optimizing():
cmd += ['-O2']
self.run_process(cmd, stdout=open('do_wasm2js.js', 'w')).stdout
# remove the wasm to make sure we never use it again
os.remove('test_hello_world.wasm')
# verify that it runs
self.assertContained('hello, world!', self.run_js('do_wasm2js.js'))
@no_asan('no wasm2js support yet in asan')
@parameterized({
'': ([],),
'minimal_runtime': (['-s', 'MINIMAL_RUNTIME'],),
})
def test_wasm2js_fallback(self, args):
if not self.is_wasm():
self.skipTest('redundant to test wasm2js in wasm2js* mode')
cmd = [EMCC, test_file('small_hello_world.c'), '-s', 'WASM=2'] + args
self.run_process(cmd)
# First run with WebAssembly support enabled
# Move the Wasm2js fallback away to test it is not accidentally getting loaded.
os.rename('a.out.wasm.js', 'a.out.wasm.js.unused')
self.assertContained('hello!', self.run_js('a.out.js'))
os.rename('a.out.wasm.js.unused', 'a.out.wasm.js')
# Then disable WebAssembly support in VM, and try again.. Should still work with Wasm2JS fallback.
open('b.out.js', 'w').write('WebAssembly = undefined;\n' + read_file('a.out.js'))
os.remove('a.out.wasm') # Also delete the Wasm file to test that it is not attempted to be loaded.
self.assertContained('hello!', self.run_js('b.out.js'))
def test_cxx_self_assign(self):
# See https://github.com/emscripten-core/emscripten/pull/2688 and http://llvm.org/bugs/show_bug.cgi?id=18735
self.do_run(r'''
#include <map>
#include <stdio.h>
int main() {
std::map<int, int> m;
m[0] = 1;
m = m;
// size should still be one after self assignment
if (m.size() == 1) {
printf("ok.\n");
}
}
''', 'ok.')
def test_memprof_requirements(self):
# This test checks for the global variables required to run the memory
# profiler. It would fail if these variables were made no longer global
# or if their identifiers were changed.
create_file('main.cpp', '''
extern "C" {
void check_memprof_requirements();
}
int main() {
check_memprof_requirements();
return 0;
}
''')
create_file('lib.js', '''
mergeInto(LibraryManager.library, {
check_memprof_requirements: function() {
if (typeof _emscripten_stack_get_base === 'function' &&
typeof _emscripten_stack_get_end === 'function' &&
typeof _emscripten_stack_get_current === 'function' &&
typeof Module['___heap_base'] === 'number') {
out('able to run memprof');
} else {
out('missing the required variables to run memprof');
}
}
});
''')
self.emcc_args += ['--memoryprofiler', '--js-library', 'lib.js']
self.do_runf('main.cpp', 'able to run memprof')
def test_fs_dict(self):
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args += ['-lidbfs.js']
self.emcc_args += ['-lnodefs.js']
create_file('pre.js', '''
Module = {};
Module['preRun'] = function() {
out(typeof FS.filesystems['MEMFS']);
out(typeof FS.filesystems['IDBFS']);
out(typeof FS.filesystems['NODEFS']);
// Globals
console.log(typeof MEMFS);
console.log(typeof IDBFS);
console.log(typeof NODEFS);
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_run('int main() { return 0; }', 'object\nobject\nobject\nobject\nobject\nobject')
def test_fs_dict_none(self):
# if IDBFS and NODEFS are not enabled, they are not present.
self.set_setting('FORCE_FILESYSTEM')
self.set_setting('ASSERTIONS')
create_file('pre.js', '''
Module = {};
Module['preRun'] = function() {
out(typeof FS.filesystems['MEMFS']);
out(typeof FS.filesystems['IDBFS']);
out(typeof FS.filesystems['NODEFS']);
// Globals
if (ASSERTIONS) {
console.log(typeof MEMFS);
console.log(IDBFS);
console.log(NODEFS);
FS.mkdir('/working1');
try {
FS.mount(IDBFS, {}, '/working1');
} catch (e) {
console.log('|' + e + '|');
}
}
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
expected = '''\
object
undefined
undefined
object
IDBFS is no longer included by default; build with -lidbfs.js
NODEFS is no longer included by default; build with -lnodefs.js
|IDBFS is no longer included by default; build with -lidbfs.js|'''
self.do_run('int main() { return 0; }', expected)
def test_stack_overflow_check(self):
self.set_setting('TOTAL_STACK', 1048576)
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.do_runf(test_file('stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
self.emcc_args += ['-DONE_BIG_STRING']
self.do_runf(test_file('stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
# ASSERTIONS=2 implies STACK_OVERFLOW_CHECK=2
self.clear_setting('STACK_OVERFLOW_CHECK')
self.set_setting('ASSERTIONS', 2)
self.do_runf(test_file('stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
@node_pthreads
def test_binaryen_2170_emscripten_atomic_cas_u8(self):
self.set_setting('USE_PTHREADS')
self.do_run_in_out_file_test('binaryen_2170_emscripten_atomic_cas_u8.cpp')
@also_with_standalone_wasm()
def test_sbrk(self):
self.do_runf(test_file('sbrk_brk.cpp'), 'OK.')
def test_brk(self):
self.emcc_args += ['-DTEST_BRK=1']
self.do_runf(test_file('sbrk_brk.cpp'), 'OK.')
# Tests that we can use the dlmalloc mallinfo() function to obtain information
# about malloc()ed blocks and compute how much memory is used/freed.
@no_asan('mallinfo is not part of ASan malloc')
def test_mallinfo(self):
self.do_runf(test_file('mallinfo.cpp'), 'OK.')
@no_asan('cannot replace malloc/free with ASan')
def test_wrap_malloc(self):
self.do_runf(test_file('wrap_malloc.cpp'), 'OK.')
def test_environment(self):
self.set_setting('ASSERTIONS')
def test(assert_returncode=0):
self.do_core_test('test_hello_world.c', assert_returncode=assert_returncode)
js = read_file('test_hello_world.js')
assert ('require(' in js) == ('node' in self.get_setting('ENVIRONMENT')), 'we should have require() calls only if node js specified'
for engine in config.JS_ENGINES:
print(engine)
# set us to test in just this engine
self.banned_js_engines = [e for e in config.JS_ENGINES if e != engine]
# tell the compiler to build with just that engine
if engine == config.NODE_JS:
right = 'node'
wrong = 'shell'
else:
right = 'shell'
wrong = 'node'
# test with the right env
self.set_setting('ENVIRONMENT', right)
print('ENVIRONMENT =', self.get_setting('ENVIRONMENT'))
test()
# test with the wrong env
self.set_setting('ENVIRONMENT', wrong)
print('ENVIRONMENT =', self.get_setting('ENVIRONMENT'))
try:
test(assert_returncode=NON_ZERO)
raise Exception('unexpected success')
except Exception as e:
self.assertContained('not compiled for this environment', str(e))
# test with a combined env
self.set_setting('ENVIRONMENT', right + ',' + wrong)
print('ENVIRONMENT =', self.get_setting('ENVIRONMENT'))
test()
def test_postrun_exception(self):
# verify that an exception thrown in postRun() will not trigger the
# compilation failed handler, and will be printed to stderr.
# Explictly disable EXIT_RUNTIME, since otherwise addOnPostRun does not work.
# https://github.com/emscripten-core/emscripten/issues/15080
self.set_setting('EXIT_RUNTIME', 0)
self.add_post_run('ThisFunctionDoesNotExist()')
self.build(test_file('core/test_hello_world.c'))
output = self.run_js('test_hello_world.js', assert_returncode=NON_ZERO)
self.assertStartswith(output, 'hello, world!')
self.assertContained('ThisFunctionDoesNotExist is not defined', output)
# Tests that building with -s DECLARE_ASM_MODULE_EXPORTS=0 works
def test_no_declare_asm_module_exports(self):
self.set_setting('DECLARE_ASM_MODULE_EXPORTS', 0)
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.maybe_closure()
self.do_runf(test_file('declare_asm_module_exports.cpp'), 'jsFunction: 1')
js = read_file('declare_asm_module_exports.js')
occurances = js.count('cFunction')
if self.is_optimizing() and '-g' not in self.emcc_args:
# In optimized builds only the single reference cFunction that exists in the EM_ASM should exist
if self.is_wasm():
self.assertEqual(occurances, 1)
else:
# With js the asm module itself also contains a reference for the cFunction name
self.assertEqual(occurances, 2)
else:
print(occurances)
# Tests that building with -s DECLARE_ASM_MODULE_EXPORTS=0 works
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_no_declare_asm_module_exports(self):
self.set_setting('DECLARE_ASM_MODULE_EXPORTS', 0)
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.maybe_closure()
self.set_setting('MINIMAL_RUNTIME')
self.do_runf(test_file('declare_asm_module_exports.cpp'), 'jsFunction: 1')
# Tests that -s MINIMAL_RUNTIME=1 works well in different build modes
@parameterized({
'default': ([],),
'streaming': (['-s', 'MINIMAL_RUNTIME_STREAMING_WASM_COMPILATION'],),
'streaming_inst': (['-s', 'MINIMAL_RUNTIME_STREAMING_WASM_INSTANTIATION'],),
'no_export': (['-s', 'DECLARE_ASM_MODULE_EXPORTS=0'],)
})
def test_minimal_runtime_hello_world(self, args):
# TODO: Support for non-Node.js shells has not yet been added to MINIMAL_RUNTIME
self.banned_js_engines = [config.V8_ENGINE, config.SPIDERMONKEY_ENGINE]
self.emcc_args = args
self.set_setting('MINIMAL_RUNTIME')
self.maybe_closure()
self.do_runf(test_file('small_hello_world.c'), 'hello')
# Test that printf() works in MINIMAL_RUNTIME=1
@parameterized({
'fs': ('FORCE_FILESYSTEM',),
'nofs': ('NO_FILESYSTEM',),
})
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_hello_printf(self, extra_setting):
self.set_setting('MINIMAL_RUNTIME')
self.set_setting(extra_setting)
# $FS is not fully compatible with MINIMAL_RUNTIME so fails with closure
# compiler. lsan also pulls in $FS
if '-fsanitize=leak' not in self.emcc_args and extra_setting != 'FORCE_FILESYSTEM':
self.maybe_closure()
self.do_runf(test_file('hello_world.c'), 'hello, world!')
# Tests that -s MINIMAL_RUNTIME=1 works well with SAFE_HEAP
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_safe_heap(self):
self.set_setting('MINIMAL_RUNTIME')
self.set_setting('SAFE_HEAP')
# $FS is not fully compatible with MINIMAL_RUNTIME so fails with closure
# compiler.
# lsan pulls in $FS
if '-fsanitize=leak' not in self.emcc_args:
self.maybe_closure()
self.do_runf(test_file('small_hello_world.c'), 'hello')
# Tests global initializer with -s MINIMAL_RUNTIME=1
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_global_initializer(self):
self.set_setting('MINIMAL_RUNTIME')
self.maybe_closure()
self.do_runf(test_file('test_global_initializer.cpp'), 't1 > t0: 1')
@no_wasm2js('wasm2js does not support PROXY_TO_PTHREAD (custom section support)')
def test_return_address(self):
self.set_setting('USE_OFFSET_CONVERTER')
self.do_runf(test_file('core/test_return_address.c'), 'passed')
@no_wasm2js('TODO: sanitizers in wasm2js')
@no_asan('-fsanitize-minimal-runtime cannot be used with ASan')
def test_ubsan_minimal_too_many_errors(self):
self.emcc_args += ['-fsanitize=undefined', '-fsanitize-minimal-runtime']
if not self.is_wasm():
if self.is_optimizing():
self.skipTest('test can only be run without optimizations on asm.js')
# Need to use `-g` to get proper line numbers in asm.js
self.emcc_args += ['-g']
self.do_runf(test_file('core/test_ubsan_minimal_too_many_errors.c'),
expected_output='ubsan: add-overflow\n' * 20 + 'ubsan: too many errors\n')
@no_wasm2js('TODO: sanitizers in wasm2js')
@no_asan('-fsanitize-minimal-runtime cannot be used with ASan')
def test_ubsan_minimal_errors_same_place(self):
self.emcc_args += ['-fsanitize=undefined', '-fsanitize-minimal-runtime']
if not self.is_wasm():
if self.is_optimizing():
self.skipTest('test can only be run without optimizations on asm.js')
# Need to use `-g` to get proper line numbers in asm.js
self.emcc_args += ['-g']
self.do_runf(test_file('core/test_ubsan_minimal_errors_same_place.c'),
expected_output='ubsan: add-overflow\n' * 5)
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_integer': (['-fsanitize=integer'],),
'fsanitize_overflow': (['-fsanitize=signed-integer-overflow'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_overflow(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_overflow.c'),
assert_all=True, expected_output=[
".c:3:5: runtime error: signed integer overflow: 2147483647 + 1 cannot be represented in type 'int'",
".c:7:7: runtime error: signed integer overflow: 2147483647 + 1 cannot be represented in type 'int'",
])
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_return': (['-fsanitize=return'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_no_return(self, args):
self.emcc_args += ['-Wno-return-type'] + args
self.do_runf(test_file('core/test_ubsan_full_no_return.cpp'),
expected_output='.cpp:1:5: runtime error: execution reached the end of a value-returning function without returning a value', assert_returncode=NON_ZERO)
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_integer': (['-fsanitize=integer'],),
'fsanitize_shift': (['-fsanitize=shift'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_left_shift(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_left_shift.c'),
assert_all=True, expected_output=[
'.c:3:5: runtime error: left shift of negative value -1',
".c:7:5: runtime error: left shift of 16 by 29 places cannot be represented in type 'int'"
])
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_null': (['-fsanitize=null'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_null_ref(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_null_ref.cpp'),
assert_all=True, expected_output=[
".cpp:3:12: runtime error: reference binding to null pointer of type 'int'",
".cpp:4:13: runtime error: reference binding to null pointer of type 'int'",
".cpp:5:14: runtime error: reference binding to null pointer of type 'int'",
])
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_vptr': (['-fsanitize=vptr'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_static_cast(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_static_cast.cpp'),
assert_all=True, expected_output=[
".cpp:18:10: runtime error: downcast of address",
"which does not point to an object of type 'R'",
])
@parameterized({
'g': ('-g', [
".cpp:3:12: runtime error: reference binding to null pointer of type 'int'",
'in main',
]),
'g4': ('-gsource-map', [
".cpp:3:12: runtime error: reference binding to null pointer of type 'int'",
'in main ',
'.cpp:3:8'
]),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_stack_trace(self, g_flag, expected_output):
if g_flag == '-gsource-map':
if not self.is_wasm():
self.skipTest('wasm2js has no source map support')
elif '-Oz' in self.emcc_args:
self.skipTest('-Oz breaks stack traces')
create_file('pre.js', 'Module = {UBSAN_OPTIONS: "print_stacktrace=1"};')
self.emcc_args += ['-fsanitize=null', g_flag, '--pre-js=pre.js']
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(test_file('core/test_ubsan_full_null_ref.cpp'),
assert_all=True, expected_output=expected_output)
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_typeinfo_eq(self):
# https://github.com/emscripten-core/emscripten/issues/13330
src = r'''
#include <typeinfo>
#include <stdio.h>
int main() {
int mismatch = typeid(int) != typeid(int);
printf("ok\n");
return mismatch;
}
'''
self.emcc_args.append('-fsanitize=undefined')
self.do_run(src, 'ok\n')
def test_template_class_deduction(self):
self.emcc_args += ['-std=c++17']
self.do_core_test('test_template_class_deduction.cpp')
@no_wasm2js('TODO: ASAN in wasm2js')
@no_safe_heap('asan does not work with SAFE_HEAP')
@parameterized({
'c': ['test_asan_no_error.c'],
'cpp': ['test_asan_no_error.cpp'],
})
def test_asan_no_error(self, name):
self.emcc_args.append('-fsanitize=address')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_runf(test_file('core', name), '', assert_returncode=NON_ZERO)
# note: these tests have things like -fno-builtin-memset in order to avoid
# clang optimizing things away. for example, a memset might be optimized into
# stores, and then the stores identified as dead, which leaves nothing for
# asan to test. here we want to test asan itself, so we work around that.
@no_safe_heap('asan does not work with SAFE_HEAP')
@parameterized({
'use_after_free_c': ('test_asan_use_after_free.c', [
'AddressSanitizer: heap-use-after-free on address',
]),
'use_after_free_cpp': ('test_asan_use_after_free.cpp', [
'AddressSanitizer: heap-use-after-free on address',
]),
'use_after_return': ('test_asan_use_after_return.c', [
'AddressSanitizer: stack-use-after-return on address',
], ['-Wno-return-stack-address']),
'static_buffer_overflow': ('test_asan_static_buffer_overflow.c', [
'AddressSanitizer: global-buffer-overflow on address',
], ['-fno-builtin-memset']),
'heap_buffer_overflow_c': ('test_asan_heap_buffer_overflow.c', [
'AddressSanitizer: heap-buffer-overflow on address',
], ['-fno-builtin-memset']),
'heap_buffer_overflow_cpp': ('test_asan_heap_buffer_overflow.cpp', [
'AddressSanitizer: heap-buffer-overflow on address',
], ['-fno-builtin-memset']),
'stack_buffer_overflow': ('test_asan_stack_buffer_overflow.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'stack_buffer_overflow_js': ('test_asan_stack_buffer_overflow_js.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'bitfield_unround_size': ('test_asan_bitfield_unround_size.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'bitfield_unround_offset': ('test_asan_bitfield_unround_offset.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'bitfield_round': ('test_asan_bitfield_round.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'memset_null': ('test_asan_memset_null.c', [
'AddressSanitizer: null-pointer-dereference on address 0x00000001'
], ['-fno-builtin-memset']),
'memset_freed': ('test_asan_memset_freed.c', [
'AddressSanitizer: heap-use-after-free on address'
], ['-fno-builtin-memset']),
'strcpy': ('test_asan_strcpy.c', [
'AddressSanitizer: heap-buffer-overflow on address'
], ['-fno-builtin-strcpy']),
'memcpy': ('test_asan_memcpy.c', [
'AddressSanitizer: heap-buffer-overflow on address'
], ['-fno-builtin-memcpy']),
'memchr': ('test_asan_memchr.c', [
'AddressSanitizer: global-buffer-overflow on address'
], ['-fno-builtin-memchr']),
'vector': ('test_asan_vector.cpp', [
'AddressSanitizer: container-overflow on address'
]),
})
def test_asan(self, name, expected_output, cflags=None):
if '-Oz' in self.emcc_args:
self.skipTest('-Oz breaks source maps')
if not self.is_wasm():
self.skipTest('wasm2js has no ASan support')
self.emcc_args.append('-fsanitize=address')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
if cflags:
self.emcc_args += cflags
self.do_runf(test_file('core', name),
expected_output=expected_output, assert_all=True,
check_for_error=False, assert_returncode=NON_ZERO)
@no_safe_heap('asan does not work with SAFE_HEAP')
@no_wasm2js('TODO: ASAN in wasm2js')
def test_asan_js_stack_op(self):
self.emcc_args.append('-fsanitize=address')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_runf(test_file('core/test_asan_js_stack_op.c'),
expected_output='Hello, World!')
@no_safe_heap('asan does not work with SAFE_HEAP')
@no_wasm2js('TODO: ASAN in wasm2js')
def test_asan_api(self):
self.emcc_args.append('-fsanitize=address')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_core_test('test_asan_api.c')
@no_safe_heap('asan does not work with SAFE_HEAP')
@no_wasm2js('TODO: ASAN in wasm2js')
def test_asan_modularized_with_closure(self):
# the bug is that createModule() returns undefined, instead of the
# proper Promise object.
create_file('post.js', 'if (!(createModule() instanceof Promise)) throw "Promise was not returned :(";\n')
self.emcc_args += ['-fsanitize=address', '--extern-post-js=post.js']
self.set_setting('MODULARIZE')
self.set_setting('EXPORT_NAME', 'createModule')
self.set_setting('USE_CLOSURE_COMPILER')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_runf(test_file('hello_world.c'), expected_output='hello, world!')
@no_asan('SAFE_HEAP cannot be used with ASan')
def test_safe_heap_user_js(self):
self.set_setting('SAFE_HEAP')
self.do_runf(test_file('core/test_safe_heap_user_js.c'),
expected_output=['Aborted(segmentation fault storing 1 bytes to address 0)'], assert_returncode=NON_ZERO)
def test_safe_stack(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
if self.is_optimizing():
expected = ['Aborted(stack overflow)']
else:
expected = ['Aborted(stack overflow)', '__handle_stack_overflow']
self.do_runf(test_file('core/test_safe_stack.c'),
expected_output=expected,
assert_returncode=NON_ZERO, assert_all=True)
@node_pthreads
def test_safe_stack_pthread(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('USE_PTHREADS')
if self.is_optimizing():
expected = ['Aborted(stack overflow)']
else:
expected = ['Aborted(stack overflow)', '__handle_stack_overflow']
self.do_runf(test_file('core/test_safe_stack.c'),
expected_output=expected,
assert_returncode=NON_ZERO, assert_all=True)
def test_safe_stack_alloca(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
if self.is_optimizing():
expected = ['Aborted(stack overflow)']
else:
expected = ['Aborted(stack overflow)', '__handle_stack_overflow']
self.do_runf(test_file('core/test_safe_stack_alloca.c'),
expected_output=expected,
assert_returncode=NON_ZERO, assert_all=True)
@needs_dylink
def test_safe_stack_dylink(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
self.dylink_test(r'''
#include <stdio.h>
extern void sidey();
int main() {
sidey();
}
''', '''
#include <string.h>
static long accumulator = 0;
int f(int *b) {
// Infinite recursion while recording stack pointer locations
// so that compiler can't eliminate the stack allocs.
accumulator += (long)b;
int a[1024];
return f(a);
}
void sidey() {
f(NULL);
}
''', ['Aborted(stack overflow)', '__handle_stack_overflow'], assert_returncode=NON_ZERO, force_c=True)
def test_fpic_static(self):
self.emcc_args.append('-fPIC')
self.do_core_test('test_hello_world.c')
@node_pthreads
def test_pthread_create(self):
self.set_setting('EXIT_RUNTIME')
# test that the node environment can be specified by itself, and that still
# works with pthreads (even though we did not specify 'node,worker')
self.set_setting('ENVIRONMENT', 'node')
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_c11_threads(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREADS_DEBUG')
if not self.has_changed_setting('INITIAL_MEMORY'):
self.set_setting('INITIAL_MEMORY', '64mb')
# test that the node and worker environments can be specified
self.set_setting('ENVIRONMENT', 'node,worker')
self.do_run_in_out_file_test('pthread/test_pthread_c11_threads.c')
@node_pthreads
def test_pthread_cxx_threads(self):
self.set_setting('PROXY_TO_PTHREAD')
self.clear_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '64Mb')
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('pthread/test_pthread_cxx_threads.cpp')
@node_pthreads
def test_pthread_create_pool(self):
# with a pool, we can synchronously depend on workers being available
self.set_setting('PTHREAD_POOL_SIZE', 2)
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-DALLOW_SYNC']
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_create_proxy(self):
# with PROXY_TO_PTHREAD, we can synchronously depend on workers being available
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-DALLOW_SYNC']
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_create_embind_stack_check(self):
# embind should work with stack overflow checks (see #12356)
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_exceptions(self):
self.set_setting('PTHREAD_POOL_SIZE', 2)
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-fexceptions']
self.do_run_in_out_file_test('core/pthread/exceptions.cpp')
@node_pthreads
def test_pthread_exit_process(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-DEXIT_RUNTIME', '--pre-js', test_file('core/pthread/test_pthread_exit_runtime.pre.js')]
self.do_run_in_out_file_test('core/pthread/test_pthread_exit_runtime.c', assert_returncode=42)
@node_pthreads
@no_wasm2js('wasm2js does not support PROXY_TO_PTHREAD (custom section support)')
def test_pthread_offset_converter(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_OFFSET_CONVERTER')
if '-g' in self.emcc_args:
self.emcc_args += ['-DDEBUG']
self.do_runf(test_file('core/test_return_address.c'), 'passed')
@node_pthreads
@no_wasm2js('wasm2js does not support PROXY_TO_PTHREAD (custom section support)')
def test_pthread_offset_converter_modularize(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_OFFSET_CONVERTER')
self.set_setting('MODULARIZE')
create_file('post.js', 'var m = require("./test_return_address.js"); m();')
self.emcc_args += ['--extern-post-js', 'post.js', '-s', 'EXPORT_NAME=foo']
if '-g' in self.emcc_args:
self.emcc_args += ['-DDEBUG']
self.do_runf(test_file('core/test_return_address.c'), 'passed')
def test_emscripten_atomics_stub(self):
self.do_run_in_out_file_test('core/pthread/emscripten_atomics.c')
@no_asan('incompatibility with atomics')
@node_pthreads
def test_emscripten_atomics(self):
self.set_setting('USE_PTHREADS')
self.do_run_in_out_file_test('core/pthread/emscripten_atomics.c')
@no_asan('incompatibility with atomics')
@node_pthreads
def test_emscripten_futexes(self):
self.set_setting('USE_PTHREADS')
self.do_run_in_out_file_test('core/pthread/emscripten_futexes.c')
@node_pthreads
def test_stdio_locking(self):
self.set_setting('PTHREAD_POOL_SIZE', '2')
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('core', 'test_stdio_locking.c')
@needs_dylink
@node_pthreads
def test_pthread_dylink_basics(self):
self.emcc_args.append('-Wno-experimental')
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.do_basic_dylink_test()
@needs_dylink
@node_pthreads
def test_pthread_dylink(self):
self.emcc_args.append('-Wno-experimental')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_PTHREADS')
self.set_setting('LLD_REPORT_UNDEFINED')
self.set_setting('PTHREAD_POOL_SIZE', 2)
main = test_file('core/pthread/test_pthread_dylink.c')
# test with a long .so name, as a regression test for
# https://github.com/emscripten-core/emscripten/issues/14833
# where we had a bug with long names + TextDecoder + pthreads + dylink
very_long_name = 'very_very_very_very_very_very_very_very_very_long.so'
self.dylink_testf(main, so_name=very_long_name,
need_reverse=False)
@needs_dylink
@node_pthreads
def test_pthread_dylink_tls(self):
self.emcc_args.append('-Wno-experimental')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_PTHREADS')
self.set_setting('PTHREAD_POOL_SIZE=1')
main = test_file('core/pthread/test_pthread_dylink_tls.c')
self.dylink_testf(main, need_reverse=False)
@needs_dylink
@node_pthreads
def test_Module_dynamicLibraries_pthreads(self):
# test that Module.dynamicLibraries works with pthreads
self.emcc_args += ['-pthread', '-Wno-experimental']
self.emcc_args += ['--extern-pre-js', 'pre.js']
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
# This test is for setting dynamicLibraries at runtime so we don't
# want emscripten loading `liblib.so` automatically (which it would
# do without this setting.
self.set_setting('NO_AUTOLOAD_DYLIBS')
create_file('pre.js', '''
if (!global.Module) {
// This is the initial load (not a worker)
// Define the initial state of Module as we would
// in the html shell file.
// Use var to escape the scope of the if statement
var Module = {
dynamicLibraries: ['liblib.so']
};
}
''')
self.dylink_test(
r'''
#include <stdio.h>
int side();
int main() {
printf("result is %d", side());
return 0;
}
''',
r'''
int side() { return 42; }
''',
'result is 42')
# Tests the emscripten_get_exported_function() API.
def test_emscripten_get_exported_function(self):
# Could also test with -s ALLOW_TABLE_GROWTH=1
self.set_setting('RESERVED_FUNCTION_POINTERS', 2)
self.emcc_args += ['-lexports.js']
self.do_core_test('test_get_exported_function.cpp')
# Tests the emscripten_get_exported_function() API.
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_emscripten_get_exported_function(self):
# Could also test with -s ALLOW_TABLE_GROWTH=1
self.set_setting('RESERVED_FUNCTION_POINTERS', 2)
self.set_setting('MINIMAL_RUNTIME')
self.emcc_args += ['-lexports.js']
self.do_core_test('test_get_exported_function.cpp')
# Marked as impure since the WASI reactor modules (modules without main)
# are not yet suppored by the wasm engines we test against.
@also_with_standalone_wasm(impure=True)
def test_undefined_main(self):
if self.get_setting('STANDALONE_WASM'):
# In standalone we don't support implicitly building without main. The user has to explicitly
# opt out (see below).
err = self.expect_fail([EMCC, test_file('core/test_ctors_no_main.cpp')] + self.get_emcc_args())
self.assertContained('error: undefined symbol: main (referenced by top-level compiled C/C++ code)', err)
self.assertContained('warning: To build in STANDALONE_WASM mode without a main(), use emcc --no-entry', err)
elif not self.get_setting('LLD_REPORT_UNDEFINED') and not self.get_setting('STRICT'):
# Traditionally in emscripten we allow main to be implicitly undefined. This allows programs
# with a main and libraries without a main to be compiled identically.
# However we are trying to move away from that model to a more explicit opt-out model. See:
# https://github.com/emscripten-core/emscripten/issues/9640
self.do_core_test('test_ctors_no_main.cpp')
# Disabling IGNORE_MISSING_MAIN should cause link to fail due to missing main
self.set_setting('IGNORE_MISSING_MAIN', 0)
err = self.expect_fail([EMCC, test_file('core/test_ctors_no_main.cpp')] + self.get_emcc_args())
self.assertContained('error: entry symbol not defined (pass --no-entry to suppress): main', err)
# In non-standalone mode exporting an empty list of functions signal that we don't
# have a main and so should not generate an error.
self.set_setting('EXPORTED_FUNCTIONS', [])
self.do_core_test('test_ctors_no_main.cpp')
self.clear_setting('EXPORTED_FUNCTIONS')
def test_undefined_main_explict(self):
# If we pass --no-entry this test should compile without issue
self.emcc_args.append('--no-entry')
self.do_core_test('test_ctors_no_main.cpp')
def test_undefined_main_wasm_output(self):
if not can_do_standalone(self):
self.skipTest('standalone mode only')
err = self.expect_fail([EMCC, '-o', 'out.wasm', test_file('core/test_ctors_no_main.cpp')] + self.get_emcc_args())
self.assertContained('undefined symbol: main', err)
def test_export_start(self):
if not can_do_standalone(self):
self.skipTest('standalone mode only')
self.set_setting('STANDALONE_WASM')
self.set_setting('EXPORTED_FUNCTIONS', ['__start'])
self.do_core_test('test_hello_world.c')
@unittest.skip("memory64 functionality only partially working")
def test_memory64_hello_world(self):
self.set_setting('MEMORY64', 2)
self.do_core_test('test_hello_world.c')
# Tests the operation of API found in #include <emscripten/math.h>
def test_emscripten_math(self):
self.do_core_test('test_emscripten_math.c')
# Tests that users can pass custom JS options from command line using
# the -jsDfoo=val syntax:
# See https://github.com/emscripten-core/emscripten/issues/10580.
def test_custom_js_options(self):
self.emcc_args += ['--js-library', test_file('core/test_custom_js_settings.js'), '-jsDCUSTOM_JS_OPTION=1']
self.do_core_test('test_custom_js_settings.c')
self.assertContained('cannot change built-in settings values with a -jsD directive', self.expect_fail([EMCC, '-jsDWASM=0']))
# Tests <emscripten/stack.h> API
@no_asan('stack allocation sizes are no longer predictable')
def test_emscripten_stack(self):
self.set_setting('TOTAL_STACK', 4 * 1024 * 1024)
self.do_core_test('test_stack_get_free.c')
# Tests settings.ABORT_ON_WASM_EXCEPTIONS
def test_abort_on_exceptions(self):
# Explictly disable EXIT_RUNTIME, since otherwise addOnPostRun does not work.
# https://github.com/emscripten-core/emscripten/issues/15080
self.set_setting('EXIT_RUNTIME', 0)
self.set_setting('ABORT_ON_WASM_EXCEPTIONS')
self.set_setting('EXPORTED_RUNTIME_METHODS', ['ccall', 'cwrap'])
self.emcc_args += ['--bind', '--post-js', test_file('core/test_abort_on_exception_post.js')]
self.do_core_test('test_abort_on_exception.cpp', interleaved_output=False)
@needs_dylink
def test_gl_main_module(self):
self.set_setting('MAIN_MODULE')
self.do_runf(test_file('core/test_gl_get_proc_address.c'))
@needs_dylink
def test_main_module_js_symbol(self):
self.set_setting('MAIN_MODULE', 2)
self.emcc_args += ['--js-library', test_file('core/test_main_module_js_symbol.js')]
self.do_runf(test_file('core/test_main_module_js_symbol.c'))
def test_REVERSE_DEPS(self):
create_file('connect.c', '#include <sys/socket.h>\nint main() { return (int)(long)&connect; }')
self.run_process([EMCC, 'connect.c'])
base_size = os.path.getsize('a.out.wasm')
# 'auto' should work (its the default)
self.run_process([EMCC, 'connect.c', '-sREVERSE_DEPS=auto'])
# 'all' should work too although it should produce a larger binary
self.run_process([EMCC, 'connect.c', '-sREVERSE_DEPS=all'])
self.assertGreater(os.path.getsize('a.out.wasm'), base_size)
# 'none' should fail to link because the dependency on ntohs was not added.
err = self.expect_fail([EMCC, 'connect.c', '-sREVERSE_DEPS=none'])
self.assertContained('undefined symbol: ntohs', err)
def test_emscripten_async_call(self):
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test(test_file('core/test_emscripten_async_call.c'))
@no_asan('asyncify stack operations confuse asan')
@parameterized({
'': ([],),
'no_dynamic_execution': (['-s', 'DYNAMIC_EXECUTION=0'],)
})
def test_embind_lib_with_asyncify(self, args):
self.uses_es6 = True
self.emcc_args += [
'--bind',
'-s', 'ASYNCIFY',
'-s', 'ASYNCIFY_IMPORTS=["sleep_and_return"]',
'--post-js', test_file('core/embind_lib_with_asyncify.test.js'),
]
self.emcc_args += args
self.do_core_test('embind_lib_with_asyncify.cpp')
@no_asan('asyncify stack operations confuse asan')
def test_em_async_js(self):
self.uses_es6 = True
self.set_setting('ASYNCIFY')
self.maybe_closure()
self.do_core_test('test_em_async_js.c')
# Generate tests for everything
def make_run(name, emcc_args, settings=None, env=None):
if env is None:
env = {}
if settings is None:
settings = {}
if settings:
# Until we create a way to specify link-time settings separately from compile-time settings
# we need to pass this flag here to avoid warnings from compile-only commands.
emcc_args.append('-Wno-unused-command-line-argument')
TT = type(name, (TestCoreBase,), dict(run_name=name, env=env, __module__=__name__)) # noqa
def tearDown(self):
try:
super(TT, self).tearDown()
finally:
for k, v in self.env.items():
del os.environ[k]
TT.tearDown = tearDown
def setUp(self):
super(TT, self).setUp()
for k, v in self.env.items():
assert k not in os.environ, k + ' should not be in environment'
os.environ[k] = v
os.chdir(self.get_dir()) # Ensure the directory exists and go there
for k, v in settings.items():
self.set_setting(k, v)
self.emcc_args += emcc_args
TT.setUp = setUp
return TT
# Main wasm test modes
wasm0 = make_run('wasm0', emcc_args=['-O0'])
wasm0g = make_run('wasm0g', emcc_args=['-O0', '-g'])
wasm1 = make_run('wasm1', emcc_args=['-O1'])
wasm2 = make_run('wasm2', emcc_args=['-O2'])
wasm2g = make_run('wasm2g', emcc_args=['-O2', '-g'])
wasm3 = make_run('wasm3', emcc_args=['-O3'])
wasms = make_run('wasms', emcc_args=['-Os'])
wasmz = make_run('wasmz', emcc_args=['-Oz'])
wasmlto0 = make_run('wasmlto0', emcc_args=['-flto', '-O0'])
wasmlto1 = make_run('wasmlto1', emcc_args=['-flto', '-O1'])
wasmlto2 = make_run('wasmlto2', emcc_args=['-flto', '-O2'])
wasmlto3 = make_run('wasmlto3', emcc_args=['-flto', '-O3'])
wasmltos = make_run('wasmltos', emcc_args=['-flto', '-Os'])
wasmltoz = make_run('wasmltoz', emcc_args=['-flto', '-Oz'])
wasm2js0 = make_run('wasm2js0', emcc_args=['-O0'], settings={'WASM': 0})
wasm2js1 = make_run('wasm2js1', emcc_args=['-O1'], settings={'WASM': 0})
wasm2js2 = make_run('wasm2js2', emcc_args=['-O2'], settings={'WASM': 0})
wasm2js3 = make_run('wasm2js3', emcc_args=['-O3'], settings={'WASM': 0})
wasm2jss = make_run('wasm2jss', emcc_args=['-Os'], settings={'WASM': 0})
wasm2jsz = make_run('wasm2jsz', emcc_args=['-Oz'], settings={'WASM': 0})
# Secondary test modes - run directly when there is a specific need
# features
simd2 = make_run('simd2', emcc_args=['-O2', '-msimd128'])
bulkmem2 = make_run('bulkmem2', emcc_args=['-O2', '-mbulk-memory'])
# wasm
wasm2s = make_run('wasm2s', emcc_args=['-O2'], settings={'SAFE_HEAP': 1})
wasm2ss = make_run('wasm2ss', emcc_args=['-O2'], settings={'STACK_OVERFLOW_CHECK': 2})
# Add DEFAULT_TO_CXX=0
strict = make_run('strict', emcc_args=[], settings={'STRICT': 1})
lsan = make_run('lsan', emcc_args=['-fsanitize=leak', '--profiling'], settings={'ALLOW_MEMORY_GROWTH': 1})
asan = make_run('asan', emcc_args=['-fsanitize=address', '--profiling'], settings={'ALLOW_MEMORY_GROWTH': 1})
asani = make_run('asani', emcc_args=['-fsanitize=address', '--profiling', '--pre-js', os.path.join(os.path.dirname(__file__), 'asan-no-leak.js')],
settings={'ALLOW_MEMORY_GROWTH': 1})
# Experimental modes (not tested by CI)
lld = make_run('lld', emcc_args=[], settings={'LLD_REPORT_UNDEFINED': 1})
minimal0 = make_run('minimal0', emcc_args=['-g'], settings={'MINIMAL_RUNTIME': 1})
# TestCoreBase is just a shape for the specific subclasses, we don't test it itself
del TestCoreBase # noqa
| 33.856882 | 545 | 0.637918 |
import glob
import hashlib
import json
import logging
import os
import random
import re
import shutil
import sys
import time
import unittest
from pathlib import Path
from functools import wraps
if __name__ == '__main__':
raise Exception('do not run this file directly; do something like: tests/runner')
from tools.shared import try_delete, PIPE
from tools.shared import PYTHON, EMCC, EMAR
from tools.utils import WINDOWS, MACOS
from tools import shared, building, config, webassembly
from common import RunnerCore, path_from_root, requires_native_clang, test_file, create_file
from common import skip_if, needs_dylink, no_windows, no_mac, is_slow_test, parameterized
from common import env_modify, with_env_modify, disabled, node_pthreads
from common import read_file, read_binary, require_node, require_v8
from common import NON_ZERO, WEBIDL_BINDER, EMBUILDER
import clang_native
logger = logging.getLogger("test_core")
def wasm_simd(f):
def decorated(self):
self.require_v8()
if not self.is_wasm():
self.skipTest('wasm2js only supports MVP for now')
if '-O3' in self.emcc_args:
self.skipTest('SIMD tests are too slow with -O3 in the new LLVM pass manager, https://github.com/emscripten-core/emscripten/issues/13427')
self.emcc_args.append('-msimd128')
self.emcc_args.append('-fno-lax-vector-conversions')
self.v8_args.append('--experimental-wasm-simd')
f(self)
return decorated
def wasm_relaxed_simd(f):
def decorated(self):
if not self.is_wasm():
self.skipTest('wasm2js only supports MVP for now')
self.emcc_args.append('-mrelaxed-simd')
f(self)
return decorated
def needs_non_trapping_float_to_int(f):
def decorated(self):
if not self.is_wasm():
self.skipTest('wasm2js only supports MVP for now')
f(self)
return decorated
def also_with_wasm_bigint(f):
def decorated(self):
self.set_setting('WASM_BIGINT', 0)
f(self)
if self.is_wasm():
self.set_setting('WASM_BIGINT')
self.require_node()
self.node_args.append('--experimental-wasm-bigint')
f(self)
return decorated
def all_engines(f):
def decorated(self):
old = self.use_all_engines
self.use_all_engines = True
self.set_setting('ENVIRONMENT', 'web,node,shell')
try:
f(self)
finally:
self.use_all_engines = old
return decorated
def with_both_exception_handling(f):
assert callable(f)
def metafunc(self, native_exceptions):
if native_exceptions:
if not self.is_wasm():
self.skipTest('wasm2js does not support wasm exceptions')
self.require_v8()
if '-fsanitize=address' in self.emcc_args:
self.skipTest('Wasm EH does not work with asan yet')
self.emcc_args.append('-fwasm-exceptions')
self.v8_args.append('--experimental-wasm-eh')
f(self)
else:
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
f(self)
metafunc._parameterize = {'': (False,),
'wasm_eh': (True,)}
return metafunc
def no_wasm2js(note=''):
assert not callable(note)
def decorated(f):
return skip_if(f, 'is_wasm2js', note)
return decorated
def also_with_noderawfs(func):
def decorated(self):
orig_args = self.emcc_args.copy()
func(self)
print('noderawfs')
self.emcc_args = orig_args + ['-DNODERAWFS']
self.set_setting('NODERAWFS')
self.js_engines = [config.NODE_JS]
func(self)
return decorated
def can_do_standalone(self):
return self.is_wasm() and \
self.get_setting('STACK_OVERFLOW_CHECK', 0) < 2 and \
not self.get_setting('MINIMAL_RUNTIME') and \
not self.get_setting('SAFE_HEAP') and \
'-fsanitize=address' not in self.emcc_args
def also_with_wasmfs(func):
def decorated(self):
func(self)
print('wasmfs')
if self.get_setting('STANDALONE_WASM'):
self.skipTest("test currently cannot run both with WASMFS and STANDALONE_WASM")
self.set_setting('WASMFS')
func(self)
return decorated
def also_with_standalone_wasm(wasm2c=False, impure=False):
def decorated(func):
def metafunc(self, standalone):
if not standalone:
func(self)
else:
if can_do_standalone(self):
self.set_setting('STANDALONE_WASM')
self.set_setting('WASM_BIGINT')
self.emcc_args.append('-Wno-unused-command-line-argument')
if impure:
self.wasm_engines = []
self.js_engines = [config.NODE_JS]
self.node_args.append('--experimental-wasm-bigint')
func(self)
if wasm2c:
print('wasm2c')
self.set_setting('WASM2C')
self.wasm_engines = []
func(self)
metafunc._parameterize = {'': (False,),
'standalone': (True,)}
return metafunc
return decorated
def no_optimize(note=''):
assert not callable(note)
def decorator(func):
assert callable(func)
def decorated(self):
if self.is_optimizing():
self.skipTest(note)
func(self)
return decorated
return decorator
def needs_make(note=''):
assert not callable(note)
if WINDOWS:
return unittest.skip('Tool not available on Windows bots (%s)' % note)
return lambda f: f
def no_asan(note):
assert not callable(note)
def decorator(f):
assert callable(f)
@wraps(f)
def decorated(self, *args, **kwargs):
if '-fsanitize=address' in self.emcc_args:
self.skipTest(note)
f(self, *args, **kwargs)
return decorated
return decorator
def no_lsan(note):
assert not callable(note)
def decorator(f):
assert callable(f)
@wraps(f)
def decorated(self, *args, **kwargs):
if '-fsanitize=leak' in self.emcc_args:
self.skipTest(note)
f(self, *args, **kwargs)
return decorated
return decorator
def make_no_decorator_for_setting(name):
def outer_decorator(note):
assert not callable(note)
def decorator(f):
assert callable(f)
@wraps(f)
def decorated(self, *args, **kwargs):
if (name + '=1') in self.emcc_args or self.get_setting(name):
self.skipTest(note)
f(self, *args, **kwargs)
return decorated
return decorator
return outer_decorator
no_minimal_runtime = make_no_decorator_for_setting('MINIMAL_RUNTIME')
no_safe_heap = make_no_decorator_for_setting('SAFE_HEAP')
def is_sanitizing(args):
return '-fsanitize=' in str(args)
class TestCoreBase(RunnerCore):
def is_wasm2js(self):
return self.get_setting('WASM') == 0
def is_optimizing(self):
return '-O' in str(self.emcc_args) and '-O0' not in self.emcc_args
def can_use_closure(self):
return '-g' not in self.emcc_args and '--profiling' not in self.emcc_args and ('-O2' in self.emcc_args or '-Os' in self.emcc_args)
def maybe_closure(self):
if '--closure=1' not in self.emcc_args and self.can_use_closure():
self.emcc_args += ['--closure=1']
logger.debug('using closure compiler..')
return True
return False
def assertStartswith(self, output, prefix):
self.assertEqual(prefix, output[:len(prefix)])
def verify_in_strict_mode(self, filename):
js = read_file(filename)
filename += '.strict.js'
with open(filename, 'w') as outfile:
outfile.write('"use strict";\n' + js)
self.run_js(filename)
def do_core_test(self, testname, **kwargs):
self.do_run_in_out_file_test(Path('core', testname), **kwargs)
def get_bullet_library(self, use_cmake):
if use_cmake:
configure_commands = ['cmake', '.']
configure_args = ['-DBUILD_DEMOS=OFF', '-DBUILD_EXTRAS=OFF', '-DUSE_GLUT=OFF']
generated_libs = [Path('src/BulletDynamics/libBulletDynamics.a'),
Path('src/BulletCollision/libBulletCollision.a'),
Path('src/LinearMath/libLinearMath.a')]
else:
configure_commands = ['sh', './configure']
configure_args = ['--disable-shared', '--host=i686-pc-linux-gnu',
'--disable-demos', '--disable-dependency-tracking']
generated_libs = [Path('src/.libs/libBulletDynamics.a'),
Path('src/.libs/libBulletCollision.a'),
Path('src/.libs/libLinearMath.a')]
return self.get_library('third_party/bullet', generated_libs,
configure=configure_commands,
configure_args=configure_args,
cache_name_extra=configure_commands[0])
@also_with_standalone_wasm()
@also_with_wasmfs
def test_hello_world(self):
self.do_core_test('test_hello_world.c')
self.assertNotContained('EMSCRIPTEN_GENERATED_FUNCTIONS', read_file('test_hello_world.js'))
def test_wasm_synchronous_compilation(self):
self.set_setting('STRICT_JS')
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.do_core_test('test_hello_world.c')
@also_with_standalone_wasm()
def test_hello_argc(self):
self.do_core_test('test_hello_argc.c')
def test_intvars(self):
self.do_core_test('test_intvars.cpp')
def test_sintvars(self):
self.do_core_test('test_sintvars.c')
def test_int53(self):
self.emcc_args += ['-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=[$convertI32PairToI53,$convertU32PairToI53,$readI53FromU64,$readI53FromI64,$writeI53ToI64,$writeI53ToI64Clamped,$writeI53ToU64Clamped,$writeI53ToI64Signaling,$writeI53ToU64Signaling]']
self.do_core_test('test_int53.c', interleaved_output=False)
def test_i64(self):
self.do_core_test('test_i64.c')
def test_i64_2(self):
self.do_core_test('test_i64_2.cpp')
def test_i64_3(self):
self.do_core_test('test_i64_3.cpp')
def test_i64_4(self):
self.do_core_test('test_i64_4.c')
def test_i64_b(self):
self.do_core_test('test_i64_b.cpp')
def test_i64_cmp(self):
self.do_core_test('test_i64_cmp.cpp')
def test_i64_cmp2(self):
self.do_core_test('test_i64_cmp2.c')
def test_i64_double(self):
self.do_core_test('test_i64_double.cpp')
def test_i64_umul(self):
self.do_core_test('test_i64_umul.c')
@also_with_standalone_wasm()
def test_i64_precise(self):
self.do_core_test('test_i64_precise.c')
def test_i64_precise_needed(self):
self.do_core_test('test_i64_precise_needed.c')
def test_i64_llabs(self):
self.do_core_test('test_i64_llabs.c')
def test_i64_zextneg(self):
self.do_core_test('test_i64_zextneg.c')
def test_i64_7z(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_i64_7z.c', args=['hallo'])
def test_i64_i16(self):
self.do_core_test('test_i64_i16.c')
def test_i64_qdouble(self):
self.do_core_test('test_i64_qdouble.c')
def test_i64_varargs(self):
self.do_core_test('test_i64_varargs.c', args='waka fleefl asdfasdfasdfasdf'.split())
@no_wasm2js('wasm_bigint')
def test_i64_invoke_bigint(self):
self.set_setting('WASM_BIGINT')
self.emcc_args += ['-fexceptions']
self.node_args += ['--experimental-wasm-bigint']
self.do_core_test('test_i64_invoke_bigint.cpp', js_engines=[config.NODE_JS])
def test_vararg_copy(self):
self.do_run_in_out_file_test('va_arg/test_va_copy.c')
def test_llvm_fabs(self):
self.do_core_test('test_llvm_fabs.c')
def test_double_varargs(self):
self.do_core_test('test_double_varargs.c')
def test_trivial_struct_varargs(self):
self.do_core_test('test_trivial_struct_varargs.c')
def test_struct_varargs(self):
self.do_core_test('test_struct_varargs.c')
def test_zero_struct_varargs(self):
self.do_core_test('test_zero_struct_varargs.c')
def zzztest_nested_struct_varargs(self):
self.do_core_test('test_nested_struct_varargs.c')
def test_i32_mul_precise(self):
self.do_core_test('test_i32_mul_precise.c')
def test_i16_emcc_intrinsic(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_i16_emcc_intrinsic.c')
def test_double_i64_conversion(self):
self.do_core_test('test_double_i64_conversion.c')
def test_float32_precise(self):
self.do_core_test('test_float32_precise.c')
def test_negative_zero(self):
self.do_core_test('test_negative_zero.c')
def test_literal_negative_zero(self):
self.do_core_test('test_literal_negative_zero.c')
@also_with_standalone_wasm()
def test_bswap64(self):
self.do_core_test('test_bswap64.cpp')
def test_sha1(self):
self.do_runf(test_file('sha1.c'), 'SHA1=15dd99a1991e0b3826fede3deffc1feba42278e6')
def test_wasm32_unknown_emscripten(self):
self.do_runf(test_file('wasm32-unknown-emscripten.c'), '')
def test_cube2md5(self):
self.emcc_args += ['--embed-file', 'cube2md5.txt']
shutil.copyfile(test_file('cube2md5.txt'), 'cube2md5.txt')
self.do_run_from_file(test_file('cube2md5.cpp'), test_file('cube2md5.ok'), assert_returncode=NON_ZERO)
@also_with_standalone_wasm(wasm2c=True)
@needs_make('make')
def test_cube2hash(self):
self.do_run('// empty file', 'Usage: hashstring <seed>',
libraries=self.get_library('third_party/cube2hash', ['libcube2hash.a'], configure=None),
includes=[test_file('third_party/cube2hash')], assert_returncode=NON_ZERO)
for text, output in [('fleefl', '892BDB6FD3F62E863D63DA55851700FDE3ACF30204798CE9'),
('fleefl2', 'AA2CC5F96FC9D540CA24FDAF1F71E2942753DB83E8A81B61'),
('64bitisslow', '64D8470573635EC354FEE7B7F87C566FCAF1EFB491041670')]:
self.do_run('src.js', 'hash value: ' + output, args=[text], no_build=True)
def test_unaligned(self):
self.skipTest('LLVM marks the reads of s as fully aligned, making this test invalid')
src = r'''
#include <stdio.h>
struct S {
double x;
int y;
};
int main() {
// the 64-bit value here will not be 8-byte aligned
S s0[3] = { {0x12a751f430142, 22}, {0x17a5c85bad144, 98}, {1, 1}};
char buffer[10*sizeof(S)];
int b = int(buffer);
S *s = (S*)(b + 4-b%8);
s[0] = s0[0];
s[1] = s0[1];
s[2] = s0[2];
printf("*%d : %d : %d\n", sizeof(S), ((unsigned int)&s[0]) % 8 != ((unsigned int)&s[1]) % 8,
((unsigned int)&s[1]) - ((unsigned int)&s[0]));
s[0].x++;
s[0].y++;
s[1].x++;
s[1].y++;
printf("%.1f,%d,%.1f,%d\n", s[0].x, s[0].y, s[1].x, s[1].y);
return 0;
}
'''
self.do_run(src, '*12 : 1 : 12\n328157500735811.0,23,416012775903557.0,99\n')
return
src = r'''
#include <stdio.h>
int main()
{
int x[10];
char *p = (char*)&x[0];
p++;
short *q = (short*)p;
*q = 300;
printf("*%d:%ld*\n", *q, ((long)q)%2);
int *r = (int*)p;
*r = 515559;
printf("*%d*\n", *r);
long long *t = (long long*)p;
*t = 42949672960;
printf("*%lld*\n", *t);
return 0;
}
'''
try:
self.do_run(src, '*300:1*\n*515559*\n*42949672960*\n')
except Exception as e:
assert 'must be aligned' in str(e), e
def test_align64(self):
src = r'''
#include <stdio.h>
// inspired by poppler
enum Type {
A = 10,
B = 20
};
struct Object {
Type type;
union {
int intg;
double real;
char *name;
};
};
struct Principal {
double x;
Object a;
double y;
};
int main(int argc, char **argv)
{
int base = argc-1;
Object *o = NULL;
printf("%zu,%zu\n", sizeof(Object), sizeof(Principal));
printf("%ld,%ld,%ld,%ld\n", (long)&o[base].type, (long)&o[base].intg, (long)&o[base].real, (long)&o[base].name);
printf("%ld,%ld,%ld,%ld\n", (long)&o[base+1].type, (long)&o[base+1].intg, (long)&o[base+1].real, (long)&o[base+1].name);
Principal p, q;
p.x = p.y = q.x = q.y = 0;
p.a.type = A;
p.a.real = 123.456;
*(&q.a) = p.a;
printf("%.2f,%d,%.2f,%.2f : %.2f,%d,%.2f,%.2f\n", p.x, p.a.type, p.a.real, p.y, q.x, q.a.type, q.a.real, q.y);
return 0;
}
'''
self.do_run(src, '''16,32
0,8,8,8
16,24,24,24
0.00,10,123.46,0.00 : 0.00,10,123.46,0.00
''')
@no_asan('asan errors on corner cases we check')
def test_aligned_alloc(self):
self.do_runf(test_file('test_aligned_alloc.c'), '',
emcc_args=['-Wno-non-power-of-two-alignment'])
def test_unsigned(self):
src = '''
#include <stdio.h>
const signed char cvals[2] = { -1, -2 }; // compiler can store this is a string, so -1 becomes \\FF, and needs re-signing
int main()
{
{
unsigned char x = 200;
printf("*%d*\\n", x);
unsigned char y = -22;
printf("*%d*\\n", y);
}
int varey = 100;
unsigned int MAXEY = -1, MAXEY2 = -77;
printf("*%u,%d,%u*\\n", MAXEY, varey >= MAXEY, MAXEY2); // 100 >= -1? not in unsigned!
int y = cvals[0];
printf("*%d,%d,%d,%d*\\n", cvals[0], cvals[0] < 0, y, y < 0);
y = cvals[1];
printf("*%d,%d,%d,%d*\\n", cvals[1], cvals[1] < 0, y, y < 0);
// zext issue - see mathop in jsifier
unsigned char x8 = -10;
unsigned long hold = 0;
hold += x8;
int y32 = hold+50;
printf("*%lu,%d*\\n", hold, y32);
// Comparisons
x8 = 0;
for (int i = 0; i < 254; i++) x8++; // make it an actual 254 in JS - not a -2
printf("*%d,%d*\\n", x8+1 == 0xff, x8+1 != 0xff); // 0xff may be '-1' in the bitcode
return 0;
}
'''
self.do_run(src, '*4294967295,0,4294967219*\n*-1,1,-1,1*\n*-2,1,-2,1*\n*246,296*\n*1,0*')
self.emcc_args.append('-Wno-constant-conversion')
src = '''
#include <stdio.h>
int main()
{
{
unsigned char x;
unsigned char *y = &x;
*y = -1;
printf("*%d*\\n", x);
}
{
unsigned short x;
unsigned short *y = &x;
*y = -1;
printf("*%d*\\n", x);
}
/*{ // This case is not checked. The hint for unsignedness is just the %u in printf, and we do not analyze that
unsigned int x;
unsigned int *y = &x;
*y = -1;
printf("*%u*\\n", x);
}*/
{
char x;
char *y = &x;
*y = 255;
printf("*%d*\\n", x);
}
{
char x;
char *y = &x;
*y = 65535;
printf("*%d*\\n", x);
}
{
char x;
char *y = &x;
*y = 0xffffffff;
printf("*%d*\\n", x);
}
return 0;
}
'''
self.do_run(src, '*255*\n*65535*\n*-1*\n*-1*\n*-1*')
def test_bitfields(self):
self.do_core_test('test_bitfields.c')
def test_floatvars(self):
self.do_core_test('test_floatvars.cpp')
def test_closebitcasts(self):
self.do_core_test('closebitcasts.c')
def test_fast_math(self):
self.emcc_args += ['-ffast-math']
self.do_core_test('test_fast_math.c', args=['5', '6', '8'])
def test_zerodiv(self):
self.do_core_test('test_zerodiv.c')
def test_zero_multiplication(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_zero_multiplication.c')
def test_isnan(self):
self.do_core_test('test_isnan.c')
def test_globaldoubles(self):
self.do_core_test('test_globaldoubles.c')
def test_math(self):
self.do_core_test('test_math.c')
def test_erf(self):
self.do_core_test('test_erf.c')
def test_math_hyperbolic(self):
self.do_core_test('test_math_hyperbolic.c')
def test_math_lgamma(self):
self.do_run_in_out_file_test('math/lgamma.c', assert_returncode=NON_ZERO)
def test_math_fmodf(self):
self.do_run_in_out_file_test('math/fmodf.c')
def test_frexp(self):
self.do_core_test('test_frexp.c')
def test_rounding(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_rounding.c')
def test_fcvt(self):
self.do_core_test('test_fcvt.cpp')
def test_llrint(self):
self.do_core_test('test_llrint.c')
def test_getgep(self):
self.do_core_test('test_getgep.c')
def test_multiply_defined_symbols(self):
create_file('a1.c', 'int f() { return 1; }')
create_file('a2.c', 'void x() {}')
create_file('b1.c', 'int f() { return 2; }')
create_file('b2.c', 'void y() {}')
create_file('main.c', r'''
#include <stdio.h>
int f();
int main() {
printf("result: %d\n", f());
return 0;
}
''')
self.emcc('a1.c', ['-c'])
self.emcc('a2.c', ['-c'])
self.emcc('b1.c', ['-c'])
self.emcc('b2.c', ['-c'])
self.emcc('main.c', ['-c'])
building.emar('cr', 'liba.a', ['a1.c.o', 'a2.c.o'])
building.emar('cr', 'libb.a', ['b1.c.o', 'b2.c.o'])
building.link_to_object(['main.c.o', 'liba.a', 'libb.a'], 'all.o')
self.emcc('all.o', self.get_emcc_args(), 'all.js')
self.do_run('all.js', 'result: 1', no_build=True)
def test_if(self):
self.do_core_test('test_if.c')
def test_if_else(self):
self.do_core_test('test_if_else.c')
def test_loop(self):
self.do_core_test('test_loop.c')
def test_stack(self):
self.set_setting('INLINING_LIMIT')
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.do_core_test('test_stack.c')
def test_stack_align(self):
src = test_file('core/test_stack_align.cpp')
def test():
self.do_runf(src, ['''align 4: 0
align 8: 0
align 16: 0
align 32: 0
base align: 0, 0, 0, 0'''])
test()
@no_asan('stack size is too low for asan to work properly')
def test_stack_placement(self):
self.set_setting('TOTAL_STACK', 1024)
self.do_core_test('test_stack_placement.c')
self.set_setting('GLOBAL_BASE', 102400)
self.do_core_test('test_stack_placement.c')
@no_asan('asan does not support main modules')
@no_wasm2js('MAIN_MODULE support')
def test_stack_placement_pic(self):
self.set_setting('TOTAL_STACK', 1024)
self.set_setting('MAIN_MODULE')
self.do_core_test('test_stack_placement.c')
self.set_setting('GLOBAL_BASE', 102400)
self.do_core_test('test_stack_placement.c')
def test_strings(self):
self.do_core_test('test_strings.c', args=['wowie', 'too', '74'])
def test_strcmp_uni(self):
self.do_core_test('test_strcmp_uni.c')
def test_strndup(self):
self.do_core_test('test_strndup.c')
def test_errar(self):
self.do_core_test('test_errar.c')
def test_mainenv(self):
self.do_core_test('test_mainenv.c')
def test_funcs(self):
self.do_core_test('test_funcs.c')
def test_structs(self):
self.do_core_test('test_structs.c')
gen_struct_src = '''
#include <stdio.h>
#include <stdlib.h>
#include "emscripten.h"
struct S
{
int x, y;
};
int main()
{
S* a = {{gen_struct}};
a->x = 51; a->y = 62;
printf("*%d,%d*\\n", a->x, a->y);
{{del_struct}}(a);
return 0;
}
'''
def test_mallocstruct(self):
self.do_run(self.gen_struct_src.replace('{{gen_struct}}', '(S*)malloc(sizeof(S))').replace('{{del_struct}}', 'free'), '*51,62*')
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
@parameterized({
'normal': [],
'memvalidate': ['-DEMMALLOC_MEMVALIDATE'],
'memvalidate_verbose': ['-DEMMALLOC_MEMVALIDATE', '-DEMMALLOC_VERBOSE', '-DRANDOM_ITERS=130'],
})
def test_emmalloc(self, *args):
self.set_setting('MALLOC', 'none')
self.emcc_args += ['-fno-builtin'] + list(args)
self.do_run(read_file(path_from_root('system/lib/emmalloc.c')) +
read_file(path_from_root('system/lib/sbrk.c')) +
read_file(test_file('core/test_emmalloc.c')),
read_file(test_file('core/test_emmalloc.out')), force_c=True)
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
def test_emmalloc_usable_size(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += list(args)
self.do_core_test('test_malloc_usable_size.c')
@no_optimize('output is sensitive to optimization flags, so only test unoptimized builds')
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
def test_emmalloc_memory_statistics(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += ['-s', 'INITIAL_MEMORY=128MB', '-g'] + list(args)
self.do_core_test('test_emmalloc_memory_statistics.cpp')
@no_optimize('output is sensitive to optimization flags, so only test unoptimized builds')
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
def test_emmalloc_trim(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += ['-s', 'INITIAL_MEMORY=128MB', '-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MAXIMUM_MEMORY=2147418112'] + list(args)
self.do_core_test('test_emmalloc_trim.cpp')
def test_emmalloc_memalign_corruption(self, *args):
self.set_setting('MALLOC', 'emmalloc')
self.do_core_test('emmalloc_memalign_corruption.cpp')
def test_newstruct(self):
self.do_run(self.gen_struct_src.replace('{{gen_struct}}', 'new S').replace('{{del_struct}}', 'delete'), '*51,62*')
def test_addr_of_stacked(self):
self.do_core_test('test_addr_of_stacked.c')
def test_globals(self):
self.do_core_test('test_globals.c')
def test_linked_list(self):
self.do_core_test('test_linked_list.c')
def test_sup(self):
self.do_run_in_out_file_test(test_file('core/test_sup.cpp'))
@also_with_standalone_wasm()
def test_assert(self):
self.do_core_test('test_assert.cpp', assert_returncode=NON_ZERO)
def test_wcslen(self):
self.do_core_test('test_wcslen.c')
def test_regex(self):
self.do_core_test('test_regex.c')
@also_with_standalone_wasm(wasm2c=True, impure=True)
def test_longjmp(self):
self.do_core_test('test_longjmp.c')
def test_longjmp2(self):
self.do_core_test('test_longjmp2.c')
@needs_dylink
def test_longjmp2_main_module(self):
self.set_setting('MAIN_MODULE')
self.do_core_test('test_longjmp2.c')
def test_longjmp3(self):
self.do_core_test('test_longjmp3.c')
def test_longjmp4(self):
self.do_core_test('test_longjmp4.c')
def test_longjmp_funcptr(self):
self.do_core_test('test_longjmp_funcptr.c')
def test_longjmp_repeat(self):
self.do_core_test('test_longjmp_repeat.c')
def test_longjmp_stacked(self):
self.do_core_test('test_longjmp_stacked.c', assert_returncode=NON_ZERO)
def test_longjmp_exc(self):
self.do_core_test('test_longjmp_exc.c', assert_returncode=NON_ZERO)
def test_longjmp_throw(self):
for disable_throw in [0, 1]:
print(disable_throw)
self.set_setting('DISABLE_EXCEPTION_CATCHING', disable_throw)
self.do_core_test('test_longjmp_throw.cpp')
def test_longjmp_unwind(self):
self.do_core_test('test_longjmp_unwind.c', assert_returncode=NON_ZERO)
def test_longjmp_i64(self):
self.emcc_args += ['-g']
self.do_core_test('test_longjmp_i64.c', assert_returncode=NON_ZERO)
def test_siglongjmp(self):
self.do_core_test('test_siglongjmp.c')
def test_setjmp_many(self):
src = r'''
#include <stdio.h>
#include <setjmp.h>
int main(int argc, char** argv) {
jmp_buf buf;
for (int i = 0; i < NUM; i++) printf("%d\n", setjmp(buf));
if (argc-- == 1131) longjmp(buf, 11);
return 0;
}
'''
for num in [1, 5, 20, 1000]:
print('NUM=%d' % num)
self.do_run(src.replace('NUM', str(num)), '0\n' * num)
def test_setjmp_many_2(self):
src = r'''
#include <setjmp.h>
#include <stdio.h>
jmp_buf env;
void luaWork(int d){
int x;
printf("d is at %d\n", d);
longjmp(env, 1);
}
int main()
{
const int ITERATIONS=25;
for(int i = 0; i < ITERATIONS; i++){
if(!setjmp(env)){
luaWork(i);
}
}
return 0;
}
'''
self.do_run(src, r'''d is at 24''')
def test_setjmp_noleak(self):
self.do_runf(test_file('core/test_setjmp_noleak.c'), 'ok.')
@with_both_exception_handling
def test_exceptions(self):
self.set_setting('EXCEPTION_DEBUG')
self.maybe_closure()
for support_longjmp in [0, 1]:
self.set_setting('SUPPORT_LONGJMP', support_longjmp)
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_caught.out'))
def test_exceptions_off(self):
for support_longjmp in [0, 1]:
self.set_setting('DISABLE_EXCEPTION_CATCHING')
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_uncaught.out'), assert_returncode=NON_ZERO)
@no_asan('TODO: ASan support in minimal runtime')
def test_exceptions_minimal_runtime(self):
self.set_setting('EXCEPTION_DEBUG')
self.set_setting('EXIT_RUNTIME')
self.maybe_closure()
self.set_setting('MINIMAL_RUNTIME')
self.emcc_args += ['--pre-js', test_file('minimal_runtime_exit_handling.js')]
for support_longjmp in [0, 1]:
self.set_setting('SUPPORT_LONGJMP', support_longjmp)
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_caught.out'))
self.set_setting('DISABLE_EXCEPTION_CATCHING')
self.do_run_from_file(test_file('core/test_exceptions.cpp'), test_file('core/test_exceptions_uncaught.out'), assert_returncode=NON_ZERO)
@with_both_exception_handling
def test_exceptions_custom(self):
self.set_setting('EXCEPTION_DEBUG')
self.set_setting('EXIT_RUNTIME')
self.maybe_closure()
src = '''
#include <iostream>
class MyException
{
public:
MyException(){ std::cout << "Construct..."; }
MyException( const MyException & ) { std::cout << "Copy..."; }
~MyException(){ std::cout << "Destruct..."; }
};
int function()
{
std::cout << "Throw...";
throw MyException();
}
int function2()
{
return function();
}
int main()
{
try
{
function2();
}
catch (MyException & e)
{
std::cout << "Caught...";
}
try
{
function2();
}
catch (MyException e)
{
std::cout << "Caught...";
}
return 0;
}
'''
self.do_run(src, 'Throw...Construct...Caught...Destruct...Throw...Construct...Copy...Caught...Destruct...Destruct...')
@with_both_exception_handling
def test_exceptions_2(self):
for safe in [0, 1]:
print(safe)
if safe and '-fsanitize=address' in self.emcc_args:
continue
self.set_setting('SAFE_HEAP', safe)
self.do_core_test('test_exceptions_2.cpp')
@with_both_exception_handling
def test_exceptions_3(self):
src = r'''
#include <iostream>
#include <stdexcept>
int main(int argc, char **argv)
{
if (argc != 2) {
std::cout << "need an arg" << std::endl;
return 1;
}
int arg = argv[1][0] - '0';
try {
if (arg == 0) throw "a c string";
if (arg == 1) throw std::exception();
if (arg == 2) throw std::runtime_error("Hello");
} catch(const char * ex) {
std::cout << "Caught C string: " << ex << std::endl;
} catch(const std::exception &ex) {
std::cout << "Caught exception: " << ex.what() << std::endl;
} catch(...) {
std::cout << "Caught something else" << std::endl;
}
std::cout << "Done.\n";
}
'''
print('0')
self.do_run(src, 'Caught C string: a c string\nDone.', args=['0'])
print('1')
self.do_run('src.js', 'Caught exception: std::exception\nDone.', args=['1'], no_build=True)
print('2')
self.do_run('src.js', 'Caught exception: Hello\nDone.', args=['2'], no_build=True)
def test_exceptions_allowed(self):
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ["_Z12somefunctionv"])
# otherwise it is inlined and not identified
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_exceptions_allowed.cpp')
size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'orig.js')
# check that an empty allow list works properly (as in, same as exceptions disabled)
src = test_file('core/test_exceptions_allowed.cpp')
empty_output = test_file('core/test_exceptions_allowed_empty.out')
self.set_setting('EXCEPTION_CATCHING_ALLOWED', [])
self.do_run_from_file(src, empty_output, assert_returncode=NON_ZERO)
empty_size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
empty_size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'empty.js')
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ['fake'])
self.do_run_from_file(src, empty_output, assert_returncode=NON_ZERO)
fake_size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
fake_size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'fake.js')
self.clear_setting('EXCEPTION_CATCHING_ALLOWED')
self.do_run_from_file(src, empty_output, assert_returncode=NON_ZERO)
disabled_size = os.path.getsize('test_exceptions_allowed.js')
if self.is_wasm():
disabled_size += os.path.getsize('test_exceptions_allowed.wasm')
shutil.copyfile('test_exceptions_allowed.js', 'disabled.js')
print('size: %d' % size)
print('empty_size: %d' % empty_size)
print('fake_size: %d' % fake_size)
print('disabled_size: %d' % disabled_size)
# empty list acts the same as fully disabled
self.assertEqual(empty_size, disabled_size)
# big change when we disable exception catching of the function
self.assertGreater(size - empty_size, 0.01 * size)
# full disable can remove a little bit more
self.assertLess(disabled_size, fake_size)
def test_exceptions_allowed_2(self):
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ["main"])
# otherwise it is inlined and not identified
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_exceptions_allowed_2.cpp')
# When 'main' function does not have a signature, its contents will be
# outlined to '__original_main'. Check if we can handle that case.
self.emcc_args += ['-DMAIN_NO_SIGNATURE']
self.do_core_test('test_exceptions_allowed_2.cpp')
def test_exceptions_allowed_uncaught(self):
self.emcc_args += ['-std=c++11']
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ["_Z4testv"])
# otherwise it is inlined and not identified
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_exceptions_allowed_uncaught.cpp')
def test_exceptions_allowed_misuse(self):
self.set_setting('EXCEPTION_CATCHING_ALLOWED', ['foo'])
# Test old =2 setting for DISABLE_EXCEPTION_CATCHING
self.set_setting('DISABLE_EXCEPTION_CATCHING', 2)
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING=X is no longer needed when specifying EXCEPTION_CATCHING_ALLOWED [-Wdeprecated] [-Werror]', err)
# =0 should also be a warning
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING=X is no longer needed when specifying EXCEPTION_CATCHING_ALLOWED [-Wdeprecated] [-Werror]', err)
# =1 should be a hard error
self.set_setting('DISABLE_EXCEPTION_CATCHING', 1)
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING and EXCEPTION_CATCHING_ALLOWED are mutually exclusive', err)
# even setting an empty list should trigger the error;
self.set_setting('EXCEPTION_CATCHING_ALLOWED', [])
err = self.expect_fail([EMCC, test_file('hello_world.c')] + self.get_emcc_args())
self.assertContained('error: DISABLE_EXCEPTION_CATCHING and EXCEPTION_CATCHING_ALLOWED are mutually exclusive', err)
@with_both_exception_handling
def test_exceptions_uncaught(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
src = r'''
#include <stdio.h>
#include <exception>
struct X {
~X() {
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
}
};
int main() {
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
try {
X x;
throw 1;
} catch(...) {
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
}
printf("exception? %s\n", std::uncaught_exception() ? "yes" : "no");
return 0;
}
'''
self.do_run(src, 'exception? no\nexception? yes\nexception? no\nexception? no\n')
src = r'''
#include <fstream>
#include <iostream>
int main() {
std::ofstream os("test");
os << std::unitbuf << "foo"; // trigger a call to std::uncaught_exception from
// std::basic_ostream::sentry::~sentry
std::cout << "success";
}
'''
self.do_run(src, 'success')
@with_both_exception_handling
def test_exceptions_uncaught_2(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
src = r'''
#include <iostream>
#include <exception>
int main() {
try {
throw std::exception();
} catch(std::exception) {
try {
throw;
} catch(std::exception) {}
}
if (std::uncaught_exception())
std::cout << "ERROR: uncaught_exception still set.";
else
std::cout << "OK";
}
'''
self.do_run(src, 'OK\n')
@with_both_exception_handling
def test_exceptions_typed(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.clear_setting('SAFE_HEAP') # Throwing null will cause an ignorable null pointer access.
self.do_core_test('test_exceptions_typed.cpp')
@with_both_exception_handling
def test_exceptions_virtual_inheritance(self):
self.do_core_test('test_exceptions_virtual_inheritance.cpp')
@with_both_exception_handling
def test_exceptions_convert(self):
self.do_core_test('test_exceptions_convert.cpp')
# TODO Make setjmp-longjmp also use Wasm exception handling
@with_both_exception_handling
def test_exceptions_multi(self):
self.do_core_test('test_exceptions_multi.cpp')
@with_both_exception_handling
def test_exceptions_std(self):
self.clear_setting('SAFE_HEAP')
self.do_core_test('test_exceptions_std.cpp')
@with_both_exception_handling
def test_exceptions_alias(self):
self.do_core_test('test_exceptions_alias.cpp')
@with_both_exception_handling
def test_exceptions_rethrow(self):
self.do_core_test('test_exceptions_rethrow.cpp')
@with_both_exception_handling
def test_exceptions_uncaught_count(self):
self.do_core_test('test_exceptions_uncaught_count.cpp')
@with_both_exception_handling
def test_exceptions_resume(self):
self.set_setting('EXCEPTION_DEBUG')
self.do_core_test('test_exceptions_resume.cpp')
@with_both_exception_handling
def test_exceptions_destroy_virtual(self):
self.do_core_test('test_exceptions_destroy_virtual.cpp')
@with_both_exception_handling
def test_exceptions_refcount(self):
self.do_core_test('test_exceptions_refcount.cpp')
@with_both_exception_handling
def test_exceptions_primary(self):
self.do_core_test('test_exceptions_primary.cpp')
@with_both_exception_handling
def test_exceptions_simplify_cfg(self):
self.do_core_test('test_exceptions_simplify_cfg.cpp')
@with_both_exception_handling
def test_exceptions_libcxx(self):
self.do_core_test('test_exceptions_libcxx.cpp')
@with_both_exception_handling
def test_exceptions_multiple_inherit(self):
self.do_core_test('test_exceptions_multiple_inherit.cpp')
@with_both_exception_handling
def test_exceptions_multiple_inherit_rethrow(self):
self.do_core_test('test_exceptions_multiple_inherit_rethrow.cpp')
@with_both_exception_handling
def test_exceptions_rethrow_missing(self):
create_file('main.cpp', 'int main() { throw; }')
self.do_runf('main.cpp', None, assert_returncode=NON_ZERO)
@with_both_exception_handling
def test_bad_typeid(self):
self.do_run(r'''
// exception example
#include <iostream> // std::cerr
#include <typeinfo> // operator typeid
#include <exception> // std::exception
class Polymorphic {virtual void member(){}};
int main () {
try
{
Polymorphic * pb = 0;
const std::type_info& ti = typeid(*pb); // throws a bad_typeid exception
}
catch (std::exception& e)
{
std::cerr << "exception caught: " << e.what() << '\n';
}
return 0;
}
''', 'exception caught: std::bad_typeid')
def test_iostream_ctors(self):
# iostream stuff must be globally constructed before user global
# constructors, so iostream works in global constructors
self.do_run(r'''
#include <iostream>
struct A {
A() { std::cout << "bug"; }
};
A a;
int main() {
std::cout << "free code" << std::endl;
return 0;
}
''', 'bugfree code')
def test_exceptions_longjmp1(self):
self.set_setting('SUPPORT_LONGJMP')
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_core_test('test_exceptions_longjmp1.cpp')
def test_exceptions_longjmp2(self):
self.set_setting('SUPPORT_LONGJMP')
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_core_test('test_exceptions_longjmp2.cpp')
def test_exceptions_longjmp3(self):
self.set_setting('SUPPORT_LONGJMP')
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.do_core_test('test_exceptions_longjmp3.cpp')
# Marked as impure since the WASI reactor modules (modules without main)
# are not yet suppored by the wasm engines we test against.
@also_with_standalone_wasm(impure=True)
def test_ctors_no_main(self):
self.emcc_args.append('--no-entry')
self.do_core_test('test_ctors_no_main.cpp')
def test_class(self):
self.do_core_test('test_class.cpp')
def test_inherit(self):
self.do_core_test('test_inherit.cpp')
def test_isdigit_l(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_isdigit_l.cpp')
def test_iswdigit(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_iswdigit.cpp')
def test_polymorph(self):
self.do_core_test('test_polymorph.cpp')
def test_complex(self):
self.do_core_test('test_complex.c')
def test_float_builtins(self):
# tests wasm_libc_rt
self.do_core_test('test_float_builtins.c')
@no_asan('SAFE_HEAP cannot be used with ASan')
def test_segfault(self):
self.set_setting('SAFE_HEAP')
for addr in ['get_null()', 'new D2()']:
print(addr)
src = r'''
#include <stdio.h>
#include <emscripten.h>
struct Classey {
virtual void doIt() = 0;
};
struct D1 : Classey {
virtual void doIt() { printf("fleefl\n"); }
};
struct D2 : Classey {
virtual void doIt() { printf("marfoosh\n"); }
};
EM_JS(Classey*, get_null, (), {
return 0;
});
int main(int argc, char **argv)
{
Classey *p = argc == 100 ? new D1() : (Classey*)%s;
p->doIt();
return 0;
}
''' % addr
if 'get_null' in addr:
self.do_run(src, 'segmentation fault', assert_returncode=NON_ZERO)
else:
self.do_run(src, 'marfoosh')
def test_dynamic_cast(self):
self.do_core_test('test_dynamic_cast.cpp')
def test_dynamic_cast_b(self):
self.do_core_test('test_dynamic_cast_b.cpp')
def test_dynamic_cast_2(self):
self.do_core_test('test_dynamic_cast_2.cpp')
def test_funcptr(self):
self.do_core_test('test_funcptr.c')
def test_mathfuncptr(self):
self.do_core_test('test_mathfuncptr.c')
def test_funcptrfunc(self):
self.do_core_test('test_funcptrfunc.c')
def test_funcptr_namecollide(self):
self.do_core_test('test_funcptr_namecollide.c')
def test_emptyclass(self):
self.do_core_test('test_emptyclass.cpp')
def test_alloca(self):
self.do_core_test('test_alloca.c')
def test_rename(self):
self.do_run_in_out_file_test('stdio/test_rename.c')
def test_remove(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('cstdio/test_remove.cpp')
def test_alloca_stack(self):
self.do_core_test('test_alloca_stack.c')
def test_stack_byval(self):
self.do_core_test('test_stack_byval.cpp')
def test_stack_varargs(self):
# in node.js we allocate argv[0] on the stack, which means the length
# of the program directory influences how much stack we need, and so
# long random temp dir names can lead to random failures. The stack
# size was increased here to avoid that.
self.set_setting('INLINING_LIMIT')
self.set_setting('TOTAL_STACK', 8 * 1024)
self.do_core_test('test_stack_varargs.c')
def test_stack_varargs2(self):
# in node.js we allocate argv[0] on the stack, which means the length
# of the program directory influences how much stack we need, and so
# long random temp dir names can lead to random failures. The stack
# size was increased here to avoid that.
self.set_setting('TOTAL_STACK', 8 * 1024)
src = r'''
#include <stdio.h>
#include <stdlib.h>
void func(int i) {
}
int main() {
for (int i = 0; i < 7000; i++) {
printf("%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",
i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i);
}
printf("ok!\n");
return 0;
}
'''
self.do_run(src, 'ok!')
print('with return')
src = r'''
#include <stdio.h>
#include <stdlib.h>
int main() {
for (int i = 0; i < 7000; i++) {
int j = printf("%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d",
i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i);
printf(" (%d)\n", j);
}
printf("ok!\n");
return 0;
}
'''
self.do_run(src, 'ok!')
print('with definitely no return')
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <stdarg.h>
void vary(const char *s, ...)
{
va_list v;
va_start(v, s);
char d[20];
vsnprintf(d, 20, s, v);
puts(d);
// Try it with copying
va_list tempva;
va_copy(tempva, v);
vsnprintf(d, 20, s, tempva);
puts(d);
va_end(v);
}
int main() {
for (int i = 0; i < 7000; i++) {
int j = printf("%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d",
i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i);
printf(" (%d)\n", j);
vary("*cheez: %d+%d*", 99, 24);
vary("*albeit*");
}
printf("ok!\n");
return 0;
}
'''
self.do_run(src, 'ok!')
def test_stack_void(self):
self.emcc_args.append('-Wno-format-extra-args')
self.set_setting('INLINING_LIMIT')
self.do_core_test('test_stack_void.c')
def test_life(self):
self.emcc_args += ['-std=c99']
self.do_run_in_out_file_test('life.c', args=['2'])
def test_array2(self):
self.do_core_test('test_array2.c')
def test_array2b(self):
self.do_core_test('test_array2b.c')
def test_constglobalstructs(self):
self.do_core_test('test_constglobalstructs.c')
def test_conststructs(self):
self.do_core_test('test_conststructs.c')
def test_bigarray(self):
self.do_core_test('test_bigarray.c')
def test_mod_globalstruct(self):
self.do_core_test('test_mod_globalstruct.c')
def test_sizeof(self):
self.do_core_test('test_sizeof.cpp')
def test_llvm_used(self):
self.do_core_test('test_llvm_used.c')
@no_asan('SAFE_HEAP cannot be used with ASan')
def test_set_align(self):
self.set_setting('SAFE_HEAP')
self.do_core_test('test_set_align.c')
def test_emscripten_api(self):
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_save_me_aimee'])
self.do_core_test('test_emscripten_api.cpp')
if '-fsanitize=address' not in self.emcc_args:
# test EXPORT_ALL (this is not compatible with asan, which doesn't
self.set_setting('EXPORTED_FUNCTIONS', [])
self.set_setting('EXPORT_ALL')
self.set_setting('LINKABLE')
self.do_core_test('test_emscripten_api.cpp')
def test_emscripten_run_script_string_int(self):
src = r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
const char *str = emscripten_run_script_string("1+1");
printf("got string: %s\n", str);
return 0;
}
'''
self.do_run(src, '''got string: 2''')
def test_emscripten_run_script_string_utf8(self):
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <emscripten.h>
int main() {
const char *str = emscripten_run_script_string("'\\u2603 \\u2603 \\u2603 Hello!'");
printf("length of returned string: %zu. Position of substring 'Hello': %zu\n", strlen(str), strstr(str, "Hello")-str);
return 0;
}
'''
self.do_run(src, '''length of returned string: 18. Position of substring 'Hello': 12''')
def test_emscripten_run_script_string_null(self):
src = r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
const char *str = emscripten_run_script_string("void(0)");
if (str) {
printf("got string: %s\n", str);
} else {
puts("got null");
}
return 0;
}
'''
self.do_run(src, 'got null')
def test_emscripten_get_now(self):
self.banned_js_engines = [config.V8_ENGINE]
self.set_setting('EXIT_RUNTIME')
self.maybe_closure()
self.do_runf(test_file('emscripten_get_now.cpp'), 'Timer resolution is good')
def test_emscripten_get_compiler_setting(self):
src = test_file('core/emscripten_get_compiler_setting.c')
output = shared.replace_suffix(src, '.out')
self.set_setting('ASSERTIONS')
self.do_runf(src, 'You must build with -s RETAIN_COMPILER_SETTINGS=1', assert_returncode=NON_ZERO)
self.clear_setting('ASSERTIONS')
self.set_setting('RETAIN_COMPILER_SETTINGS')
self.do_runf(src, read_file(output).replace('waka', shared.EMSCRIPTEN_VERSION))
def test_emscripten_has_asyncify(self):
src = r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("%d\n", emscripten_has_asyncify());
return 0;
}
'''
self.set_setting('ASYNCIFY', 0)
self.do_run(src, '0')
self.set_setting('ASYNCIFY')
self.do_run(src, '1')
def test_inlinejs(self):
self.skipTest('non-fastcomp is deprecated and fails in 3.5')
self.do_core_test('test_inlinejs.c')
if self.emcc_args == []:
out = read_file('src.js')
for i in range(1, 5):
assert ('comment%d' % i) in out
def test_inlinejs2(self):
self.skipTest('non-fastcomp is deprecated and fails in 3.5')
self.do_core_test('test_inlinejs2.c')
def test_inlinejs3(self):
if self.is_wasm():
self.skipTest('wasm requires a proper asm module')
src = test_file('core/test_inlinejs3.c')
output = shared.unsuffixed(src) + '.out'
self.do_core_test('test_inlinejs3.c')
print('no debugger, check validation')
src = read_file(src).replace('emscripten_debugger();', '')
self.do_run(src, read_file(output))
def test_inlinejs4(self):
self.do_run(r'''
#include <emscripten.h>
#define TO_STRING_INNER(x) #x
#define TO_STRING(x) TO_STRING_INNER(x)
#define assert_msg(msg, file, line) EM_ASM( throw 'Assert (' + msg + ') failed in ' + file + ':' + line + '!'; )
#define assert(expr) { \
if (!(expr)) { \
assert_msg(#expr, TO_STRING(__FILE__), TO_STRING(__LINE__)); \
} \
}
int main(int argc, char **argv) {
assert(argc != 17);
assert(false);
return 0;
}
''', 'false', assert_returncode=NON_ZERO)
def test_em_asm(self):
self.do_core_test('test_em_asm.cpp')
self.emcc_args.append('-std=gnu89')
self.do_core_test('test_em_asm.cpp', force_c=True)
def test_em_asm_2(self):
self.do_core_test('test_em_asm_2.cpp')
self.emcc_args.append('-std=gnu89')
self.do_core_test('test_em_asm_2.cpp', force_c=True)
@no_asan('Cannot use ASan: test depends exactly on heap size')
def test_main_thread_em_asm(self):
src = read_file(test_file('core/test_em_asm_2.cpp'))
create_file('src.cpp', src.replace('EM_ASM', 'MAIN_THREAD_EM_ASM'))
expected_result = read_file(test_file('core/test_em_asm_2.out'))
create_file('result.out', expected_result.replace('EM_ASM', 'MAIN_THREAD_EM_ASM'))
self.do_run_from_file('src.cpp', 'result.out')
self.do_run_from_file('src.cpp', 'result.out', force_c=True)
def test_main_thread_async_em_asm(self):
self.do_core_test('test_main_thread_async_em_asm.cpp')
self.do_core_test('test_main_thread_async_em_asm.cpp', force_c=True)
def test_main_thread_em_asm_signatures(self):
self.do_core_test('test_em_asm_signatures.cpp', assert_returncode=NON_ZERO)
def test_em_asm_unicode(self):
self.do_core_test('test_em_asm_unicode.cpp')
self.do_core_test('test_em_asm_unicode.cpp', force_c=True)
def test_em_asm_types(self):
self.do_core_test('test_em_asm_types.cpp')
self.do_core_test('test_em_asm_types.cpp', force_c=True)
def test_em_asm_unused_arguments(self):
self.do_core_test('test_em_asm_unused_arguments.cpp')
def test_em_asm_parameter_pack(self):
self.do_core_test('test_em_asm_parameter_pack.cpp')
def test_em_asm_arguments_side_effects(self):
self.do_core_test('test_em_asm_arguments_side_effects.cpp')
self.do_core_test('test_em_asm_arguments_side_effects.cpp', force_c=True)
def test_em_asm_direct(self):
self.do_core_test('test_em_asm_direct.c')
@parameterized({
'': ([], False),
'c': ([], True),
'linked': (['-s', 'MAIN_MODULE'], False),
'linked_c': (['-s', 'MAIN_MODULE'], True),
})
def test_em_js(self, args, force_c):
if 'MAIN_MODULE' in args and not self.is_wasm():
self.skipTest('main module support for non-wasm')
if '-fsanitize=address' in self.emcc_args:
self.skipTest('no dynamic library support in asan yet')
self.emcc_args += args + ['-s', 'EXPORTED_FUNCTIONS=_main,_malloc']
self.do_core_test('test_em_js.cpp', force_c=force_c)
self.assertContained("no args returning int", read_file('test_em_js.js'))
def test_runtime_stacksave(self):
self.do_runf(test_file('core/test_runtime_stacksave.c'), 'success')
def test_minimal_runtime_memorygrowth(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
self.set_setting('MINIMAL_RUNTIME')
src = test_file('core/test_memorygrowth.c')
self.do_runf(src, 'OOM', assert_returncode=NON_ZERO)
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
def test_memorygrowth(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if self.maybe_closure():
self.set_setting('DYNAMIC_EXECUTION', 0)
src = test_file('core/test_memorygrowth.c')
self.do_runf(src, 'OOM', assert_returncode=NON_ZERO)
fail = read_file('test_memorygrowth.js')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
win = read_file('test_memorygrowth.js')
if '-O2' in self.emcc_args and not self.is_wasm():
possible_starts = ['// EMSCRIPTEN_START_FUNCS', 'var TOTAL_STACK']
code_start = None
for s in possible_starts:
if fail.find(s) >= 0:
code_start = s
break
assert code_start is not None, 'Generated code must contain one of ' + str(possible_starts)
fail = fail[fail.find(code_start):]
win = win[win.find(code_start):]
assert len(fail) < len(win), 'failing code - without memory growth on - is more optimized, and smaller' + str([len(fail), len(win)])
if not self.get_setting('SAFE_HEAP'):
self.emcc_args += ['--tracing']
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
def test_memorygrowth_2(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
src = test_file('core/test_memorygrowth_2.c')
self.do_runf(src, 'OOM', assert_returncode=NON_ZERO)
fail = read_file('test_memorygrowth_2.js')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(src, '*pre: hello,4.955*\n*hello,4.955*\n*hello,4.955*')
win = read_file('test_memorygrowth_2.js')
if '-O2' in self.emcc_args and not self.is_wasm():
assert len(fail) < len(win), 'failing code - without memory growth on - is more optimized, and smaller' + str([len(fail), len(win)])
def test_memorygrowth_3(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
self.set_setting('ABORTING_MALLOC', 0)
self.set_setting('SAFE_HEAP')
self.do_core_test('test_memorygrowth_3.c')
@also_with_standalone_wasm(impure=True)
def test_memorygrowth_MAXIMUM_MEMORY(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if not self.is_wasm():
self.skipTest('wasm memory specific test')
self.emcc_args += ['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'INITIAL_MEMORY=64Mb', '-s', 'MAXIMUM_MEMORY=100Mb']
self.do_core_test('test_memorygrowth_wasm_mem_max.c')
def test_memorygrowth_linear_step(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if not self.is_wasm():
self.skipTest('wasm memory specific test')
self.emcc_args += ['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'TOTAL_STACK=1Mb', '-s', 'INITIAL_MEMORY=64Mb', '-s', 'MAXIMUM_MEMORY=130Mb', '-s', 'MEMORY_GROWTH_LINEAR_STEP=1Mb']
self.do_core_test('test_memorygrowth_memory_growth_step.c')
def test_memorygrowth_geometric_step(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
if not self.is_wasm():
self.skipTest('wasm memory specific test')
self.emcc_args += ['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MEMORY_GROWTH_GEOMETRIC_STEP=8.5', '-s', 'MEMORY_GROWTH_GEOMETRIC_CAP=32MB']
self.do_core_test('test_memorygrowth_geometric_step.c')
def test_memorygrowth_3_force_fail_reallocBuffer(self):
if self.has_changed_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('test needs to modify memory growth')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('TEST_MEMORY_GROWTH_FAILS')
self.do_core_test('test_memorygrowth_3.c')
@parameterized({
'nogrow': ([],),
'grow': (['-sALLOW_MEMORY_GROWTH', '-sMAXIMUM_MEMORY=18MB'],)
})
@no_asan('requires more memory when growing')
def test_aborting_new(self, args):
self.emcc_args += args
self.do_core_test('test_aborting_new.cpp')
@no_wasm2js('no WebAssembly.Memory()')
@no_asan('ASan alters the memory size')
def test_module_wasm_memory(self):
self.emcc_args += ['--pre-js', test_file('core/test_module_wasm_memory.js')]
self.set_setting('IMPORTED_MEMORY')
self.do_runf(test_file('core/test_module_wasm_memory.c'), 'success')
def test_ssr(self):
src = '''
#include <stdio.h>
// see related things in openjpeg
typedef struct opj_mqc_state {
unsigned int qeval;
int mps;
struct opj_mqc_state *nmps;
struct opj_mqc_state *nlps;
} opj_mqc_state_t;
static opj_mqc_state_t mqc_states[4] = {
{0x5600, 0, &mqc_states[2], &mqc_states[3]},
{0x5602, 1, &mqc_states[3], &mqc_states[2]},
};
int main() {
printf("*%ld*\\n", (long)(mqc_states+1)-(long)mqc_states);
for (int i = 0; i < 2; i++)
printf("%d:%d,%d,%ld,%ld\\n", i, mqc_states[i].qeval, mqc_states[i].mps,
(long)mqc_states[i].nmps-(long)mqc_states, (long)mqc_states[i].nlps-(long)mqc_states);
return 0;
}
'''
self.do_run(src, '''*16*\n0:22016,0,32,48\n1:22018,1,48,32\n''')
def test_tinyfuncstr(self):
self.do_core_test('test_tinyfuncstr.cpp')
def test_llvmswitch(self):
self.do_core_test('test_llvmswitch.c')
def test_cxx_version(self):
self.do_core_test('test_cxx_version.cpp')
@no_wasm2js('massive switches can break js engines')
def test_bigswitch(self):
self.do_runf(test_file('bigswitch.cpp'), '''34962: GL_ARRAY_BUFFER (0x8892)
26214: what?
35040: GL_STREAM_DRAW (0x88E0)
3060: what?
''', args=['34962', '26214', '35040', str(0xbf4)])
@no_wasm2js('massive switches can break js engines')
@is_slow_test
def test_biggerswitch(self):
if not self.is_optimizing():
self.skipTest('nodejs takes >6GB to compile this if the wasm is not optimized, which OOMs, see https://github.com/emscripten-core/emscripten/issues/7928#issuecomment-458308453')
if '-Os' in self.emcc_args:
self.skipTest('hangs in recent upstream clang, see https://bugs.llvm.org/show_bug.cgi?id=43468')
num_cases = 20000
switch_case = self.run_process([PYTHON, test_file('gen_large_switchcase.py'), str(num_cases)], stdout=PIPE, stderr=PIPE).stdout
self.do_run(switch_case, '''58996: 589965899658996
59297: 592975929759297
59598: default
59899: 598995989959899
Success!''')
def test_indirectbr(self):
self.emcc_args = [x for x in self.emcc_args if x != '-g']
self.do_core_test('test_indirectbr.c')
@no_asan('local count too large for VMs')
@no_wasm2js('extremely deep nesting, hits stack limit on some VMs')
def test_indirectbr_many(self):
self.do_core_test('test_indirectbr_many.c')
def test_pack(self):
src = '''
#include <stdio.h>
#include <string.h>
#pragma pack(push,1)
typedef struct header
{
unsigned char id;
unsigned short colour;
unsigned char desc;
} header;
#pragma pack(pop)
typedef struct fatheader
{
unsigned char id;
unsigned short colour;
unsigned char desc;
} fatheader;
int main( int argc, const char *argv[] ) {
header h, *ph = 0;
fatheader fh, *pfh = 0;
printf("*%zu,%ld,%ld*\\n", sizeof(header), (long)((long)&h.desc - (long)&h.id), (long)(&ph[1])-(long)(&ph[0]));
printf("*%zu,%ld,%ld*\\n", sizeof(fatheader), (long)((long)&fh.desc - (long)&fh.id), (long)(&pfh[1])-(long)(&pfh[0]));
return 0;
}
'''
self.do_run(src, '*4,3,4*\n*6,4,6*')
def test_varargs(self):
self.do_core_test('test_varargs.c')
def test_varargs_multi(self):
self.do_core_test('test_varargs_multi.c')
@unittest.skip('clang cannot compile this code with that target yet')
def test_varargs_byval(self):
src = r'''
#include <stdio.h>
#include <stdarg.h>
typedef struct type_a {
union {
double f;
void *p;
int i;
short sym;
} value;
} type_a;
enum mrb_vtype {
MRB_TT_FALSE = 0, /* 0 */
MRB_TT_CLASS = 9 /* 9 */
};
typedef struct type_b {
enum mrb_vtype tt:8;
} type_b;
void print_type_a(int argc, ...);
void print_type_b(int argc, ...);
int main(int argc, char *argv[])
{
type_a a;
type_b b;
a.value.p = (void*) 0x12345678;
b.tt = MRB_TT_CLASS;
printf("The original address of a is: %p\n", a.value.p);
printf("The original type of b is: %d\n", b.tt);
print_type_a(1, a);
print_type_b(1, b);
return 0;
}
void print_type_a(int argc, ...) {
va_list ap;
type_a a;
va_start(ap, argc);
a = va_arg(ap, type_a);
va_end(ap);
printf("The current address of a is: %p\n", a.value.p);
}
void print_type_b(int argc, ...) {
va_list ap;
type_b b;
va_start(ap, argc);
b = va_arg(ap, type_b);
va_end(ap);
printf("The current type of b is: %d\n", b.tt);
}
'''
self.do_run(src, '''The original address of a is: 0x12345678
The original type of b is: 9
The current address of a is: 0x12345678
The current type of b is: 9
''')
def test_functionpointer_libfunc_varargs(self):
self.do_core_test('test_functionpointer_libfunc_varargs.c')
def test_structbyval(self):
self.set_setting('INLINING_LIMIT')
src = r'''
#include <stdio.h>
struct point
{
int x, y;
};
void dump(struct point p) {
p.x++; // should not modify
p.y++; // anything in the caller!
printf("dump: %d,%d\n", p.x, p.y);
}
void dumpmod(struct point *p) {
p->x++; // should not modify
p->y++; // anything in the caller!
printf("dump: %d,%d\n", p->x, p->y);
}
int main( int argc, const char *argv[] ) {
point p = { 54, 2 };
printf("pre: %d,%d\n", p.x, p.y);
dump(p);
void (*dp)(point p) = dump; // And, as a function pointer
dp(p);
printf("post: %d,%d\n", p.x, p.y);
dumpmod(&p);
dumpmod(&p);
printf("last: %d,%d\n", p.x, p.y);
return 0;
}
'''
self.do_run(src, 'pre: 54,2\ndump: 55,3\ndump: 55,3\npost: 54,2\ndump: 55,3\ndump: 56,4\nlast: 56,4')
def test_stdlibs(self):
self.set_setting('SAFE_HEAP', 0)
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_stdlibs.c')
def test_stdbool(self):
create_file('test_stdbool.c', r'''
#include <stdio.h>
#include <stdbool.h>
int main() {
bool x = true;
bool y = false;
printf("*%d*\n", x != y);
return 0;
}
''')
self.do_runf('test_stdbool.c', '*1*')
def test_strtoll_hex(self):
self.do_core_test('test_strtoll_hex.c')
def test_strtoll_dec(self):
self.do_core_test('test_strtoll_dec.c')
def test_strtoll_bin(self):
self.do_core_test('test_strtoll_bin.c')
def test_strtoll_oct(self):
self.do_core_test('test_strtoll_oct.c')
def test_strtol_hex(self):
self.do_core_test('test_strtol_hex.c')
def test_strtol_dec(self):
self.do_core_test('test_strtol_dec.c')
def test_strtol_bin(self):
self.do_core_test('test_strtol_bin.c')
def test_strtol_oct(self):
self.do_core_test('test_strtol_oct.c')
@also_with_standalone_wasm()
def test_atexit(self):
if not self.get_setting('STANDALONE_WASM'):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_atexit.c')
def test_atexit_threads(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_atexit_threads.c')
@no_asan('test relies on null pointer reads')
def test_pthread_specific(self):
self.do_run_in_out_file_test('pthread/specific.c')
def test_pthread_equal(self):
self.do_run_in_out_file_test('pthread/test_pthread_equal.cpp')
@node_pthreads
def test_pthread_dispatch_after_exit(self):
self.do_run_in_out_file_test('pthread/test_pthread_dispatch_after_exit.c', interleaved_output=False)
@node_pthreads
def test_pthread_atexit(self):
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREAD_POOL_SIZE', 1)
self.do_run_in_out_file_test('pthread/test_pthread_atexit.c')
@node_pthreads
def test_pthread_nested_work_queue(self):
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREAD_POOL_SIZE', 1)
self.do_run_in_out_file_test('pthread/test_pthread_nested_work_queue.c')
@node_pthreads
def test_pthread_thread_local_storage(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_run_in_out_file_test('pthread/test_pthread_thread_local_storage.cpp')
@node_pthreads
def test_pthread_cleanup(self):
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREAD_POOL_SIZE', 4)
self.do_run_in_out_file_test('pthread/test_pthread_cleanup.cpp')
@node_pthreads
def test_pthread_setspecific_mainthread(self):
self.set_setting('EXIT_RUNTIME')
print('.. return')
self.do_runf(test_file('pthread/test_pthread_setspecific_mainthread.c'), 'done!', emcc_args=['-DRETURN'])
print('.. exit')
self.do_runf(test_file('pthread/test_pthread_setspecific_mainthread.c'), 'done!', emcc_args=['-DEXIT'])
print('.. pthread_exit')
self.do_run_in_out_file_test('pthread/test_pthread_setspecific_mainthread.c')
@node_pthreads
@no_mac('https://github.com/emscripten-core/emscripten/issues/15014')
def test_pthread_abort(self):
self.set_setting('PROXY_TO_PTHREAD')
self.add_pre_run("Module.onAbort = function() { console.log('onAbort called'); }")
self.do_run_in_out_file_test('pthread/test_pthread_abort.c', assert_returncode=NON_ZERO)
@no_asan('ASan does not support custom memory allocators')
@no_lsan('LSan does not support custom memory allocators')
@node_pthreads
def test_pthread_emmalloc(self):
self.emcc_args += ['-fno-builtin']
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASSERTIONS=2')
self.set_setting('MALLOC', 'emmalloc')
self.do_core_test('test_emmalloc.c')
def test_tcgetattr(self):
self.do_runf(test_file('termios/test_tcgetattr.c'), 'success')
def test_time(self):
self.do_core_test('test_time.cpp')
for tz in ['EST+05EDT', 'UTC+0']:
print('extra tz test:', tz)
with env_modify({'TZ': tz}):
# Node.js on Windows), but it does no harm either.
self.do_core_test('test_time.cpp')
def test_timeb(self):
# Confirms they are called in reverse order
self.do_core_test('test_timeb.c')
def test_time_c(self):
self.do_core_test('test_time_c.c')
def test_gmtime(self):
self.do_core_test('test_gmtime.c')
def test_strptime_tm(self):
self.do_core_test('test_strptime_tm.c')
def test_strptime_days(self):
self.do_core_test('test_strptime_days.c')
def test_strptime_reentrant(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_strptime_reentrant.c')
def test_strftime(self):
self.do_core_test('test_strftime.cpp')
def test_trickystring(self):
self.do_core_test('test_trickystring.c')
def test_statics(self):
self.do_core_test('test_statics.cpp')
def test_copyop(self):
# clang generated code is vulnerable to this, as it uses
# memcpy for assignments, with hardcoded numbers of bytes
# (llvm-gcc copies items one by one).
self.do_core_test('test_copyop.cpp')
def test_memcpy_memcmp(self):
self.banned_js_engines = [config.V8_ENGINE] # Currently broken under V8_ENGINE but not node
def check(output):
output = output.replace('\n \n', '\n') # remove extra node output
return hashlib.sha1(output.encode('utf-8')).hexdigest()
self.do_core_test('test_memcpy_memcmp.c', output_nicerizer=check)
def test_memcpy2(self):
self.do_core_test('test_memcpy2.c')
def test_memcpy3(self):
self.do_core_test('test_memcpy3.c')
@also_with_standalone_wasm()
def test_memcpy_alignment(self):
self.do_runf(test_file('test_memcpy_alignment.cpp'), 'OK.')
def test_memset_alignment(self):
self.do_runf(test_file('test_memset_alignment.cpp'), 'OK.')
def test_memset(self):
self.do_core_test('test_memset.c')
def test_getopt(self):
self.do_core_test('test_getopt.c', args=['-t', '12', '-n', 'foobar'])
def test_getopt_long(self):
self.do_core_test('test_getopt_long.c', args=['--file', 'foobar', '-b'])
def test_memmove(self):
self.do_core_test('test_memmove.c')
def test_memmove2(self):
self.do_core_test('test_memmove2.c')
def test_memmove3(self):
self.do_core_test('test_memmove3.c')
def test_flexarray_struct(self):
self.do_core_test('test_flexarray_struct.c')
def test_bsearch(self):
self.do_core_test('test_bsearch.c')
def test_stack_overflow(self):
self.set_setting('ASSERTIONS', 2)
self.do_runf(test_file('core/stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
def test_stackAlloc(self):
self.do_core_test('stackAlloc.cpp')
def test_nestedstructs(self):
src = '''
#include <stdio.h>
#include "emscripten.h"
struct base {
int x;
float y;
union {
int a;
float b;
};
char c;
};
struct hashtableentry {
int key;
base data;
};
struct hashset {
typedef hashtableentry entry;
struct chain { entry elem; chain *next; };
// struct chainchunk { chain chains[100]; chainchunk *next; };
};
struct hashtable : hashset {
hashtable() {
base *b = NULL;
entry *e = NULL;
chain *c = NULL;
printf("*%zu,%ld,%ld,%ld,%ld,%ld|%zu,%ld,%ld,%ld,%ld,%ld,%ld,%ld|%zu,%ld,%ld,%ld,%ld,%ld,%ld,%ld,%ld,%ld*\\n",
sizeof(base),
long(&(b->x)), long(&(b->y)), long(&(b->a)), long(&(b->b)), long(&(b->c)),
sizeof(hashtableentry),
long(&(e->key)), long(&(e->data)), long(&(e->data.x)), long(&(e->data.y)), long(&(e->data.a)), long(&(e->data.b)), long(&(e->data.c)),
sizeof(hashset::chain),
long(&(c->elem)), long(&(c->next)), long(&(c->elem.key)), long(&(c->elem.data)), long(&(c->elem.data.x)), long(&(c->elem.data.y)), long(&(c->elem.data.a)), long(&(c->elem.data.b)), long(&(c->elem.data.c))
);
}
};
struct B { char buffer[62]; int last; char laster; char laster2; };
struct Bits {
unsigned short A : 1;
unsigned short B : 1;
unsigned short C : 1;
unsigned short D : 1;
unsigned short x1 : 1;
unsigned short x2 : 1;
unsigned short x3 : 1;
unsigned short x4 : 1;
};
int main() {
hashtable t;
// Part 2 - the char[] should be compressed, BUT have a padding space at the end so the next
// one is aligned properly. Also handle char; char; etc. properly.
B *b = NULL;
printf("*%ld,%ld,%ld,%ld,%ld,%ld,%ld,%ld,%zu*\\n", long(b), long(&(b->buffer)), long(&(b->buffer[0])), long(&(b->buffer[1])), long(&(b->buffer[2])),
long(&(b->last)), long(&(b->laster)), long(&(b->laster2)), sizeof(B));
// Part 3 - bitfields, and small structures
Bits *b2 = NULL;
printf("*%zu*\\n", sizeof(Bits));
return 0;
}
'''
# Bloated memory; same layout as C/C++
self.do_run(src, '*16,0,4,8,8,12|20,0,4,4,8,12,12,16|24,0,20,0,4,4,8,12,12,16*\n*0,0,0,1,2,64,68,69,72*\n*2*')
def prep_dlfcn_main(self):
self.set_setting('MAIN_MODULE')
self.set_setting('NODERAWFS')
self.clear_setting('SIDE_MODULE')
def build_dlfcn_lib(self, filename):
self.clear_setting('MAIN_MODULE')
self.set_setting('SIDE_MODULE')
outfile = self.build(filename, js_outfile=not self.is_wasm())
shutil.move(outfile, 'liblib.so')
@needs_dylink
def test_dlfcn_missing(self):
self.set_setting('MAIN_MODULE')
self.set_setting('ASSERTIONS')
src = r'''
#include <dlfcn.h>
#include <stdio.h>
#include <assert.h>
int main() {
void* lib_handle = dlopen("libfoo.so", RTLD_NOW);
assert(!lib_handle);
printf("error: %s\n", dlerror());
return 0;
}
'''
self.do_run(src, "error: Could not load dynamic lib: libfoo.so\nError: ENOENT: no such file or directory, open 'libfoo.so'")
@needs_dylink
def test_dlfcn_basic(self):
create_file('liblib.cpp', '''
#include <cstdio>
class Foo {
public:
Foo() {
puts("Constructing lib object.");
}
};
Foo global;
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = '''
#include <cstdio>
#include <dlfcn.h>
class Bar {
public:
Bar() {
puts("Constructing main object.");
}
};
Bar global;
int main() {
dlopen("liblib.so", RTLD_NOW);
return 0;
}
'''
self.do_run(src, 'Constructing main object.\nConstructing lib object.\n')
@needs_dylink
def test_dlfcn_i64(self):
create_file('liblib.c', '''
#include <inttypes.h>
int64_t foo(int x) {
return (long long)x / (long long)1234;
}
''')
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
src = r'''
#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
#include <dlfcn.h>
typedef int64_t (*int64func)(int);
int main() {
void *lib_handle = dlopen("liblib.so", RTLD_NOW);
if (!lib_handle) {
puts(dlerror());
abort();
}
printf("dll handle: %p\n", lib_handle);
int64func x = (int64func)dlsym(lib_handle, "foo");
printf("foo func handle: %p\n", x);
if (!x) {
printf("dlsym failed: %s\n", dlerror());
return 1;
}
printf("|%lld|\n", x(81234567));
return 0;
}
'''
self.do_run(src, '|65830|')
@needs_dylink
@disabled('EM_ASM in not yet supported in SIDE_MODULE')
def test_dlfcn_em_asm(self):
create_file('liblib.cpp', '''
#include <emscripten.h>
class Foo {
public:
Foo() {
EM_ASM( out("Constructing lib object.") );
}
};
Foo global;
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = '''
#include <emscripten.h>
#include <dlfcn.h>
class Bar {
public:
Bar() {
EM_ASM( out("Constructing main object.") );
}
};
Bar global;
int main() {
dlopen("liblib.so", RTLD_NOW);
EM_ASM( out("All done.") );
return 0;
}
'''
self.do_run(src, 'Constructing main object.\nConstructing lib object.\nAll done.\n')
@needs_dylink
def test_dlfcn_qsort(self):
self.set_setting('EXPORTED_FUNCTIONS', ['_get_cmp'])
create_file('liblib.cpp', '''
int lib_cmp(const void* left, const void* right) {
const int* a = (const int*) left;
const int* b = (const int*) right;
if(*a > *b) return 1;
else if(*a == *b) return 0;
else return -1;
}
typedef int (*CMP_TYPE)(const void*, const void*);
extern "C" CMP_TYPE get_cmp() {
return lib_cmp;
}
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
src = '''
#include <stdio.h>
#include <stdlib.h>
#include <dlfcn.h>
typedef int (*CMP_TYPE)(const void*, const void*);
int main_cmp(const void* left, const void* right) {
const int* a = (const int*) left;
const int* b = (const int*) right;
if(*a < *b) return 1;
else if(*a == *b) return 0;
else return -1;
}
int main() {
void* lib_handle;
CMP_TYPE (*getter_ptr)();
CMP_TYPE lib_cmp_ptr;
int arr[5] = {4, 2, 5, 1, 3};
qsort((void*)arr, 5, sizeof(int), main_cmp);
printf("Sort with main comparison: ");
for (int i = 0; i < 5; i++) {
printf("%d ", arr[i]);
}
printf("\\n");
lib_handle = dlopen("liblib.so", RTLD_NOW);
if (lib_handle == NULL) {
printf("Could not load lib.\\n");
return 1;
}
getter_ptr = (CMP_TYPE (*)()) dlsym(lib_handle, "get_cmp");
if (getter_ptr == NULL) {
printf("Could not find func.\\n");
return 1;
}
lib_cmp_ptr = getter_ptr();
qsort((void*)arr, 5, sizeof(int), lib_cmp_ptr);
printf("Sort with lib comparison: ");
for (int i = 0; i < 5; i++) {
printf("%d ", arr[i]);
}
printf("\\n");
return 0;
}
'''
self.do_run(src, 'Sort with main comparison: 5 4 3 2 1 *Sort with lib comparison: 1 2 3 4 5 *',
output_nicerizer=lambda x: x.replace('\n', '*'))
@needs_dylink
def test_dlfcn_data_and_fptr(self):
# Failing under v8 since: https://chromium-review.googlesource.com/712595
if self.is_wasm():
self.banned_js_engines = [config.V8_ENGINE]
create_file('liblib.cpp', r'''
#include <stdio.h>
int theglobal = 42;
extern void parent_func(); // a function that is defined in the parent
int* lib_get_global_addr() {
return &theglobal;
}
void lib_fptr() {
printf("Second calling lib_fptr from main.\n");
parent_func();
// call it also through a pointer, to check indexizing
void (*p_f)();
p_f = parent_func;
p_f();
}
extern "C" void (*func(int x, void(*fptr)()))() {
printf("In func: %d\n", x);
fptr();
return lib_fptr;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_func'])
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <stdio.h>
#include <dlfcn.h>
#include <emscripten.h>
typedef void (*FUNCTYPE(int, void(*)()))();
FUNCTYPE func;
void EMSCRIPTEN_KEEPALIVE parent_func() {
printf("parent_func called from child\n");
}
void main_fptr() {
printf("First calling main_fptr from lib.\n");
}
int main() {
void* lib_handle;
FUNCTYPE* func_fptr;
// Test basic lib loading.
lib_handle = dlopen("liblib.so", RTLD_NOW);
if (lib_handle == NULL) {
printf("Could not load lib.\n");
return 1;
}
// Test looked up function.
func_fptr = (FUNCTYPE*) dlsym(lib_handle, "func");
// Load twice to test cache.
func_fptr = (FUNCTYPE*) dlsym(lib_handle, "func");
if (func_fptr == NULL) {
printf("Could not find func.\n");
return 1;
}
// Test passing function pointers across module bounds.
void (*fptr)() = func_fptr(13, main_fptr);
fptr();
// Test global data.
int* globaladdr = (int*) dlsym(lib_handle, "theglobal");
if (globaladdr == NULL) {
printf("Could not find global.\n");
return 1;
}
printf("Var: %d\n", *globaladdr);
return 0;
}
'''
self.set_setting('EXPORTED_FUNCTIONS', ['_main'])
self.do_run(src, '''\
In func: 13
First calling main_fptr from lib.
Second calling lib_fptr from main.
parent_func called from child
parent_func called from child
Var: 42
''')
@needs_dylink
def test_dlfcn_varargs(self):
# this test is not actually valid - it fails natively. the child should fail
# to be loaded, not load and successfully see the parent print_ints func
create_file('liblib.cpp', r'''
void print_ints(int n, ...);
extern "C" void func() {
print_ints(2, 13, 42);
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_func'])
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <stdarg.h>
#include <stdio.h>
#include <dlfcn.h>
#include <assert.h>
void print_ints(int n, ...) {
va_list args;
va_start(args, n);
for (int i = 0; i < n; i++) {
printf("%d\n", va_arg(args, int));
}
va_end(args);
}
int main() {
void* lib_handle;
void (*fptr)();
print_ints(2, 100, 200);
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle);
fptr = (void (*)())dlsym(lib_handle, "func");
fptr();
return 0;
}
'''
self.set_setting('EXPORTED_FUNCTIONS', ['_main'])
self.do_run(src, '100\n200\n13\n42\n')
@needs_dylink
def test_dlfcn_alignment_and_zeroing(self):
self.set_setting('INITIAL_MEMORY', '16mb')
create_file('liblib.c', r'''
int prezero = 0;
__attribute__((aligned(1024))) int superAligned = 12345;
int postzero = 0;
''')
self.build_dlfcn_lib('liblib.c')
for i in range(10):
curr = '%d.so' % i
shutil.copyfile('liblib.so', curr)
self.prep_dlfcn_main()
self.set_setting('INITIAL_MEMORY', '128mb')
create_file('src.c', r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
#include <assert.h>
#include <emscripten.h>
int main() {
printf("'prepare' memory with non-zero inited stuff\n");
int num = 120 * 1024 * 1024; // total is 128; we'll use 5*5 = 25 at least, so allocate pretty much all of it
void* mem = malloc(num);
assert(mem);
printf("setting this range to non-zero: %ld - %ld\n", (long)mem, ((long)mem) + num);
memset(mem, 1, num);
EM_ASM({
var value = HEAP8[64*1024*1024];
out('verify middle of memory is non-zero: ' + value);
assert(value === 1);
});
free(mem);
for (int i = 0; i < 10; i++) {
char curr[] = "?.so";
curr[0] = '0' + i;
printf("loading %s\n", curr);
void* lib_handle = dlopen(curr, RTLD_NOW);
if (!lib_handle) {
puts(dlerror());
assert(0);
}
printf("getting superAligned\n");
int* superAligned = (int*)dlsym(lib_handle, "superAligned");
assert(superAligned);
assert(((long)superAligned) % 1024 == 0); // alignment
printf("checking value of superAligned, at %p\n", superAligned);
assert(*superAligned == 12345); // value
printf("getting prezero\n");
int* prezero = (int*)dlsym(lib_handle, "prezero");
assert(prezero);
printf("checking value of prezero, at %p\n", prezero);
assert(*prezero == 0);
*prezero = 1;
assert(*prezero != 0);
printf("getting postzero\n");
int* postzero = (int*)dlsym(lib_handle, "postzero");
printf("checking value of postzero, at %p\n", postzero);
assert(postzero);
printf("checking value of postzero\n");
assert(*postzero == 0);
*postzero = 1;
assert(*postzero != 0);
}
printf("success.\n");
return 0;
}
''')
self.do_runf('src.c', 'success.\n')
@needs_dylink
def test_dlfcn_self(self):
self.set_setting('MAIN_MODULE')
self.set_setting('EXPORT_ALL')
def get_data_export_count(wasm):
wat = self.get_wasm_text(wasm)
lines = wat.splitlines()
exports = [l for l in lines if l.strip().startswith('(export ')]
data_exports = [l for l in exports if '(global ' in l]
return len(data_exports)
self.do_core_test('test_dlfcn_self.c')
export_count = get_data_export_count('test_dlfcn_self.wasm')
self.assertGreater(export_count, 20)
self.assertLess(export_count, 56)
@needs_dylink
def test_dlfcn_unique_sig(self):
create_file('liblib.c', r'''
#include <stdio.h>
int myfunc(int a, int b, int c, int d, int e, int f, int g, int h, int i, int j, int k, int l, int m) {
return 13;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_myfunc'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
typedef int (*FUNCTYPE)(int, int, int, int, int, int, int, int, int, int, int, int, int);
int main() {
void *lib_handle;
FUNCTYPE func_ptr;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
func_ptr = (FUNCTYPE)dlsym(lib_handle, "myfunc");
assert(func_ptr != NULL);
assert(func_ptr(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) == 13);
puts("success");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
self.do_runf('main.c', 'success')
@needs_dylink
def test_dlfcn_info(self):
create_file('liblib.c', r'''
#include <stdio.h>
int myfunc(int a, int b, int c, int d, int e, int f, int g, int h, int i, int j, int k, int l, int m) {
return 13;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_myfunc'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', '''
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include <dlfcn.h>
typedef int (*FUNCTYPE)(int, int, int, int, int, int, int, int, int, int, int, int, int);
int main() {
void *lib_handle;
FUNCTYPE func_ptr;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
func_ptr = (FUNCTYPE)dlsym(lib_handle, "myfunc");
assert(func_ptr != NULL);
assert(func_ptr(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) == 13);
/* Verify that we don't corrupt func_ptr when calling dladdr. */
Dl_info info;
memset(&info, 0, sizeof(info));
dladdr(func_ptr, &info);
assert(func_ptr != NULL);
assert(func_ptr(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) == 13);
/* Verify something useful lives in info. */
assert(info.dli_fname != NULL);
assert(info.dli_fbase == NULL);
assert(info.dli_sname == NULL);
assert(info.dli_saddr == NULL);
puts("success");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
self.do_runf('main.c', 'success')
@needs_dylink
def test_dlfcn_stacks(self):
create_file('liblib.c', r'''
#include <assert.h>
#include <stdio.h>
#include <string.h>
int myfunc(const char *input) {
char bigstack[1024] = { 0 };
// make sure we didn't just trample the stack!
assert(!strcmp(input, "foobar"));
snprintf(bigstack, sizeof(bigstack), "%s", input);
return strlen(bigstack);
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_myfunc'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', '''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
#include <string.h>
typedef int (*FUNCTYPE)(const char *);
int main() {
void *lib_handle;
FUNCTYPE func_ptr;
char str[128];
snprintf(str, sizeof(str), "foobar");
// HACK: Use strcmp in the main executable so that it doesn't get optimized out and the dynamic library
// is able to use it.
assert(!strcmp(str, "foobar"));
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
func_ptr = (FUNCTYPE)dlsym(lib_handle, "myfunc");
assert(func_ptr != NULL);
assert(func_ptr(str) == 6);
puts("success");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_strcmp'])
self.do_runf('main.c', 'success')
@needs_dylink
def test_dlfcn_funcs(self):
create_file('liblib.c', r'''
#include <assert.h>
#include <stdio.h>
#include <string.h>
typedef void (*voidfunc)();
typedef void (*intfunc)(int);
void callvoid(voidfunc f) { f(); }
void callint(voidfunc f, int x) { f(x); }
void void_0() { printf("void 0\n"); }
void void_1() { printf("void 1\n"); }
voidfunc getvoid(int i) {
switch(i) {
case 0: return void_0;
case 1: return void_1;
default: return NULL;
}
}
void int_0(int x) { printf("int 0 %d\n", x); }
void int_1(int x) { printf("int 1 %d\n", x); }
intfunc getint(int i) {
switch(i) {
case 0: return int_0;
case 1: return int_1;
default: return NULL;
}
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_callvoid', '_callint', '_getvoid', '_getint'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
typedef void (*voidfunc)();
typedef void (*intfunc)(int);
typedef void (*voidcaller)(voidfunc);
typedef void (*intcaller)(intfunc, int);
typedef voidfunc (*voidgetter)(int);
typedef intfunc (*intgetter)(int);
void void_main() { printf("void_main.\n"); }
void int_main(int x) { printf("int_main %d\n", x); }
int main() {
printf("go\n");
void *lib_handle;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
voidcaller callvoid = (voidcaller)dlsym(lib_handle, "callvoid");
assert(callvoid != NULL);
callvoid(void_main);
intcaller callint = (intcaller)dlsym(lib_handle, "callint");
assert(callint != NULL);
callint(int_main, 201);
voidgetter getvoid = (voidgetter)dlsym(lib_handle, "getvoid");
assert(getvoid != NULL);
callvoid(getvoid(0));
callvoid(getvoid(1));
intgetter getint = (intgetter)dlsym(lib_handle, "getint");
assert(getint != NULL);
callint(getint(0), 54);
callint(getint(1), 9000);
assert(getint(1000) == NULL);
puts("ok");
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc'])
self.do_runf('main.c', '''go
void_main.
int_main 201
void 0
void 1
int 0 54
int 1 9000
ok
''')
@needs_dylink
def test_dlfcn_mallocs(self):
# will be exhausted without functional malloc/free
self.set_setting('INITIAL_MEMORY', '64mb')
create_file('liblib.c', r'''
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
void *mallocproxy(int n) { return malloc(n); }
void freeproxy(void *p) { free(p); }
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_mallocproxy', '_freeproxy'])
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.do_runf(test_file('dlmalloc_proxy.c'), '*294,153*')
@needs_dylink
def test_dlfcn_longjmp(self):
create_file('liblib.c', r'''
#include <setjmp.h>
#include <stdio.h>
void jumpy(jmp_buf buf) {
static int i = 0;
i++;
if (i == 10) longjmp(buf, i);
printf("pre %d\n", i);
}
''')
self.build_dlfcn_lib('liblib.c')
self.prep_dlfcn_main()
create_file('main.c', r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
#include <setjmp.h>
typedef void (*jumpfunc)(jmp_buf);
int main() {
printf("go!\n");
void *lib_handle;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
jumpfunc jumpy = (jumpfunc)dlsym(lib_handle, "jumpy");
assert(jumpy);
jmp_buf buf;
int jmpval = setjmp(buf);
if (jmpval == 0) {
while (1) jumpy(buf);
} else {
printf("out!\n");
}
return 0;
}
''')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.do_runf('main.c', '''go!
pre 1
pre 2
pre 3
pre 4
pre 5
pre 6
pre 7
pre 8
pre 9
out!
''', force_c=True)
# TODO: make this work. need to forward tempRet0 across modules
# TODO Enable @with_both_exception_handling (the test is not working now)
@needs_dylink
def zzztest_dlfcn_exceptions(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
create_file('liblib.cpp', r'''
extern "C" {
int ok() {
return 65;
}
int fail() {
throw 123;
}
}
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <assert.h>
#include <stdio.h>
#include <dlfcn.h>
typedef int (*intfunc)();
int main() {
printf("go!\n");
void *lib_handle;
lib_handle = dlopen("liblib.so", RTLD_NOW);
assert(lib_handle != NULL);
intfunc okk = (intfunc)dlsym(lib_handle, "ok");
intfunc faill = (intfunc)dlsym(lib_handle, "fail");
assert(okk && faill);
try {
printf("ok: %d\n", okk());
} catch(...) {
printf("wha\n");
}
try {
printf("fail: %d\n", faill());
} catch(int x) {
printf("int %d\n", x);
}
try {
printf("fail: %d\n", faill());
} catch(double x) {
printf("caught %f\n", x);
}
return 0;
}
'''
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.do_run(src, '''go!
ok: 65
int 123
ok
''')
@needs_dylink
def test_dlfcn_handle_alloc(self):
# verify that dlopen does not allocate already used handles
dirname = self.get_dir()
def indir(name):
return os.path.join(dirname, name)
create_file('a.cpp', r'''
#include <stdio.h>
static struct a {
a() {
puts("a: loaded");
}
} _;
''')
create_file('b.cpp', r'''
#include <stdio.h>
static struct b {
b() {
puts("b: loaded");
}
} _;
''')
self.build_dlfcn_lib('a.cpp')
shutil.move(indir('liblib.so'), indir('liba.so'))
self.build_dlfcn_lib('b.cpp')
shutil.move(indir('liblib.so'), indir('libb.so'))
self.set_setting('MAIN_MODULE')
self.set_setting('NODERAWFS')
self.clear_setting('SIDE_MODULE')
create_file('main.c', r'''
#include <dlfcn.h>
#include <assert.h>
#include <stddef.h>
int main() {
void *liba, *libb, *liba2, *libb2;
int err;
liba = dlopen("liba.so", RTLD_NOW);
assert(liba != NULL);
libb = dlopen("libb.so", RTLD_NOW);
assert(libb != NULL);
// Test that opening libb a second times gives the same handle
libb2 = dlopen("libb.so", RTLD_NOW);
assert(libb == libb2);
err = dlclose(liba);
assert(!err);
liba2 = dlopen("liba.so", RTLD_NOW);
assert(liba2 != libb);
return 0;
}
''')
self.do_runf('main.c', 'a: loaded\nb: loaded\n')
@needs_dylink
@needs_non_trapping_float_to_int
def test_dlfcn_feature_in_lib(self):
self.emcc_args.append('-mnontrapping-fptoint')
create_file('liblib.cpp', r'''
extern "C" int magic(float x) {
return __builtin_wasm_trunc_saturate_s_i32_f32(x);
}
''')
self.build_dlfcn_lib('liblib.cpp')
self.prep_dlfcn_main()
src = r'''
#include <dlfcn.h>
#include <stdio.h>
#include <stdlib.h>
typedef int (*fi)(float);
int main() {
void *lib_handle = dlopen("liblib.so", RTLD_NOW);
if (!lib_handle) {
puts(dlerror());
abort();
}
fi x = (fi)dlsym(lib_handle, "magic");
if (!x) {
puts(dlerror());
abort();
}
printf("float: %d.\n", x(42.99));
return 0;
}
'''
self.do_run(src, 'float: 42.\n')
def dylink_test(self, main, side, expected=None, header=None, force_c=False,
main_module=2, **kwargs):
# Same as dylink_testf but take source code in string form
if not isinstance(side, list):
side_file = 'liblib.cpp' if not force_c else 'liblib.c'
create_file(side_file, side)
side = side_file
if not isinstance(main, list):
main_file = 'main.cpp' if not force_c else 'main.c'
create_file(main_file, main)
main = main_file
if header:
create_file('header.h', header)
return self.dylink_testf(main, side, expected, force_c, main_module=main_module, **kwargs)
def dylink_testf(self, main, side=None, expected=None, force_c=False, main_emcc_args=[],
main_module=2,
so_name='liblib.so',
need_reverse=True, **kwargs):
self.maybe_closure()
# Same as dylink_test but takes source code as filenames on disc.
old_args = self.emcc_args.copy()
if not expected:
outfile = shared.replace_suffix(main, '.out')
expected = read_file(outfile)
if not side:
side, ext = os.path.splitext(main)
side += '_side' + ext
# side settings
self.clear_setting('MAIN_MODULE')
self.set_setting('SIDE_MODULE')
side_suffix = 'wasm' if self.is_wasm() else 'js'
if isinstance(side, list):
out_file = 'liblib.' + side_suffix
# side is just a library
self.run_process([EMCC] + side + self.get_emcc_args() + ['-o', out_file])
else:
out_file = self.build(side, js_outfile=(side_suffix == 'js'))
shutil.move(out_file, so_name)
# main settings
self.set_setting('MAIN_MODULE', main_module)
self.clear_setting('SIDE_MODULE')
self.emcc_args += main_emcc_args
self.emcc_args.append(so_name)
if force_c:
self.emcc_args.append('-nostdlib++')
if isinstance(main, list):
# main is just a library
try_delete('main.js')
self.run_process([EMCC] + main + self.get_emcc_args() + ['-o', 'main.js'])
self.do_run('main.js', expected, no_build=True, **kwargs)
else:
self.do_runf(main, expected, force_c=force_c, **kwargs)
self.emcc_args = old_args
if need_reverse:
print('flip')
# Test the reverse as well. There we flip the role of the side module and main module.
# - We add --no-entry since the side module doesn't have a `main`
self.dylink_testf(side, main, expected, force_c, main_emcc_args + ['--no-entry'],
need_reverse=False, **kwargs)
def do_basic_dylink_test(self, **kwargs):
self.dylink_test(r'''
#include <stdio.h>
#include "header.h"
int main() {
printf("other says %d.\n", sidey());
return 0;
}
''', '''
#include "header.h"
int sidey() {
return 11;
}
''', 'other says 11.', 'int sidey();', force_c=True, **kwargs)
@needs_dylink
def test_dylink_basics(self):
self.do_basic_dylink_test(need_reverse=False)
self.verify_in_strict_mode('main.js')
@needs_dylink
def test_dylink_basics_no_modify(self):
if self.is_optimizing():
self.skipTest('no modify mode only works with non-optimizing builds')
self.set_setting('WASM_BIGINT')
self.set_setting('ERROR_ON_WASM_CHANGES_AFTER_LINK')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_basics_lld_report_undefined(self):
self.set_setting('LLD_REPORT_UNDEFINED')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_no_export(self):
self.set_setting('NO_DECLARE_ASM_MODULE_EXPORTS')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_memory_growth(self):
if not self.is_wasm():
self.skipTest('wasm only')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_safe_heap(self):
self.set_setting('SAFE_HEAP')
self.do_basic_dylink_test()
@needs_dylink
def test_dylink_function_pointer_equality(self):
self.dylink_test(r'''
#include <stdio.h>
#include "header.h"
int main() {
void* puts_side = get_address();
printf("main module address %p.\n", &puts);
printf("side module address address %p.\n", puts_side);
if (&puts == puts_side)
printf("success\n");
else
printf("failure\n");
return 0;
}
''', '''
#include <stdio.h>
#include "header.h"
void* get_address() {
return (void*)&puts;
}
''', 'success', header='void* get_address();', force_c=True)
@needs_dylink
def test_dylink_floats(self):
self.dylink_test(r'''
#include <stdio.h>
extern float sidey();
int main() {
printf("other says %.2f.\n", sidey()+1);
return 0;
}
''', '''
float sidey() { return 11.5; }
''', 'other says 12.50', force_c=True)
@needs_dylink
def test_dylink_printf(self):
self.dylink_test(r'''
#include <stdio.h>
void sidey();
int main() {
printf("hello from main\n");
sidey();
return 0;
}
''', r'''
#include <stdio.h>
void sidey() {
printf("hello from side\n");
}
''', 'hello from main\nhello from side\n', force_c=True)
@needs_dylink
def test_dylink_funcpointer(self):
self.dylink_test(
main=r'''
#include <stdio.h>
#include <assert.h>
#include "header.h"
intfunc sidey(intfunc f);
void a(int arg) { printf("hello from funcptr: %d\n", arg); }
int main() {
intfunc b = sidey(a);
assert(a == b);
b(0);
return 0;
}
''',
side='''
#include "header.h"
intfunc sidey(intfunc f) { f(1); return f; }
''',
expected='hello from funcptr: 1\nhello from funcptr: 0\n',
header='typedef void (*intfunc)(int );', force_c=True)
@needs_dylink
def test_dylink_static_funcpointers(self):
self.dylink_test(
main=r'''
#include <stdio.h>
#include "header.h"
void areturn0() { printf("hello 0\n"); }
void areturn1() { printf("hello 1\n"); }
void areturn2() { printf("hello 2\n"); }
voidfunc func_ptrs[3] = { areturn0, areturn1, areturn2 };
int main(int argc, char **argv) {
sidey(func_ptrs[0]);
sidey(func_ptrs[1]);
sidey(func_ptrs[2]);
return 0;
}
''',
side='''
#include "header.h"
void sidey(voidfunc f) { f(); }
''',
expected='hello 0\nhello 1\nhello 2\n',
header='typedef void (*voidfunc)(); void sidey(voidfunc f);', force_c=True)
@needs_dylink
def test_dylink_funcpointers_wrapper(self):
self.dylink_test(
main=r'''\
#include <stdio.h>
#include "header.h"
int main(int argc, char **argv) {
charfunc f1 = emscripten_run_script;
f1("console.log('one')");
charfunc f2 = get();
f2("console.log('two')");
return 0;
}
''',
side='''\
#include "header.h"
charfunc get() {
return emscripten_run_script;
}
''',
expected='one\ntwo\n',
header='''\
#include <emscripten.h>
typedef void (*charfunc)(const char*);
extern charfunc get();
''', force_c=True)
@needs_dylink
def test_dylink_static_funcpointer_float(self):
self.dylink_test(
main=r'''\
#include <stdio.h>
#include "header.h"
int sidey(floatfunc f);
float func1(float f) { printf("hello 1: %f\n", f); return 0; }
floatfunc f1 = &func1;
int main(int argc, char **argv) {
printf("got: %d\n", sidey(f1));
f1(12.34);
return 0;
}
''',
side='''\
#include "header.h"
int sidey(floatfunc f) { f(56.78); return 1; }
''',
expected='hello 1: 56.779999\ngot: 1\nhello 1: 12.340000\n',
header='typedef float (*floatfunc)(float);', force_c=True)
@needs_dylink
def test_missing_signatures(self):
create_file('test_sig.c', r'''#include <emscripten.h>
int main() {
return 0 == ( (long)&emscripten_run_script_string +
(long)&emscripten_run_script );
}''')
self.set_setting('MAIN_MODULE', 1)
self.set_setting('ALLOW_MEMORY_GROWTH', '1')
self.set_setting('MAXIMUM_MEMORY', '4GB')
self.do_runf('test_sig.c', '')
@needs_dylink
def test_dylink_global_init(self):
self.dylink_test(r'''
#include <stdio.h>
struct Class {
Class() { printf("a new Class\n"); }
};
static Class c;
int main() {
return 0;
}
''', r'''
void nothing() {}
''', 'a new Class\n')
@needs_dylink
def test_dylink_global_inits(self):
def test():
self.dylink_test(header=r'''
#include <stdio.h>
struct Class {
Class(const char *name) { printf("new %s\n", name); }
};
''', main=r'''
#include "header.h"
static Class c("main");
int main() {
return 0;
}
''', side=r'''
#include "header.h"
static Class c("side");
''', expected=['new main\nnew side\n', 'new side\nnew main\n'])
test()
print('check warnings')
self.set_setting('ASSERTIONS', 2)
test()
@needs_dylink
def test_dylink_i64(self):
self.dylink_test(r'''
#include <stdio.h>
#include <stdint.h>
extern int64_t sidey();
int main() {
printf("other says %lld.\n", sidey());
return 0;
}
''', '''
#include <stdint.h>
int64_t sidey() {
return 42;
}
''', 'other says 42.', force_c=True)
@all_engines
@needs_dylink
def test_dylink_i64_b(self):
self.dylink_test(r'''
#include <stdio.h>
#include <stdint.h>
extern int64_t sidey();
int64_t testAdd(int64_t a) {
return a + 1;
}
int64_t testAddB(int a) {
return a + 1;
}
typedef int64_t (*testAddHandler)(int64_t);
testAddHandler h = &testAdd;
typedef int64_t (*testAddBHandler)(int);
testAddBHandler hb = &testAddB;
int main() {
printf("other says %lld.\n", sidey());
int64_t r = h(42);
printf("my fp says: %lld.\n", r);
int64_t rb = hb(42);
printf("my second fp says: %lld.\n", r);
}
''', '''
#include <stdint.h>
int64_t sidey() {
volatile int64_t x = 0x12345678abcdef12LL;
x += x % 17;
x = 18 - x;
return x;
}
''', 'other says -1311768467750121224.\nmy fp says: 43.\nmy second fp says: 43.', force_c=True)
@needs_dylink
@also_with_wasm_bigint
def test_dylink_i64_c(self):
self.dylink_test(r'''
#include <stdio.h>
#include <inttypes.h>
#include "header.h"
typedef int32_t (*fp_type_32)(int32_t, int32_t, int32_t);
typedef int64_t (*fp_type_64)(int32_t, int32_t, int32_t);
int32_t internal_function_ret_32(int32_t i, int32_t j, int32_t k) {
return 32;
}
int64_t internal_function_ret_64(int32_t i, int32_t j, int32_t k) {
return 64;
}
int main() {
fp_type_32 fp32_internal = &internal_function_ret_32;
fp_type_32 fp32_external = &function_ret_32;
fp_type_64 fp64_external = &function_ret_64;
fp_type_64 fp64_internal = &internal_function_ret_64;
int32_t ires32 = fp32_internal(0,0,0);
printf("res32 - internal %d\n",ires32);
int32_t eres32 = fp32_external(0,0,0);
printf("res32 - external %d\n",eres32);
int64_t ires64 = fp64_internal(0,0,0);
printf("res64 - internal %" PRId64 "\n",ires64);
int64_t eres64 = fp64_external(0,0,0);
printf("res64 - external %" PRId64 "\n",eres64);
return 0;
}
''', '''
#include "header.h"
int32_t function_ret_32(int32_t i, int32_t j, int32_t k) {
return 32;
}
int64_t function_ret_64(int32_t i, int32_t j, int32_t k) {
return 64;
}
''', '''res32 - internal 32
res32 - external 32
res64 - internal 64
res64 - external 64\n''', header='''
#include <emscripten.h>
#include <stdint.h>
EMSCRIPTEN_KEEPALIVE int32_t function_ret_32(int32_t i, int32_t j, int32_t k);
EMSCRIPTEN_KEEPALIVE int64_t function_ret_64(int32_t i, int32_t j, int32_t k);
''', force_c=True)
@needs_dylink
@also_with_wasm_bigint
def test_dylink_i64_invoke(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING', 0)
self.dylink_test(r'''\
#include <stdio.h>
#include <stdint.h>
extern "C" int64_t sidey(int64_t arg);
int main(int argc, char *argv[]) {
int64_t temp = 42;
printf("got %lld\n", sidey(temp));
return 0;
}''', r'''\
#include <stdint.h>
#include <stdio.h>
#include <emscripten.h>
extern "C" {
EMSCRIPTEN_KEEPALIVE int64_t do_call(int64_t arg) {
if (arg == 0) {
throw;
}
return 2 * arg;
}
int64_t sidey(int64_t arg) {
try {
return do_call(arg);
} catch(...) {
return 0;
}
}
}''', 'got 84', need_reverse=False)
@needs_dylink
def test_dylink_class(self):
self.dylink_test(header=r'''
#include <stdio.h>
struct Class {
Class(const char *name);
};
''', main=r'''
#include "header.h"
int main() {
Class c("main");
return 0;
}
''', side=r'''
#include "header.h"
Class::Class(const char *name) { printf("new %s\n", name); }
''', expected=['new main\n'])
@needs_dylink
def test_dylink_global_var(self):
self.dylink_test(main=r'''
#include <stdio.h>
extern int x;
int main() {
printf("extern is %d.\n", x);
return 0;
}
''', side=r'''
int x = 123;
''', expected=['extern is 123.\n'], force_c=True)
@needs_dylink
def test_dylink_global_var_modded(self):
self.dylink_test(main=r'''
#include <stdio.h>
extern int x;
int main() {
printf("extern is %d.\n", x);
return 0;
}
''', side=r'''
int x = 123;
struct Initter {
Initter() { x = 456; }
};
Initter initter;
''', expected=['extern is 456.\n'])
@needs_dylink
def test_dylink_stdlib(self):
self.dylink_test(header=r'''
#include <math.h>
#include <stdlib.h>
#include <string.h>
char *side(const char *data);
double pow_two(double x);
''', main=r'''
#include <stdio.h>
#include "header.h"
int main() {
char *temp = side("hello through side\n");
char *ret = (char*)malloc(strlen(temp)+1);
strcpy(ret, temp);
temp[1] = 'x';
puts(ret);
printf("pow_two: %d.\n", (int)pow_two(5.9));
return 0;
}
''', side=r'''
#include "header.h"
char *side(const char *data) {
char *ret = (char*)malloc(strlen(data)+1);
strcpy(ret, data);
return ret;
}
double pow_two(double x) {
return pow(2, x);
}
''', expected=['hello through side\n\npow_two: 59.'], force_c=True)
@needs_dylink
def test_dylink_jslib(self):
create_file('lib.js', r'''
mergeInto(LibraryManager.library, {
test_lib_func: function(x) {
return x + 17.2;
}
});
''')
self.dylink_test(header=r'''
extern double test_lib_func(int input);
''', main=r'''
#include <stdio.h>
#include "header.h"
extern double sidey();
int main2() { return 11; }
int main() {
int input = sidey();
double temp = test_lib_func(input);
printf("other says %.2f\n", temp);
printf("more: %.5f, %d\n", temp, input);
return 0;
}
''', side=r'''
#include <stdio.h>
#include "header.h"
extern int main2();
double sidey() {
int temp = main2();
printf("main2 sed: %d\n", temp);
printf("main2 sed: %u, %c\n", temp, temp/2);
return test_lib_func(temp);
}
''', expected='other says 45.2', main_emcc_args=['--js-library', 'lib.js'], force_c=True)
@needs_dylink
def test_dylink_many_postsets(self):
NUM = 1234
self.dylink_test(header=r'''
#include <stdio.h>
typedef void (*voidfunc)();
static void simple() {
printf("simple.\n");
}
static volatile voidfunc funcs[''' + str(NUM) + '] = { ' + ','.join(['simple'] * NUM) + r''' };
static void test() {
volatile int i = ''' + str(NUM - 1) + r''';
funcs[i]();
i = 0;
funcs[i]();
}
extern void more();
''', main=r'''
#include "header.h"
int main() {
test();
more();
return 0;
}
''', side=r'''
#include "header.h"
void more() {
test();
}
''', expected=['simple.\nsimple.\nsimple.\nsimple.\n'], force_c=True)
@needs_dylink
def test_dylink_postsets_chunking(self):
self.dylink_test(header=r'''
extern int global_var;
''', main=r'''
#include <stdio.h>
#include "header.h"
// prepare 99 global variable with local initializer
static int p = 1;
#define P(x) __attribute__((used)) int *padding##x = &p;
P(01) P(02) P(03) P(04) P(05) P(06) P(07) P(08) P(09) P(10)
P(11) P(12) P(13) P(14) P(15) P(16) P(17) P(18) P(19) P(20)
P(21) P(22) P(23) P(24) P(25) P(26) P(27) P(28) P(29) P(30)
P(31) P(32) P(33) P(34) P(35) P(36) P(37) P(38) P(39) P(40)
P(41) P(42) P(43) P(44) P(45) P(46) P(47) P(48) P(49) P(50)
P(51) P(52) P(53) P(54) P(55) P(56) P(57) P(58) P(59) P(60)
P(61) P(62) P(63) P(64) P(65) P(66) P(67) P(68) P(69) P(70)
P(71) P(72) P(73) P(74) P(75) P(76) P(77) P(78) P(79) P(80)
P(81) P(82) P(83) P(84) P(85) P(86) P(87) P(88) P(89) P(90)
P(91) P(92) P(93) P(94) P(95) P(96) P(97) P(98) P(99)
// prepare global variable with global initializer
int *ptr = &global_var;
int main(int argc, char *argv[]) {
printf("%d\n", *ptr);
}
''', side=r'''
#include "header.h"
int global_var = 12345;
''', expected=['12345\n'], force_c=True)
@needs_dylink
@parameterized({
'libcxx': ('libc,libc++,libmalloc,libc++abi',),
'all': ('1',),
'missing': ('libc,libmalloc', False, False, False),
'missing_assertions': ('libc,libmalloc', False, False, True),
})
def test_dylink_syslibs(self, syslibs, expect_pass=True, need_reverse=True, assertions=True):
self.emcc_args.append('-Wno-deprecated')
self.set_setting('WARN_ON_UNDEFINED_SYMBOLS', 0)
if assertions is not None:
self.set_setting('ASSERTIONS', int(assertions))
passed = True
try:
with env_modify({'EMCC_FORCE_STDLIBS': syslibs, 'EMCC_ONLY_FORCED_STDLIBS': '1'}):
self.dylink_test(main=r'''
void side();
int main() {
side();
return 0;
}
''', side=r'''
#include <iostream>
void side() { std::cout << "cout hello from side\n"; }
''', expected=['cout hello from side\n'], need_reverse=need_reverse, main_module=1)
except Exception as e:
if expect_pass:
raise
print('(seeing expected fail)')
passed = False
assertion = 'build the MAIN_MODULE with EMCC_FORCE_STDLIBS=1 in the environment'
if self.get_setting('ASSERTIONS'):
self.assertContained(assertion, str(e))
else:
self.assertNotContained(assertion, str(e))
assert passed == expect_pass, ['saw', passed, 'but expected', expect_pass]
@needs_dylink
@with_env_modify({'EMCC_FORCE_STDLIBS': 'libc++'})
def test_dylink_iostream(self):
self.dylink_test(header=r'''
#include <iostream>
#include <string>
std::string side();
''', main=r'''
#include "header.h"
int main() {
std::cout << "hello from main " << side() << std::endl;
return 0;
}
''', side=r'''
#include "header.h"
std::string side() { return "and hello from side"; }
''', expected=['hello from main and hello from side\n'])
@needs_dylink
def test_dylink_dynamic_cast(self): # issue 3465
self.dylink_test(header=r'''
class Base {
public:
virtual void printName();
};
class Derived : public Base {
public:
void printName();
};
''', main=r'''
#include "header.h"
#include <iostream>
using namespace std;
int main() {
cout << "starting main" << endl;
Base *base = new Base();
Base *derived = new Derived();
base->printName();
derived->printName();
if (dynamic_cast<Derived*>(derived)) {
cout << "OK" << endl;
} else {
cout << "KO" << endl;
}
return 0;
}
''', side=r'''
#include "header.h"
#include <iostream>
using namespace std;
void Base::printName() {
cout << "Base" << endl;
}
void Derived::printName() {
cout << "Derived" << endl;
}
''', expected=['starting main\nBase\nDerived\nOK'])
@with_both_exception_handling
@needs_dylink
def test_dylink_raii_exceptions(self):
self.dylink_test(main=r'''
#include <stdio.h>
extern int side();
int main() {
printf("from side: %d.\n", side());
}
''', side=r'''
#include <stdio.h>
typedef int (*ifdi)(float, double, int);
int func_with_special_sig(float a, double b, int c) {
printf("special %f %f %d\n", a, b, c);
return 1337;
}
struct DestructorCaller {
~DestructorCaller() { printf("destroy\n"); }
};
int side() {
// d has a destructor that must be called on function
// exit, which means an invoke will be used for the
// indirect call here - and the signature of that call
// is special and not present in the main module, so
// it must be generated for the side module.
DestructorCaller d;
volatile ifdi p = func_with_special_sig;
return p(2.18281, 3.14159, 42);
}
''', expected=['special 2.182810 3.141590 42\ndestroy\nfrom side: 1337.\n'])
@needs_dylink
@disabled('https://github.com/emscripten-core/emscripten/issues/12815')
def test_dylink_hyper_dupe(self):
self.set_setting('INITIAL_MEMORY', '64mb')
self.set_setting('ASSERTIONS', 2)
# test hyper-dynamic linking, and test duplicate warnings
create_file('third.cpp', r'''
#include <stdio.h>
int sidef() { return 36; }
int sideg = 49;
int bsidef() { return 536; }
extern void only_in_second_1(int x);
extern int second_to_third;
int third_to_second = 1337;
void only_in_third_0() {
// note we access our own globals directly, so
// it doesn't matter that overriding failed
printf("only_in_third_0: %d, %d, %d\n", sidef(), sideg, second_to_third);
only_in_second_1(2112);
}
void only_in_third_1(int x) {
printf("only_in_third_1: %d, %d, %d, %d\n", sidef(), sideg, second_to_third, x);
}
''')
if self.is_wasm():
libname = 'third.wasm'
else:
libname = 'third.js'
self.run_process([EMCC, 'third.cpp', '-o', libname, '-s', 'SIDE_MODULE'] + self.get_emcc_args())
self.dylink_test(main=r'''
#include <stdio.h>
#include <emscripten.h>
extern int sidef();
extern int sideg;
extern int bsidef();
extern int bsideg;
extern void only_in_second_0();
extern void only_in_third_0();
int main() {
EM_ASM({
loadDynamicLibrary('%s'); // hyper-dynamic! works at least for functions (and consts not used in same block)
});
printf("sidef: %%d, sideg: %%d.\n", sidef(), sideg);
printf("bsidef: %%d.\n", bsidef());
only_in_second_0();
only_in_third_0();
}
''' % libname,
side=r'''
#include <stdio.h>
int sidef() { return 10; } // third will try to override these, but fail!
int sideg = 20;
extern void only_in_third_1(int x);
int second_to_third = 500;
extern int third_to_second;
void only_in_second_0() {
printf("only_in_second_0: %d, %d, %d\n", sidef(), sideg, third_to_second);
only_in_third_1(1221);
}
void only_in_second_1(int x) {
printf("only_in_second_1: %d, %d, %d, %d\n", sidef(), sideg, third_to_second, x);
}
''',
expected=['sidef: 10, sideg: 20.\nbsidef: 536.\nonly_in_second_0: 10, 20, 1337\nonly_in_third_1: 36, 49, 500, 1221\nonly_in_third_0: 36, 49, 500\nonly_in_second_1: 10, 20, 1337, 2112\n'],
need_reverse=not self.is_wasm())
print('check warnings')
full = self.run_js('src.js')
self.assertContained("warning: symbol '_sideg' from '%s' already exists" % libname, full)
@needs_dylink
def test_dylink_load_compiled_side_module(self):
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args.append('-lnodefs.js')
self.set_setting('INITIAL_MEMORY', '64mb')
self.set_setting('NO_AUTOLOAD_DYLIBS')
self.dylink_test(main=r'''
#include <stdio.h>
#include <emscripten.h>
extern int sidef();
int main() {
EM_ASM({
FS.mkdir('/working');
FS.mount(NODEFS,{ root: '.' }, '/working');
var libData = FS.readFile('/working/liblib.so', {encoding: 'binary'});
if (!(libData instanceof Uint8Array)) {
libData = new Uint8Array(libData);
}
var compiledModule = new WebAssembly.Module(libData);
var sideExports = loadWebAssemblyModule(compiledModule, {loadAsync: false, nodelete: true});
mergeLibSymbols(sideExports, 'liblib.so');
});
printf("sidef: %d.\n", sidef());
}
''',
side=r'''
#include <stdio.h>
int sidef() { return 10; }
''',
expected=['sidef: 10'],
need_reverse=not self.is_wasm())
@needs_dylink
def test_dylink_dso_needed(self):
def do_run(src, expected_output, emcc_args=[]):
create_file('main.c', src + 'int main() { return test_main(); }')
self.do_runf('main.c', expected_output, emcc_args=emcc_args)
self._test_dylink_dso_needed(do_run)
@needs_dylink
def test_dylink_dot_a(self):
create_file('third.c', 'int sidef() { return 36; }')
create_file('fourth.c', 'int sideg() { return 17; }')
self.run_process([EMCC, '-fPIC', '-c', 'third.c', '-o', 'third.o'] + self.get_emcc_args())
self.run_process([EMCC, '-fPIC', '-c', 'fourth.c', '-o', 'fourth.o'] + self.get_emcc_args())
self.run_process([EMAR, 'rc', 'libfourth.a', 'fourth.o'])
self.dylink_test(main=r'''
#include <stdio.h>
#include <emscripten.h>
int sidef();
int sideg();
int main() {
printf("sidef: %d, sideg: %d.\n", sidef(), sideg());
}
''',
side=['libfourth.a', 'third.o'],
expected=['sidef: 36, sideg: 17.\n'], force_c=True)
@needs_dylink
def test_dylink_spaghetti(self):
self.dylink_test(main=r'''
#include <stdio.h>
int main_x = 72;
extern int side_x;
int adjust = side_x + 10;
int *ptr = &side_x;
struct Class {
Class() {
printf("main init sees %d, %d, %d.\n", adjust, *ptr, main_x);
}
};
Class cm;
int main() {
printf("main main sees %d, %d, %d.\n", adjust, *ptr, main_x);
return 0;
}
''', side=r'''
#include <stdio.h>
extern int main_x;
int side_x = -534;
int adjust2 = main_x + 10;
int *ptr2 = &main_x;
struct SideClass {
SideClass() {
printf("side init sees %d, %d, %d.\n", adjust2, *ptr2, side_x);
}
};
SideClass cs;
''', expected=['''\
side init sees 82, 72, -534.
main init sees -524, -534, 72.
main main sees -524, -534, 72.
''', '''\
main init sees -524, -534, 72.
side init sees 82, 72, -534.
main main sees -524, -534, 72.
'''])
@needs_make('mingw32-make')
@needs_dylink
def test_dylink_zlib(self):
self.emcc_args += ['-Wno-shift-negative-value', '-I' + test_file('third_party/zlib')]
self.set_setting('RELOCATABLE')
zlib_archive = self.get_zlib_library()
self.dylink_test(main=read_file(test_file('third_party/zlib/example.c')),
side=zlib_archive,
expected=read_file(test_file('core/test_zlib.out')),
force_c=True)
# @needs_dylink
# def test_dylink_bullet(self):
# self.emcc_args += ['-I' + test_file('bullet/src')]
# side = self.get_bullet_library(self, True)
# self.dylink_test(main=read_file(test_file('bullet/Demos/HelloWorld/HelloWorld.cpp')),
# side=side,
# expected=[read_file(test_file('bullet/output.txt')), # different roundings
# read_file(test_file('bullet/output2.txt')),
# read_file(test_file('bullet/output3.txt'))])
@needs_dylink
def test_dylink_rtti(self):
# Verify that objects created in one module and be dynamic_cast<> correctly
# in the another module.
# Each module will define its own copy of certain COMDAT symbols such as
# each classs's typeinfo, but at runtime they should both use the same one.
self.set_setting('LLD_REPORT_UNDEFINED')
header = '''
#include <cstddef>
class Foo {
public:
virtual ~Foo() {}
};
class Bar : public Foo {
public:
virtual ~Bar() {}
};
bool is_bar(Foo* foo);
'''
main = '''
#include <stdio.h>
#include "header.h"
int main() {
Bar bar;
if (!is_bar(&bar)) {
puts("failure");
return 1;
}
puts("success");
return 0;
}
'''
side = '''
#include "header.h"
bool is_bar(Foo* foo) {
return dynamic_cast<Bar*>(foo) != nullptr;
}
'''
self.dylink_test(main=main,
side=side,
header=header,
expected='success')
@needs_dylink
def test_dylink_argv_argc(self):
self.emcc_args += ['--extern-pre-js', 'pre.js']
create_file('pre.js', '''
var Module = { arguments: ['hello', 'world!'] }
''')
self.dylink_test(
'',
r'''
#include <stdio.h>
int main(int argc, char const *argv[]) {
printf("%d ", argc);
for (int i=1; i<argc; i++) printf("%s ", argv[i]);
printf("\n");
return 0;
}
''',
expected='3 hello world!',
need_reverse=False)
@needs_dylink
def test_dylink_weak(self):
self.dylink_testf(test_file('core/test_dylink_weak.c'), need_reverse=False)
@node_pthreads
@needs_dylink
def test_dylink_tls(self):
self.emcc_args.append('-Wno-experimental')
self.dylink_testf(test_file('core/test_dylink_tls.c'),
need_reverse=False)
@node_pthreads
@needs_dylink
def test_dylink_tls_export(self):
self.emcc_args.append('-Wno-experimental')
self.dylink_testf(test_file('core/test_dylink_tls_export.c'),
need_reverse=False)
def test_random(self):
src = r'''#include <stdlib.h>
#include <stdio.h>
int main()
{
srandom(0xdeadbeef);
printf("%ld\n", random());
}
'''
self.do_run(src, '956867869')
def test_rand(self):
src = r'''#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
int main()
{
// we need RAND_MAX to be a bitmask (power of 2 minus 1). this assertions guarantees
// if RAND_MAX changes the test failure will focus attention on that issue here.
assert(RAND_MAX == 0x7fffffff);
srand(0xdeadbeef);
for(int i = 0; i < 10; ++i)
printf("%d\n", rand());
unsigned int seed = 0xdeadbeef;
for(int i = 0; i < 10; ++i)
printf("%d\n", rand_r(&seed));
bool haveEvenAndOdd = true;
for(int i = 1; i <= 30; ++i)
{
int mask = 1 << i;
if (mask > RAND_MAX) break;
bool haveEven = false;
bool haveOdd = false;
for(int j = 0; j < 1000 && (!haveEven || !haveOdd); ++j)
{
if ((rand() & mask) == 0)
haveEven = true;
else
haveOdd = true;
}
haveEvenAndOdd = haveEvenAndOdd && haveEven && haveOdd;
}
if (haveEvenAndOdd)
printf("Have even and odd!\n");
return 0;
}
'''
expected = '''490242850
2074599277
1480056542
1912638067
931112055
2110392489
2053422194
1614832492
216117595
174823244
760368382
602359081
1121118963
1291018924
1608306807
352705809
958258461
1182561381
114276303
1481323674
Have even and odd!
'''
self.do_run(src, expected)
def test_strtod(self):
self.do_core_test('test_strtod.c')
def test_strtold(self):
self.do_core_test('test_strtold.c')
def test_strtok(self):
self.do_core_test('test_strtok.c')
def test_strtol(self):
self.do_core_test('test_strtol.c')
def test_transtrcase(self):
self.do_core_test('test_transtrcase.c')
@no_wasm2js('very slow to compile')
@is_slow_test
def test_printf(self):
self.emcc_args.append('-Wno-format')
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('printf/test.c')
def test_printf_2(self):
self.do_core_test('test_printf_2.c')
def test_printf_float(self):
self.do_run_in_out_file_test('printf/test_float.c')
def test_printf_octal(self):
self.do_run_in_out_file_test('printf/test_octal.c')
def test_printf_macros(self):
self.do_core_test('test_printf_macros.c')
def test_vprintf(self):
self.do_core_test('test_vprintf.c')
def test_vsnprintf(self):
self.do_core_test('test_vsnprintf.c')
def test_printf_more(self):
self.do_core_test('test_printf_more.c')
def test_perrar(self):
self.do_core_test('test_perrar.c')
def test_atoX(self):
self.do_core_test('test_atoX.c')
def test_strstr(self):
self.do_core_test('test_strstr.c')
def test_fnmatch(self):
self.do_core_test('test_fnmatch.cpp')
def test_sscanf(self):
self.do_core_test('test_sscanf.c')
def test_sscanf_2(self):
for ftype in ['float', 'double']:
src = r'''
#include <stdio.h>
int main(){
char strval1[] = "1.2345678901";
char strval2[] = "1.23456789e5";
char strval3[] = "1.23456789E5";
char strval4[] = "1.2345678e-5";
char strval5[] = "1.2345678E-5";
double dblval = 1.2345678901;
double tstval;
sscanf(strval1, "%lf", &tstval);
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval2, "%lf", &tstval);
dblval = 123456.789;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval3, "%lf", &tstval);
dblval = 123456.789;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval4, "%lf", &tstval);
dblval = 0.000012345678;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
sscanf(strval5, "%lf", &tstval);
dblval = 0.000012345678;
if(dblval != tstval) printf("FAIL: Values are not equal: %lf %lf\n", dblval, tstval);
else printf("Pass: %lf %lf\n", tstval, dblval);
return 0;
}
'''
if ftype == 'float':
self.do_run(src.replace('%lf', '%f').replace('double', 'float'), '''Pass: 1.234568 1.234568
Pass: 123456.789062 123456.789062
Pass: 123456.789062 123456.789062
Pass: 0.000012 0.000012
Pass: 0.000012 0.000012''')
else:
self.do_run(src, '''Pass: 1.234568 1.234568
Pass: 123456.789000 123456.789000
Pass: 123456.789000 123456.789000
Pass: 0.000012 0.000012
Pass: 0.000012 0.000012''')
def test_sscanf_n(self):
self.do_core_test('test_sscanf_n.c')
def test_sscanf_whitespace(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_sscanf_whitespace.c')
def test_sscanf_other_whitespace(self):
self.set_setting('SAFE_HEAP', 0)
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_sscanf_other_whitespace.c')
def test_sscanf_3(self):
self.do_core_test('test_sscanf_3.c')
def test_sscanf_4(self):
self.do_core_test('test_sscanf_4.c')
def test_sscanf_5(self):
self.do_core_test('test_sscanf_5.c')
def test_sscanf_6(self):
self.do_core_test('test_sscanf_6.c')
def test_sscanf_skip(self):
self.do_core_test('test_sscanf_skip.c')
def test_sscanf_caps(self):
self.do_core_test('test_sscanf_caps.c')
def test_sscanf_hex(self):
self.do_core_test('test_sscanf_hex.cpp')
def test_sscanf_float(self):
self.do_core_test('test_sscanf_float.c')
def test_langinfo(self):
self.do_core_test('test_langinfo.c')
def test_files(self):
self.banned_js_engines = [config.SPIDERMONKEY_ENGINE]
if self.maybe_closure():
self.emcc_args = [x for x in self.emcc_args if x != '-g'] # ensure we test --closure 1 --memory-init-file 1 (-g would disable closure)
elif '-O3' in self.emcc_args and not self.is_wasm():
print('closure 2')
self.emcc_args += ['--closure', '2'] # Use closure 2 here for some additional coverage
return self.skipTest('TODO: currently skipped because CI runs out of memory running Closure in this test!')
self.emcc_args += ['--pre-js', 'pre.js']
self.set_setting('FORCE_FILESYSTEM')
print('base', self.emcc_args)
create_file('pre.js', '''
/** @suppress{checkTypes}*/
Module = {
'noFSInit': true,
'preRun': function() {
FS.createLazyFile('/', 'test.file', 'test.file', true, false);
// Test FS_* exporting
Module['FS_createDataFile']('/', 'somefile.binary', [100, 200, 50, 25, 10, 77, 123], true, false, false); // 200 becomes -56, since signed chars are used in memory
var test_files_input = 'hi there!';
var test_files_input_index = 0;
FS.init(function() {
return test_files_input.charCodeAt(test_files_input_index++) || null;
});
}
};
''')
create_file('test.file', 'some data')
mem_file = 'files.js.mem'
try_delete(mem_file)
def clean(out):
return '\n'.join([line for line in out.split('\n') if 'binaryen' not in line and 'wasm' not in line and 'so not running' not in line])
self.do_runf(test_file('files.cpp'), ('size: 7\ndata: 100,-56,50,25,10,77,123\nloop: 100 -56 50 25 10 77 123 \ninput:hi there!\ntexto\n$\n5 : 10,30,20,11,88\nother=some data.\nseeked=me da.\nseeked=ata.\nseeked=ta.\nfscanfed: 10 - hello\n5 bytes to dev/null: 5\nok.\ntexte\n', 'size: 7\ndata: 100,-56,50,25,10,77,123\nloop: 100 -56 50 25 10 77 123 \ninput:hi there!\ntexto\ntexte\n$\n5 : 10,30,20,11,88\nother=some data.\nseeked=me da.\nseeked=ata.\nseeked=ta.\nfscanfed: 10 - hello\n5 bytes to dev/null: 5\nok.\n'),
output_nicerizer=clean)
if self.uses_memory_init_file():
self.assertExists(mem_file)
def test_files_m(self):
# Test for Module.stdin etc.
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
create_file('pre.js', '''
Module = {
data: [10, 20, 40, 30],
stdin: function() { return Module.data.pop() || null },
stdout: function(x) { out('got: ' + x) }
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
src = r'''
#include <stdio.h>
#include <unistd.h>
int main () {
char c;
fprintf(stderr, "isatty? %d,%d,%d\n", isatty(fileno(stdin)), isatty(fileno(stdout)), isatty(fileno(stderr)));
while ((c = fgetc(stdin)) != EOF) {
putc(c+5, stdout);
}
return 0;
}
'''
def clean(out):
return '\n'.join(l for l in out.splitlines() if 'warning' not in l and 'binaryen' not in l)
self.do_run(src, ('got: 35\ngot: 45\ngot: 25\ngot: 15\nisatty? 0,0,1\n', 'got: 35\ngot: 45\ngot: 25\ngot: 15\nisatty? 0,0,1', 'isatty? 0,0,1\ngot: 35\ngot: 45\ngot: 25\ngot: 15'), output_nicerizer=clean)
def test_mount(self):
self.set_setting('FORCE_FILESYSTEM')
self.do_runf(test_file('fs/test_mount.c'), 'success')
def test_getdents64(self):
self.do_runf(test_file('fs/test_getdents64.cpp'), '..')
def test_getdents64_special_cases(self):
# https://bugs.chromium.org/p/v8/issues/detail?id=6881
self.banned_js_engines = [config.V8_ENGINE]
self.do_run_in_out_file_test('fs/test_getdents64_special_cases.cpp')
def test_getcwd_with_non_ascii_name(self):
# https://bugs.chromium.org/p/v8/issues/detail?id=6881
self.banned_js_engines = [config.V8_ENGINE]
self.do_run_in_out_file_test('fs/test_getcwd_with_non_ascii_name.cpp')
def test_proc_self_fd(self):
self.do_run_in_out_file_test('fs/test_proc_self_fd.c')
def test_fwrite_0(self):
self.do_core_test('test_fwrite_0.c')
def test_fgetc_ungetc(self):
print('TODO: update this test once the musl ungetc-on-EOF-stream bug is fixed upstream and reaches us')
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
print(fs)
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('stdio/test_fgetc_ungetc.c'), 'success', js_engines=[config.NODE_JS])
def test_fgetc_unsigned(self):
src = r'''
#include <stdio.h>
int main() {
FILE *file = fopen("file_with_byte_234.txt", "rb");
int c = fgetc(file);
printf("*%d\n", c);
}
'''
create_file('file_with_byte_234.txt', b'\xea', binary=True)
self.emcc_args += ['--embed-file', 'file_with_byte_234.txt']
self.do_run(src, '*234\n')
def test_fgets_eol(self):
src = r'''
#include <stdio.h>
char buf[32];
int main()
{
const char *r = "SUCCESS";
FILE *f = fopen("eol.txt", "r");
while (fgets(buf, 32, f) != NULL) {
if (buf[0] == '\0') {
r = "FAIL";
break;
}
}
printf("%s\n", r);
fclose(f);
return 0;
}
'''
open('eol.txt', 'wb').write(b'\n')
self.emcc_args += ['--embed-file', 'eol.txt']
self.do_run(src, 'SUCCESS\n')
def test_fscanf(self):
create_file('three_numbers.txt', '-1 0.1 -.1')
src = r'''
#include <stdio.h>
#include <assert.h>
#include <float.h>
int main()
{
float x = FLT_MAX, y = FLT_MAX, z = FLT_MAX;
FILE* fp = fopen("three_numbers.txt", "r");
if (fp) {
int match = fscanf(fp, " %f %f %f ", &x, &y, &z);
printf("match = %d\n", match);
printf("x = %0.1f, y = %0.1f, z = %0.1f\n", x, y, z);
} else {
printf("failed to open three_numbers.txt\n");
}
return 0;
}
'''
self.emcc_args += ['--embed-file', 'three_numbers.txt']
self.do_run(src, 'match = 3\nx = -1.0, y = 0.1, z = -0.1\n')
def test_fscanf_2(self):
create_file('a.txt', '''1/2/3 4/5/6 7/8/9
''')
self.emcc_args += ['--embed-file', 'a.txt']
self.do_run(r'''#include <cstdio>
#include <iostream>
using namespace std;
int
main( int argv, char ** argc ) {
cout << "fscanf test" << endl;
FILE * file;
file = fopen("a.txt", "rb");
int vertexIndex[4];
int normalIndex[4];
int uvIndex[4];
int matches = fscanf(file, "%d/%d/%d %d/%d/%d %d/%d/%d %d/%d/%d\n", &vertexIndex[0], &uvIndex[0], &normalIndex[0], &vertexIndex [1], &uvIndex[1], &normalIndex[1], &vertexIndex[2], &uvIndex[2], &normalIndex[2], &vertexIndex[3], &uvIndex[3], &normalIndex[3]);
cout << matches << endl;
return 0;
}
''', 'fscanf test\n9\n')
def test_fileno(self):
create_file('empty.txt', '')
src = r'''
#include <stdio.h>
#include <unistd.h>
int main()
{
FILE* fp = fopen("empty.txt", "r");
if (fp) {
printf("%d\n", fileno(fp));
} else {
printf("failed to open empty.txt\n");
}
return 0;
}
'''
self.emcc_args += ['--embed-file', 'empty.txt']
self.do_run(src, '3\n')
def test_readdir(self):
self.do_run_in_out_file_test('dirent/test_readdir.c')
def test_readdir_empty(self):
self.do_run_in_out_file_test('dirent/test_readdir_empty.c')
def test_stat(self):
self.do_runf(test_file('stat/test_stat.c'), 'success')
self.verify_in_strict_mode('test_stat.js')
def test_fstatat(self):
self.do_runf(test_file('stat/test_fstatat.c'), 'success')
def test_stat_chmod(self):
self.do_runf(test_file('stat/test_chmod.c'), 'success')
def test_stat_mknod(self):
self.do_runf(test_file('stat/test_mknod.c'), 'success')
def test_fcntl(self):
self.add_pre_run("FS.createDataFile('/', 'test', 'abcdef', true, true, false);")
self.do_run_in_out_file_test('fcntl/test_fcntl.c')
def test_fcntl_open(self):
self.do_run_in_out_file_test('fcntl/test_fcntl_open.c')
@also_with_wasm_bigint
def test_fcntl_misc(self):
self.add_pre_run("FS.createDataFile('/', 'test', 'abcdef', true, true, false);")
self.do_run_in_out_file_test('fcntl/test_fcntl_misc.c')
def test_poll(self):
self.add_pre_run('''
var dummy_device = FS.makedev(64, 0);
FS.registerDevice(dummy_device, {});
FS.createDataFile('/', 'file', 'abcdef', true, true, false);
FS.mkdev('/device', dummy_device);
''')
self.do_core_test('test_poll.c')
def test_statvfs(self):
self.do_core_test('test_statvfs.c')
def test_libgen(self):
self.do_core_test('test_libgen.c')
def test_utime(self):
self.do_runf(test_file('utime/test_utime.c'), 'success')
def test_futimens(self):
self.do_runf(test_file('utime', 'test_futimens.c'), 'success')
@no_minimal_runtime('MINIMAL_RUNTIME does not have getValue() and setValue() (TODO add it to a JS library function to get it in)')
def test_utf(self):
self.banned_js_engines = [config.SPIDERMONKEY_ENGINE] # only node handles utf well
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_malloc', '_free'])
self.set_setting('EXPORTED_RUNTIME_METHODS', ['getValue', 'setValue', 'UTF8ToString', 'stringToUTF8'])
self.do_core_test('test_utf.c')
def test_utf32(self):
if self.get_setting('MINIMAL_RUNTIME'):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$UTF32ToString', '$stringToUTF32', '$lengthBytesUTF32'])
else:
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF32ToString', 'stringToUTF32', 'lengthBytesUTF32'])
self.do_runf(test_file('utf32.cpp'), 'OK.')
self.do_runf(test_file('utf32.cpp'), 'OK.', args=['-fshort-wchar'])
def test_utf16(self):
self.do_runf(test_file('core/test_utf16.cpp'), 'OK.')
def test_utf8(self):
if self.get_setting('MINIMAL_RUNTIME'):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$AsciiToString', '$stringToAscii', '$writeAsciiToMemory'])
else:
self.set_setting('EXPORTED_RUNTIME_METHODS',
['UTF8ToString', 'stringToUTF8', 'AsciiToString', 'stringToAscii'])
self.do_runf(test_file('utf8.cpp'), 'OK.')
@also_with_wasm_bigint
def test_utf8_textdecoder(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF8ToString', 'stringToUTF8'])
self.emcc_args += ['--embed-file', test_file('utf8_corpus.txt') + '@/utf8_corpus.txt']
self.do_runf(test_file('benchmark_utf8.cpp'), 'OK.')
# Test that invalid character in UTF8 does not cause decoding to crash.
def test_utf8_invalid(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF8ToString', 'stringToUTF8'])
for decoder_mode in [[], ['-s', 'TEXTDECODER']]:
self.emcc_args += decoder_mode
print(str(decoder_mode))
self.do_runf(test_file('utf8_invalid.cpp'), 'OK.')
# Test that invalid character in UTF8 does not cause decoding to crash.
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_utf8_invalid(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF8ToString', 'stringToUTF8'])
self.set_setting('MINIMAL_RUNTIME')
for decoder_mode in [False, True]:
self.set_setting('TEXTDECODER', decoder_mode)
print(str(decoder_mode))
self.do_runf(test_file('utf8_invalid.cpp'), 'OK.')
def test_utf16_textdecoder(self):
self.set_setting('EXPORTED_RUNTIME_METHODS', ['UTF16ToString', 'stringToUTF16', 'lengthBytesUTF16'])
self.emcc_args += ['--embed-file', test_file('utf16_corpus.txt') + '@/utf16_corpus.txt']
self.do_runf(test_file('benchmark_utf16.cpp'), 'OK.')
def test_wprintf(self):
self.do_core_test('test_wprintf.cpp')
def test_write_stdout_fileno(self):
self.do_core_test('test_write_stdout_fileno.c')
self.do_core_test('test_write_stdout_fileno.c', args=['-s', 'FILESYSTEM=0'])
def test_direct_string_constant_usage(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_direct_string_constant_usage.cpp')
def test_std_cout_new(self):
self.do_core_test('test_std_cout_new.cpp')
def test_std_function_incomplete_return(self):
self.do_core_test('test_std_function_incomplete_return.cpp')
def test_istream(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
for linkable in [0]: # , 1]:
print(linkable)
# regression check for issue #273
self.set_setting('LINKABLE', linkable)
self.do_core_test('test_istream.cpp')
def test_fs_base(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$FS'])
self.uses_es6 = True
self.add_pre_run(read_file(test_file('filesystem/src.js')))
src = 'int main() {return 0;}\n'
expected = read_file(test_file('filesystem/output.txt'))
self.do_run(src, expected)
@also_with_noderawfs
@is_slow_test
def test_fs_nodefs_rw(self):
# TODO(sbc): This test exposes in issue in the way we run closure compiler and
# causes it to generate non-ES5 output.
# Remove this line once we fix: https://github.com/emscripten-core/emscripten/issues/12628
self.uses_es6 = True
self.emcc_args += ['-lnodefs.js']
self.set_setting('SYSCALL_DEBUG')
self.do_runf(test_file('fs/test_nodefs_rw.c'), 'success')
if self.maybe_closure():
self.do_runf(test_file('fs/test_nodefs_rw.c'), 'success')
@also_with_noderawfs
def test_fs_nodefs_cloexec(self):
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('fs/test_nodefs_cloexec.c'), 'success')
def test_fs_nodefs_home(self):
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('fs/test_nodefs_home.c'), 'success', js_engines=[config.NODE_JS])
def test_fs_nodefs_nofollow(self):
self.emcc_args += ['-lnodefs.js']
self.do_runf(test_file('fs/test_nodefs_nofollow.c'), 'success', js_engines=[config.NODE_JS])
def test_fs_trackingdelegate(self):
self.set_setting('FS_DEBUG')
self.do_run_in_out_file_test('fs/test_trackingdelegate.c')
@also_with_noderawfs
def test_fs_writeFile(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING') # see issue 2334
self.do_run_in_out_file_test('fs/test_writeFile.cpp')
def test_fs_write(self):
self.do_run_in_out_file_test('fs/test_write.cpp')
@also_with_noderawfs
def test_fs_emptyPath(self):
self.do_run_in_out_file_test('fs/test_emptyPath.c')
@also_with_noderawfs
def test_fs_append(self):
self.do_runf(test_file('fs/test_append.c'), 'success')
def test_fs_mmap(self):
self.uses_es6 = True
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS', 'NODERAWFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
if fs == 'NODERAWFS':
self.emcc_args += ['-lnodefs.js', '-lnoderawfs.js']
self.do_run_in_out_file_test('fs/test_mmap.c')
@parameterized({
'': [],
'minimal_runtime': ['-s', 'MINIMAL_RUNTIME=1']
})
def test_fs_no_main(self, *args):
# library_fs.js uses hooks to enable ignoreing of permisions up until ATMAINs are run. This
# test verified that they work correctly, even in programs without a main function.
create_file('pre.js', '''
Module['preRun'] = function() {
assert(FS.ignorePermissions, "ignorePermissions not set during preRun");
}
Module['onRuntimeInitialized'] = function() {
assert(!FS.ignorePermissions, "ignorePermissions not unset during onRuntimeInitialized");
assert(_foo() == 42);
}
''')
self.set_setting('EXPORTED_FUNCTIONS', '_foo')
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args += ['--pre-js', 'pre.js'] + list(args)
self.do_run('int foo() { return 42; }', '', force_c=True)
@also_with_noderawfs
def test_fs_errorstack(self):
# Enables strict mode, which may catch some strict-mode-only errors
# so that users can safely work with strict JavaScript if enabled.
create_file('pre.js', '"use strict";')
self.emcc_args += ['--pre-js', 'pre.js']
self.set_setting('FORCE_FILESYSTEM')
self.set_setting('ASSERTIONS')
self.do_run(r'''
#include <emscripten.h>
#include <iostream>
int main(void) {
std::cout << "hello world\n"; // should work with strict mode
EM_ASM(
try {
FS.readFile('/dummy.txt');
} catch (err) {
err.stack = err.stack; // should be writable
throw err;
}
);
return 0;
}
''', 'at Object.readFile', assert_returncode=NON_ZERO) # engines has different error stack format
@also_with_noderawfs
def test_fs_llseek(self):
self.set_setting('FORCE_FILESYSTEM')
self.do_runf(test_file('fs/test_llseek.c'), 'success')
def test_fs_64bit(self):
self.do_runf(test_file('fs/test_64bit.c'), 'success')
def test_sigalrm(self):
self.do_runf(test_file('test_sigalrm.c'), 'Received alarm!')
self.set_setting('EXIT_RUNTIME')
self.do_runf(test_file('test_sigalrm.c'), 'Received alarm!')
def test_signals(self):
self.do_core_test(test_file('test_signals.c'))
@no_windows('https://github.com/emscripten-core/emscripten/issues/8882')
def test_unistd_access(self):
self.uses_es6 = True
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/access.c', js_engines=[config.NODE_JS])
# Node.js fs.chmod is nearly no-op on Windows
if not WINDOWS:
self.emcc_args = orig_compiler_opts
self.set_setting('NODERAWFS')
self.do_run_in_out_file_test('unistd/access.c', js_engines=[config.NODE_JS])
def test_unistd_curdir(self):
self.uses_es6 = True
self.do_run_in_out_file_test('unistd/curdir.c')
@also_with_noderawfs
def test_unistd_close(self):
self.do_run_in_out_file_test('unistd/close.c')
def test_unistd_confstr(self):
self.do_run_in_out_file_test('unistd/confstr.c')
def test_unistd_ttyname(self):
self.do_runf(test_file('unistd/ttyname.c'), 'success')
@also_with_noderawfs
def test_unistd_pipe(self):
self.do_runf(test_file('unistd/pipe.c'), 'success')
@also_with_noderawfs
def test_unistd_dup(self):
self.do_run_in_out_file_test('unistd/dup.c')
def test_unistd_pathconf(self):
self.do_run_in_out_file_test('unistd/pathconf.c')
def test_unistd_truncate(self):
self.uses_es6 = True
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/truncate.c', js_engines=[config.NODE_JS])
@no_windows("Windows throws EPERM rather than EACCES or EINVAL")
@unittest.skipIf(WINDOWS or os.geteuid() == 0, "Root access invalidates this test by being able to write on readonly files")
def test_unistd_truncate_noderawfs(self):
self.uses_es6 = True
self.set_setting('NODERAWFS')
self.maybe_closure()
self.do_run_in_out_file_test('unistd/truncate.c', js_engines=[config.NODE_JS])
def test_unistd_swab(self):
self.do_run_in_out_file_test('unistd/swab.c')
def test_unistd_isatty(self):
self.do_runf(test_file('unistd/isatty.c'), 'success')
@also_with_standalone_wasm()
def test_unistd_sysconf(self):
self.do_run_in_out_file_test('unistd/sysconf.c')
@no_asan('ASan alters memory layout')
def test_unistd_sysconf_phys_pages(self):
filename = test_file('unistd/sysconf_phys_pages.c')
if self.get_setting('ALLOW_MEMORY_GROWTH'):
expected = (2 * 1024 * 1024 * 1024) // webassembly.WASM_PAGE_SIZE
else:
expected = 16 * 1024 * 1024 // webassembly.WASM_PAGE_SIZE
self.do_runf(filename, str(expected) + ', errno: 0')
def test_unistd_login(self):
self.do_run_in_out_file_test('unistd/login.c')
@no_windows('https://github.com/emscripten-core/emscripten/issues/8882')
def test_unistd_unlink(self):
self.clear()
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
# symlinks on node.js on non-linux behave differently (e.g. on Windows they require administrative privileges)
# so skip testing those bits on that combination.
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
if WINDOWS:
self.emcc_args += ['-DNO_SYMLINK=1']
if MACOS:
continue
self.do_runf(test_file('unistd/unlink.c'), 'success', js_engines=[config.NODE_JS])
# Several differences/bugs on non-linux including https://github.com/nodejs/node/issues/18014
if not WINDOWS and not MACOS:
self.emcc_args = orig_compiler_opts + ['-DNODERAWFS']
# 0 if root user
if os.geteuid() == 0:
self.emcc_args += ['-DSKIP_ACCESS_TESTS']
self.set_setting('NODERAWFS')
self.do_runf(test_file('unistd/unlink.c'), 'success', js_engines=[config.NODE_JS])
def test_unistd_links(self):
self.clear()
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
if WINDOWS and fs == 'NODEFS':
print('Skipping NODEFS part of this test for test_unistd_links on Windows, since it would require administrative privileges.', file=sys.stderr)
# Also, other detected discrepancies if you do end up running this test on NODEFS:
# test expects /, but Windows gives \ as path slashes.
# Calling readlink() on a non-link gives error 22 EINVAL on Unix, but simply error 0 OK on Windows.
continue
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/links.c', js_engines=[config.NODE_JS])
@no_windows('Skipping NODEFS test, since it would require administrative privileges.')
def test_unistd_symlink_on_nodefs(self):
# Also, other detected discrepancies if you do end up running this test on NODEFS:
# test expects /, but Windows gives \ as path slashes.
# Calling readlink() on a non-link gives error 22 EINVAL on Unix, but simply error 0 OK on Windows.
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/symlink_on_nodefs.c', js_engines=[config.NODE_JS])
def test_unistd_sleep(self):
self.do_run_in_out_file_test('unistd/sleep.c')
@also_with_wasm_bigint
def test_unistd_io(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$ERRNO_CODES'])
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.clear()
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/io.c')
@no_windows('https://github.com/emscripten-core/emscripten/issues/8882')
def test_unistd_misc(self):
orig_compiler_opts = self.emcc_args.copy()
for fs in ['MEMFS', 'NODEFS']:
self.emcc_args = orig_compiler_opts + ['-D' + fs]
if fs == 'NODEFS':
self.emcc_args += ['-lnodefs.js']
self.do_run_in_out_file_test('unistd/misc.c', js_engines=[config.NODE_JS], interleaved_output=False)
def test_unistd_fstatfs(self):
self.do_run_in_out_file_test('unistd/fstatfs.c')
# i64s in the API, which we'd need to legalize for JS, so in standalone mode
@also_with_standalone_wasm(wasm2c=True)
def test_posixtime(self):
self.banned_js_engines = [config.V8_ENGINE]
self.do_core_test('test_posixtime.c')
def test_uname(self):
self.do_core_test('test_uname.c')
def test_unary_literal(self):
self.do_core_test('test_unary_literal.cpp')
def test_env(self):
expected = read_file(test_file('env/output.txt'))
self.do_runf(test_file('env/src.c'), [
expected.replace('{{{ THIS_PROGRAM }}}', self.in_dir('src.js')).replace('\\', '/'),
expected.replace('{{{ THIS_PROGRAM }}}', './this.program')
])
def test_environ(self):
expected = read_file(test_file('env/output-mini.txt'))
self.do_runf(test_file('env/src-mini.c'), [
expected.replace('{{{ THIS_PROGRAM }}}', self.in_dir('src-mini.js')).replace('\\', '/'),
expected.replace('{{{ THIS_PROGRAM }}}', './this.program')
])
def test_systypes(self):
self.do_core_test('test_systypes.c')
def test_stddef(self):
self.do_core_test('test_stddef.cpp')
self.do_core_test('test_stddef.cpp', force_c=True)
def test_getloadavg(self):
self.do_core_test('test_getloadavg.c')
def test_nl_types(self):
self.do_core_test('test_nl_types.c')
def test_799(self):
src = test_file('799.cpp')
self.do_runf(src, '''Set PORT family: 0, port: 3979
Get PORT family: 0
PORT: 3979
''')
def test_ctype(self):
self.do_core_test('test_ctype.c')
def test_strcasecmp(self):
self.do_core_test('test_strcasecmp.c')
def test_atomic(self):
self.do_core_test('test_atomic.c')
def test_atomic_cxx(self):
self.emcc_args += ['-DIS_64BIT_LOCK_FREE=1']
self.do_core_test('test_atomic_cxx.cpp')
def test_phiundef(self):
self.do_core_test('test_phiundef.c')
def test_netinet_in(self):
self.do_run_in_out_file_test('netinet/in.cpp')
@needs_dylink
def test_main_module_static_align(self):
if self.get_setting('ALLOW_MEMORY_GROWTH'):
self.skipTest('no shared modules with memory growth')
self.set_setting('MAIN_MODULE')
self.do_core_test('test_main_module_static_align.cpp')
def test_iostream_and_determinism(self):
create_file('src.cpp', '''
#include <iostream>
int main()
{
std::cout << "hello world" << std::endl << 77 << "." << std::endl;
return 0;
}
''')
num = 5
for i in range(num):
print('(iteration %d)' % i)
time.sleep(random.random() / (10 * num))
self.do_runf('src.cpp', 'hello world\n77.\n')
if os.path.exists('src.js.previous'):
self.assertBinaryEqual('src.js', 'src.js.previous')
shutil.copy2('src.js', 'src.js.previous')
if self.is_wasm() and not self.get_setting('WASM2JS'):
if os.path.exists('src.wasm.previous'):
self.assertBinaryEqual('src.wasm', 'src.wasm.previous')
shutil.copy2('src.wasm', 'src.wasm.previous')
def test_stdvec(self):
self.do_core_test('test_stdvec.cpp')
def test_random_device(self):
self.maybe_closure()
self.do_core_test('test_random_device.cpp')
def test_reinterpreted_ptrs(self):
self.do_core_test('test_reinterpreted_ptrs.cpp')
def test_js_libraries(self):
create_file('main.cpp', '''
#include <stdio.h>
extern "C" {
extern void printey();
extern int calcey(int x, int y);
}
int main() {
printey();
printf("*%d*\\n", calcey(10, 22));
return 0;
}
''')
create_file('mylib1.js', '''
mergeInto(LibraryManager.library, {
printey: function() {
out('hello from lib!');
}
});
''')
create_file('mylib2.js', '''
mergeInto(LibraryManager.library, {
calcey: function(x, y) {
return x + y;
}
});
''')
self.emcc_args += ['--js-library', 'mylib1.js', '--js-library', 'mylib2.js']
self.do_runf('main.cpp', 'hello from lib!\n*32*\n')
def test_unicode_js_library(self):
create_file('main.cpp', '''
#include <stdio.h>
extern "C" {
extern void printey();
}
int main() {
printey();
return 0;
}
''')
self.emcc_args += ['--js-library', test_file('unicode_library.js')]
self.do_runf('main.cpp', u'Unicode snowman \u2603 says hello!')
def test_funcptr_import_type(self):
self.emcc_args += ['--js-library', test_file('core/test_funcptr_import_type.js')]
self.do_core_test('test_funcptr_import_type.cpp')
@no_asan('ASan does not work with EXPORT_ALL')
def test_constglobalunion(self):
self.set_setting('EXPORT_ALL')
self.do_run(r'''
#include <stdio.h>
struct one_const {
long a;
};
struct two_consts {
long a;
long b;
};
union some_consts {
struct one_const one;
struct two_consts two;
};
union some_consts my_consts = {{
1
}};
struct one_const addr_of_my_consts = {
(long)(&my_consts)
};
int main(void) {
printf("%li\n", (long)!!addr_of_my_consts.a);
return 0;
}
''', '1')
results = [(1, 0), (2, 1), (3, 2), (4, 4), (5, 7), (6, 10), (7, 16), (8, 22)]
self.build(test_file('fannkuch.cpp'))
for i, j in results:
print(i, j)
self.do_run('fannkuch.js', 'Pfannkuchen(%d) = %d.' % (i, j), args=[str(i)], no_build=True)
def test_raytrace(self):
self.skipTest('Relies on double value rounding, extremely sensitive')
src = read_file(test_file('raytrace.cpp')).replace('double', 'float')
output = read_file(test_file('raytrace.ppm'))
self.do_run(src, output, args=['3', '16'])
def test_fasta(self):
results = [(1, '''GG*ctt**tgagc*'''),
(20, '''GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTT*cttBtatcatatgctaKggNcataaaSatgtaaaDcDRtBggDtctttataattcBgtcg**tacgtgtagcctagtgtttgtgttgcgttatagtctatttgtggacacagtatggtcaaa**tgacgtcttttgatctgacggcgttaacaaagatactctg*'''),
(50, '''GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGA*TCACCTGAGGTCAGGAGTTCGAGACCAGCCTGGCCAACAT*cttBtatcatatgctaKggNcataaaSatgtaaaDcDRtBggDtctttataattcBgtcg**tactDtDagcctatttSVHtHttKtgtHMaSattgWaHKHttttagacatWatgtRgaaa**NtactMcSMtYtcMgRtacttctWBacgaa**agatactctgggcaacacacatacttctctcatgttgtttcttcggacctttcataacct**ttcctggcacatggttagctgcacatcacaggattgtaagggtctagtggttcagtgagc**ggaatatcattcgtcggtggtgttaatctatctcggtgtagcttataaatgcatccgtaa**gaatattatgtttatttgtcggtacgttcatggtagtggtgtcgccgatttagacgtaaa**ggcatgtatg*''')]
old = self.emcc_args
orig_src = read_file(test_file('fasta.cpp'))
def test(extra_args):
self.emcc_args = old + extra_args
for t in ['float', 'double']:
print(t)
src = orig_src.replace('double', t)
with open('fasta.cpp', 'w') as f:
f.write(src)
self.build('fasta.cpp')
for arg, output in results:
self.do_run('fasta.js', output, args=[str(arg)], output_nicerizer=lambda x: x.replace('\n', '*'), no_build=True)
shutil.copyfile('fasta.js', '%s.js' % t)
test([])
@needs_non_trapping_float_to_int
def test_fasta_nontrapping(self):
self.emcc_args += ['-mnontrapping-fptoint']
self.test_fasta()
def test_whets(self):
self.do_runf(test_file('whets.cpp'), 'Single Precision C Whetstone Benchmark')
@require_v8
@no_asan('depends on the specifics of memory size, which for asan we are forced to increase')
def test_dlmalloc_inline(self):
self.set_setting('INITIAL_MEMORY', '128mb')
src = read_file(path_from_root('system/lib/dlmalloc.c')) + '\n\n\n' + read_file(test_file('dlmalloc_test.c'))
self.do_run(src, '*1,0*', args=['200', '1'], force_c=True)
self.do_run('src.js', '*400,0*', args=['400', '400'], force_c=True, no_build=True)
@require_v8
@no_asan('depends on the specifics of memory size, which for asan we are forced to increase')
def test_dlmalloc(self):
self.set_setting('INITIAL_MEMORY', '128mb')
self.do_runf(test_file('dlmalloc_test.c'), '*1,0*', args=['200', '1'])
self.do_run('dlmalloc_test.js', '*400,0*', args=['400', '400'], no_build=True)
if self.emcc_args == []:
try_delete('src.js')
self.run_process([EMCC, test_file('dlmalloc_test.c'), '-s', 'INITIAL_MEMORY=128MB', '-o', 'src.js'], stdout=PIPE, stderr=self.stderr_redirect)
self.do_run(None, '*1,0*', ['200', '1'], no_build=True)
self.do_run(None, '*400,0*', ['400', '400'], no_build=True)
src = read_file(test_file('new.cpp'))
for new, delete in [
('malloc(100)', 'free'),
('new char[100]', 'delete[]'),
('new Structy', 'delete'),
('new int', 'delete'),
('new Structy[10]', 'delete[]'),
]:
self.do_run(src.replace('{{{ NEW }}}', new).replace('{{{ DELETE }}}', delete), '*1,0*')
@no_asan('the memory size limit here is too small for asan')
def test_dlmalloc_large(self):
self.emcc_args += ['-s', 'ABORTING_MALLOC=0', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'MAXIMUM_MEMORY=128MB']
self.do_runf(test_file('dlmalloc_test_large.c'), '0 0 0 1')
@no_asan('asan also changes malloc, and that ends up linking in new twice')
def test_dlmalloc_partial(self):
src = read_file(test_file('new.cpp')).replace('{{{ NEW }}}', 'new int').replace('{{{ DELETE }}}', 'delete') + '''
#include <new>
void* operator new(size_t size) {
printf("new %zu!\\n", size);
return malloc(size);
}
'''
self.do_run(src, 'new 4!\n*1,0*')
@no_asan('asan also changes malloc, and that ends up linking in new twice')
def test_dlmalloc_partial_2(self):
if 'SAFE_HEAP' in str(self.emcc_args):
self.skipTest('we do unsafe stuff here')
self.do_core_test('test_dlmalloc_partial_2.c', assert_returncode=NON_ZERO)
def test_libcxx(self):
self.do_runf(test_file('hashtest.cpp'),
'june -> 30\nPrevious (in alphabetical order) is july\nNext (in alphabetical order) is march')
self.do_run('''
#include <set>
#include <stdio.h>
int main() {
std::set<int> fetchOriginatorNums;
fetchOriginatorNums.insert(171);
printf("hello world\\n");
return 0;
}
''', 'hello world')
def test_typeid(self):
self.do_core_test('test_typeid.cpp')
def test_static_variable(self):
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_static_variable.cpp')
def test_fakestat(self):
self.do_core_test('test_fakestat.c')
def test_mmap(self):
if '-fsanitize=address' not in self.emcc_args:
self.set_setting('INITIAL_MEMORY', '128mb')
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_mmap.c')
def test_mmap_file(self):
for extra_args in [[]]:
self.emcc_args += ['--embed-file', 'data.dat'] + extra_args
x = 'data from the file........'
s = ''
while len(s) < 9000:
if len(s) + len(x) < 9000:
s += x
continue
s += '.'
assert len(s) == 9000
create_file('data.dat', s)
self.do_runf(test_file('mmap_file.c'), '*\n' + s[0:20] + '\n' + s[4096:4096 + 20] + '\n*\n')
@no_lsan('Test code contains memory leaks')
def test_cubescript(self):
self.emcc_args += ['-std=c++03', '-Wno-dynamic-class-memaccess']
self.maybe_closure()
self.emcc_args += ['-I', test_file('third_party/cubescript')]
if '-fsanitize=address' in self.emcc_args:
self.emcc_args += ['--pre-js', test_file('asan-no-leak.js')]
def test():
src = test_file('third_party/cubescript/command.cpp')
self.do_runf(src, '*\nTemp is 33\n9\n5\nhello, everyone\n*')
test()
print('asyncify')
self.set_setting('ASYNCIFY')
test()
@needs_dylink
def test_relocatable_void_function(self):
self.set_setting('RELOCATABLE')
self.do_core_test('test_relocatable_void_function.c')
@wasm_simd
def test_wasm_intrinsics_simd(self):
def run():
self.do_runf(test_file('test_wasm_intrinsics_simd.c'), 'Success!')
self.emcc_args.append('-Wno-c++11-narrowing')
self.emcc_args.extend(['-Wpedantic', '-Werror', '-Wall', '-xc++'])
run()
self.emcc_args.append('-funsigned-char')
run()
@wasm_simd
def test_neon_wasm_simd(self):
self.emcc_args.append('-Wno-c++11-narrowing')
self.emcc_args.append('-mfpu=neon')
self.emcc_args.append('-msimd128')
self.do_runf(test_file('neon/test_neon_wasm_simd.cpp'), 'Success!')
@wasm_simd
@requires_native_clang
@no_safe_heap('has unaligned 64-bit operations in wasm')
def test_sse1(self):
src = test_file('sse/test_sse1.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse', '-o', 'test_sse1', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse1', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
@requires_native_clang
@no_safe_heap('has unaligned 64-bit operations in wasm')
@is_slow_test
def test_sse2(self):
src = test_file('sse/test_sse2.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse2', '-Wno-argument-outside-range', '-o', 'test_sse2', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse2', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse2', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
@requires_native_clang
def test_sse3(self):
src = test_file('sse/test_sse3.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse3', '-Wno-argument-outside-range', '-o', 'test_sse3', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse3', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse3', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
@requires_native_clang
def test_ssse3(self):
src = test_file('sse/test_ssse3.cpp')
self.run_process([shared.CLANG_CXX, src, '-mssse3', '-Wno-argument-outside-range', '-o', 'test_ssse3', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_ssse3', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-mssse3', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
@requires_native_clang
@is_slow_test
def test_sse4_1(self):
src = test_file('sse/test_sse4_1.cpp')
if not self.is_optimizing() and '-fsanitize=address' in self.emcc_args:
ocess([shared.CLANG_CXX, src, '-msse4.1', '-Wno-argument-outside-range', '-o', 'test_sse4_1', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse4_1', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse4.1', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
@requires_native_clang
def test_sse4_2(self):
src = test_file('sse/test_sse4_2.cpp')
self.run_process([shared.CLANG_CXX, src, '-msse4.2', '-Wno-argument-outside-range', '-o', 'test_sse4_2', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_sse4_2', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-msse4.2', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
@requires_native_clang
def test_avx(self):
src = test_file('sse/test_avx.cpp')
self.run_process([shared.CLANG_CXX, src, '-mavx', '-Wno-argument-outside-range', '-o', 'test_avx', '-D_CRT_SECURE_NO_WARNINGS=1'] + clang_native.get_clang_native_args(), stdout=PIPE)
native_result = self.run_process('./test_avx', stdout=PIPE).stdout
self.emcc_args += ['-I' + test_file('sse'), '-mavx', '-Wno-argument-outside-range']
self.maybe_closure()
self.do_runf(src, native_result)
@wasm_simd
def test_sse_diagnostics(self):
self.emcc_args.remove('-Werror')
src = test_file('sse/test_sse_diagnostic.cpp')
p = self.run_process(
[shared.EMXX, src, '-msse', '-DWASM_SIMD_COMPAT_SLOW'] + self.get_emcc_args(),
stderr=PIPE)
self.assertContained('Instruction emulated via slow path.', p.stderr)
@requires_native_clang
@wasm_relaxed_simd
def test_relaxed_simd_implies_simd128(self):
src = test_file('sse/test_sse1.cpp')
self.build(src, emcc_args=['-msse'])
@no_asan('call stack exceeded on some versions of node')
def test_gcc_unmangler(self):
self.emcc_args += ['-I' + test_file('third_party/libiberty')]
self.do_runf(test_file('third_party/libiberty/cp-demangle.c'), '*d_demangle(char const*, int, unsigned int*)*', args=['_ZL10d_demanglePKciPj'])
@needs_make('make')
def test_lua(self):
self.emcc_args.remove('-Werror')
libs = self.get_library('third_party/lua', [Path('src/lua.o'), Path('src/liblua.a')], make=['make', 'generic'], configure=None)
self.do_run('',
'hello lua world!\n17\n1\n2\n3\n4\n7',
args=['-e', '''print("hello lua world!");print(17);for x = 1,4 do print(x) end;print(10-3)'''],
libraries=libs,
includes=[test_file('lua')],
output_nicerizer=lambda output: output.replace('\n\n', '\n').replace('\n\n', '\n'))
@no_asan('issues with freetype itself')
@needs_make('configure script')
@is_slow_test
def test_freetype(self):
self.add_pre_run("FS.createDataFile('/', 'font.ttf', %s, true, false, false);" % str(
list(bytearray(read_binary(test_file('freetype/LiberationSansBold.ttf'))))
))
shutil.copyfile(test_file('freetype/LiberationSansBold.ttf'), 'font.ttf')
self.do_run_from_file(test_file('freetype/main.c'),
test_file('freetype/ref.txt'),
args=['font.ttf', 'test!', '150', '120', '25'],
libraries=self.get_freetype_library(),
includes=[test_file('third_party/freetype/include')])
print('[issue 324]')
self.do_run_from_file(test_file('freetype/main_2.c'),
test_file('freetype/ref_2.txt'),
args=['font.ttf', 'w', '32', '32', '25'],
libraries=self.get_freetype_library(),
includes=[test_file('third_party/freetype/include')])
print('[issue 324 case 2]')
self.do_run_from_file(test_file('freetype/main_3.c'),
test_file('freetype/ref_3.txt'),
args=['font.ttf', 'W', '32', '32', '0'],
libraries=self.get_freetype_library(),
includes=[test_file('third_party/freetype/include')])
print('[issue 324 case 3]')
self.do_run('main_3.js',
read_file(test_file('freetype/ref_4.txt')),
args=['font.ttf', 'ea', '40', '32', '0'],
no_build=True)
@no_asan('local count too large for VMs')
@is_slow_test
def test_sqlite(self):
self.set_setting('DISABLE_EXCEPTION_CATCHING')
self.set_setting('EXPORTED_FUNCTIONS', ['_main', '_sqlite3_open', '_sqlite3_close', '_sqlite3_exec', '_sqlite3_free'])
if '-g' in self.emcc_args:
print("disabling inlining")
self.set_setting('INLINING_LIMIT')
elf.emcc_args += ['-Wno-implicit-int-float-conversion']
# array initialization; did you mean to separate the elements with a comma?"
self.emcc_args += ['-Wno-string-concatenation']
self.emcc_args += ['-Wno-unknown-warning-option']
self.emcc_args += ['-Wno-pointer-bool-conversion']
self.emcc_args += ['-I' + test_file('third_party/sqlite')]
src = '''
#define SQLITE_DISABLE_LFS
#define LONGDOUBLE_TYPE double
#define SQLITE_INT64_TYPE long long int
#define SQLITE_THREADSAFE 0
'''
src += read_file(test_file('third_party/sqlite/sqlite3.c'))
src += read_file(test_file('sqlite/benchmark.c'))
self.do_run(src,
read_file(test_file('sqlite/benchmark.txt')),
includes=[test_file('sqlite')],
force_c=True)
@needs_make('mingw32-make')
@is_slow_test
@parameterized({
'cmake': (True,),
'configure': (False,)
})
def test_zlib(self, use_cmake):
if WINDOWS and not use_cmake:
self.skipTest("Windows cannot run configure sh scripts")
self.maybe_closure()
self.emcc_args.append('-Wno-shift-negative-value')
if '-g' in self.emcc_args:
self.emcc_args.append('-gsource-map')
if use_cmake:
make_args = []
configure = ['cmake', '.']
else:
make_args = ['libz.a']
configure = ['sh', './configure']
self.do_run_from_file(
test_file('third_party/zlib/example.c'),
test_file('core/test_zlib.out'),
libraries=self.get_library('third_party/zlib', 'libz.a', make_args=make_args, configure=configure),
includes=[test_file('third_party/zlib'), 'building', 'zlib'])
@needs_make('make')
@is_slow_test
@parameterized({
'cmake': (True,),
'autoconf': (False,)
})
def test_bullet(self, use_cmake):
if WINDOWS and not use_cmake:
self.skipTest("Windows cannot run configure sh scripts")
self.emcc_args += [
'-Wno-c++11-narrowing',
'-Wno-deprecated-register',
'-Wno-writable-strings',
'-Wno-shift-negative-value',
'-Wno-format'
]
if use_cmake:
self.set_setting('ASSERTIONS', 2)
self.emcc_args.append('-Wno-unused-command-line-argument')
self.do_runf(test_file('third_party/bullet/Demos/HelloWorld/HelloWorld.cpp'),
[read_file(test_file('bullet/output.txt')),
read_file(test_file('bullet/output2.txt')),
read_file(test_file('bullet/output3.txt')),
read_file(test_file('bullet/output4.txt'))],
libraries=self.get_bullet_library(use_cmake),
includes=[test_file('third_party/bullet/src')])
@unittest.skip('LLVM changes have caused this C++ to no longer compile, https://github.com/emscripten-core/emscripten/issues/14614')
@no_asan('issues with freetype itself')
@needs_make('depends on freetype')
@is_slow_test
def test_poppler(self):
pdf_data = read_binary(test_file('poppler/paper.pdf'))
create_file('paper.pdf.js', str(list(bytearray(pdf_data))))
create_file('pre.js', '''
Module.preRun = function() {
FS.createDataFile('/', 'paper.pdf', eval(read_('paper.pdf.js')), true, false, false);
};
Module.postRun = function() {
var FileData = Array.from(MEMFS.getFileDataAsTypedArray(FS.root.contents['filename-1.ppm']));
out("Data: " + JSON.stringify(FileData.map(function(x) { return unSign(x, 8) })));
};
''')
self.emcc_args += ['--pre-js', 'pre.js', '-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=$unSign']
ppm_data = str(list(bytearray(read_binary(test_file('poppler/ref.ppm')))))
self.do_run('', ppm_data.replace(' ', ''),
libraries=self.get_poppler_library(),
args=['-scale-to', '512', 'paper.pdf', 'filename'])
@needs_make('make')
@is_slow_test
def test_openjpeg(self):
def do_test_openjpeg():
def line_splitter(data):
out = ''
counter = 0
for ch in data:
out += ch
if ch == ' ' and counter > 60:
out += '\n'
counter = 0
else:
counter += 1
return out
self.emcc_args = [x for x in self.emcc_args if x != '-g']
original_j2k = test_file('openjpeg/syntensity_lobby_s.j2k')
image_bytes = list(bytearray(read_binary(original_j2k)))
create_file('pre.js', """
Module.preRun = function() { FS.createDataFile('/', 'image.j2k', %s, true, false, false); };
Module.postRun = function() {
out('Data: ' + JSON.stringify(Array.from(MEMFS.getFileDataAsTypedArray(FS.analyzePath('image.raw').object))));
};
""" % line_splitter(str(image_bytes)))
# If we don't do this then we don't know what the state of the cache will be
# and this test would different non-deterministic results based on, for example,
# what other tests had previously run.
self.run_process([EMBUILDER, 'build', 'libpng'])
lib = self.get_library('third_party/openjpeg',
[Path('codec/CMakeFiles/j2k_to_image.dir/index.c.o'),
Path('codec/CMakeFiles/j2k_to_image.dir/convert.c.o'),
Path('codec/CMakeFiles/j2k_to_image.dir/__/common/color.c.o'),
Path('bin/libopenjpeg.a')],
configure=['cmake', '.'],
# configure_args=['--enable-tiff=no', '--enable-jp3d=no', '--enable-png=no'],
make_args=[]) # no -j 2, since parallel builds can fail
# We use doubles in JS, so we get slightly different values than native code. So we
# check our output by comparing the average pixel difference
def image_compare(output):
# Get the image generated by JS, from the JSON.stringify'd array
m = re.search(r'\[[\d, -]*\]', output)
self.assertIsNotNone(m, 'Failed to find proper image output in: ' + output)
js_data = eval(m.group(0))
js_data = [x if x >= 0 else 256 + x for x in js_data]
true_data = bytearray(read_binary(test_file('openjpeg/syntensity_lobby_s.raw')))
assert(len(js_data) == len(true_data))
num = len(js_data)
diff_total = js_total = true_total = 0
for i in range(num):
js_total += js_data[i]
true_total += true_data[i]
diff_total += abs(js_data[i] - true_data[i])
js_mean = js_total / float(num)
true_mean = true_total / float(num)
diff_mean = diff_total / float(num)
image_mean = 83.265
assert abs(js_mean - image_mean) < 0.01, [js_mean, image_mean]
assert abs(true_mean - image_mean) < 0.01, [true_mean, image_mean]
assert diff_mean < 0.01, diff_mean
return output
self.set_setting('EXIT_RUNTIME', 0)
self.emcc_args += ['--minify=0']
self.emcc_args += ['--pre-js', 'pre.js']
def do_test():
self.do_runf(test_file('third_party/openjpeg/codec/j2k_to_image.c'),
'Successfully generated',
args='-i image.j2k -o image.raw'.split(),
emcc_args=['-sUSE_LIBPNG'],
libraries=lib,
includes=[test_file('third_party/openjpeg/libopenjpeg'),
test_file('third_party/openjpeg/codec'),
test_file('third_party/openjpeg/common'),
Path(self.get_build_dir(), 'third_party/openjpeg')],
output_nicerizer=image_compare)
do_test()
if self.get_setting('ALLOW_MEMORY_GROWTH') == 1:
print('no memory growth', file=sys.stderr)
self.set_setting('ALLOW_MEMORY_GROWTH', 0)
do_test()
if '-fsanitize=address' in self.emcc_args:
with env_modify({'EMMAKEN_CFLAGS': '-sINITIAL_MEMORY=300MB'}):
do_test_openjpeg()
else:
do_test_openjpeg()
@no_asan('call stack exceeded on some versions of node')
@is_slow_test
def test_fuzz(self):
self.emcc_args += ['-I' + test_file('fuzz/include'), '-w']
def run_all(x):
print(x)
for name in sorted(glob.glob(test_file('fuzz/*.c')) + glob.glob(test_file('fuzz/*.cpp'))):
if 'newfail' in name:
continue
if os.path.basename(name).startswith('temp_fuzzcode'):
continue
print(name)
if name.endswith('.cpp'):
self.emcc_args.append('-std=c++03')
self.do_runf(test_file('fuzz', name),
read_file(test_file('fuzz', name + '.txt')))
if name.endswith('.cpp'):
self.emcc_args.remove('-std=c++03')
run_all('normal')
self.emcc_args += ['-flto']
run_all('lto')
@also_with_standalone_wasm(wasm2c=True, impure=True)
@no_asan('autodebug logging interferes with asan')
@with_env_modify({'EMCC_AUTODEBUG': '1'})
def test_autodebug_wasm(self):
def check(out):
for msg in ['log_execution', 'get_i32', 'set_i32', 'load_ptr', 'load_val', 'store_ptr', 'store_val']:
self.assertIn(msg, out)
return out
self.do_runf(test_file('core/test_autodebug.c'),
'success', output_nicerizer=check)
@parameterized({
'full': ('full',),
'mask': ('mask',),
'none': ('none',),
})
def test_wasm2c_sandboxing(self, mode):
if not can_do_standalone(self):
return self.skipTest('standalone mode not supported')
self.set_setting('STANDALONE_WASM')
self.set_setting('WASM2C')
self.set_setting('WASM2C_SANDBOXING', mode)
self.wasm_engines = []
self.do_core_test('test_hello_world.c')
_args.append('-Wno-return-stack-address')
self.set_setting('EXPORTED_RUNTIME_METHODS', ['ccall', 'cwrap'])
self.set_setting('WASM_ASYNC_COMPILATION', 0)
create_file('post.js', '''
out('*');
var ret;
ret = Module['ccall']('get_int', 'number'); out([typeof ret, ret].join(','));
ret = ccall('get_float', 'number'); out([typeof ret, ret.toFixed(2)].join(','));
ret = ccall('get_bool', 'boolean'); out([typeof ret, ret].join(','));
ret = ccall('get_string', 'string'); out([typeof ret, ret].join(','));
ret = ccall('print_int', null, ['number'], [12]); out(typeof ret);
ret = ccall('print_float', null, ['number'], [14.56]); out(typeof ret);
ret = ccall('print_bool', null, ['boolean'], [true]); out(typeof ret);
ret = ccall('print_string', null, ['string'], ["cheez"]); out(typeof ret);
ret = ccall('print_string', null, ['array'], [[97, 114, 114, 45, 97, 121, 0]]); out(typeof ret); // JS array
ret = ccall('print_string', null, ['array'], [new Uint8Array([97, 114, 114, 45, 97, 121, 0])]); out(typeof ret); // typed array
ret = ccall('multi', 'number', ['number', 'number', 'number', 'string'], [2, 1.4, 3, 'more']); out([typeof ret, ret].join(','));
var p = ccall('malloc', 'pointer', ['number'], [4]);
setValue(p, 650, 'i32');
ret = ccall('pointer', 'pointer', ['pointer'], [p]); out([typeof ret, getValue(ret, 'i32')].join(','));
out('*');
// part 2: cwrap
var noThirdParam = Module['cwrap']('get_int', 'number');
out(noThirdParam());
var multi = Module['cwrap']('multi', 'number', ['number', 'number', 'number', 'string']);
out(multi(2, 1.4, 3, 'atr'));
out(multi(8, 5.4, 4, 'bret'));
out('*');
// part 3: avoid stack explosion and check it's restored correctly
for (var i = 0; i < TOTAL_STACK/60; i++) {
ccall('multi', 'number', ['number', 'number', 'number', 'string'], [0, 0, 0, '123456789012345678901234567890123456789012345678901234567890']);
}
out('stack is ok.');
ccall('call_ccall_again', null);
''')
self.emcc_args += ['--post-js', 'post.js']
self.set_setting('EXPORTED_FUNCTIONS', ['_get_int', '_get_float', '_get_bool', '_get_string', '_print_int', '_print_float', '_print_bool', '_print_string', '_multi', '_pointer', '_call_ccall_again', '_malloc'])
self.do_core_test('test_ccall.cpp')
if self.maybe_closure():
self.do_core_test('test_ccall.cpp')
def test_EXPORTED_RUNTIME_METHODS(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$dynCall'])
self.do_core_test('EXPORTED_RUNTIME_METHODS.c')
# test dyncall (and other runtime methods in support.js) can be exported
self.emcc_args += ['-DEXPORTED']
self.set_setting('EXPORTED_RUNTIME_METHODS', ['dynCall', 'addFunction', 'lengthBytesUTF8', 'getTempRet0', 'setTempRet0'])
self.do_core_test('EXPORTED_RUNTIME_METHODS.c')
@parameterized({
'': [],
'minimal_runtime': ['-s', 'MINIMAL_RUNTIME=1']
})
def test_dyncall_specific(self, *args):
cases = [
('DIRECT', []),
('DYNAMIC_SIG', ['-s', 'DYNCALLS=1', '-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=$dynCall']),
]
if 'MINIMAL_RUNTIME=1' in args:
self.emcc_args += ['--pre-js', test_file('minimal_runtime_exit_handling.js')]
else:
cases += [
('EXPORTED', []),
('EXPORTED_DYNAMIC_SIG', ['-s', 'DYNCALLS=1', '-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=$dynCall', '-s', 'EXPORTED_RUNTIME_METHODS=dynCall']),
('FROM_OUTSIDE', ['-s', 'EXPORTED_RUNTIME_METHODS=dynCall_iiji'])
]
for which, extra_args in cases:
print(str(args) + ' ' + which)
self.do_core_test('dyncall_specific.c', emcc_args=['-D' + which] + list(args) + extra_args)
def test_getValue_setValue(self):
# these used to be exported, but no longer are by default
def test(output_prefix='', args=[], assert_returncode=0):
src = test_file('core/getValue_setValue.cpp')
expected = test_file('core/getValue_setValue' + output_prefix + '.out')
self.do_run_from_file(src, expected, assert_returncode=assert_returncode, emcc_args=args)
# see that direct usage (not on module) works. we don't export, but the use
test(args=['-DDIRECT'])
self.set_setting('EXPORTED_RUNTIME_METHODS', [])
self.set_setting('ASSERTIONS')
test('_assert', assert_returncode=NON_ZERO)
self.set_setting('ASSERTIONS', 0)
self.set_setting('EXPORTED_RUNTIME_METHODS', ['getValue', 'setValue'])
test()
@parameterized({
'': ([],),
'_files': (['-DUSE_FILES'],)
})
def test_FS_exports(self, extra_args):
def test(output_prefix='', args=[], assert_returncode=0):
args += extra_args
print(args)
self.do_runf(test_file('core/FS_exports.cpp'),
(read_file(test_file('core/FS_exports' + output_prefix + '.out')),
read_file(test_file('core/FS_exports' + output_prefix + '_2.out'))),
assert_returncode=assert_returncode, emcc_args=args)
# keeps it alive through JSDCE
test(args=['-DDIRECT', '-s', 'FORCE_FILESYSTEM'])
# see that with assertions, we get a nice error message
self.set_setting('EXPORTED_RUNTIME_METHODS', [])
self.set_setting('ASSERTIONS')
test('_assert', assert_returncode=NON_ZERO)
self.set_setting('ASSERTIONS', 0)
# see that when we export them, things work on the module
self.set_setting('EXPORTED_RUNTIME_METHODS', ['FS_createDataFile'])
test(args=['-s', 'FORCE_FILESYSTEM'])
def test_legacy_exported_runtime_numbers(self):
# these used to be exported, but no longer are by default
def test(output_prefix='', args=[], assert_returncode=0):
old = self.emcc_args.copy()
self.emcc_args += args
src = test_file('core/legacy_exported_runtime_numbers.cpp')
expected = test_file('core/legacy_exported_runtime_numbers%s.out' % output_prefix)
self.do_run_from_file(src, expected, assert_returncode=assert_returncode)
self.emcc_args = old
# see that direct usage (not on module) works. we don't export, but the use
test(args=['-DDIRECT'])
self.set_setting('EXPORTED_RUNTIME_METHODS', [])
self.set_setting('ASSERTIONS')
test('_assert', assert_returncode=NON_ZERO)
self.set_setting('ASSERTIONS', 0)
self.set_setting('EXPORTED_RUNTIME_METHODS', ['ALLOC_STACK'])
test()
def test_response_file(self):
response_data = '-o %s/response_file.js %s' % (self.get_dir(), test_file('hello_world.cpp'))
create_file('rsp_file', response_data.replace('\\', '\\\\'))
self.run_process([EMCC, "@rsp_file"] + self.get_emcc_args())
self.do_run('response_file.js', 'hello, world', no_build=True)
self.assertContained('response file not found: foo.txt', self.expect_fail([EMCC, '@foo.txt']))
def test_linker_response_file(self):
objfile = 'response_file.o'
self.run_process([EMCC, '-c', test_file('hello_world.cpp'), '-o', objfile] + self.get_emcc_args())
response_data = '--start-group ' + objfile + ' --end-group'
create_file('rsp_file', response_data.replace('\\', '\\\\'))
self.run_process([EMCC, "-Wl,@rsp_file", '-o', 'response_file.o.js'] + self.get_emcc_args())
self.do_run('response_file.o.js', 'hello, world', no_build=True)
def test_exported_response(self):
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <emscripten.h>
extern "C" {
int other_function() { return 5; }
}
int main() {
int x = EM_ASM_INT({ return Module._other_function() });
emscripten_run_script_string(""); // Add a reference to a symbol that exists in src/deps_info.json to uncover issue #2836 in the test suite.
printf("waka %d!\n", x);
return 0;
}
'''
create_file('exps', '["_main","_other_function"]')
self.set_setting('EXPORTED_FUNCTIONS', '@exps')
self.do_run(src, '''waka 5!''')
assert 'other_function' in read_file('src.js')
def test_large_exported_response(self):
src = r'''
#include <stdio.h>
#include <stdlib.h>
#include <emscripten.h>
extern "C" {
'''
js_funcs = []
num_exports = 5000
count = 0
while count < num_exports:
src += 'int exported_func_from_response_file_%d () { return %d;}\n' % (count, count)
js_funcs.append('_exported_func_from_response_file_%d' % count)
count += 1
src += r'''
}
int main() {
int x = EM_ASM_INT({ return Module._exported_func_from_response_file_4999() });
emscripten_run_script_string(""); // Add a reference to a symbol that exists in src/deps_info.json to uncover issue #2836 in the test suite.
printf("waka %d!\n", x);
return 0;
}
'''
js_funcs.append('_main')
create_file('large_exported_response.json', json.dumps(js_funcs))
self.set_setting('EXPORTED_FUNCTIONS', '@large_exported_response.json')
self.do_run(src, 'waka 4999!')
self.assertContained('_exported_func_from_response_file_1', read_file('src.js'))
def test_add_function(self):
self.set_setting('INVOKE_RUN', 0)
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.set_setting('RESERVED_FUNCTION_POINTERS')
self.set_setting('EXPORTED_RUNTIME_METHODS', ['callMain'])
src = test_file('interop/test_add_function.cpp')
post_js = test_file('interop/test_add_function_post.js')
self.emcc_args += ['--post-js', post_js]
print('basics')
self.do_run_in_out_file_test('interop/test_add_function.cpp')
print('with RESERVED_FUNCTION_POINTERS=0')
self.set_setting('RESERVED_FUNCTION_POINTERS', 0)
expected = 'Unable to grow wasm table'
if self.is_wasm2js():
# emulation code. when ASSERTIONS are enabled we show a clear message, but
# in optimized builds we don't waste code size on that, and the JS engine
expected = 'wasmTable.grow is not a function'
self.do_runf(src, expected, assert_returncode=NON_ZERO)
print('- with table growth')
self.set_setting('ALLOW_TABLE_GROWTH')
self.emcc_args += ['-DGROWTH']
self.set_setting('ASSERTIONS', 2)
self.do_run_in_out_file_test('interop/test_add_function.cpp', interleaved_output=False)
def test_getFuncWrapper_sig_alias(self):
self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', ['$getFuncWrapper'])
src = r'''
#include <stdio.h>
#include <emscripten.h>
void func1(int a) {
printf("func1\n");
}
void func2(int a, int b) {
printf("func2\n");
}
int main() {
EM_ASM({
getFuncWrapper($0, 'vi')(0);
getFuncWrapper($1, 'vii')(0, 0);
}, func1, func2);
return 0;
}
'''
self.do_run(src, 'func1\nfunc2\n')
def test_emulate_function_pointer_casts(self):
self.set_setting('EXIT_RUNTIME', 0)
self.set_setting('EMULATE_FUNCTION_POINTER_CASTS')
self.do_core_test('test_emulate_function_pointer_casts.cpp')
@no_wasm2js('TODO: nicely printed names in wasm2js')
@parameterized({
'normal': ([],),
'noexcept': (['-fno-exceptions'],)
})
def test_demangle_stacks(self, extra_args):
self.emcc_args += extra_args
self.set_setting('DEMANGLE_SUPPORT')
self.set_setting('ASSERTIONS')
self.set_setting('BINARYEN_EXTRA_PASSES', '--one-caller-inline-max-function-size=1')
self.emcc_args += ['--profiling-funcs']
self.do_core_test('test_demangle_stacks.cpp', assert_returncode=NON_ZERO)
print('without assertions, the stack is not printed, but a message suggesting assertions is')
self.set_setting('ASSERTIONS', 0)
self.do_core_test('test_demangle_stacks_noassert.cpp', assert_returncode=NON_ZERO)
def test_demangle_stacks_symbol_map(self):
self.set_setting('BINARYEN_EXTRA_PASSES', '--one-caller-inline-max-function-size=1')
self.set_setting('DEMANGLE_SUPPORT')
if '-O' not in str(self.emcc_args) or '-O0' in self.emcc_args or '-O1' in self.emcc_args or '-g' in self.emcc_args:
self.skipTest("without opts, we don't emit a symbol map")
self.emcc_args += ['--emit-symbol-map']
self.do_runf(test_file('core/test_demangle_stacks.cpp'), 'Aborted', assert_returncode=NON_ZERO)
# make sure the shortened name is the right one
full_aborter = None
short_aborter = None
for line in open('test_demangle_stacks.js.symbols').readlines():
if ':' not in line:
continue
# split by the first ':' (wasm backend demangling may include more :'s later on)
short, full = line.split(':', 1)
if 'Aborter' in full:
short_aborter = short
full_aborter = full
self.assertIsNotNone(full_aborter)
self.assertIsNotNone(short_aborter)
print('full:', full_aborter, 'short:', short_aborter)
if config.SPIDERMONKEY_ENGINE and os.path.exists(config.SPIDERMONKEY_ENGINE[0]):
output = self.run_js('test_demangle_stacks.js', engine=config.SPIDERMONKEY_ENGINE, assert_returncode=NON_ZERO)
if ' ' + short_aborter + ' ' not in output and ' ' + full_aborter + ' ' not in output:
if '\n' + short_aborter + ' ' not in output and '\n' + full_aborter + ' ' not in output and 'wasm-function[' + short_aborter + ']' not in output:
if '\n' + short_aborter + '@' not in output and '\n' + full_aborter + '@' not in output:
self.assertContained(' ' + short_aborter + ' ' + '\n' + ' ' + full_aborter + ' ', output)
@no_safe_heap('tracing from sbrk into JS leads to an infinite loop')
def test_tracing(self):
self.emcc_args += ['--tracing']
self.do_core_test('test_tracing.c')
@disabled('https://github.com/emscripten-core/emscripten/issues/9527')
def test_eval_ctors(self):
if '-O2' not in str(self.emcc_args) or '-O1' in str(self.emcc_args):
self.skipTest('need js optimizations')
if not self.is_wasm():
self.skipTest('this test uses wasm binaries')
print('leave printf in ctor')
self.set_setting('EVAL_CTORS')
self.do_run(r'''
#include <stdio.h>
struct C {
C() { printf("constructing!\n"); } // don't remove this!
};
C c;
int main() {}
''', "constructing!\n")
def get_code_size():
if self.is_wasm():
# Use number of functions as a for code size
return self.count_wasm_contents('hello_libcxx.wasm', 'funcs')
else:
return os.path.getsize('hello_libcxx.js')
def get_mem_size():
if self.is_wasm():
# Use number of functions as a for code size
return self.count_wasm_contents('hello_libcxx.wasm', 'memory-data')
if self.uses_memory_init_file():
return os.path.getsize('hello_libcxx.js.mem')
# otherwise we ignore memory size
return 0
def do_test(test):
self.set_setting('EVAL_CTORS')
test()
ec_code_size = get_code_size()
ec_mem_size = get_mem_size()
self.clear_setting('EVAL_CTORS')
test()
code_size = get_code_size()
mem_size = get_mem_size()
if mem_size:
print('mem: ', mem_size, '=>', ec_mem_size)
self.assertGreater(ec_mem_size, mem_size)
print('code:', code_size, '=>', ec_code_size)
self.assertLess(ec_code_size, code_size)
print('remove ctor of just assigns to memory')
def test1():
self.do_run(r'''
#include <stdio.h>
struct C {
int x;
C() {
volatile int y = 10;
y++;
x = y;
}
};
C c;
int main() {
printf("x: %d\n", c.x);
}
''', "x: 11\n")
do_test(test1)
# The wasm backend currently exports a single initalizer so the ctor
# evaluation is all or nothing. As well as that it doesn't currently
print('libcxx - remove 2 ctors from iostream code')
output = 'hello, world!'
def test2():
self.do_runf(test_file('hello_libcxx.cpp'), output)
do_test(test2)
print('assertions too')
self.set_setting('ASSERTIONS')
self.do_runf(test_file('hello_libcxx.cpp'), output)
self.set_setting('ASSERTIONS', 0)
print('remove just some, leave others')
def test3():
self.do_run(r'''
#include <iostream>
#include <string>
class std_string {
public:
std_string(): ptr(nullptr) { std::cout << "std_string()\n"; }
std_string(const char* s): ptr(s) { std::cout << "std_string(const char* s)" << std::endl; }
std_string(const std_string& s): ptr(s.ptr) { std::cout << "std_string(const std_string& s) " << std::endl; }
const char* data() const { return ptr; }
private:
const char* ptr;
};
const std_string txtTestString("212121\0");
const std::string s2text("someweirdtext");
int main() {
std::cout << s2text << std::endl;
std::cout << txtTestString.data() << std::endl;
std::cout << txtTestString.data() << std::endl;
return 0;
}
''', '''std_string(const char* s)
someweirdtext
212121
212121
''') # noqa
do_test(test3)
def test_embind(self):
self.emcc_args += ['--bind']
create_file('test_embind.cpp', r'''
#include <stdio.h>
#include <emscripten/val.h>
using namespace emscripten;
int main() {
val Math = val::global("Math");
// two ways to call Math.abs
printf("abs(-10): %d\n", Math.call<int>("abs", -10));
printf("abs(-11): %d\n", Math["abs"](-11).as<int>());
return 0;
}
''')
self.do_runf('test_embind.cpp', 'abs(-10): 10\nabs(-11): 11')
def test_embind_2(self):
self.emcc_args += ['--bind', '--post-js', 'post.js']
create_file('post.js', '''
function printLerp() {
out('lerp ' + Module.lerp(100, 200, 66) + '.');
}
''')
create_file('test_embind_2.cpp', r'''
#include <stdio.h>
#include <emscripten.h>
#include <emscripten/bind.h>
using namespace emscripten;
int lerp(int a, int b, int t) {
return (100 - t) * a + t * b;
}
EMSCRIPTEN_BINDINGS(my_module) {
function("lerp", &lerp);
}
int main(int argc, char **argv) {
EM_ASM(printLerp());
return 0;
}
''')
self.do_runf('test_embind_2.cpp', 'lerp 166')
def test_embind_3(self):
self.emcc_args += ['--bind', '--post-js', 'post.js']
create_file('post.js', '''
function ready() {
try {
Module.compute(new Uint8Array([1,2,3]));
} catch(e) {
out(e);
}
}
''')
create_file('test_embind_3.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
using namespace emscripten;
int compute(int array[]) {
return 0;
}
EMSCRIPTEN_BINDINGS(my_module) {
function("compute", &compute, allow_raw_pointers());
}
int main(int argc, char **argv) {
EM_ASM(ready());
return 0;
}
''')
self.do_runf('test_embind_3.cpp', 'UnboundTypeError: Cannot call compute due to unbound types: Pi')
def test_embind_4(self):
self.emcc_args += ['--bind', '--post-js', 'post.js']
create_file('post.js', '''
function printFirstElement() {
out(Module.getBufferView()[0]);
}
''')
create_file('test_embind_4.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
#include <emscripten/val.h>
#include <stdio.h>
using namespace emscripten;
const size_t kBufferSize = 1024;
double buffer[kBufferSize];
val getBufferView(void) {
val v = val(typed_memory_view(kBufferSize, buffer));
return v;
}
EMSCRIPTEN_BINDINGS(my_module) {
function("getBufferView", &getBufferView);
}
int main(int argc, char **argv) {
buffer[0] = 107;
EM_ASM(printFirstElement());
return 0;
}
''')
self.do_runf('test_embind_4.cpp', '107')
def test_embind_5(self):
self.emcc_args += ['--bind']
self.set_setting('EXIT_RUNTIME')
self.do_core_test('test_embind_5.cpp')
def test_embind_custom_marshal(self):
self.emcc_args += ['--bind', '--pre-js', test_file('embind/test_custom_marshal.js')]
self.do_run_in_out_file_test('embind/test_custom_marshal.cpp', assert_identical=True)
def test_embind_float_constants(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_float_constants.cpp')
def test_embind_negative_constants(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_negative_constants.cpp')
@also_with_wasm_bigint
def test_embind_unsigned(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_unsigned.cpp')
def test_embind_val(self):
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('embind/test_val.cpp')
@no_wasm2js('wasm_bigint')
def test_embind_i64_val(self):
self.set_setting('WASM_BIGINT')
self.emcc_args += ['--bind']
self.node_args += ['--experimental-wasm-bigint']
self.do_run_in_out_file_test('embind/test_i64_val.cpp', assert_identical=True)
@no_wasm2js('wasm_bigint')
def test_embind_i64_binding(self):
self.set_setting('WASM_BIGINT')
self.emcc_args += ['--bind']
self.node_args += ['--experimental-wasm-bigint']
self.do_run_in_out_file_test('embind/test_i64_binding.cpp', assert_identical=True)
def test_embind_no_rtti(self):
create_file('main.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
#include <emscripten/val.h>
#include <stdio.h>
EM_JS(void, calltest, (), {
console.log("dotest returned: " + Module.dotest());
});
int main(int argc, char** argv){
printf("418\n");
calltest();
return 0;
}
int test() {
return 42;
}
EMSCRIPTEN_BINDINGS(my_module) {
emscripten::function("dotest", &test);
}
''')
self.emcc_args += ['--bind', '-fno-rtti', '-DEMSCRIPTEN_HAS_UNBOUND_TYPE_NAMES=0']
self.do_runf('main.cpp', '418\ndotest returned: 42\n')
def test_embind_polymorphic_class_no_rtti(self):
self.emcc_args += ['--bind', '-fno-rtti', '-DEMSCRIPTEN_HAS_UNBOUND_TYPE_NAMES=0']
self.do_core_test('test_embind_polymorphic_class_no_rtti.cpp')
def test_embind_no_rtti_followed_by_rtti(self):
src = r'''
#include <emscripten.h>
#include <emscripten/bind.h>
#include <emscripten/val.h>
#include <stdio.h>
EM_JS(void, calltest, (), {
console.log("dotest returned: " + Module.dotest());
});
int main(int argc, char** argv){
printf("418\n");
calltest();
return 0;
}
int test() {
return 42;
}
EMSCRIPTEN_BINDINGS(my_module) {
emscripten::function("dotest", &test);
}
'''
self.emcc_args += ['--bind', '-fno-rtti', '-frtti']
self.do_run(src, '418\ndotest returned: 42\n')
@parameterized({
'': (None, False),
'all': ('ALL', False),
'fast': ('FAST', False),
'default': ('DEFAULT', False),
'all_growth': ('ALL', True),
})
def test_webidl(self, mode, allow_memory_growth):
self.uses_es6 = True
self.set_setting('WASM_ASYNC_COMPILATION', 0)
if self.maybe_closure():
# avoid closure minified names competing with our test code in the global name space
self.set_setting('MODULARIZE')
else:
self.set_setting('WASM_ASYNC_COMPILATION', 0)
# Force IDL checks mode
with env_modify({'IDL_CHECKS': mode}):
self.run_process([WEBIDL_BINDER, test_file('webidl/test.idl'), 'glue'])
self.assertExists('glue.cpp')
self.assertExists('glue.js')
post_js = '\n\n'
if self.get_setting('MODULARIZE'):
post_js += 'var TheModule = Module();\n'
else:
post_js += 'var TheModule = Module;\n'
post_js += '\n\n'
if allow_memory_growth:
post_js += "var isMemoryGrowthAllowed = true;\n"
else:
post_js += "var isMemoryGrowthAllowed = false;\n"
post_js += read_file(test_file('webidl/post.js'))
post_js += '\n\n'
create_file('extern-post.js', post_js)
# Export things on "TheModule". This matches the typical use pattern of the bound library
# being used as Box2D.* or Ammo.*, and we cannot rely on "Module" being always present (closure may remove it).
self.emcc_args += ['-s', 'EXPORTED_FUNCTIONS=_malloc,_free', '--post-js=glue.js', '--extern-post-js=extern-post.js']
if allow_memory_growth:
self.set_setting('ALLOW_MEMORY_GROWTH')
if not mode:
mode = 'DEFAULT'
expected = test_file('webidl/output_%s.txt' % mode)
self.do_run_from_file(test_file('webidl/test.cpp'), expected)
### Tests for tools
@no_wasm2js('TODO: source maps in wasm2js')
@parameterized({
'': ([],),
'minimal_runtime': (['-s', 'MINIMAL_RUNTIME'],),
})
def test_source_map(self, args):
if '-g' not in self.emcc_args:
self.emcc_args.append('-g')
self.emcc_args += args
src = '''
#include <stdio.h>
#include <assert.h>
__attribute__((noinline)) int foo() {
printf("hi"); // line 6
return 1; // line 7
}
int main() {
printf("%d", foo()); // line 11
return 0; // line 12
}
'''
create_file('src.cpp', src)
out_filename = 'a.out.js'
wasm_filename = 'a.out.wasm'
no_maps_filename = 'no-maps.out.js'
assert '-gsource-map' not in self.emcc_args
self.emcc('src.cpp', self.get_emcc_args(), out_filename)
# the file name may find its way into the generated code, so make sure we
# can do an apples-to-apples comparison by compiling with the same file name
shutil.move(out_filename, no_maps_filename)
no_maps_file = read_file(no_maps_filename)
no_maps_file = re.sub(' *//[@
self.emcc_args.append('-gsource-map')
self.emcc(os.path.abspath('src.cpp'),
self.get_emcc_args(),
out_filename,
stderr=PIPE)
map_referent = out_filename if not self.is_wasm() else wasm_filename
# after removing the @line and @sourceMappingURL comments, the build
# result should be identical to the non-source-mapped debug version.
# this is worth checking because the parser AST swaps strings for token
# objects when generating source maps, so we want to make sure the
# optimizer can deal with both types.
map_filename = map_referent + '.map'
data = json.load(open(map_filename))
if hasattr(data, 'file'):
# the file attribute is optional, but if it is present it needs to refer
# the output file.
self.assertPathsIdentical(map_referent, data['file'])
self.assertGreater(len(data['sources']), 1)
self.assertPathsIdentical('src.cpp', data['sources'][0])
if hasattr(data, 'sourcesContent'):
# the sourcesContent attribute is optional, but if it is present it
# needs to containt valid source text.
self.assertTextDataIdentical(src, data['sourcesContent'][0])
mappings = json.loads(self.run_js(
path_from_root('tests/sourcemap2json.js'),
args=[map_filename]))
seen_lines = set()
for m in mappings:
if m['source'] == 'src.cpp':
seen_lines.add(m['originalLine'])
# ensure that all the 'meaningful' lines in the original code get mapped
# when optimizing, the binaryen optimizer may remove some of them (by inlining, etc.)
if self.is_optimizing():
self.assertTrue(seen_lines.issuperset([11, 12]), seen_lines)
else:
self.assertTrue(seen_lines.issuperset([6, 7, 11, 12]), seen_lines)
@no_wasm2js('TODO: source maps in wasm2js')
def test_dwarf(self):
self.emcc_args.append('-g')
create_file('src.cpp', '''
#include <emscripten.h>
EM_JS(int, out_to_js, (int x), {})
void foo() {
out_to_js(0); // line 5
out_to_js(1); // line 6
out_to_js(2); // line 7
// A silly possible recursion to avoid binaryen doing any inlining.
if (out_to_js(3)) foo();
}
int main() {
foo();
}
''')
js_filename = 'a.out.js'
wasm_filename = 'a.out.wasm'
self.emcc('src.cpp', self.get_emcc_args(), js_filename)
out = self.run_process([shared.LLVM_DWARFDUMP, wasm_filename, '-all'], stdout=PIPE).stdout
# parse the sections
sections = {}
curr_section_name = ''
curr_section_body = ''
def add_section():
if curr_section_name:
sections[curr_section_name] = curr_section_body
for line in out.splitlines():
if ' contents:' in line:
# a new section, a line like ".debug_str contents:"
add_section()
curr_section_name = line.split(' ')[0]
curr_section_body = ''
else:
# possibly a line in a section
if curr_section_name:
curr_section_body += line + '\n'
add_section()
# make sure the right sections exist
self.assertIn('.debug_abbrev', sections)
self.assertIn('.debug_info', sections)
self.assertIn('.debug_line', sections)
self.assertIn('.debug_str', sections)
self.assertIn('.debug_ranges', sections)
# verify some content in the sections
self.assertIn('"src.cpp"', sections['.debug_info'])
# the line section looks like this:
# Address Line Column File ISA Discriminator Flags
# ------------------ ------ ------ ------ --- ------------- -------------
# 0x000000000000000b 5 0 3 0 0 is_stmt
src_to_addr = {}
found_src_cpp = False
for line in sections['.debug_line'].splitlines():
if 'name: "src.cpp"' in line:
found_src_cpp = True
if not found_src_cpp:
continue
if 'debug_line' in line:
break
if line.startswith('0x'):
while ' ' in line:
line = line.replace(' ', ' ')
addr, line, col = line.split(' ')[:3]
key = (int(line), int(col))
src_to_addr.setdefault(key, []).append(addr)
# each of the calls must remain in the binary, and be mapped
self.assertIn((5, 9), src_to_addr)
self.assertIn((6, 9), src_to_addr)
self.assertIn((7, 9), src_to_addr)
def get_dwarf_addr(line, col):
addrs = src_to_addr[(line, col)]
# we assume the simple calls have one address
self.assertEqual(len(addrs), 1)
return int(addrs[0], 0)
# the lines must appear in sequence (as calls to JS, the optimizer cannot
# reorder them)
self.assertLess(get_dwarf_addr(5, 9), get_dwarf_addr(6, 9))
self.assertLess(get_dwarf_addr(6, 9), get_dwarf_addr(7, 9))
# Get the wat, printing with -g which has binary offsets
wat = self.run_process([Path(building.get_binaryen_bin(), 'wasm-opt'),
wasm_filename, '-g', '--print'], stdout=PIPE).stdout
# We expect to see a pattern like this in optimized builds (there isn't
# anything else):
#
# ;; code offset: 0x?
# (drop
# ;; code offset: 0x?
# (call $out_to_js
# ;; code offset: 0x?
# (local.get ?) or (i32.const ?)
# )
# )
#
# In the stacky stream of instructions form, it is
#
# local.get or i32.const
# call $out_to_js
# drop
#
# However, in an unoptimized build the constant may be assigned earlier in
# some other manner, so stop here.
if not self.is_optimizing():
return
# get_wat_addr gets the address of one of the 3 interesting calls, by its
# index (0,1,2).
def get_wat_addr(call_index):
# find the call_index-th call
call_loc = -1
for i in range(call_index + 1):
call_loc = wat.find('call $out_to_js', call_loc + 1)
assert call_loc > 0
# the call begins with the local.get/i32.const printed below it, which is
# the first instruction in the stream, so it has the lowest address
start_addr_loc = wat.find('0x', call_loc)
assert start_addr_loc > 0
start_addr_loc_end = wat.find('\n', start_addr_loc)
start_addr = int(wat[start_addr_loc:start_addr_loc_end], 0)
# the call ends with the drop, which is the last in the stream, at the
# highest address
end_addr_loc = wat.rfind('drop', 0, call_loc)
assert end_addr_loc > 0
end_addr_loc = wat.rfind('0x', 0, end_addr_loc)
assert end_addr_loc > 0
end_addr_loc_end = wat.find('\n', end_addr_loc)
assert end_addr_loc_end > 0
end_addr = int(wat[end_addr_loc:end_addr_loc_end], 0)
return (start_addr, end_addr)
# match up the DWARF and the wat
for i in range(3):
dwarf_addr = get_dwarf_addr(5 + i, 9)
start_wat_addr, end_wat_addr = get_wat_addr(i)
# the dwarf may match any of the 3 instructions that form the stream of
# of instructions implementing the call in the source code, in theory
self.assertLessEqual(start_wat_addr, dwarf_addr)
self.assertLessEqual(dwarf_addr, end_wat_addr)
def test_modularize_closure_pre(self):
# test that the combination of modularize + closure + pre-js works. in that mode,
# closure should not minify the Module object in a way that the pre-js cannot use it.
create_file('post.js', 'var TheModule = Module();\n')
self.emcc_args += [
'--pre-js', test_file('core/modularize_closure_pre.js'),
'--extern-post-js=post.js',
'--closure=1',
'-g1',
'-s',
'MODULARIZE=1',
]
self.do_core_test('modularize_closure_pre.c')
@no_wasm2js('symbol names look different wasm2js backtraces')
def test_emscripten_log(self):
self.banned_js_engines = [config.V8_ENGINE] # v8 doesn't support console.log
self.set_setting('DEMANGLE_SUPPORT')
if '-g' not in self.emcc_args:
self.emcc_args.append('-g')
self.emcc_args += ['-DRUN_FROM_JS_SHELL']
self.do_run_in_out_file_test('emscripten_log/emscripten_log.cpp', interleaved_output=False)
if self.maybe_closure():
self.emcc_args += ['-g1']
self.do_run_in_out_file_test('emscripten_log/emscripten_log_with_closure.cpp', interleaved_output=False)
def test_float_literals(self):
self.do_run_in_out_file_test('test_float_literals.cpp')
def test_exit_status(self):
self.set_setting('EXIT_RUNTIME')
create_file('exit.c', r'''
#include <stdio.h>
#include <assert.h>
#include <stdlib.h>
#include <unistd.h>
static void cleanup() {
#ifndef NORMAL_EXIT
assert(0 && "cleanup should only be called from normal exit()");
#endif
printf("cleanup\n");
}
int main() {
atexit(cleanup); // this atexit should still be called
printf("hello, world!\n");
// Unusual exit status to make sure it's working!
#ifdef CAPITAL_EXIT
_Exit(118);
#elif defined(UNDER_EXIT)
_exit(118);
#elif defined(NORMAL_EXIT)
exit(118);
#endif
}
''')
create_file('pre.js', '''
Module.onExit = function() {
out('I see exit status: ' + EXITSTATUS);
}
''')
self.emcc_args += ['--pre-js', 'pre.js']
print('.. exit')
self.do_runf('exit.c', 'hello, world!\ncleanup\nI see exit status: 118', assert_returncode=118, emcc_args=['-DNORMAL_EXIT'])
print('.. _exit')
self.do_runf('exit.c', 'hello, world!\nI see exit status: 118', assert_returncode=118, emcc_args=['-DUNDER_EXIT'])
print('.. _Exit')
self.do_runf('exit.c', 'hello, world!\nI see exit status: 118', assert_returncode=118, emcc_args=['-DCAPITAL_EXIT'])
def test_noexitruntime(self):
src = r'''
#include <emscripten.h>
#include <stdio.h>
static int testPre = TEST_PRE;
struct Global {
Global() {
printf("in Global()\n");
if (testPre) { EM_ASM(noExitRuntime = true;); }
}
~Global() { printf("ERROR: in ~Global()\n"); }
} global;
int main() {
if (!testPre) { EM_ASM(noExitRuntime = true;); }
printf("in main()\n");
}
'''
self.do_run(src.replace('TEST_PRE', '0'), 'in Global()\nin main()')
self.do_run(src.replace('TEST_PRE', '1'), 'in Global()\nin main()')
def test_minmax(self):
self.do_runf(test_file('test_minmax.c'), 'NAN != NAN\nSuccess!')
def test_localeconv(self):
self.do_run_in_out_file_test('core/test_localeconv.c')
def test_newlocale(self):
self.do_run_in_out_file_test('core/test_newlocale.c')
def test_setlocale(self):
self.do_run_in_out_file_test('core/test_setlocale.c')
def test_vswprintf_utf8(self):
self.do_run_in_out_file_test('vswprintf_utf8.c')
# needs setTimeout which only node has
@require_node
def test_async_hello(self):
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASYNCIFY')
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
void f(void *p) {
*(int*)p = 99;
printf("!");
}
int main() {
int i = 0;
printf("Hello");
emscripten_async_call(f, &i, 1);
printf("World");
emscripten_sleep(100);
printf("%d\n", i);
}
''')
self.do_runf('main.c', 'HelloWorld!99')
@require_node
def test_async_ccall_bad(self):
# check bad ccall use
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASYNCIFY')
self.set_setting('ASSERTIONS')
self.set_setting('INVOKE_RUN', 0)
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("Hello");
emscripten_sleep(100);
printf("World\n");
}
''')
create_file('pre.js', '''
Module['onRuntimeInitialized'] = function() {
try {
ccall('main', 'number', ['number', 'string'], [2, 'waka']);
var never = true;
} catch(e) {
out(e);
assert(!never);
}
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_runf('main.c', 'The call to main is running asynchronously.')
@require_node
def test_async_ccall_good(self):
# check reasonable ccall use
# needs to flush stdio streams
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASYNCIFY')
self.set_setting('ASSERTIONS')
self.set_setting('INVOKE_RUN', 0)
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("Hello");
emscripten_sleep(100);
printf("World\n");
}
''')
create_file('pre.js', '''
Module['onRuntimeInitialized'] = function() {
ccall('main', null, ['number', 'string'], [2, 'waka'], { async: true });
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_runf('main.c', 'HelloWorld')
@parameterized({
'': (False,),
'exit_runtime': (True,),
})
def test_async_ccall_promise(self, exit_runtime):
self.set_setting('ASYNCIFY')
self.set_setting('EXIT_RUNTIME')
self.set_setting('ASSERTIONS')
self.set_setting('INVOKE_RUN', 0)
self.set_setting('EXIT_RUNTIME', exit_runtime)
self.set_setting('EXPORTED_FUNCTIONS', ['_stringf', '_floatf'])
create_file('main.c', r'''
#include <stdio.h>
#include <emscripten.h>
const char* stringf(char* param) {
emscripten_sleep(20);
printf("stringf: %s", param);
return "second";
}
double floatf() {
emscripten_sleep(20);
emscripten_sleep(20);
return 6.4;
}
''')
create_file('pre.js', r'''
Module['onRuntimeInitialized'] = function() {
runtimeKeepalivePush();
ccall('stringf', 'string', ['string'], ['first\n'], { async: true })
.then(function(val) {
console.log(val);
ccall('floatf', 'number', null, null, { async: true }).then(function(arg) {
console.log(arg);
runtimeKeepalivePop();
maybeExit();
});
});
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_runf('main.c', 'stringf: first\nsecond\n6.4')
def test_fibers_asyncify(self):
self.set_setting('ASYNCIFY')
self.maybe_closure()
self.do_runf(test_file('test_fibers.cpp'), '*leaf-0-100-1-101-1-102-2-103-3-104-5-105-8-106-13-107-21-108-34-109-*')
def test_asyncify_unused(self):
# test a program not using asyncify, but the pref is set
self.set_setting('ASYNCIFY')
self.do_core_test('test_hello_world.c')
@parameterized({
'normal': ([], True),
'removelist_a': (['-s', 'ASYNCIFY_REMOVE=["foo(int, double)"]'], False),
'removelist_b': (['-s', 'ASYNCIFY_REMOVE=["bar()"]'], True),
'removelist_c': (['-s', 'ASYNCIFY_REMOVE=["baz()"]'], False),
'onlylist_a': (['-s', 'ASYNCIFY_ONLY=["main","__original_main","foo(int, double)","baz()","c_baz","Structy::funcy()","bar()"]'], True),
'onlylist_b': (['-s', 'ASYNCIFY_ONLY=["main","__original_main","foo(int, double)","baz()","c_baz","Structy::funcy()"]'], True),
'onlylist_c': (['-s', 'ASYNCIFY_ONLY=["main","__original_main","foo(int, double)","baz()","c_baz"]'], False),
'onlylist_d': (['-s', 'ASYNCIFY_ONLY=["foo(int, double)","baz()","c_baz","Structy::funcy()"]'], False, None, True),
'onlylist_b_response': ([], True, '["main","__original_main","foo(int, double)","baz()","c_baz","Structy::funcy()"]'),
'onlylist_c_response': ([], False, '["main","__original_main","foo(int, double)","baz()","c_baz"]'),
})
def test_asyncify_lists(self, args, should_pass, response=None, no_san=False):
if no_san and is_sanitizing(self.emcc_args):
self.skipTest('remaining asyncify+sanitizer TODO')
if response is not None:
create_file('response.file', response)
self.set_setting('ASYNCIFY_ONLY', '@response.file')
self.set_setting('ASYNCIFY')
self.emcc_args += args
if should_pass:
self.do_core_test('test_asyncify_lists.cpp', assert_identical=True)
else:
self.do_runf(test_file('core/test_asyncify_lists.cpp'), ('RuntimeError', 'Thrown at'), assert_returncode=NON_ZERO)
# use of ASYNCIFY_* options may require intermediate debug info. that should
# not end up emitted in the final binary
# (note that we can't check this if sanitizers run, as they include a lot of
if self.is_wasm() and not is_sanitizing(self.emcc_args):
binary = read_binary('test_asyncify_lists.wasm')
self.assertFalse(b'name' in binary)
if '-O3' in self.emcc_args:
self.assertFalse(b'main' in binary)
@parameterized({
'normal': ([], True),
'ignoreindirect': (['-s', 'ASYNCIFY_IGNORE_INDIRECT'], False),
'add': (['-s', 'ASYNCIFY_IGNORE_INDIRECT', '-s', 'ASYNCIFY_ADD=["__original_main","main","virt()"]'], True),
})
def test_asyncify_indirect_lists(self, args, should_pass):
self.set_setting('ASYNCIFY')
self.emcc_args += args
try:
self.do_core_test('test_asyncify_indirect_lists.cpp', assert_identical=True)
if not should_pass:
should_pass = True
raise Exception('should not have passed')
except Exception:
if should_pass:
raise
@no_asan('asyncify stack operations confuse asan')
def test_emscripten_scan_registers(self):
self.set_setting('ASYNCIFY')
self.do_core_test('test_emscripten_scan_registers.cpp')
def test_asyncify_assertions(self):
self.set_setting('ASYNCIFY')
self.set_setting('ASYNCIFY_IMPORTS', ['suspend'])
self.set_setting('ASSERTIONS')
self.do_core_test('test_asyncify_assertions.c', assert_returncode=NON_ZERO)
@no_lsan('leaks asyncify stack during exit')
@no_asan('leaks asyncify stack during exit')
def test_asyncify_during_exit(self):
self.set_setting('ASYNCIFY')
self.set_setting('ASSERTIONS')
self.set_setting('EXIT_RUNTIME', 1)
self.do_core_test('test_asyncify_during_exit.cpp', assert_returncode=NON_ZERO)
print('NO_ASYNC')
self.do_core_test('test_asyncify_during_exit.cpp', emcc_args=['-DNO_ASYNC'], out_suffix='_no_async')
@no_asan('asyncify stack operations confuse asan')
@no_wasm2js('dynamic linking support in wasm2js')
def test_asyncify_main_module(self):
self.set_setting('ASYNCIFY', 1)
self.set_setting('MAIN_MODULE', 2)
self.do_core_test('test_hello_world.c')
@no_asan('asyncify stack operations confuse asan')
@no_wasm2js('TODO: lazy loading in wasm2js')
@parameterized({
'conditional': (True,),
'unconditional': (False,),
})
def test_emscripten_lazy_load_code(self, conditional):
self.set_setting('ASYNCIFY_LAZY_LOAD_CODE')
self.set_setting('ASYNCIFY_IGNORE_INDIRECT')
self.set_setting('MALLOC', 'emmalloc')
self.emcc_args += ['--profiling-funcs']
if conditional:
self.emcc_args += ['-DCONDITIONAL']
self.do_core_test('emscripten_lazy_load_code.cpp', args=['0'])
first_size = os.path.getsize('emscripten_lazy_load_code.wasm')
second_size = os.path.getsize('emscripten_lazy_load_code.wasm.lazy.wasm')
print('first wasm size', first_size)
print('second wasm size', second_size)
if not conditional and self.is_optimizing() and '-g' not in self.emcc_args:
self.assertLess(first_size, 0.6 * second_size)
wasm1 = read_binary('emscripten_lazy_load_code.wasm')
wasm2 = read_binary('emscripten_lazy_load_code.wasm.lazy.wasm')
self.assertNotEqual(wasm1, wasm2)
def break_wasm(name):
wat = self.run_process([Path(building.get_binaryen_bin(), 'wasm-dis'), name], stdout=PIPE).stdout
lines = wat.splitlines()
wat = None
for i in range(len(lines)):
if '(func $foo_end ' in lines[i]:
j = i + 1
while '(local ' in lines[j]:
j += 1
lines[j] = '(unreachable)' + lines[j]
wat = '\n'.join(lines)
break
if wat is None:
shutil.copyfile(name, name + '.orig')
return False
with open('wat.wat', 'w') as f:
f.write(wat)
shutil.move(name, name + '.orig')
self.run_process([Path(building.get_binaryen_bin(), 'wasm-as'), 'wat.wat', '-o', name, '-g'])
return True
def verify_working(args=['0']):
self.assertContained('foo_end\n', self.run_js('emscripten_lazy_load_code.js', args=args))
def verify_broken(args=['0']):
self.assertNotContained('foo_end\n', self.run_js('emscripten_lazy_load_code.js', args=args, assert_returncode=NON_ZERO))
found_foo_end = break_wasm('emscripten_lazy_load_code.wasm')
if not conditional and self.is_optimizing():
self.assertFalse(found_foo_end, 'should have optimizd out $foo_end')
verify_working()
break_wasm('emscripten_lazy_load_code.wasm.lazy.wasm')
verify_broken()
shutil.copyfile('emscripten_lazy_load_code.wasm.orig', 'emscripten_lazy_load_code.wasm')
shutil.copyfile('emscripten_lazy_load_code.wasm.lazy.wasm.orig', 'emscripten_lazy_load_code.wasm.lazy.wasm')
verify_working()
if conditional:
os.remove('emscripten_lazy_load_code.wasm.lazy.wasm')
verify_broken()
verify_working(['42'])
break_wasm('emscripten_lazy_load_code.wasm')
verify_broken()
@no_asan('no wasm2js support yet in asan')
def test_wasm2js(self):
if not self.is_wasm():
self.skipTest('redundant to test wasm2js in wasm2js* mode')
self.set_setting('WASM', 0)
self.do_core_test('test_hello_world.c')
expect_memory_init_file = self.uses_memory_init_file()
if expect_memory_init_file:
self.assertExists('test_hello_world.js.mem')
mem = read_binary('test_hello_world.js.mem')
self.assertTrue(mem[-1] != b'\0')
else:
self.assertNotExists('test_hello_world.js.mem')
@no_asan('no wasm2js support yet in asan')
def test_maybe_wasm2js(self):
if not self.is_wasm():
self.skipTest('redundant to test wasm2js in wasm2js* mode')
self.set_setting('MAYBE_WASM2JS')
self.do_core_test('test_hello_world.c')
cmd = [PYTHON, path_from_root('tools/maybe_wasm2js.py'), 'test_hello_world.js', 'test_hello_world.wasm']
if self.is_optimizing():
cmd += ['-O2']
self.run_process(cmd, stdout=open('do_wasm2js.js', 'w')).stdout
os.remove('test_hello_world.wasm')
self.assertContained('hello, world!', self.run_js('do_wasm2js.js'))
@no_asan('no wasm2js support yet in asan')
@parameterized({
'': ([],),
'minimal_runtime': (['-s', 'MINIMAL_RUNTIME'],),
})
def test_wasm2js_fallback(self, args):
if not self.is_wasm():
self.skipTest('redundant to test wasm2js in wasm2js* mode')
cmd = [EMCC, test_file('small_hello_world.c'), '-s', 'WASM=2'] + args
self.run_process(cmd)
os.rename('a.out.wasm.js', 'a.out.wasm.js.unused')
self.assertContained('hello!', self.run_js('a.out.js'))
os.rename('a.out.wasm.js.unused', 'a.out.wasm.js')
open('b.out.js', 'w').write('WebAssembly = undefined;\n' + read_file('a.out.js'))
os.remove('a.out.wasm')
self.assertContained('hello!', self.run_js('b.out.js'))
def test_cxx_self_assign(self):
self.do_run(r'''
#include <map>
#include <stdio.h>
int main() {
std::map<int, int> m;
m[0] = 1;
m = m;
// size should still be one after self assignment
if (m.size() == 1) {
printf("ok.\n");
}
}
''', 'ok.')
def test_memprof_requirements(self):
create_file('main.cpp', '''
extern "C" {
void check_memprof_requirements();
}
int main() {
check_memprof_requirements();
return 0;
}
''')
create_file('lib.js', '''
mergeInto(LibraryManager.library, {
check_memprof_requirements: function() {
if (typeof _emscripten_stack_get_base === 'function' &&
typeof _emscripten_stack_get_end === 'function' &&
typeof _emscripten_stack_get_current === 'function' &&
typeof Module['___heap_base'] === 'number') {
out('able to run memprof');
} else {
out('missing the required variables to run memprof');
}
}
});
''')
self.emcc_args += ['--memoryprofiler', '--js-library', 'lib.js']
self.do_runf('main.cpp', 'able to run memprof')
def test_fs_dict(self):
self.set_setting('FORCE_FILESYSTEM')
self.emcc_args += ['-lidbfs.js']
self.emcc_args += ['-lnodefs.js']
create_file('pre.js', '''
Module = {};
Module['preRun'] = function() {
out(typeof FS.filesystems['MEMFS']);
out(typeof FS.filesystems['IDBFS']);
out(typeof FS.filesystems['NODEFS']);
// Globals
console.log(typeof MEMFS);
console.log(typeof IDBFS);
console.log(typeof NODEFS);
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
self.do_run('int main() { return 0; }', 'object\nobject\nobject\nobject\nobject\nobject')
def test_fs_dict_none(self):
self.set_setting('FORCE_FILESYSTEM')
self.set_setting('ASSERTIONS')
create_file('pre.js', '''
Module = {};
Module['preRun'] = function() {
out(typeof FS.filesystems['MEMFS']);
out(typeof FS.filesystems['IDBFS']);
out(typeof FS.filesystems['NODEFS']);
// Globals
if (ASSERTIONS) {
console.log(typeof MEMFS);
console.log(IDBFS);
console.log(NODEFS);
FS.mkdir('/working1');
try {
FS.mount(IDBFS, {}, '/working1');
} catch (e) {
console.log('|' + e + '|');
}
}
};
''')
self.emcc_args += ['--pre-js', 'pre.js']
expected = '''\
object
undefined
undefined
object
IDBFS is no longer included by default; build with -lidbfs.js
NODEFS is no longer included by default; build with -lnodefs.js
|IDBFS is no longer included by default; build with -lidbfs.js|'''
self.do_run('int main() { return 0; }', expected)
def test_stack_overflow_check(self):
self.set_setting('TOTAL_STACK', 1048576)
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.do_runf(test_file('stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
self.emcc_args += ['-DONE_BIG_STRING']
self.do_runf(test_file('stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
self.clear_setting('STACK_OVERFLOW_CHECK')
self.set_setting('ASSERTIONS', 2)
self.do_runf(test_file('stack_overflow.cpp'), 'stack overflow', assert_returncode=NON_ZERO)
@node_pthreads
def test_binaryen_2170_emscripten_atomic_cas_u8(self):
self.set_setting('USE_PTHREADS')
self.do_run_in_out_file_test('binaryen_2170_emscripten_atomic_cas_u8.cpp')
@also_with_standalone_wasm()
def test_sbrk(self):
self.do_runf(test_file('sbrk_brk.cpp'), 'OK.')
def test_brk(self):
self.emcc_args += ['-DTEST_BRK=1']
self.do_runf(test_file('sbrk_brk.cpp'), 'OK.')
@no_asan('mallinfo is not part of ASan malloc')
def test_mallinfo(self):
self.do_runf(test_file('mallinfo.cpp'), 'OK.')
@no_asan('cannot replace malloc/free with ASan')
def test_wrap_malloc(self):
self.do_runf(test_file('wrap_malloc.cpp'), 'OK.')
def test_environment(self):
self.set_setting('ASSERTIONS')
def test(assert_returncode=0):
self.do_core_test('test_hello_world.c', assert_returncode=assert_returncode)
js = read_file('test_hello_world.js')
assert ('require(' in js) == ('node' in self.get_setting('ENVIRONMENT')), 'we should have require() calls only if node js specified'
for engine in config.JS_ENGINES:
print(engine)
self.banned_js_engines = [e for e in config.JS_ENGINES if e != engine]
if engine == config.NODE_JS:
right = 'node'
wrong = 'shell'
else:
right = 'shell'
wrong = 'node'
self.set_setting('ENVIRONMENT', right)
print('ENVIRONMENT =', self.get_setting('ENVIRONMENT'))
test()
self.set_setting('ENVIRONMENT', wrong)
print('ENVIRONMENT =', self.get_setting('ENVIRONMENT'))
try:
test(assert_returncode=NON_ZERO)
raise Exception('unexpected success')
except Exception as e:
self.assertContained('not compiled for this environment', str(e))
self.set_setting('ENVIRONMENT', right + ',' + wrong)
print('ENVIRONMENT =', self.get_setting('ENVIRONMENT'))
test()
def test_postrun_exception(self):
self.set_setting('EXIT_RUNTIME', 0)
self.add_post_run('ThisFunctionDoesNotExist()')
self.build(test_file('core/test_hello_world.c'))
output = self.run_js('test_hello_world.js', assert_returncode=NON_ZERO)
self.assertStartswith(output, 'hello, world!')
self.assertContained('ThisFunctionDoesNotExist is not defined', output)
def test_no_declare_asm_module_exports(self):
self.set_setting('DECLARE_ASM_MODULE_EXPORTS', 0)
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.maybe_closure()
self.do_runf(test_file('declare_asm_module_exports.cpp'), 'jsFunction: 1')
js = read_file('declare_asm_module_exports.js')
occurances = js.count('cFunction')
if self.is_optimizing() and '-g' not in self.emcc_args:
if self.is_wasm():
self.assertEqual(occurances, 1)
else:
self.assertEqual(occurances, 2)
else:
print(occurances)
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_no_declare_asm_module_exports(self):
self.set_setting('DECLARE_ASM_MODULE_EXPORTS', 0)
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.maybe_closure()
self.set_setting('MINIMAL_RUNTIME')
self.do_runf(test_file('declare_asm_module_exports.cpp'), 'jsFunction: 1')
@parameterized({
'default': ([],),
'streaming': (['-s', 'MINIMAL_RUNTIME_STREAMING_WASM_COMPILATION'],),
'streaming_inst': (['-s', 'MINIMAL_RUNTIME_STREAMING_WASM_INSTANTIATION'],),
'no_export': (['-s', 'DECLARE_ASM_MODULE_EXPORTS=0'],)
})
def test_minimal_runtime_hello_world(self, args):
self.banned_js_engines = [config.V8_ENGINE, config.SPIDERMONKEY_ENGINE]
self.emcc_args = args
self.set_setting('MINIMAL_RUNTIME')
self.maybe_closure()
self.do_runf(test_file('small_hello_world.c'), 'hello')
@parameterized({
'fs': ('FORCE_FILESYSTEM',),
'nofs': ('NO_FILESYSTEM',),
})
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_hello_printf(self, extra_setting):
self.set_setting('MINIMAL_RUNTIME')
self.set_setting(extra_setting)
if '-fsanitize=leak' not in self.emcc_args and extra_setting != 'FORCE_FILESYSTEM':
self.maybe_closure()
self.do_runf(test_file('hello_world.c'), 'hello, world!')
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_safe_heap(self):
self.set_setting('MINIMAL_RUNTIME')
self.set_setting('SAFE_HEAP')
if '-fsanitize=leak' not in self.emcc_args:
self.maybe_closure()
self.do_runf(test_file('small_hello_world.c'), 'hello')
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_global_initializer(self):
self.set_setting('MINIMAL_RUNTIME')
self.maybe_closure()
self.do_runf(test_file('test_global_initializer.cpp'), 't1 > t0: 1')
@no_wasm2js('wasm2js does not support PROXY_TO_PTHREAD (custom section support)')
def test_return_address(self):
self.set_setting('USE_OFFSET_CONVERTER')
self.do_runf(test_file('core/test_return_address.c'), 'passed')
@no_wasm2js('TODO: sanitizers in wasm2js')
@no_asan('-fsanitize-minimal-runtime cannot be used with ASan')
def test_ubsan_minimal_too_many_errors(self):
self.emcc_args += ['-fsanitize=undefined', '-fsanitize-minimal-runtime']
if not self.is_wasm():
if self.is_optimizing():
self.skipTest('test can only be run without optimizations on asm.js')
self.emcc_args += ['-g']
self.do_runf(test_file('core/test_ubsan_minimal_too_many_errors.c'),
expected_output='ubsan: add-overflow\n' * 20 + 'ubsan: too many errors\n')
@no_wasm2js('TODO: sanitizers in wasm2js')
@no_asan('-fsanitize-minimal-runtime cannot be used with ASan')
def test_ubsan_minimal_errors_same_place(self):
self.emcc_args += ['-fsanitize=undefined', '-fsanitize-minimal-runtime']
if not self.is_wasm():
if self.is_optimizing():
self.skipTest('test can only be run without optimizations on asm.js')
self.emcc_args += ['-g']
self.do_runf(test_file('core/test_ubsan_minimal_errors_same_place.c'),
expected_output='ubsan: add-overflow\n' * 5)
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_integer': (['-fsanitize=integer'],),
'fsanitize_overflow': (['-fsanitize=signed-integer-overflow'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_overflow(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_overflow.c'),
assert_all=True, expected_output=[
".c:3:5: runtime error: signed integer overflow: 2147483647 + 1 cannot be represented in type 'int'",
".c:7:7: runtime error: signed integer overflow: 2147483647 + 1 cannot be represented in type 'int'",
])
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_return': (['-fsanitize=return'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_no_return(self, args):
self.emcc_args += ['-Wno-return-type'] + args
self.do_runf(test_file('core/test_ubsan_full_no_return.cpp'),
expected_output='.cpp:1:5: runtime error: execution reached the end of a value-returning function without returning a value', assert_returncode=NON_ZERO)
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_integer': (['-fsanitize=integer'],),
'fsanitize_shift': (['-fsanitize=shift'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_left_shift(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_left_shift.c'),
assert_all=True, expected_output=[
'.c:3:5: runtime error: left shift of negative value -1',
".c:7:5: runtime error: left shift of 16 by 29 places cannot be represented in type 'int'"
])
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_null': (['-fsanitize=null'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_null_ref(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_null_ref.cpp'),
assert_all=True, expected_output=[
".cpp:3:12: runtime error: reference binding to null pointer of type 'int'",
".cpp:4:13: runtime error: reference binding to null pointer of type 'int'",
".cpp:5:14: runtime error: reference binding to null pointer of type 'int'",
])
@parameterized({
'fsanitize_undefined': (['-fsanitize=undefined'],),
'fsanitize_vptr': (['-fsanitize=vptr'],),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_static_cast(self, args):
self.emcc_args += args
self.do_runf(test_file('core/test_ubsan_full_static_cast.cpp'),
assert_all=True, expected_output=[
".cpp:18:10: runtime error: downcast of address",
"which does not point to an object of type 'R'",
])
@parameterized({
'g': ('-g', [
".cpp:3:12: runtime error: reference binding to null pointer of type 'int'",
'in main',
]),
'g4': ('-gsource-map', [
".cpp:3:12: runtime error: reference binding to null pointer of type 'int'",
'in main ',
'.cpp:3:8'
]),
})
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_full_stack_trace(self, g_flag, expected_output):
if g_flag == '-gsource-map':
if not self.is_wasm():
self.skipTest('wasm2js has no source map support')
elif '-Oz' in self.emcc_args:
self.skipTest('-Oz breaks stack traces')
create_file('pre.js', 'Module = {UBSAN_OPTIONS: "print_stacktrace=1"};')
self.emcc_args += ['-fsanitize=null', g_flag, '--pre-js=pre.js']
self.set_setting('ALLOW_MEMORY_GROWTH')
self.do_runf(test_file('core/test_ubsan_full_null_ref.cpp'),
assert_all=True, expected_output=expected_output)
@no_wasm2js('TODO: sanitizers in wasm2js')
def test_ubsan_typeinfo_eq(self):
src = r'''
#include <typeinfo>
#include <stdio.h>
int main() {
int mismatch = typeid(int) != typeid(int);
printf("ok\n");
return mismatch;
}
'''
self.emcc_args.append('-fsanitize=undefined')
self.do_run(src, 'ok\n')
def test_template_class_deduction(self):
self.emcc_args += ['-std=c++17']
self.do_core_test('test_template_class_deduction.cpp')
@no_wasm2js('TODO: ASAN in wasm2js')
@no_safe_heap('asan does not work with SAFE_HEAP')
@parameterized({
'c': ['test_asan_no_error.c'],
'cpp': ['test_asan_no_error.cpp'],
})
def test_asan_no_error(self, name):
self.emcc_args.append('-fsanitize=address')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_runf(test_file('core', name), '', assert_returncode=NON_ZERO)
@no_safe_heap('asan does not work with SAFE_HEAP')
@parameterized({
'use_after_free_c': ('test_asan_use_after_free.c', [
'AddressSanitizer: heap-use-after-free on address',
]),
'use_after_free_cpp': ('test_asan_use_after_free.cpp', [
'AddressSanitizer: heap-use-after-free on address',
]),
'use_after_return': ('test_asan_use_after_return.c', [
'AddressSanitizer: stack-use-after-return on address',
], ['-Wno-return-stack-address']),
'static_buffer_overflow': ('test_asan_static_buffer_overflow.c', [
'AddressSanitizer: global-buffer-overflow on address',
], ['-fno-builtin-memset']),
'heap_buffer_overflow_c': ('test_asan_heap_buffer_overflow.c', [
'AddressSanitizer: heap-buffer-overflow on address',
], ['-fno-builtin-memset']),
'heap_buffer_overflow_cpp': ('test_asan_heap_buffer_overflow.cpp', [
'AddressSanitizer: heap-buffer-overflow on address',
], ['-fno-builtin-memset']),
'stack_buffer_overflow': ('test_asan_stack_buffer_overflow.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'stack_buffer_overflow_js': ('test_asan_stack_buffer_overflow_js.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'bitfield_unround_size': ('test_asan_bitfield_unround_size.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'bitfield_unround_offset': ('test_asan_bitfield_unround_offset.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'bitfield_round': ('test_asan_bitfield_round.c', [
'AddressSanitizer: stack-buffer-overflow'
], ['-fno-builtin-memset']),
'memset_null': ('test_asan_memset_null.c', [
'AddressSanitizer: null-pointer-dereference on address 0x00000001'
], ['-fno-builtin-memset']),
'memset_freed': ('test_asan_memset_freed.c', [
'AddressSanitizer: heap-use-after-free on address'
], ['-fno-builtin-memset']),
'strcpy': ('test_asan_strcpy.c', [
'AddressSanitizer: heap-buffer-overflow on address'
], ['-fno-builtin-strcpy']),
'memcpy': ('test_asan_memcpy.c', [
'AddressSanitizer: heap-buffer-overflow on address'
], ['-fno-builtin-memcpy']),
'memchr': ('test_asan_memchr.c', [
'AddressSanitizer: global-buffer-overflow on address'
], ['-fno-builtin-memchr']),
'vector': ('test_asan_vector.cpp', [
'AddressSanitizer: container-overflow on address'
]),
})
def test_asan(self, name, expected_output, cflags=None):
if '-Oz' in self.emcc_args:
self.skipTest('-Oz breaks source maps')
if not self.is_wasm():
self.skipTest('wasm2js has no ASan support')
self.emcc_args.append('-fsanitize=address')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
if cflags:
self.emcc_args += cflags
self.do_runf(test_file('core', name),
expected_output=expected_output, assert_all=True,
check_for_error=False, assert_returncode=NON_ZERO)
@no_safe_heap('asan does not work with SAFE_HEAP')
@no_wasm2js('TODO: ASAN in wasm2js')
def test_asan_js_stack_op(self):
self.emcc_args.append('-fsanitize=address')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_runf(test_file('core/test_asan_js_stack_op.c'),
expected_output='Hello, World!')
@no_safe_heap('asan does not work with SAFE_HEAP')
@no_wasm2js('TODO: ASAN in wasm2js')
def test_asan_api(self):
self.emcc_args.append('-fsanitize=address')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_core_test('test_asan_api.c')
@no_safe_heap('asan does not work with SAFE_HEAP')
@no_wasm2js('TODO: ASAN in wasm2js')
def test_asan_modularized_with_closure(self):
create_file('post.js', 'if (!(createModule() instanceof Promise)) throw "Promise was not returned :(";\n')
self.emcc_args += ['-fsanitize=address', '--extern-post-js=post.js']
self.set_setting('MODULARIZE')
self.set_setting('EXPORT_NAME', 'createModule')
self.set_setting('USE_CLOSURE_COMPILER')
self.set_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '300mb')
self.do_runf(test_file('hello_world.c'), expected_output='hello, world!')
@no_asan('SAFE_HEAP cannot be used with ASan')
def test_safe_heap_user_js(self):
self.set_setting('SAFE_HEAP')
self.do_runf(test_file('core/test_safe_heap_user_js.c'),
expected_output=['Aborted(segmentation fault storing 1 bytes to address 0)'], assert_returncode=NON_ZERO)
def test_safe_stack(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
if self.is_optimizing():
expected = ['Aborted(stack overflow)']
else:
expected = ['Aborted(stack overflow)', '__handle_stack_overflow']
self.do_runf(test_file('core/test_safe_stack.c'),
expected_output=expected,
assert_returncode=NON_ZERO, assert_all=True)
@node_pthreads
def test_safe_stack_pthread(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('USE_PTHREADS')
if self.is_optimizing():
expected = ['Aborted(stack overflow)']
else:
expected = ['Aborted(stack overflow)', '__handle_stack_overflow']
self.do_runf(test_file('core/test_safe_stack.c'),
expected_output=expected,
assert_returncode=NON_ZERO, assert_all=True)
def test_safe_stack_alloca(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
if self.is_optimizing():
expected = ['Aborted(stack overflow)']
else:
expected = ['Aborted(stack overflow)', '__handle_stack_overflow']
self.do_runf(test_file('core/test_safe_stack_alloca.c'),
expected_output=expected,
assert_returncode=NON_ZERO, assert_all=True)
@needs_dylink
def test_safe_stack_dylink(self):
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('TOTAL_STACK', 65536)
self.dylink_test(r'''
#include <stdio.h>
extern void sidey();
int main() {
sidey();
}
''', '''
#include <string.h>
static long accumulator = 0;
int f(int *b) {
// Infinite recursion while recording stack pointer locations
// so that compiler can't eliminate the stack allocs.
accumulator += (long)b;
int a[1024];
return f(a);
}
void sidey() {
f(NULL);
}
''', ['Aborted(stack overflow)', '__handle_stack_overflow'], assert_returncode=NON_ZERO, force_c=True)
def test_fpic_static(self):
self.emcc_args.append('-fPIC')
self.do_core_test('test_hello_world.c')
@node_pthreads
def test_pthread_create(self):
self.set_setting('EXIT_RUNTIME')
# test that the node environment can be specified by itself, and that still
# works with pthreads (even though we did not specify 'node,worker')
self.set_setting('ENVIRONMENT', 'node')
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_c11_threads(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('PTHREADS_DEBUG')
if not self.has_changed_setting('INITIAL_MEMORY'):
self.set_setting('INITIAL_MEMORY', '64mb')
# test that the node and worker environments can be specified
self.set_setting('ENVIRONMENT', 'node,worker')
self.do_run_in_out_file_test('pthread/test_pthread_c11_threads.c')
@node_pthreads
def test_pthread_cxx_threads(self):
self.set_setting('PROXY_TO_PTHREAD')
self.clear_setting('ALLOW_MEMORY_GROWTH')
self.set_setting('INITIAL_MEMORY', '64Mb')
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('pthread/test_pthread_cxx_threads.cpp')
@node_pthreads
def test_pthread_create_pool(self):
# with a pool, we can synchronously depend on workers being available
self.set_setting('PTHREAD_POOL_SIZE', 2)
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-DALLOW_SYNC']
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_create_proxy(self):
# with PROXY_TO_PTHREAD, we can synchronously depend on workers being available
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-DALLOW_SYNC']
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_create_embind_stack_check(self):
# embind should work with stack overflow checks (see #12356)
self.set_setting('STACK_OVERFLOW_CHECK', 2)
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['--bind']
self.do_run_in_out_file_test('core/pthread/create.cpp')
@node_pthreads
def test_pthread_exceptions(self):
self.set_setting('PTHREAD_POOL_SIZE', 2)
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-fexceptions']
self.do_run_in_out_file_test('core/pthread/exceptions.cpp')
@node_pthreads
def test_pthread_exit_process(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.emcc_args += ['-DEXIT_RUNTIME', '--pre-js', test_file('core/pthread/test_pthread_exit_runtime.pre.js')]
self.do_run_in_out_file_test('core/pthread/test_pthread_exit_runtime.c', assert_returncode=42)
@node_pthreads
@no_wasm2js('wasm2js does not support PROXY_TO_PTHREAD (custom section support)')
def test_pthread_offset_converter(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_OFFSET_CONVERTER')
if '-g' in self.emcc_args:
self.emcc_args += ['-DDEBUG']
self.do_runf(test_file('core/test_return_address.c'), 'passed')
@node_pthreads
@no_wasm2js('wasm2js does not support PROXY_TO_PTHREAD (custom section support)')
def test_pthread_offset_converter_modularize(self):
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_OFFSET_CONVERTER')
self.set_setting('MODULARIZE')
create_file('post.js', 'var m = require("./test_return_address.js"); m();')
self.emcc_args += ['--extern-post-js', 'post.js', '-s', 'EXPORT_NAME=foo']
if '-g' in self.emcc_args:
self.emcc_args += ['-DDEBUG']
self.do_runf(test_file('core/test_return_address.c'), 'passed')
def test_emscripten_atomics_stub(self):
self.do_run_in_out_file_test('core/pthread/emscripten_atomics.c')
@no_asan('incompatibility with atomics')
@node_pthreads
def test_emscripten_atomics(self):
self.set_setting('USE_PTHREADS')
self.do_run_in_out_file_test('core/pthread/emscripten_atomics.c')
@no_asan('incompatibility with atomics')
@node_pthreads
def test_emscripten_futexes(self):
self.set_setting('USE_PTHREADS')
self.do_run_in_out_file_test('core/pthread/emscripten_futexes.c')
@node_pthreads
def test_stdio_locking(self):
self.set_setting('PTHREAD_POOL_SIZE', '2')
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test('core', 'test_stdio_locking.c')
@needs_dylink
@node_pthreads
def test_pthread_dylink_basics(self):
self.emcc_args.append('-Wno-experimental')
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
self.do_basic_dylink_test()
@needs_dylink
@node_pthreads
def test_pthread_dylink(self):
self.emcc_args.append('-Wno-experimental')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_PTHREADS')
self.set_setting('LLD_REPORT_UNDEFINED')
self.set_setting('PTHREAD_POOL_SIZE', 2)
main = test_file('core/pthread/test_pthread_dylink.c')
# test with a long .so name, as a regression test for
# https://github.com/emscripten-core/emscripten/issues/14833
# where we had a bug with long names + TextDecoder + pthreads + dylink
very_long_name = 'very_very_very_very_very_very_very_very_very_long.so'
self.dylink_testf(main, so_name=very_long_name,
need_reverse=False)
@needs_dylink
@node_pthreads
def test_pthread_dylink_tls(self):
self.emcc_args.append('-Wno-experimental')
self.set_setting('EXIT_RUNTIME')
self.set_setting('USE_PTHREADS')
self.set_setting('PTHREAD_POOL_SIZE=1')
main = test_file('core/pthread/test_pthread_dylink_tls.c')
self.dylink_testf(main, need_reverse=False)
@needs_dylink
@node_pthreads
def test_Module_dynamicLibraries_pthreads(self):
# test that Module.dynamicLibraries works with pthreads
self.emcc_args += ['-pthread', '-Wno-experimental']
self.emcc_args += ['--extern-pre-js', 'pre.js']
self.set_setting('PROXY_TO_PTHREAD')
self.set_setting('EXIT_RUNTIME')
# This test is for setting dynamicLibraries at runtime so we don't
self.set_setting('NO_AUTOLOAD_DYLIBS')
create_file('pre.js', '''
if (!global.Module) {
// This is the initial load (not a worker)
// Define the initial state of Module as we would
// in the html shell file.
// Use var to escape the scope of the if statement
var Module = {
dynamicLibraries: ['liblib.so']
};
}
''')
self.dylink_test(
r'''
#include <stdio.h>
int side();
int main() {
printf("result is %d", side());
return 0;
}
''',
r'''
int side() { return 42; }
''',
'result is 42')
def test_emscripten_get_exported_function(self):
self.set_setting('RESERVED_FUNCTION_POINTERS', 2)
self.emcc_args += ['-lexports.js']
self.do_core_test('test_get_exported_function.cpp')
@no_asan('TODO: ASan support in minimal runtime')
def test_minimal_runtime_emscripten_get_exported_function(self):
self.set_setting('RESERVED_FUNCTION_POINTERS', 2)
self.set_setting('MINIMAL_RUNTIME')
self.emcc_args += ['-lexports.js']
self.do_core_test('test_get_exported_function.cpp')
@also_with_standalone_wasm(impure=True)
def test_undefined_main(self):
if self.get_setting('STANDALONE_WASM'):
# opt out (see below).
err = self.expect_fail([EMCC, test_file('core/test_ctors_no_main.cpp')] + self.get_emcc_args())
self.assertContained('error: undefined symbol: main (referenced by top-level compiled C/C++ code)', err)
self.assertContained('warning: To build in STANDALONE_WASM mode without a main(), use emcc --no-entry', err)
elif not self.get_setting('LLD_REPORT_UNDEFINED') and not self.get_setting('STRICT'):
# Traditionally in emscripten we allow main to be implicitly undefined. This allows programs
# with a main and libraries without a main to be compiled identically.
# However we are trying to move away from that model to a more explicit opt-out model. See:
# https://github.com/emscripten-core/emscripten/issues/9640
self.do_core_test('test_ctors_no_main.cpp')
# Disabling IGNORE_MISSING_MAIN should cause link to fail due to missing main
self.set_setting('IGNORE_MISSING_MAIN', 0)
err = self.expect_fail([EMCC, test_file('core/test_ctors_no_main.cpp')] + self.get_emcc_args())
self.assertContained('error: entry symbol not defined (pass --no-entry to suppress): main', err)
# In non-standalone mode exporting an empty list of functions signal that we don't
self.set_setting('EXPORTED_FUNCTIONS', [])
self.do_core_test('test_ctors_no_main.cpp')
self.clear_setting('EXPORTED_FUNCTIONS')
def test_undefined_main_explict(self):
self.emcc_args.append('--no-entry')
self.do_core_test('test_ctors_no_main.cpp')
def test_undefined_main_wasm_output(self):
if not can_do_standalone(self):
self.skipTest('standalone mode only')
err = self.expect_fail([EMCC, '-o', 'out.wasm', test_file('core/test_ctors_no_main.cpp')] + self.get_emcc_args())
self.assertContained('undefined symbol: main', err)
def test_export_start(self):
if not can_do_standalone(self):
self.skipTest('standalone mode only')
self.set_setting('STANDALONE_WASM')
self.set_setting('EXPORTED_FUNCTIONS', ['__start'])
self.do_core_test('test_hello_world.c')
@unittest.skip("memory64 functionality only partially working")
def test_memory64_hello_world(self):
self.set_setting('MEMORY64', 2)
self.do_core_test('test_hello_world.c')
self):
self.do_core_test('test_emscripten_math.c')
def test_custom_js_options(self):
self.emcc_args += ['--js-library', test_file('core/test_custom_js_settings.js'), '-jsDCUSTOM_JS_OPTION=1']
self.do_core_test('test_custom_js_settings.c')
self.assertContained('cannot change built-in settings values with a -jsD directive', self.expect_fail([EMCC, '-jsDWASM=0']))
@no_asan('stack allocation sizes are no longer predictable')
def test_emscripten_stack(self):
self.set_setting('TOTAL_STACK', 4 * 1024 * 1024)
self.do_core_test('test_stack_get_free.c')
def test_abort_on_exceptions(self):
self.set_setting('EXIT_RUNTIME', 0)
self.set_setting('ABORT_ON_WASM_EXCEPTIONS')
self.set_setting('EXPORTED_RUNTIME_METHODS', ['ccall', 'cwrap'])
self.emcc_args += ['--bind', '--post-js', test_file('core/test_abort_on_exception_post.js')]
self.do_core_test('test_abort_on_exception.cpp', interleaved_output=False)
@needs_dylink
def test_gl_main_module(self):
self.set_setting('MAIN_MODULE')
self.do_runf(test_file('core/test_gl_get_proc_address.c'))
@needs_dylink
def test_main_module_js_symbol(self):
self.set_setting('MAIN_MODULE', 2)
self.emcc_args += ['--js-library', test_file('core/test_main_module_js_symbol.js')]
self.do_runf(test_file('core/test_main_module_js_symbol.c'))
def test_REVERSE_DEPS(self):
create_file('connect.c', '#include <sys/socket.h>\nint main() { return (int)(long)&connect; }')
self.run_process([EMCC, 'connect.c'])
base_size = os.path.getsize('a.out.wasm')
self.run_process([EMCC, 'connect.c', '-sREVERSE_DEPS=auto'])
self.run_process([EMCC, 'connect.c', '-sREVERSE_DEPS=all'])
self.assertGreater(os.path.getsize('a.out.wasm'), base_size)
err = self.expect_fail([EMCC, 'connect.c', '-sREVERSE_DEPS=none'])
self.assertContained('undefined symbol: ntohs', err)
def test_emscripten_async_call(self):
self.set_setting('EXIT_RUNTIME')
self.do_run_in_out_file_test(test_file('core/test_emscripten_async_call.c'))
@no_asan('asyncify stack operations confuse asan')
@parameterized({
'': ([],),
'no_dynamic_execution': (['-s', 'DYNAMIC_EXECUTION=0'],)
})
def test_embind_lib_with_asyncify(self, args):
self.uses_es6 = True
self.emcc_args += [
'--bind',
'-s', 'ASYNCIFY',
'-s', 'ASYNCIFY_IMPORTS=["sleep_and_return"]',
'--post-js', test_file('core/embind_lib_with_asyncify.test.js'),
]
self.emcc_args += args
self.do_core_test('embind_lib_with_asyncify.cpp')
@no_asan('asyncify stack operations confuse asan')
def test_em_async_js(self):
self.uses_es6 = True
self.set_setting('ASYNCIFY')
self.maybe_closure()
self.do_core_test('test_em_async_js.c')
def make_run(name, emcc_args, settings=None, env=None):
if env is None:
env = {}
if settings is None:
settings = {}
if settings:
emcc_args.append('-Wno-unused-command-line-argument')
TT = type(name, (TestCoreBase,), dict(run_name=name, env=env, __module__=__name__))
def tearDown(self):
try:
super(TT, self).tearDown()
finally:
for k, v in self.env.items():
del os.environ[k]
TT.tearDown = tearDown
def setUp(self):
super(TT, self).setUp()
for k, v in self.env.items():
assert k not in os.environ, k + ' should not be in environment'
os.environ[k] = v
os.chdir(self.get_dir())
for k, v in settings.items():
self.set_setting(k, v)
self.emcc_args += emcc_args
TT.setUp = setUp
return TT
wasm0 = make_run('wasm0', emcc_args=['-O0'])
wasm0g = make_run('wasm0g', emcc_args=['-O0', '-g'])
wasm1 = make_run('wasm1', emcc_args=['-O1'])
wasm2 = make_run('wasm2', emcc_args=['-O2'])
wasm2g = make_run('wasm2g', emcc_args=['-O2', '-g'])
wasm3 = make_run('wasm3', emcc_args=['-O3'])
wasms = make_run('wasms', emcc_args=['-Os'])
wasmz = make_run('wasmz', emcc_args=['-Oz'])
wasmlto0 = make_run('wasmlto0', emcc_args=['-flto', '-O0'])
wasmlto1 = make_run('wasmlto1', emcc_args=['-flto', '-O1'])
wasmlto2 = make_run('wasmlto2', emcc_args=['-flto', '-O2'])
wasmlto3 = make_run('wasmlto3', emcc_args=['-flto', '-O3'])
wasmltos = make_run('wasmltos', emcc_args=['-flto', '-Os'])
wasmltoz = make_run('wasmltoz', emcc_args=['-flto', '-Oz'])
wasm2js0 = make_run('wasm2js0', emcc_args=['-O0'], settings={'WASM': 0})
wasm2js1 = make_run('wasm2js1', emcc_args=['-O1'], settings={'WASM': 0})
wasm2js2 = make_run('wasm2js2', emcc_args=['-O2'], settings={'WASM': 0})
wasm2js3 = make_run('wasm2js3', emcc_args=['-O3'], settings={'WASM': 0})
wasm2jss = make_run('wasm2jss', emcc_args=['-Os'], settings={'WASM': 0})
wasm2jsz = make_run('wasm2jsz', emcc_args=['-Oz'], settings={'WASM': 0})
simd2 = make_run('simd2', emcc_args=['-O2', '-msimd128'])
bulkmem2 = make_run('bulkmem2', emcc_args=['-O2', '-mbulk-memory'])
wasm2s = make_run('wasm2s', emcc_args=['-O2'], settings={'SAFE_HEAP': 1})
wasm2ss = make_run('wasm2ss', emcc_args=['-O2'], settings={'STACK_OVERFLOW_CHECK': 2})
strict = make_run('strict', emcc_args=[], settings={'STRICT': 1})
lsan = make_run('lsan', emcc_args=['-fsanitize=leak', '--profiling'], settings={'ALLOW_MEMORY_GROWTH': 1})
asan = make_run('asan', emcc_args=['-fsanitize=address', '--profiling'], settings={'ALLOW_MEMORY_GROWTH': 1})
asani = make_run('asani', emcc_args=['-fsanitize=address', '--profiling', '--pre-js', os.path.join(os.path.dirname(__file__), 'asan-no-leak.js')],
settings={'ALLOW_MEMORY_GROWTH': 1})
lld = make_run('lld', emcc_args=[], settings={'LLD_REPORT_UNDEFINED': 1})
minimal0 = make_run('minimal0', emcc_args=['-g'], settings={'MINIMAL_RUNTIME': 1})
del TestCoreBase # noqa
| true | true |
f71c3617afc853a7653c8b2fa5b96fb74a081eed | 464 | py | Python | python/string-manipulation/alternating_characters.py | anishLearnsToCode/hackerrabk-interview-preparation-kit | 9d31eefe336e6dbef104ae78f06dd46686c28f84 | [
"MIT"
] | 2 | 2020-06-04T09:48:30.000Z | 2021-11-28T15:43:00.000Z | python/string-manipulation/alternating_characters.py | anishLearnsToCode/hackerrabk-interview-preparation-kit | 9d31eefe336e6dbef104ae78f06dd46686c28f84 | [
"MIT"
] | null | null | null | python/string-manipulation/alternating_characters.py | anishLearnsToCode/hackerrabk-interview-preparation-kit | 9d31eefe336e6dbef104ae78f06dd46686c28f84 | [
"MIT"
] | 1 | 2020-11-01T01:03:08.000Z | 2020-11-01T01:03:08.000Z | def minimum_deletions(string: str) -> int:
current_character = string[0]
count = 0
deletions = 0
for character in string:
if character == current_character:
count += 1
else:
current_character = character
deletions += count - 1
count = 1
return deletions + count - 1
test_cases = int(input())
for _ in range(test_cases):
string = input()
print(minimum_deletions(string))
| 24.421053 | 42 | 0.594828 | def minimum_deletions(string: str) -> int:
current_character = string[0]
count = 0
deletions = 0
for character in string:
if character == current_character:
count += 1
else:
current_character = character
deletions += count - 1
count = 1
return deletions + count - 1
test_cases = int(input())
for _ in range(test_cases):
string = input()
print(minimum_deletions(string))
| true | true |
f71c36d4dffacd0a0f9415f50fd7e32ca1fa2928 | 6,067 | py | Python | oops_fhir/r4/code_system/v3_substance_admin_substitution.py | Mikuana/oops_fhir | 77963315d123756b7d21ae881f433778096a1d25 | [
"MIT"
] | null | null | null | oops_fhir/r4/code_system/v3_substance_admin_substitution.py | Mikuana/oops_fhir | 77963315d123756b7d21ae881f433778096a1d25 | [
"MIT"
] | null | null | null | oops_fhir/r4/code_system/v3_substance_admin_substitution.py | Mikuana/oops_fhir | 77963315d123756b7d21ae881f433778096a1d25 | [
"MIT"
] | null | null | null | from pathlib import Path
from fhir.resources.codesystem import CodeSystem
from oops_fhir.utils import CodeSystemConcept
__all__ = ["v3substanceAdminSubstitution"]
_resource = CodeSystem.parse_file(Path(__file__).with_suffix(".json"))
class v3substanceAdminSubstitution:
"""
v3 Code System substanceAdminSubstitution
Identifies what sort of change is permitted or has occurred between the
therapy that was ordered and the therapy that was/will be provided.
Status: active - Version: 2018-08-12
Copyright None
http://terminology.hl7.org/CodeSystem/v3-substanceAdminSubstitution
"""
underscore_act_substance_admin_substitution_code = CodeSystemConcept(
{
"code": "_ActSubstanceAdminSubstitutionCode",
"concept": [
{
"code": "E",
"concept": [
{
"code": "EC",
"concept": [
{
"code": "BC",
"definition": "Description: \n \r\n\n Substitution occurred or is permitted between equivalent Brands but not Generics\r\n\n \n Examples: \n \r\n\n \n Zestril for Prinivil\n Coumadin for Jantoven",
"display": "brand composition",
},
{
"code": "G",
"definition": "Description: Substitution occurred or is permitted between equivalent Generics but not Brands\r\n\n \n Examples: \n \r\n\n \n Lisnopril (Lupin Corp) for Lisnopril (Wockhardt Corp)",
"display": "generic composition",
},
],
"definition": "Description: \n \r\n\n Substitution occurred or is permitted with another product that is a:\r\n\n \n pharmaceutical alternative containing the same active ingredient but is formulated with different salt, ester\n pharmaceutical equivalent that has the same active ingredient, strength, dosage form and route of administration\n \n \n Examples: \n \r\n\n \n \n Pharmaceutical alternative: Erythromycin Ethylsuccinate for Erythromycin Stearate\n \n Pharmaceutical equivalent: Lisonpril for Zestril",
"display": "equivalent composition",
},
{
"code": "TE",
"concept": [
{
"code": "TB",
"definition": "Description: Substitution occurred or is permitted between therapeutically equivalent Brands but not Generics\r\n>\n Examples: \n \r\n\n \n Zantac for Tagamet",
"display": "therapeutic brand",
},
{
"code": "TG",
"definition": "Description: Substitution occurred or is permitted between therapeutically equivalent Generics but not Brands\r\n>\n Examples: \n \r\n\n \n Ranitidine for cimetidine",
"display": "therapeutic generic",
},
],
"definition": "Description: Substitution occurred or is permitted with another product having the same therapeutic objective and safety profile.\r\n\n \n Examples: \n \r\n\n \n ranitidine for Tagamet",
"display": "therapeutic alternative",
},
],
"definition": "Description: Substitution occurred or is permitted with another bioequivalent and therapeutically equivalent product.",
"display": "equivalent",
},
{
"code": "F",
"definition": "Description: This substitution was performed or is permitted based on formulary guidelines.",
"display": "formulary",
},
{
"code": "N",
"definition": "No substitution occurred or is permitted.",
"display": "none",
},
],
"definition": "Description: Substitution occurred or is permitted with another product that may potentially have different ingredients, but having the same biological and therapeutic effects.",
"display": "ActSubstanceAdminSubstitutionCode",
"property": [{"code": "notSelectable", "valueBoolean": True}],
}
)
"""
ActSubstanceAdminSubstitutionCode
Description: Substitution occurred or is permitted with another product that may potentially have different ingredients, but having the same biological and therapeutic effects.
"""
class Meta:
resource = _resource
| 63.197917 | 890 | 0.447338 | from pathlib import Path
from fhir.resources.codesystem import CodeSystem
from oops_fhir.utils import CodeSystemConcept
__all__ = ["v3substanceAdminSubstitution"]
_resource = CodeSystem.parse_file(Path(__file__).with_suffix(".json"))
class v3substanceAdminSubstitution:
underscore_act_substance_admin_substitution_code = CodeSystemConcept(
{
"code": "_ActSubstanceAdminSubstitutionCode",
"concept": [
{
"code": "E",
"concept": [
{
"code": "EC",
"concept": [
{
"code": "BC",
"definition": "Description: \n \r\n\n Substitution occurred or is permitted between equivalent Brands but not Generics\r\n\n \n Examples: \n \r\n\n \n Zestril for Prinivil\n Coumadin for Jantoven",
"display": "brand composition",
},
{
"code": "G",
"definition": "Description: Substitution occurred or is permitted between equivalent Generics but not Brands\r\n\n \n Examples: \n \r\n\n \n Lisnopril (Lupin Corp) for Lisnopril (Wockhardt Corp)",
"display": "generic composition",
},
],
"definition": "Description: \n \r\n\n Substitution occurred or is permitted with another product that is a:\r\n\n \n pharmaceutical alternative containing the same active ingredient but is formulated with different salt, ester\n pharmaceutical equivalent that has the same active ingredient, strength, dosage form and route of administration\n \n \n Examples: \n \r\n\n \n \n Pharmaceutical alternative: Erythromycin Ethylsuccinate for Erythromycin Stearate\n \n Pharmaceutical equivalent: Lisonpril for Zestril",
"display": "equivalent composition",
},
{
"code": "TE",
"concept": [
{
"code": "TB",
"definition": "Description: Substitution occurred or is permitted between therapeutically equivalent Brands but not Generics\r\n>\n Examples: \n \r\n\n \n Zantac for Tagamet",
"display": "therapeutic brand",
},
{
"code": "TG",
"definition": "Description: Substitution occurred or is permitted between therapeutically equivalent Generics but not Brands\r\n>\n Examples: \n \r\n\n \n Ranitidine for cimetidine",
"display": "therapeutic generic",
},
],
"definition": "Description: Substitution occurred or is permitted with another product having the same therapeutic objective and safety profile.\r\n\n \n Examples: \n \r\n\n \n ranitidine for Tagamet",
"display": "therapeutic alternative",
},
],
"definition": "Description: Substitution occurred or is permitted with another bioequivalent and therapeutically equivalent product.",
"display": "equivalent",
},
{
"code": "F",
"definition": "Description: This substitution was performed or is permitted based on formulary guidelines.",
"display": "formulary",
},
{
"code": "N",
"definition": "No substitution occurred or is permitted.",
"display": "none",
},
],
"definition": "Description: Substitution occurred or is permitted with another product that may potentially have different ingredients, but having the same biological and therapeutic effects.",
"display": "ActSubstanceAdminSubstitutionCode",
"property": [{"code": "notSelectable", "valueBoolean": True}],
}
)
class Meta:
resource = _resource
| true | true |
f71c37bb391e2b9895264076e05236334fa0075c | 570 | py | Python | tests/test_poetcli.py | jkerola/poetcli | e307513fa073beaddd7247d944fcef7092ee95dd | [
"MIT"
] | null | null | null | tests/test_poetcli.py | jkerola/poetcli | e307513fa073beaddd7247d944fcef7092ee95dd | [
"MIT"
] | null | null | null | tests/test_poetcli.py | jkerola/poetcli | e307513fa073beaddd7247d944fcef7092ee95dd | [
"MIT"
] | null | null | null | from pytest import raises
from poetcli.main import PoetCLITest
def test_poetcli():
# test poetcli without any subcommands or arguments
with PoetCLITest() as app:
app.run()
assert app.exit_code == 0
def test_poetcli_debug():
# test that debug mode is functional
argv = ['--debug']
with PoetCLITest(argv=argv) as app:
app.run()
assert app.debug is True
def test_create_poem():
argv = []
with PoetCLITest(argv=argv) as app:
app.run()
output = app.last_rendered
assert output is None
| 21.923077 | 55 | 0.64386 | from pytest import raises
from poetcli.main import PoetCLITest
def test_poetcli():
with PoetCLITest() as app:
app.run()
assert app.exit_code == 0
def test_poetcli_debug():
argv = ['--debug']
with PoetCLITest(argv=argv) as app:
app.run()
assert app.debug is True
def test_create_poem():
argv = []
with PoetCLITest(argv=argv) as app:
app.run()
output = app.last_rendered
assert output is None
| true | true |
f71c38457ce1146b10dc11311b97086931643887 | 2,269 | py | Python | cengal/hardware_info/cpu.py | FI-Mihej/Cengal | 516b9780da6ccc9168f8f89d7ba13dc29e24bc0b | [
"Apache-2.0"
] | 3 | 2018-07-23T18:48:58.000Z | 2021-07-18T14:17:20.000Z | cengal/hardware_info/cpu.py | FI-Mihej/Cengal | 516b9780da6ccc9168f8f89d7ba13dc29e24bc0b | [
"Apache-2.0"
] | null | null | null | cengal/hardware_info/cpu.py | FI-Mihej/Cengal | 516b9780da6ccc9168f8f89d7ba13dc29e24bc0b | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# coding=utf-8
# Copyright © 2017 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cpuinfo
"""
Module Docstring
Docstrings: http://www.python.org/dev/peps/pep-0257/
"""
__author__ = "ButenkoMS <gtalk@butenkoms.space>"
__copyright__ = "Copyright © 2017 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space>"
__credits__ = ["ButenkoMS <gtalk@butenkoms.space>", ]
__license__ = "Apache License, Version 2.0"
__version__ = "0.0.1"
__maintainer__ = "ButenkoMS <gtalk@butenkoms.space>"
__email__ = "gtalk@butenkoms.space"
__status__ = "Prototype"
# __status__ = "Development"
# __status__ = "Production"
def get_cpu_info()->dict:
return cpuinfo.get_cpu_info()
def get_l2_cache_size()->int:
size_text = cpuinfo.get_cpu_info()['l2_cache_size']
size_text_list = size_text.split()
size_text_list_size = len(size_text_list)
size_text_number = None
size_text_dimension = None
if 0 == size_text_list_size:
return 0
elif 1 == size_text_list_size:
return int(size_text_list)
elif 2 == size_text_list_size:
size_text_number, size_text_dimension = size_text_list
else:
return 0
size_text_number = int(size_text_number)
size_text_dimension = size_text_dimension.lower()
factor = 1
if 'kb' == size_text_dimension:
factor = 1024
elif 'mb' == size_text_dimension:
factor = 1024**2
elif 'gb' == size_text_dimension:
factor = 1024**3 # :)
return size_text_number * factor
def l2_cache_per_core()->int:
core_count = cpuinfo.get_cpu_info()['count']
if core_count:
return int(get_l2_cache_size() / core_count)
else:
return 0
| 31.082192 | 100 | 0.712649 |
import cpuinfo
__author__ = "ButenkoMS <gtalk@butenkoms.space>"
__copyright__ = "Copyright © 2017 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space>"
__credits__ = ["ButenkoMS <gtalk@butenkoms.space>", ]
__license__ = "Apache License, Version 2.0"
__version__ = "0.0.1"
__maintainer__ = "ButenkoMS <gtalk@butenkoms.space>"
__email__ = "gtalk@butenkoms.space"
__status__ = "Prototype"
def get_cpu_info()->dict:
return cpuinfo.get_cpu_info()
def get_l2_cache_size()->int:
size_text = cpuinfo.get_cpu_info()['l2_cache_size']
size_text_list = size_text.split()
size_text_list_size = len(size_text_list)
size_text_number = None
size_text_dimension = None
if 0 == size_text_list_size:
return 0
elif 1 == size_text_list_size:
return int(size_text_list)
elif 2 == size_text_list_size:
size_text_number, size_text_dimension = size_text_list
else:
return 0
size_text_number = int(size_text_number)
size_text_dimension = size_text_dimension.lower()
factor = 1
if 'kb' == size_text_dimension:
factor = 1024
elif 'mb' == size_text_dimension:
factor = 1024**2
elif 'gb' == size_text_dimension:
factor = 1024**3
return size_text_number * factor
def l2_cache_per_core()->int:
core_count = cpuinfo.get_cpu_info()['count']
if core_count:
return int(get_l2_cache_size() / core_count)
else:
return 0
| true | true |
f71c38629e95ab04b4b2bedf6a45506097a47901 | 6,731 | py | Python | glfw/library.py | brpollock/pyGLFW | 1f78383e0b466ad4af83270ef9631d306665e714 | [
"MIT"
] | null | null | null | glfw/library.py | brpollock/pyGLFW | 1f78383e0b466ad4af83270ef9631d306665e714 | [
"MIT"
] | null | null | null | glfw/library.py | brpollock/pyGLFW | 1f78383e0b466ad4af83270ef9631d306665e714 | [
"MIT"
] | null | null | null | """
Python bindings for GLFW.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import ctypes
import os
import glob
import sys
import subprocess
import textwrap
def _find_library_candidates(library_names,
library_file_extensions,
library_search_paths):
"""
Finds and returns filenames which might be the library you are looking for.
"""
candidates = set()
for library_name in library_names:
for search_path in library_search_paths:
glob_query = os.path.join(search_path, '*'+library_name+'*')
for filename in glob.iglob(glob_query):
filename = os.path.realpath(filename)
if filename in candidates:
continue
basename = os.path.basename(filename)
if basename.startswith('lib'+library_name):
basename_end = basename[len('lib'+library_name):]
elif basename.startswith(library_name):
basename_end = basename[len(library_name):]
else:
continue
for file_extension in library_file_extensions:
if basename_end.startswith(file_extension):
if basename_end[len(file_extension):][:1] in ('', '.'):
candidates.add(filename)
if basename_end.endswith(file_extension):
basename_middle = basename_end[:-len(file_extension)]
if all(c in '0123456789.' for c in basename_middle):
candidates.add(filename)
return candidates
def _load_library(library_names, library_file_extensions,
library_search_paths, version_check_callback):
"""
Finds, loads and returns the most recent version of the library.
"""
candidates = _find_library_candidates(library_names,
library_file_extensions,
library_search_paths)
library_versions = []
for filename in candidates:
version = version_check_callback(filename)
if version is not None and version >= (3, 0, 0):
library_versions.append((version, filename))
if not library_versions:
return None
library_versions.sort()
return ctypes.CDLL(library_versions[-1][1])
def _glfw_get_version(filename):
"""
Queries and returns the library version tuple or None by using a
subprocess.
"""
version_checker_source = '''
import sys
import ctypes
def get_version(library_handle):
"""
Queries and returns the library version tuple or None.
"""
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
if hasattr(library_handle, 'glfwGetVersion'):
library_handle.glfwGetVersion(major, minor, rev)
version = (major_value.value,
minor_value.value,
rev_value.value)
return version
else:
return None
try:
input_func = raw_input
except NameError:
input_func = input
filename = input_func().strip()
try:
library_handle = ctypes.CDLL(filename)
except OSError:
pass
else:
version = get_version(library_handle)
print(version)
'''
args = [sys.executable, '-c', textwrap.dedent(version_checker_source)]
process = subprocess.Popen(args, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
out = process.communicate(filename)[0]
out = out.strip()
if out:
return eval(out)
else:
return None
def _get_library_search_paths():
"""
Returns a list of library search paths, considering of the current working
directory, default paths and paths from environment variables.
"""
package_path = os.path.abspath(os.path.dirname(__file__))
search_paths = [
'',
package_path,
sys.prefix + '/lib',
'/usr/lib64',
'/usr/local/lib64',
'/usr/lib', '/usr/local/lib',
'/run/current-system/sw/lib',
'/usr/lib/x86_64-linux-gnu/',
'/usr/lib/aarch64-linux-gnu/',
]
if sys.platform != 'darwin':
# manylinux2014 wheels contain libraries built for X11 and Wayland
if os.environ.get('XDG_SESSION_TYPE') == 'wayland':
search_paths.insert(1, os.path.join(package_path, 'wayland'))
else:
# X11 is the default, even if XDG_SESSION_TYPE is not set
search_paths.insert(1, os.path.join(package_path, 'x11'))
if sys.platform == 'darwin':
path_environment_variable = 'DYLD_LIBRARY_PATH'
else:
path_environment_variable = 'LD_LIBRARY_PATH'
if path_environment_variable in os.environ:
search_paths.extend(os.environ[path_environment_variable].split(':'))
return search_paths
if os.environ.get('PYGLFW_LIBRARY', ''):
try:
glfw = ctypes.CDLL(os.environ['PYGLFW_LIBRARY'])
except OSError:
glfw = None
elif sys.platform == 'win32':
glfw = None # Will become `not None` on success.
# try Windows default search path
try:
glfw = ctypes.CDLL('glfw3.dll')
except OSError:
pass
# try package directory
if glfw is None:
try:
if sys.maxsize > 2**32:
# load Microsoft Visual C++ 2012 runtime on 64-bit systems
msvcr = ctypes.CDLL(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'msvcr110.dll'))
else:
# load Microsoft Visual C++ 2010 runtime on 32-bit systems
msvcr = ctypes.CDLL(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'msvcr100.dll'))
glfw = ctypes.CDLL(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'glfw3.dll'))
except OSError:
pass
# try conda's default location on Windows
if glfw is None:
try:
glfw = ctypes.CDLL(os.path.join(sys.prefix, 'Library', 'bin', 'glfw3.dll'))
except OSError:
pass
else:
glfw = _load_library(['glfw', 'glfw3'], ['.so', '.dylib'],
_get_library_search_paths(), _glfw_get_version)
| 34.695876 | 109 | 0.588323 |
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import ctypes
import os
import glob
import sys
import subprocess
import textwrap
def _find_library_candidates(library_names,
library_file_extensions,
library_search_paths):
candidates = set()
for library_name in library_names:
for search_path in library_search_paths:
glob_query = os.path.join(search_path, '*'+library_name+'*')
for filename in glob.iglob(glob_query):
filename = os.path.realpath(filename)
if filename in candidates:
continue
basename = os.path.basename(filename)
if basename.startswith('lib'+library_name):
basename_end = basename[len('lib'+library_name):]
elif basename.startswith(library_name):
basename_end = basename[len(library_name):]
else:
continue
for file_extension in library_file_extensions:
if basename_end.startswith(file_extension):
if basename_end[len(file_extension):][:1] in ('', '.'):
candidates.add(filename)
if basename_end.endswith(file_extension):
basename_middle = basename_end[:-len(file_extension)]
if all(c in '0123456789.' for c in basename_middle):
candidates.add(filename)
return candidates
def _load_library(library_names, library_file_extensions,
library_search_paths, version_check_callback):
candidates = _find_library_candidates(library_names,
library_file_extensions,
library_search_paths)
library_versions = []
for filename in candidates:
version = version_check_callback(filename)
if version is not None and version >= (3, 0, 0):
library_versions.append((version, filename))
if not library_versions:
return None
library_versions.sort()
return ctypes.CDLL(library_versions[-1][1])
def _glfw_get_version(filename):
version_checker_source = '''
import sys
import ctypes
def get_version(library_handle):
"""
Queries and returns the library version tuple or None.
"""
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
if hasattr(library_handle, 'glfwGetVersion'):
library_handle.glfwGetVersion(major, minor, rev)
version = (major_value.value,
minor_value.value,
rev_value.value)
return version
else:
return None
try:
input_func = raw_input
except NameError:
input_func = input
filename = input_func().strip()
try:
library_handle = ctypes.CDLL(filename)
except OSError:
pass
else:
version = get_version(library_handle)
print(version)
'''
args = [sys.executable, '-c', textwrap.dedent(version_checker_source)]
process = subprocess.Popen(args, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
out = process.communicate(filename)[0]
out = out.strip()
if out:
return eval(out)
else:
return None
def _get_library_search_paths():
package_path = os.path.abspath(os.path.dirname(__file__))
search_paths = [
'',
package_path,
sys.prefix + '/lib',
'/usr/lib64',
'/usr/local/lib64',
'/usr/lib', '/usr/local/lib',
'/run/current-system/sw/lib',
'/usr/lib/x86_64-linux-gnu/',
'/usr/lib/aarch64-linux-gnu/',
]
if sys.platform != 'darwin':
if os.environ.get('XDG_SESSION_TYPE') == 'wayland':
search_paths.insert(1, os.path.join(package_path, 'wayland'))
else:
search_paths.insert(1, os.path.join(package_path, 'x11'))
if sys.platform == 'darwin':
path_environment_variable = 'DYLD_LIBRARY_PATH'
else:
path_environment_variable = 'LD_LIBRARY_PATH'
if path_environment_variable in os.environ:
search_paths.extend(os.environ[path_environment_variable].split(':'))
return search_paths
if os.environ.get('PYGLFW_LIBRARY', ''):
try:
glfw = ctypes.CDLL(os.environ['PYGLFW_LIBRARY'])
except OSError:
glfw = None
elif sys.platform == 'win32':
glfw = None
try:
glfw = ctypes.CDLL('glfw3.dll')
except OSError:
pass
if glfw is None:
try:
if sys.maxsize > 2**32:
msvcr = ctypes.CDLL(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'msvcr110.dll'))
else:
msvcr = ctypes.CDLL(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'msvcr100.dll'))
glfw = ctypes.CDLL(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'glfw3.dll'))
except OSError:
pass
if glfw is None:
try:
glfw = ctypes.CDLL(os.path.join(sys.prefix, 'Library', 'bin', 'glfw3.dll'))
except OSError:
pass
else:
glfw = _load_library(['glfw', 'glfw3'], ['.so', '.dylib'],
_get_library_search_paths(), _glfw_get_version)
| true | true |
f71c38e7407523eed8a014449cd499676148e894 | 1,456 | py | Python | azure-servicefabric/azure/servicefabric/models/scaling_mechanism_description_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | azure-servicefabric/azure/servicefabric/models/scaling_mechanism_description_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | azure-servicefabric/azure/servicefabric/models/scaling_mechanism_description_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2018-10-16T13:08:23.000Z | 2018-10-16T13:08:23.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ScalingMechanismDescription(Model):
"""Describes the mechanism for performing a scaling operation.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: PartitionInstanceCountScaleMechanism,
AddRemoveIncrementalNamedPartitionScalingMechanism
All required parameters must be populated in order to send to Azure.
:param kind: Required. Constant filled by server.
:type kind: str
"""
_validation = {
'kind': {'required': True},
}
_attribute_map = {
'kind': {'key': 'Kind', 'type': 'str'},
}
_subtype_map = {
'kind': {'PartitionInstanceCount': 'PartitionInstanceCountScaleMechanism', 'AddRemoveIncrementalNamedPartition': 'AddRemoveIncrementalNamedPartitionScalingMechanism'}
}
def __init__(self, **kwargs) -> None:
super(ScalingMechanismDescription, self).__init__(**kwargs)
self.kind = None
| 33.860465 | 174 | 0.644231 |
from msrest.serialization import Model
class ScalingMechanismDescription(Model):
_validation = {
'kind': {'required': True},
}
_attribute_map = {
'kind': {'key': 'Kind', 'type': 'str'},
}
_subtype_map = {
'kind': {'PartitionInstanceCount': 'PartitionInstanceCountScaleMechanism', 'AddRemoveIncrementalNamedPartition': 'AddRemoveIncrementalNamedPartitionScalingMechanism'}
}
def __init__(self, **kwargs) -> None:
super(ScalingMechanismDescription, self).__init__(**kwargs)
self.kind = None
| true | true |
f71c39ebffaad67624f3ef9efc20843ab2437935 | 4,779 | py | Python | selfdrive/controls/lib/long_mpc.py | DS1SQM/HKG082_SCC_2 | c5b1c7fb593a69b1bc585eaa9947c0a76f381bbc | [
"MIT"
] | null | null | null | selfdrive/controls/lib/long_mpc.py | DS1SQM/HKG082_SCC_2 | c5b1c7fb593a69b1bc585eaa9947c0a76f381bbc | [
"MIT"
] | null | null | null | selfdrive/controls/lib/long_mpc.py | DS1SQM/HKG082_SCC_2 | c5b1c7fb593a69b1bc585eaa9947c0a76f381bbc | [
"MIT"
] | 1 | 2020-10-20T12:26:33.000Z | 2020-10-20T12:26:33.000Z | import os
import math
import cereal.messaging as messaging
from common.numpy_fast import clip, interp
from selfdrive.swaglog import cloudlog
from common.realtime import sec_since_boot
from selfdrive.controls.lib.radar_helpers import _LEAD_ACCEL_TAU
from selfdrive.controls.lib.longitudinal_mpc import libmpc_py
from selfdrive.controls.lib.drive_helpers import MPC_COST_LONG
LOG_MPC = os.environ.get('LOG_MPC', False)
class LongitudinalMpc():
def __init__(self, mpc_id):
self.mpc_id = mpc_id
self.setup_mpc()
self.v_mpc = 0.0
self.v_mpc_future = 0.0
self.a_mpc = 0.0
self.v_cruise = 0.0
self.prev_lead_status = False
self.prev_lead_x = 0.0
self.new_lead = False
self.last_cloudlog_t = 0.0
self.n_its = 0
self.duration = 0
# scc smoother
self.cruise_gap = 0
def publish(self, pm):
if LOG_MPC:
qp_iterations = max(0, self.n_its)
dat = messaging.new_message('liveLongitudinalMpc')
dat.liveLongitudinalMpc.xEgo = list(self.mpc_solution[0].x_ego)
dat.liveLongitudinalMpc.vEgo = list(self.mpc_solution[0].v_ego)
dat.liveLongitudinalMpc.aEgo = list(self.mpc_solution[0].a_ego)
dat.liveLongitudinalMpc.xLead = list(self.mpc_solution[0].x_l)
dat.liveLongitudinalMpc.vLead = list(self.mpc_solution[0].v_l)
dat.liveLongitudinalMpc.cost = self.mpc_solution[0].cost
dat.liveLongitudinalMpc.aLeadTau = self.a_lead_tau
dat.liveLongitudinalMpc.qpIterations = qp_iterations
dat.liveLongitudinalMpc.mpcId = self.mpc_id
dat.liveLongitudinalMpc.calculationTime = self.duration
pm.send('liveLongitudinalMpc', dat)
def setup_mpc(self):
ffi, self.libmpc = libmpc_py.get_libmpc(self.mpc_id)
self.libmpc.init(MPC_COST_LONG.TTC, MPC_COST_LONG.DISTANCE,
MPC_COST_LONG.ACCELERATION, MPC_COST_LONG.JERK)
self.mpc_solution = ffi.new("log_t *")
self.cur_state = ffi.new("state_t *")
self.cur_state[0].v_ego = 0
self.cur_state[0].a_ego = 0
self.a_lead_tau = _LEAD_ACCEL_TAU
def set_cur_state(self, v, a):
self.cur_state[0].v_ego = v
self.cur_state[0].a_ego = a
def update(self, CS, lead):
v_ego = CS.vEgo
# Setup current mpc state
self.cur_state[0].x_ego = 0.0
if lead is not None and lead.status:
x_lead = max(0, lead.dRel - 0.5)
v_lead = max(0.0, lead.vLead)
a_lead = lead.aLeadK
if (v_lead < 0.1 or -a_lead / 2.0 > v_lead):
v_lead = 0.0
a_lead = 0.0
self.a_lead_tau = max(lead.aLeadTau, (a_lead ** 2 * math.pi) / (2 * (v_lead + 0.01) ** 2))
self.new_lead = False
if not self.prev_lead_status or abs(x_lead - self.prev_lead_x) > 2.5:
self.libmpc.init_with_simulation(self.v_mpc, x_lead, v_lead, a_lead, self.a_lead_tau)
self.new_lead = True
self.prev_lead_status = True
self.prev_lead_x = x_lead
self.cur_state[0].x_l = x_lead
self.cur_state[0].v_l = v_lead
else:
self.prev_lead_status = False
# Fake a fast lead car, so mpc keeps running
self.cur_state[0].x_l = 50.0
self.cur_state[0].v_l = v_ego + 10.0
a_lead = 0.0
self.a_lead_tau = _LEAD_ACCEL_TAU
# Calculate mpc
t = sec_since_boot()
# scc smoother
cruise_gap = int(clip(CS.cruiseGap, 1., 4.))
# TR = interp(float(cruise_gap), [1., 2., 3., 4.], [1.0, 1.3, 1.6, 2.0])
TR = interp(v_ego, [3., 30.], [1., 2.5])
if self.cruise_gap != cruise_gap:
self.cruise_gap = cruise_gap
self.n_its = self.libmpc.run_mpc(self.cur_state, self.mpc_solution, self.a_lead_tau, a_lead, TR)
self.duration = int((sec_since_boot() - t) * 1e9)
# Get solution. MPC timestep is 0.2 s, so interpolation to 0.05 s is needed
self.v_mpc = self.mpc_solution[0].v_ego[1]
self.a_mpc = self.mpc_solution[0].a_ego[1]
self.v_mpc_future = self.mpc_solution[0].v_ego[10]
# Reset if NaN or goes through lead car
crashing = any(lead - ego < -50 for (lead, ego) in zip(self.mpc_solution[0].x_l, self.mpc_solution[0].x_ego))
nans = any(math.isnan(x) for x in self.mpc_solution[0].v_ego)
backwards = min(self.mpc_solution[0].v_ego) < -0.01
if ((backwards or crashing) and self.prev_lead_status) or nans:
if t > self.last_cloudlog_t + 5.0:
self.last_cloudlog_t = t
cloudlog.warning("Longitudinal mpc %d reset - backwards: %s crashing: %s nan: %s" % (
self.mpc_id, backwards, crashing, nans))
self.libmpc.init(MPC_COST_LONG.TTC, MPC_COST_LONG.DISTANCE,
MPC_COST_LONG.ACCELERATION, MPC_COST_LONG.JERK)
self.cur_state[0].v_ego = v_ego
self.cur_state[0].a_ego = 0.0
self.v_mpc = v_ego
self.a_mpc = CS.aEgo
self.prev_lead_status = False
| 35.139706 | 113 | 0.669178 | import os
import math
import cereal.messaging as messaging
from common.numpy_fast import clip, interp
from selfdrive.swaglog import cloudlog
from common.realtime import sec_since_boot
from selfdrive.controls.lib.radar_helpers import _LEAD_ACCEL_TAU
from selfdrive.controls.lib.longitudinal_mpc import libmpc_py
from selfdrive.controls.lib.drive_helpers import MPC_COST_LONG
LOG_MPC = os.environ.get('LOG_MPC', False)
class LongitudinalMpc():
def __init__(self, mpc_id):
self.mpc_id = mpc_id
self.setup_mpc()
self.v_mpc = 0.0
self.v_mpc_future = 0.0
self.a_mpc = 0.0
self.v_cruise = 0.0
self.prev_lead_status = False
self.prev_lead_x = 0.0
self.new_lead = False
self.last_cloudlog_t = 0.0
self.n_its = 0
self.duration = 0
self.cruise_gap = 0
def publish(self, pm):
if LOG_MPC:
qp_iterations = max(0, self.n_its)
dat = messaging.new_message('liveLongitudinalMpc')
dat.liveLongitudinalMpc.xEgo = list(self.mpc_solution[0].x_ego)
dat.liveLongitudinalMpc.vEgo = list(self.mpc_solution[0].v_ego)
dat.liveLongitudinalMpc.aEgo = list(self.mpc_solution[0].a_ego)
dat.liveLongitudinalMpc.xLead = list(self.mpc_solution[0].x_l)
dat.liveLongitudinalMpc.vLead = list(self.mpc_solution[0].v_l)
dat.liveLongitudinalMpc.cost = self.mpc_solution[0].cost
dat.liveLongitudinalMpc.aLeadTau = self.a_lead_tau
dat.liveLongitudinalMpc.qpIterations = qp_iterations
dat.liveLongitudinalMpc.mpcId = self.mpc_id
dat.liveLongitudinalMpc.calculationTime = self.duration
pm.send('liveLongitudinalMpc', dat)
def setup_mpc(self):
ffi, self.libmpc = libmpc_py.get_libmpc(self.mpc_id)
self.libmpc.init(MPC_COST_LONG.TTC, MPC_COST_LONG.DISTANCE,
MPC_COST_LONG.ACCELERATION, MPC_COST_LONG.JERK)
self.mpc_solution = ffi.new("log_t *")
self.cur_state = ffi.new("state_t *")
self.cur_state[0].v_ego = 0
self.cur_state[0].a_ego = 0
self.a_lead_tau = _LEAD_ACCEL_TAU
def set_cur_state(self, v, a):
self.cur_state[0].v_ego = v
self.cur_state[0].a_ego = a
def update(self, CS, lead):
v_ego = CS.vEgo
self.cur_state[0].x_ego = 0.0
if lead is not None and lead.status:
x_lead = max(0, lead.dRel - 0.5)
v_lead = max(0.0, lead.vLead)
a_lead = lead.aLeadK
if (v_lead < 0.1 or -a_lead / 2.0 > v_lead):
v_lead = 0.0
a_lead = 0.0
self.a_lead_tau = max(lead.aLeadTau, (a_lead ** 2 * math.pi) / (2 * (v_lead + 0.01) ** 2))
self.new_lead = False
if not self.prev_lead_status or abs(x_lead - self.prev_lead_x) > 2.5:
self.libmpc.init_with_simulation(self.v_mpc, x_lead, v_lead, a_lead, self.a_lead_tau)
self.new_lead = True
self.prev_lead_status = True
self.prev_lead_x = x_lead
self.cur_state[0].x_l = x_lead
self.cur_state[0].v_l = v_lead
else:
self.prev_lead_status = False
self.cur_state[0].x_l = 50.0
self.cur_state[0].v_l = v_ego + 10.0
a_lead = 0.0
self.a_lead_tau = _LEAD_ACCEL_TAU
t = sec_since_boot()
cruise_gap = int(clip(CS.cruiseGap, 1., 4.))
TR = interp(v_ego, [3., 30.], [1., 2.5])
if self.cruise_gap != cruise_gap:
self.cruise_gap = cruise_gap
self.n_its = self.libmpc.run_mpc(self.cur_state, self.mpc_solution, self.a_lead_tau, a_lead, TR)
self.duration = int((sec_since_boot() - t) * 1e9)
self.v_mpc = self.mpc_solution[0].v_ego[1]
self.a_mpc = self.mpc_solution[0].a_ego[1]
self.v_mpc_future = self.mpc_solution[0].v_ego[10]
crashing = any(lead - ego < -50 for (lead, ego) in zip(self.mpc_solution[0].x_l, self.mpc_solution[0].x_ego))
nans = any(math.isnan(x) for x in self.mpc_solution[0].v_ego)
backwards = min(self.mpc_solution[0].v_ego) < -0.01
if ((backwards or crashing) and self.prev_lead_status) or nans:
if t > self.last_cloudlog_t + 5.0:
self.last_cloudlog_t = t
cloudlog.warning("Longitudinal mpc %d reset - backwards: %s crashing: %s nan: %s" % (
self.mpc_id, backwards, crashing, nans))
self.libmpc.init(MPC_COST_LONG.TTC, MPC_COST_LONG.DISTANCE,
MPC_COST_LONG.ACCELERATION, MPC_COST_LONG.JERK)
self.cur_state[0].v_ego = v_ego
self.cur_state[0].a_ego = 0.0
self.v_mpc = v_ego
self.a_mpc = CS.aEgo
self.prev_lead_status = False
| true | true |
f71c39f0319329a2f4f59ab30550497ff4c60f40 | 6,860 | py | Python | script/bfe.py | tiancity-NJU/REID | 125a520a9c0b94440a7757e6f3c3c8bf976906ec | [
"MIT"
] | 1 | 2020-06-15T07:50:05.000Z | 2020-06-15T07:50:05.000Z | script/bfe.py | tiancity-NJU/REID | 125a520a9c0b94440a7757e6f3c3c8bf976906ec | [
"MIT"
] | null | null | null | script/bfe.py | tiancity-NJU/REID | 125a520a9c0b94440a7757e6f3c3c8bf976906ec | [
"MIT"
] | 4 | 2019-04-09T13:10:58.000Z | 2020-03-06T15:22:38.000Z | # encoding: utf-8
import os
import sys
from os import path as osp
from pprint import pprint
import numpy as np
import torch
from tensorboardX import SummaryWriter
from torch import nn
from torch.backends import cudnn
from torch.utils.data import DataLoader
sys.path.insert(0,os.path.abspath(os.path.dirname(__file__)+os.sep+'..'))
from config import opt
from datasets import data_manager
from datasets.data_loader import ImageData
from datasets.samplers import RandomIdentitySampler
from models.networks import ResNetBuilder, IDE, Resnet, BFE
#from models.BFE import BFE
from trainers.evaluator import ResNetEvaluator
from trainers.trainer import cls_tripletTrainer
from utils.loss import CrossEntropyLabelSmooth, TripletLoss, Margin
from utils.LiftedStructure import LiftedStructureLoss
from utils.DistWeightDevianceLoss import DistWeightBinDevianceLoss
from utils.serialization import Logger, save_checkpoint
from utils.transforms import TestTransform, TrainTransform
def train(**kwargs):
opt._parse(kwargs)
opt.model_name = 'bfe'
# set random seed and cudnn benchmark
torch.manual_seed(opt.seed)
os.makedirs(opt.save_dir, exist_ok=True)
use_gpu = torch.cuda.is_available()
sys.stdout = Logger(osp.join(opt.save_dir, 'log_train.txt'))
print('=========user config==========')
pprint(opt._state_dict())
print('============end===============')
if use_gpu:
print('currently using GPU')
cudnn.benchmark = True
torch.cuda.manual_seed_all(opt.seed)
else:
print('currently using cpu')
print('initializing dataset {}'.format(opt.dataset))
dataset = data_manager.init_dataset(name=opt.dataset, mode=opt.mode)
pin_memory = True if use_gpu else False
summary_writer = SummaryWriter(osp.join(opt.save_dir, 'tensorboard_log'))
trainloader = DataLoader(
ImageData(dataset.train, TrainTransform(opt.datatype)),
sampler=RandomIdentitySampler(dataset.train, opt.num_instances),
batch_size=opt.train_batch, num_workers=opt.workers,
pin_memory=pin_memory, drop_last=True
)
queryloader = DataLoader(
ImageData(dataset.query, TestTransform(opt.datatype)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
galleryloader = DataLoader(
ImageData(dataset.gallery, TestTransform(opt.datatype)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
queryFliploader = DataLoader(
ImageData(dataset.query, TestTransform(opt.datatype, True)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
galleryFliploader = DataLoader(
ImageData(dataset.gallery, TestTransform(opt.datatype, True)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
print('initializing model ...')
model = BFE(dataset.num_train_pids, 1.0, 0.33)
optim_policy = model.get_optim_policy()
if opt.pretrained_model:
state_dict = torch.load(opt.pretrained_model)['state_dict']
# state_dict = {k: v for k, v in state_dict.items() \
# if not ('reduction' in k or 'softmax' in k)}
model.load_state_dict(state_dict, False)
print('load pretrained model ' + opt.pretrained_model)
print('model size: {:.5f}M'.format(sum(p.numel() for p in model.parameters()) / 1e6))
if use_gpu:
model = nn.DataParallel(model).cuda()
reid_evaluator = ResNetEvaluator(model)
if opt.evaluate:
reid_evaluator.evaluate(queryloader, galleryloader,
queryFliploader, galleryFliploader, re_ranking=opt.re_ranking, savefig=opt.savefig)
return
# xent_criterion = nn.CrossEntropyLoss()
xent_criterion = CrossEntropyLabelSmooth(dataset.num_train_pids)
if opt.loss == 'triplet':
embedding_criterion = TripletLoss(opt.margin)
elif opt.loss == 'lifted':
embedding_criterion = LiftedStructureLoss(hard_mining=True)
elif opt.loss == 'weight':
embedding_criterion = Margin()
def criterion(triplet_y, softmax_y, labels):
losses = [embedding_criterion(output, labels)[0] for output in triplet_y] + \
[xent_criterion(output, labels) for output in softmax_y]
loss = sum(losses)
return loss
# get optimizer
if opt.optim == "sgd":
optimizer = torch.optim.SGD(optim_policy, lr=opt.lr, momentum=0.9, weight_decay=opt.weight_decay)
else:
optimizer = torch.optim.Adam(optim_policy, lr=opt.lr, weight_decay=opt.weight_decay)
start_epoch = opt.start_epoch
# get trainer and evaluator
reid_trainer = cls_tripletTrainer(opt, model, optimizer, criterion, summary_writer)
def adjust_lr(optimizer, ep):
if ep < 50:
lr = 1e-4 * (ep // 5 + 1)
elif ep < 200:
lr = 1e-3
elif ep < 300:
lr = 1e-4
else:
lr = 1e-5
for p in optimizer.param_groups:
p['lr'] = lr
# start training
best_rank1 = opt.best_rank
best_epoch = 0
for epoch in range(start_epoch, opt.max_epoch):
if opt.adjust_lr:
adjust_lr(optimizer, epoch + 1)
reid_trainer.train(epoch, trainloader)
# skip if not save model
if opt.eval_step > 0 and (epoch + 1) % opt.eval_step == 0 or (epoch + 1) == opt.max_epoch:
if opt.mode == 'class':
rank1 = test(model, queryloader)
else:
rank1 = reid_evaluator.evaluate(queryloader, galleryloader, queryFliploader, galleryFliploader)
is_best = rank1 > best_rank1
if is_best:
best_rank1 = rank1
best_epoch = epoch + 1
if use_gpu:
state_dict = model.module.state_dict()
else:
state_dict = model.state_dict()
save_checkpoint({'state_dict': state_dict, 'epoch': epoch + 1},
is_best=is_best, save_dir=opt.save_dir,
filename='checkpoint_ep' + str(epoch + 1) + '.pth.tar')
print('Best rank-1 {:.1%}, achived at epoch {}'.format(best_rank1, best_epoch))
def test(model, queryloader):
model.eval()
correct = 0
with torch.no_grad():
for data, target, _ in queryloader:
output = model(data).cpu()
# get the index of the max log-probability
pred = output.max(1, keepdim=True)[1]
correct += pred.eq(target.view_as(pred)).sum().item()
rank1 = 100. * correct / len(queryloader.dataset)
print('\nTest set: Accuracy: {}/{} ({:.2f}%)\n'.format(correct, len(queryloader.dataset), rank1))
return rank1
if __name__ == '__main__':
import fire
fire.Fire()
| 34.3 | 115 | 0.655248 |
import os
import sys
from os import path as osp
from pprint import pprint
import numpy as np
import torch
from tensorboardX import SummaryWriter
from torch import nn
from torch.backends import cudnn
from torch.utils.data import DataLoader
sys.path.insert(0,os.path.abspath(os.path.dirname(__file__)+os.sep+'..'))
from config import opt
from datasets import data_manager
from datasets.data_loader import ImageData
from datasets.samplers import RandomIdentitySampler
from models.networks import ResNetBuilder, IDE, Resnet, BFE
from trainers.evaluator import ResNetEvaluator
from trainers.trainer import cls_tripletTrainer
from utils.loss import CrossEntropyLabelSmooth, TripletLoss, Margin
from utils.LiftedStructure import LiftedStructureLoss
from utils.DistWeightDevianceLoss import DistWeightBinDevianceLoss
from utils.serialization import Logger, save_checkpoint
from utils.transforms import TestTransform, TrainTransform
def train(**kwargs):
opt._parse(kwargs)
opt.model_name = 'bfe'
torch.manual_seed(opt.seed)
os.makedirs(opt.save_dir, exist_ok=True)
use_gpu = torch.cuda.is_available()
sys.stdout = Logger(osp.join(opt.save_dir, 'log_train.txt'))
print('=========user config==========')
pprint(opt._state_dict())
print('============end===============')
if use_gpu:
print('currently using GPU')
cudnn.benchmark = True
torch.cuda.manual_seed_all(opt.seed)
else:
print('currently using cpu')
print('initializing dataset {}'.format(opt.dataset))
dataset = data_manager.init_dataset(name=opt.dataset, mode=opt.mode)
pin_memory = True if use_gpu else False
summary_writer = SummaryWriter(osp.join(opt.save_dir, 'tensorboard_log'))
trainloader = DataLoader(
ImageData(dataset.train, TrainTransform(opt.datatype)),
sampler=RandomIdentitySampler(dataset.train, opt.num_instances),
batch_size=opt.train_batch, num_workers=opt.workers,
pin_memory=pin_memory, drop_last=True
)
queryloader = DataLoader(
ImageData(dataset.query, TestTransform(opt.datatype)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
galleryloader = DataLoader(
ImageData(dataset.gallery, TestTransform(opt.datatype)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
queryFliploader = DataLoader(
ImageData(dataset.query, TestTransform(opt.datatype, True)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
galleryFliploader = DataLoader(
ImageData(dataset.gallery, TestTransform(opt.datatype, True)),
batch_size=opt.test_batch, num_workers=opt.workers,
pin_memory=pin_memory
)
print('initializing model ...')
model = BFE(dataset.num_train_pids, 1.0, 0.33)
optim_policy = model.get_optim_policy()
if opt.pretrained_model:
state_dict = torch.load(opt.pretrained_model)['state_dict']
model.load_state_dict(state_dict, False)
print('load pretrained model ' + opt.pretrained_model)
print('model size: {:.5f}M'.format(sum(p.numel() for p in model.parameters()) / 1e6))
if use_gpu:
model = nn.DataParallel(model).cuda()
reid_evaluator = ResNetEvaluator(model)
if opt.evaluate:
reid_evaluator.evaluate(queryloader, galleryloader,
queryFliploader, galleryFliploader, re_ranking=opt.re_ranking, savefig=opt.savefig)
return
xent_criterion = CrossEntropyLabelSmooth(dataset.num_train_pids)
if opt.loss == 'triplet':
embedding_criterion = TripletLoss(opt.margin)
elif opt.loss == 'lifted':
embedding_criterion = LiftedStructureLoss(hard_mining=True)
elif opt.loss == 'weight':
embedding_criterion = Margin()
def criterion(triplet_y, softmax_y, labels):
losses = [embedding_criterion(output, labels)[0] for output in triplet_y] + \
[xent_criterion(output, labels) for output in softmax_y]
loss = sum(losses)
return loss
if opt.optim == "sgd":
optimizer = torch.optim.SGD(optim_policy, lr=opt.lr, momentum=0.9, weight_decay=opt.weight_decay)
else:
optimizer = torch.optim.Adam(optim_policy, lr=opt.lr, weight_decay=opt.weight_decay)
start_epoch = opt.start_epoch
reid_trainer = cls_tripletTrainer(opt, model, optimizer, criterion, summary_writer)
def adjust_lr(optimizer, ep):
if ep < 50:
lr = 1e-4 * (ep // 5 + 1)
elif ep < 200:
lr = 1e-3
elif ep < 300:
lr = 1e-4
else:
lr = 1e-5
for p in optimizer.param_groups:
p['lr'] = lr
best_rank1 = opt.best_rank
best_epoch = 0
for epoch in range(start_epoch, opt.max_epoch):
if opt.adjust_lr:
adjust_lr(optimizer, epoch + 1)
reid_trainer.train(epoch, trainloader)
if opt.eval_step > 0 and (epoch + 1) % opt.eval_step == 0 or (epoch + 1) == opt.max_epoch:
if opt.mode == 'class':
rank1 = test(model, queryloader)
else:
rank1 = reid_evaluator.evaluate(queryloader, galleryloader, queryFliploader, galleryFliploader)
is_best = rank1 > best_rank1
if is_best:
best_rank1 = rank1
best_epoch = epoch + 1
if use_gpu:
state_dict = model.module.state_dict()
else:
state_dict = model.state_dict()
save_checkpoint({'state_dict': state_dict, 'epoch': epoch + 1},
is_best=is_best, save_dir=opt.save_dir,
filename='checkpoint_ep' + str(epoch + 1) + '.pth.tar')
print('Best rank-1 {:.1%}, achived at epoch {}'.format(best_rank1, best_epoch))
def test(model, queryloader):
model.eval()
correct = 0
with torch.no_grad():
for data, target, _ in queryloader:
output = model(data).cpu()
pred = output.max(1, keepdim=True)[1]
correct += pred.eq(target.view_as(pred)).sum().item()
rank1 = 100. * correct / len(queryloader.dataset)
print('\nTest set: Accuracy: {}/{} ({:.2f}%)\n'.format(correct, len(queryloader.dataset), rank1))
return rank1
if __name__ == '__main__':
import fire
fire.Fire()
| true | true |
f71c3aaf6ae1025d594d51ec2a103091d3f8a9c3 | 21,405 | py | Python | pandapower/build_gen.py | mathildebadoual/pandapower | 9ba4bcb78e84b644d2ba6df0c08e285c54af8ddc | [
"BSD-3-Clause"
] | 1 | 2020-10-19T06:39:15.000Z | 2020-10-19T06:39:15.000Z | pandapower/build_gen.py | mathildebadoual/pandapower | 9ba4bcb78e84b644d2ba6df0c08e285c54af8ddc | [
"BSD-3-Clause"
] | null | null | null | pandapower/build_gen.py | mathildebadoual/pandapower | 9ba4bcb78e84b644d2ba6df0c08e285c54af8ddc | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2016-2018 by University of Kassel and Fraunhofer Institute for Energy Economics
# and Energy System Technology (IEE), Kassel. All rights reserved.
import numpy as np
import numpy.core.numeric as ncn
from numpy import array, zeros, isnan
from pandas import DataFrame
from pandapower.idx_bus import PV, REF, VA, VM, BUS_TYPE, NONE, VMAX, VMIN, PQ
from pandapower.idx_gen import QMIN, QMAX, PMIN, PMAX, GEN_STATUS, GEN_BUS, PG, VG, QG
def _build_gen_ppc(net, ppc):
'''
Takes the empty ppc network and fills it with the gen values. The gen
datatype will be float afterwards.
**INPUT**:
**net** -The pandapower format network
**ppc** - The PYPOWER format network to fill in values
'''
mode = net["_options"]["mode"]
# if mode == power flow or short circuit...
if mode == "pf" or mode == "sc":
# get in service elements
_is_elements = net["_is_elements"]
eg_is_mask = _is_elements['ext_grid']
gen_is_mask = _is_elements['gen']
eg_end = np.sum(eg_is_mask)
gen_end = eg_end + np.sum(gen_is_mask)
xw_end = gen_end + len(net["xward"])
# define default q limits
q_lim_default = 1e9 # which is 1000 TW - should be enough for distribution grids.
p_lim_default = 1e9
_init_ppc_gen(ppc, xw_end, 0)
if mode == "sc":
return
# add generator / pv data
if gen_end > eg_end:
_build_pp_gen(net, ppc, gen_is_mask, eg_end, gen_end, q_lim_default, p_lim_default)
_build_pp_ext_grid(net, ppc, eg_is_mask, eg_end)
# add extended ward pv node data
if xw_end > gen_end:
_build_pp_xward(net, ppc, gen_end, xw_end, q_lim_default)
# if mode == optimal power flow...
if mode == "opf":
bus_lookup = net["_pd2ppc_lookups"]["bus"]
calculate_voltage_angles = net["_options"]["calculate_voltage_angles"]
if len(net.dcline) > 0:
ppc["dcline"] = net.dcline[["loss_kw", "loss_percent"]].values
# get in service elements
_is_elements = net["_is_elements"]
eg_is = net["ext_grid"][_is_elements['ext_grid']]
gen_is = net["gen"][_is_elements['gen']]
sg_is = net.sgen[(net.sgen.in_service & net.sgen.controllable) == True] \
if "controllable" in net.sgen.columns else DataFrame()
l_is = net.load[(net.load.in_service & net.load.controllable) == True] \
if "controllable" in net.load.columns else DataFrame()
stor_is = net.storage[(net.storage.in_service & net.storage.controllable) == True] \
if "controllable" in net.storage.columns else DataFrame()
_is_elements["sgen_controllable"] = sg_is
_is_elements["load_controllable"] = l_is
_is_elements["storage_controllable"] = stor_is
eg_end = len(eg_is)
gen_end = eg_end + len(gen_is)
sg_end = gen_end + len(sg_is)
l_end = sg_end + len(l_is)
stor_end = l_end + len(stor_is)
q_lim_default = 1e9 # which is 1000 TW - should be enough for distribution grids.
p_lim_default = 1e9 # changes must be considered in check_opf_data
delta = net["_options"]["delta"]
# initialize generator matrix
ppc["gen"] = zeros(shape=(stor_end, 21), dtype=float)
ppc["gen"][:] = array([0, 0, 0, q_lim_default, -q_lim_default, 1., 1., 1, p_lim_default,
-p_lim_default, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
# add sgens first so pv bus types won't be overwritten
if sg_end > gen_end:
gen_buses = bus_lookup[sg_is["bus"].values]
ppc["gen"][gen_end:sg_end, GEN_BUS] = gen_buses
ppc["gen"][gen_end:sg_end, PG] = - sg_is["p_kw"].values * 1e-3 * sg_is["scaling"].values
ppc["gen"][gen_end:sg_end, QG] = sg_is["q_kvar"].values * 1e-3 * sg_is["scaling"].values
# set bus values for generator buses
ppc["bus"][gen_buses, BUS_TYPE] = PQ
# set constraints for controllable sgens
if "min_q_kvar" in sg_is.columns:
ppc["gen"][gen_end:sg_end, QMAX] = - (sg_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][gen_end:sg_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][gen_end:sg_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in sg_is.columns:
ppc["gen"][gen_end:sg_end, QMIN] = - (sg_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][gen_end:sg_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][gen_end:sg_end, [QMAX]] = min_q_kvar - 1e-10 # TODO Why this? (M.Scharf, 2018-02)
if "max_p_kw" in sg_is.columns:
ppc["gen"][gen_end:sg_end, PMIN] = - (sg_is["max_p_kw"].values * 1e-3 + delta)
max_p_kw = ppc["gen"][gen_end:sg_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][gen_end:sg_end, [PMIN]] = max_p_kw
if "min_p_kw" in sg_is.columns:
ppc["gen"][gen_end:sg_end, PMAX] = - (sg_is["min_p_kw"].values * 1e-3 - delta)
min_p_kw = ppc["gen"][gen_end:sg_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][gen_end:sg_end, [PMAX]] = min_p_kw
# add controllable loads
if l_end > sg_end:
load_buses = bus_lookup[l_is["bus"].values]
ppc["gen"][sg_end:l_end, GEN_BUS] = load_buses
ppc["gen"][sg_end:l_end, PG] = - l_is["p_kw"].values * 1e-3 * l_is["scaling"].values
ppc["gen"][sg_end:l_end, QG] = l_is["q_kvar"].values * 1e-3 * l_is["scaling"].values
# set bus values for controllable loads
ppc["bus"][load_buses, BUS_TYPE] = PQ
# set constraints for controllable loads
if "min_q_kvar" in l_is.columns:
ppc["gen"][sg_end:l_end, QMAX] = - (l_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][sg_end:l_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][sg_end:l_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in l_is.columns:
ppc["gen"][sg_end:l_end, QMIN] = - (l_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][sg_end:l_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][sg_end:l_end, [QMAX]] = min_q_kvar
if "min_p_kw" in l_is.columns:
ppc["gen"][sg_end:l_end, PMIN] = - (l_is["max_p_kw"].values * 1e-3 + delta)
max_p_kw = ppc["gen"][sg_end:l_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][sg_end:l_end, [PMIN]] = max_p_kw
if "max_p_kw" in l_is.columns:
ppc["gen"][sg_end:l_end, PMAX] = - (l_is["min_p_kw"].values * 1e-3 - delta)
min_p_kw = ppc["gen"][sg_end:l_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][sg_end:l_end, [PMAX]] = min_p_kw
# add controllable storages
if stor_end > l_end:
stor_buses = bus_lookup[stor_is["bus"].values]
ppc["gen"][l_end:stor_end, GEN_BUS] = stor_buses
ppc["gen"][l_end:stor_end, PG] = - stor_is["p_kw"].values * 1e-3 * stor_is["scaling"].values
ppc["gen"][l_end:stor_end, QG] = stor_is["q_kvar"].values * 1e-3 * stor_is["scaling"].values
# set bus values for generator buses
ppc["bus"][stor_buses, BUS_TYPE] = PQ
# set constraints for controllable sgens
if "min_q_kvar" in stor_is.columns:
ppc["gen"][l_end:stor_end, QMAX] = - (stor_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][l_end:stor_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][l_end:stor_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in stor_is.columns:
ppc["gen"][l_end:stor_end, QMIN] = - (stor_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][l_end:stor_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][l_end:stor_end, [QMAX]] = min_q_kvar
if "max_p_kw" in stor_is.columns:
ppc["gen"][l_end:stor_end, PMIN] = - (stor_is["max_p_kw"].values * 1e-3 + delta)
max_p_kw = ppc["gen"][l_end:stor_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][l_end:stor_end, [PMIN]] = max_p_kw
if "min_p_kw" in stor_is.columns:
ppc["gen"][l_end:stor_end, PMAX] = - (stor_is["min_p_kw"].values * 1e-3 - delta)
min_p_kw = ppc["gen"][l_end:stor_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][l_end:stor_end, [PMAX]] = min_p_kw
# add ext grid / slack data
ppc["gen"][:eg_end, GEN_BUS] = bus_lookup[eg_is["bus"].values]
ppc["gen"][:eg_end, VG] = eg_is["vm_pu"].values
ppc["gen"][:eg_end, GEN_STATUS] = eg_is["in_service"].values
if "max_p_kw" in eg_is.columns:
ppc["gen"][:eg_end, PMIN] = - (eg_is["max_p_kw"].values * 1e-3 - delta)
max_p_kw = ppc["gen"][:eg_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][:eg_end, [PMIN]] = max_p_kw
if "min_p_kw" in eg_is.columns:
ppc["gen"][:eg_end, PMAX] = - (eg_is["min_p_kw"].values * 1e-3 + delta)
min_p_kw = ppc["gen"][:eg_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][:eg_end, [PMAX]] = min_p_kw
if "min_q_kvar" in eg_is.columns:
ppc["gen"][:eg_end, QMAX] = - (eg_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][:eg_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][:eg_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in eg_is.columns:
ppc["gen"][:eg_end, QMIN] = - (eg_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][:eg_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][:eg_end, [QMAX]] = min_q_kvar - 1e-10
# set bus values for external grid buses
eg_buses = bus_lookup[eg_is["bus"].values]
if calculate_voltage_angles:
ppc["bus"][eg_buses, VA] = eg_is["va_degree"].values
ppc["bus"][eg_buses, BUS_TYPE] = REF
ppc["bus"][eg_buses, VM] = eg_is["vm_pu"].values
# REF busses don't have flexible voltages by definition:
ppc["bus"][eg_buses, VMAX] = ppc["bus"][ppc["bus"][:, BUS_TYPE] == REF, VM]
ppc["bus"][eg_buses, VMIN] = ppc["bus"][ppc["bus"][:, BUS_TYPE] == REF, VM]
# add generator / pv data
if gen_end > eg_end:
ppc["gen"][eg_end:gen_end, GEN_BUS] = bus_lookup[gen_is["bus"].values]
ppc["gen"][eg_end:gen_end, PG] = - gen_is["p_kw"].values * 1e-3 * gen_is["scaling"].values
ppc["gen"][eg_end:gen_end, VG] = gen_is["vm_pu"].values
# set bus values for generator buses
gen_buses = bus_lookup[gen_is["bus"].values]
ppc["bus"][gen_buses, BUS_TYPE] = PV
ppc["bus"][gen_buses, VM] = gen_is["vm_pu"].values
# set constraints for PV generators
_copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, _is_elements['gen'])
_copy_p_limits_to_ppc(net, ppc, eg_end, gen_end, _is_elements['gen'])
_replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default)
_replace_nans_with_default_p_limits_in_ppc(ppc, eg_end, gen_end, p_lim_default)
def _init_ppc_gen(ppc, xw_end, q_lim_default):
# initialize generator matrix
ppc["gen"] = np.zeros(shape=(xw_end, 21), dtype=float)
ppc["gen"][:] = np.array([0, 0, 0, q_lim_default, -q_lim_default, 1.,
1., 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
def _build_pp_ext_grid(net, ppc, eg_is_mask, eg_end):
calculate_voltage_angles = net["_options"]["calculate_voltage_angles"]
bus_lookup = net["_pd2ppc_lookups"]["bus"]
# add ext grid / slack data
eg_buses = bus_lookup[net["ext_grid"]["bus"].values[eg_is_mask]]
ppc["gen"][:eg_end, GEN_BUS] = eg_buses
ppc["gen"][:eg_end, VG] = net["ext_grid"]["vm_pu"].values[eg_is_mask]
ppc["gen"][:eg_end, GEN_STATUS] = True
# set bus values for external grid buses
if calculate_voltage_angles:
ppc["bus"][eg_buses, VA] = net["ext_grid"]["va_degree"].values[eg_is_mask]
ppc["bus"][eg_buses, BUS_TYPE] = REF
# _build_gen_lookups(net, "ext_grid", 0, eg_end)
def _build_pp_gen(net, ppc, gen_is_mask, eg_end, gen_end, q_lim_default, p_lim_default):
bus_lookup = net["_pd2ppc_lookups"]["bus"]
copy_constraints_to_ppc = net["_options"]["copy_constraints_to_ppc"]
gen_buses = bus_lookup[net["gen"]["bus"].values[gen_is_mask]]
gen_is_vm = net["gen"]["vm_pu"].values[gen_is_mask]
ppc["gen"][eg_end:gen_end, GEN_BUS] = gen_buses
ppc["gen"][eg_end:gen_end, PG] = - (net["gen"]["p_kw"].values[gen_is_mask] * 1e-3 *
net["gen"]["scaling"].values[gen_is_mask])
ppc["gen"][eg_end:gen_end, VG] = gen_is_vm
# set bus values for generator buses
ppc["bus"][gen_buses, BUS_TYPE] = PV
ppc["bus"][gen_buses, VM] = gen_is_vm
_copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask)
_replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default)
if copy_constraints_to_ppc:
_copy_p_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask)
_replace_nans_with_default_p_limits_in_ppc(ppc, eg_end, gen_end, p_lim_default)
# _build_gen_lookups(net, "gen", eg_end, gen_end)
def _build_pp_xward(net, ppc, gen_end, xw_end, q_lim_default, update_lookup=True):
bus_lookup = net["_pd2ppc_lookups"]["bus"]
xw = net["xward"]
xw_is = net["_is_elements"]['xward']
if update_lookup:
ppc["gen"][gen_end:xw_end, GEN_BUS] = bus_lookup[xw["ad_bus"].values]
ppc["gen"][gen_end:xw_end, VG] = xw["vm_pu"].values
ppc["gen"][gen_end:xw_end, GEN_STATUS] = xw_is
ppc["gen"][gen_end:xw_end, QMIN] = -q_lim_default
ppc["gen"][gen_end:xw_end, QMAX] = q_lim_default
xward_buses = bus_lookup[net["xward"]["ad_bus"].values]
ppc["bus"][xward_buses[xw_is], BUS_TYPE] = PV
ppc["bus"][xward_buses[~xw_is], BUS_TYPE] = NONE
ppc["bus"][xward_buses, VM] = net["xward"]["vm_pu"].values
def _update_gen_ppc(net, ppc):
'''
Takes the ppc network and updates the gen values from the values in net.
**INPUT**:
**net** -The pandapower format network
**ppc** - The PYPOWER format network to fill in values
'''
# get options from net
calculate_voltage_angles = net["_options"]["calculate_voltage_angles"]
bus_lookup = net["_pd2ppc_lookups"]["bus"]
# get in service elements
_is_elements = net["_is_elements"]
gen_is_mask = _is_elements['gen']
# TODO maybe speed up things here, too
eg_is = net["ext_grid"][_is_elements['ext_grid']]
gen_is = net["gen"][_is_elements['gen']]
eg_end = len(eg_is)
gen_end = eg_end + len(gen_is)
xw_end = gen_end + len(net["xward"])
q_lim_default = 1e9 # which is 1000 TW - should be enough for distribution grids.
# add ext grid / slack data
ext_grid_lookup = net["_pd2ppc_lookups"]["ext_grid"]
ext_grid_idx_ppc = ext_grid_lookup[eg_is.index]
ppc["gen"][ext_grid_idx_ppc, VG] = eg_is["vm_pu"].values
ppc["gen"][ext_grid_idx_ppc, GEN_STATUS] = eg_is["in_service"].values
# set bus values for external grid buses
if calculate_voltage_angles:
# eg_buses = bus_lookup[eg_is["bus"].values]
ppc["bus"][ext_grid_idx_ppc, VA] = eg_is["va_degree"].values
# add generator / pv data
if gen_end > eg_end:
gen_lookup = net["_pd2ppc_lookups"]["gen"]
gen_idx_ppc = gen_lookup[gen_is.index]
ppc["gen"][gen_idx_ppc, PG] = - gen_is["p_kw"].values * 1e-3 * gen_is["scaling"].values
ppc["gen"][gen_idx_ppc, VG] = gen_is["vm_pu"].values
# set bus values for generator buses
gen_buses = bus_lookup[gen_is["bus"].values]
ppc["bus"][gen_buses, VM] = gen_is["vm_pu"].values
_copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask)
_replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default)
# add extended ward pv node data
if xw_end > gen_end:
# ToDo: this must be tested in combination with recycle. Maybe the placement of the updated value in ppc["gen"]
# ToDo: is wrong. -> I'll better raise en error
raise NotImplementedError("xwards in combination with recycle is not properly implemented")
# _build_pp_xward(net, ppc, gen_end, xw_end, q_lim_default,
# update_lookup=False)
def _copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask):
# Note: Pypower has generator reference system, pandapower uses load reference
# system (max <-> min)
delta = net["_options"]["delta"]
if "max_q_kvar" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, QMIN] = -net["gen"]["max_q_kvar"].values[gen_is_mask] * 1e-3 - delta
if "min_q_kvar" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, QMAX] = -net["gen"]["min_q_kvar"].values[gen_is_mask] * 1e-3 + delta
def _copy_p_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask):
delta = net["_options"]["delta"]
if "max_p_kw" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, PMIN] = -net["gen"]["max_p_kw"].values[gen_is_mask] * 1e-3 + delta
if "min_p_kw" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, PMAX] = -net["gen"]["min_p_kw"].values[gen_is_mask] * 1e-3 - delta
def _replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default):
# Note: Pypower has generator reference system, pandapower uses load reference system (max <-> min)
max_q_kvar = ppc["gen"][eg_end:gen_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=np.isnan(max_q_kvar))
ppc["gen"][eg_end:gen_end, [QMIN]] = max_q_kvar
min_q_kvar = ppc["gen"][eg_end:gen_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=np.isnan(min_q_kvar))
ppc["gen"][eg_end:gen_end, [QMAX]] = min_q_kvar
def _replace_nans_with_default_p_limits_in_ppc(ppc, eg_end, gen_end, p_lim_default):
# Note: Pypower has generator reference system, pandapower uses load reference system (max <-> min)
max_p_kw = ppc["gen"][eg_end:gen_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][eg_end:gen_end, [PMIN]] = max_p_kw
min_p_kw = ppc["gen"][eg_end:gen_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][eg_end:gen_end, [PMAX]] = min_p_kw
def _check_voltage_setpoints_at_same_bus(ppc):
# generator buses:
gen_bus = ppc['gen'][:, GEN_BUS].astype(int)
# generator setpoints:
gen_vm = ppc['gen'][:, VG]
if _different_values_at_one_bus(gen_bus, gen_vm):
raise UserWarning("Generators with different voltage setpoints connected to the same bus")
def _check_voltage_angles_at_same_bus(net, ppc):
gen_va = net.ext_grid.va_degree[net._is_elements["ext_grid"]].values
eg_gens = net._pd2ppc_lookups["ext_grid"][net.ext_grid.index[net._is_elements["ext_grid"]]]
gen_bus = ppc["gen"][eg_gens, GEN_BUS].astype(int)
if _different_values_at_one_bus(gen_bus, gen_va):
raise UserWarning("Ext grids with different voltage angle setpoints connected to the same bus")
def _different_values_at_one_bus(buses, values):
"""
checks if there are different values in any of the
"""
# buses with one or more generators and their index
unique_bus, index_first_bus = np.unique(buses, return_index=True)
# voltage setpoint lookup with the voltage of the first occurence of that bus
first_values = -np.ones(buses.max() + 1)
first_values[unique_bus] = values[index_first_bus]
# generate voltage setpoints where all generators at the same bus
# have the voltage of the first generator at that bus
values_equal = first_values[buses]
return not np.array_equal(values, values_equal)
| 46.131466 | 120 | 0.608783 |
import numpy as np
import numpy.core.numeric as ncn
from numpy import array, zeros, isnan
from pandas import DataFrame
from pandapower.idx_bus import PV, REF, VA, VM, BUS_TYPE, NONE, VMAX, VMIN, PQ
from pandapower.idx_gen import QMIN, QMAX, PMIN, PMAX, GEN_STATUS, GEN_BUS, PG, VG, QG
def _build_gen_ppc(net, ppc):
mode = net["_options"]["mode"]
if mode == "pf" or mode == "sc":
_is_elements = net["_is_elements"]
eg_is_mask = _is_elements['ext_grid']
gen_is_mask = _is_elements['gen']
eg_end = np.sum(eg_is_mask)
gen_end = eg_end + np.sum(gen_is_mask)
xw_end = gen_end + len(net["xward"])
q_lim_default = 1e9
p_lim_default = 1e9
_init_ppc_gen(ppc, xw_end, 0)
if mode == "sc":
return
if gen_end > eg_end:
_build_pp_gen(net, ppc, gen_is_mask, eg_end, gen_end, q_lim_default, p_lim_default)
_build_pp_ext_grid(net, ppc, eg_is_mask, eg_end)
if xw_end > gen_end:
_build_pp_xward(net, ppc, gen_end, xw_end, q_lim_default)
if mode == "opf":
bus_lookup = net["_pd2ppc_lookups"]["bus"]
calculate_voltage_angles = net["_options"]["calculate_voltage_angles"]
if len(net.dcline) > 0:
ppc["dcline"] = net.dcline[["loss_kw", "loss_percent"]].values
_is_elements = net["_is_elements"]
eg_is = net["ext_grid"][_is_elements['ext_grid']]
gen_is = net["gen"][_is_elements['gen']]
sg_is = net.sgen[(net.sgen.in_service & net.sgen.controllable) == True] \
if "controllable" in net.sgen.columns else DataFrame()
l_is = net.load[(net.load.in_service & net.load.controllable) == True] \
if "controllable" in net.load.columns else DataFrame()
stor_is = net.storage[(net.storage.in_service & net.storage.controllable) == True] \
if "controllable" in net.storage.columns else DataFrame()
_is_elements["sgen_controllable"] = sg_is
_is_elements["load_controllable"] = l_is
_is_elements["storage_controllable"] = stor_is
eg_end = len(eg_is)
gen_end = eg_end + len(gen_is)
sg_end = gen_end + len(sg_is)
l_end = sg_end + len(l_is)
stor_end = l_end + len(stor_is)
q_lim_default = 1e9
p_lim_default = 1e9
delta = net["_options"]["delta"]
ppc["gen"] = zeros(shape=(stor_end, 21), dtype=float)
ppc["gen"][:] = array([0, 0, 0, q_lim_default, -q_lim_default, 1., 1., 1, p_lim_default,
-p_lim_default, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
if sg_end > gen_end:
gen_buses = bus_lookup[sg_is["bus"].values]
ppc["gen"][gen_end:sg_end, GEN_BUS] = gen_buses
ppc["gen"][gen_end:sg_end, PG] = - sg_is["p_kw"].values * 1e-3 * sg_is["scaling"].values
ppc["gen"][gen_end:sg_end, QG] = sg_is["q_kvar"].values * 1e-3 * sg_is["scaling"].values
# set bus values for generator buses
ppc["bus"][gen_buses, BUS_TYPE] = PQ
# set constraints for controllable sgens
if "min_q_kvar" in sg_is.columns:
ppc["gen"][gen_end:sg_end, QMAX] = - (sg_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][gen_end:sg_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][gen_end:sg_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in sg_is.columns:
ppc["gen"][gen_end:sg_end, QMIN] = - (sg_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][gen_end:sg_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][gen_end:sg_end, [QMAX]] = min_q_kvar - 1e-10 # TODO Why this? (M.Scharf, 2018-02)
if "max_p_kw" in sg_is.columns:
ppc["gen"][gen_end:sg_end, PMIN] = - (sg_is["max_p_kw"].values * 1e-3 + delta)
max_p_kw = ppc["gen"][gen_end:sg_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][gen_end:sg_end, [PMIN]] = max_p_kw
if "min_p_kw" in sg_is.columns:
ppc["gen"][gen_end:sg_end, PMAX] = - (sg_is["min_p_kw"].values * 1e-3 - delta)
min_p_kw = ppc["gen"][gen_end:sg_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][gen_end:sg_end, [PMAX]] = min_p_kw
# add controllable loads
if l_end > sg_end:
load_buses = bus_lookup[l_is["bus"].values]
ppc["gen"][sg_end:l_end, GEN_BUS] = load_buses
ppc["gen"][sg_end:l_end, PG] = - l_is["p_kw"].values * 1e-3 * l_is["scaling"].values
ppc["gen"][sg_end:l_end, QG] = l_is["q_kvar"].values * 1e-3 * l_is["scaling"].values
# set bus values for controllable loads
ppc["bus"][load_buses, BUS_TYPE] = PQ
# set constraints for controllable loads
if "min_q_kvar" in l_is.columns:
ppc["gen"][sg_end:l_end, QMAX] = - (l_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][sg_end:l_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][sg_end:l_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in l_is.columns:
ppc["gen"][sg_end:l_end, QMIN] = - (l_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][sg_end:l_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][sg_end:l_end, [QMAX]] = min_q_kvar
if "min_p_kw" in l_is.columns:
ppc["gen"][sg_end:l_end, PMIN] = - (l_is["max_p_kw"].values * 1e-3 + delta)
max_p_kw = ppc["gen"][sg_end:l_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][sg_end:l_end, [PMIN]] = max_p_kw
if "max_p_kw" in l_is.columns:
ppc["gen"][sg_end:l_end, PMAX] = - (l_is["min_p_kw"].values * 1e-3 - delta)
min_p_kw = ppc["gen"][sg_end:l_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][sg_end:l_end, [PMAX]] = min_p_kw
# add controllable storages
if stor_end > l_end:
stor_buses = bus_lookup[stor_is["bus"].values]
ppc["gen"][l_end:stor_end, GEN_BUS] = stor_buses
ppc["gen"][l_end:stor_end, PG] = - stor_is["p_kw"].values * 1e-3 * stor_is["scaling"].values
ppc["gen"][l_end:stor_end, QG] = stor_is["q_kvar"].values * 1e-3 * stor_is["scaling"].values
# set bus values for generator buses
ppc["bus"][stor_buses, BUS_TYPE] = PQ
# set constraints for controllable sgens
if "min_q_kvar" in stor_is.columns:
ppc["gen"][l_end:stor_end, QMAX] = - (stor_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][l_end:stor_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][l_end:stor_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in stor_is.columns:
ppc["gen"][l_end:stor_end, QMIN] = - (stor_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][l_end:stor_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][l_end:stor_end, [QMAX]] = min_q_kvar
if "max_p_kw" in stor_is.columns:
ppc["gen"][l_end:stor_end, PMIN] = - (stor_is["max_p_kw"].values * 1e-3 + delta)
max_p_kw = ppc["gen"][l_end:stor_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][l_end:stor_end, [PMIN]] = max_p_kw
if "min_p_kw" in stor_is.columns:
ppc["gen"][l_end:stor_end, PMAX] = - (stor_is["min_p_kw"].values * 1e-3 - delta)
min_p_kw = ppc["gen"][l_end:stor_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][l_end:stor_end, [PMAX]] = min_p_kw
# add ext grid / slack data
ppc["gen"][:eg_end, GEN_BUS] = bus_lookup[eg_is["bus"].values]
ppc["gen"][:eg_end, VG] = eg_is["vm_pu"].values
ppc["gen"][:eg_end, GEN_STATUS] = eg_is["in_service"].values
if "max_p_kw" in eg_is.columns:
ppc["gen"][:eg_end, PMIN] = - (eg_is["max_p_kw"].values * 1e-3 - delta)
max_p_kw = ppc["gen"][:eg_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][:eg_end, [PMIN]] = max_p_kw
if "min_p_kw" in eg_is.columns:
ppc["gen"][:eg_end, PMAX] = - (eg_is["min_p_kw"].values * 1e-3 + delta)
min_p_kw = ppc["gen"][:eg_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][:eg_end, [PMAX]] = min_p_kw
if "min_q_kvar" in eg_is.columns:
ppc["gen"][:eg_end, QMAX] = - (eg_is["min_q_kvar"].values * 1e-3 - delta)
max_q_kvar = ppc["gen"][:eg_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=isnan(max_q_kvar))
ppc["gen"][:eg_end, [QMIN]] = max_q_kvar
if "max_q_kvar" in eg_is.columns:
ppc["gen"][:eg_end, QMIN] = - (eg_is["max_q_kvar"].values * 1e-3 + delta)
min_q_kvar = ppc["gen"][:eg_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=isnan(min_q_kvar))
ppc["gen"][:eg_end, [QMAX]] = min_q_kvar - 1e-10
# set bus values for external grid buses
eg_buses = bus_lookup[eg_is["bus"].values]
if calculate_voltage_angles:
ppc["bus"][eg_buses, VA] = eg_is["va_degree"].values
ppc["bus"][eg_buses, BUS_TYPE] = REF
ppc["bus"][eg_buses, VM] = eg_is["vm_pu"].values
# REF busses don't have flexible voltages by definition:
ppc["bus"][eg_buses, VMAX] = ppc["bus"][ppc["bus"][:, BUS_TYPE] == REF, VM]
ppc["bus"][eg_buses, VMIN] = ppc["bus"][ppc["bus"][:, BUS_TYPE] == REF, VM]
if gen_end > eg_end:
ppc["gen"][eg_end:gen_end, GEN_BUS] = bus_lookup[gen_is["bus"].values]
ppc["gen"][eg_end:gen_end, PG] = - gen_is["p_kw"].values * 1e-3 * gen_is["scaling"].values
ppc["gen"][eg_end:gen_end, VG] = gen_is["vm_pu"].values
gen_buses = bus_lookup[gen_is["bus"].values]
ppc["bus"][gen_buses, BUS_TYPE] = PV
ppc["bus"][gen_buses, VM] = gen_is["vm_pu"].values
_copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, _is_elements['gen'])
_copy_p_limits_to_ppc(net, ppc, eg_end, gen_end, _is_elements['gen'])
_replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default)
_replace_nans_with_default_p_limits_in_ppc(ppc, eg_end, gen_end, p_lim_default)
def _init_ppc_gen(ppc, xw_end, q_lim_default):
ppc["gen"] = np.zeros(shape=(xw_end, 21), dtype=float)
ppc["gen"][:] = np.array([0, 0, 0, q_lim_default, -q_lim_default, 1.,
1., 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
def _build_pp_ext_grid(net, ppc, eg_is_mask, eg_end):
calculate_voltage_angles = net["_options"]["calculate_voltage_angles"]
bus_lookup = net["_pd2ppc_lookups"]["bus"]
eg_buses = bus_lookup[net["ext_grid"]["bus"].values[eg_is_mask]]
ppc["gen"][:eg_end, GEN_BUS] = eg_buses
ppc["gen"][:eg_end, VG] = net["ext_grid"]["vm_pu"].values[eg_is_mask]
ppc["gen"][:eg_end, GEN_STATUS] = True
if calculate_voltage_angles:
ppc["bus"][eg_buses, VA] = net["ext_grid"]["va_degree"].values[eg_is_mask]
ppc["bus"][eg_buses, BUS_TYPE] = REF
def _build_pp_gen(net, ppc, gen_is_mask, eg_end, gen_end, q_lim_default, p_lim_default):
bus_lookup = net["_pd2ppc_lookups"]["bus"]
copy_constraints_to_ppc = net["_options"]["copy_constraints_to_ppc"]
gen_buses = bus_lookup[net["gen"]["bus"].values[gen_is_mask]]
gen_is_vm = net["gen"]["vm_pu"].values[gen_is_mask]
ppc["gen"][eg_end:gen_end, GEN_BUS] = gen_buses
ppc["gen"][eg_end:gen_end, PG] = - (net["gen"]["p_kw"].values[gen_is_mask] * 1e-3 *
net["gen"]["scaling"].values[gen_is_mask])
ppc["gen"][eg_end:gen_end, VG] = gen_is_vm
ppc["bus"][gen_buses, BUS_TYPE] = PV
ppc["bus"][gen_buses, VM] = gen_is_vm
_copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask)
_replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default)
if copy_constraints_to_ppc:
_copy_p_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask)
_replace_nans_with_default_p_limits_in_ppc(ppc, eg_end, gen_end, p_lim_default)
def _build_pp_xward(net, ppc, gen_end, xw_end, q_lim_default, update_lookup=True):
bus_lookup = net["_pd2ppc_lookups"]["bus"]
xw = net["xward"]
xw_is = net["_is_elements"]['xward']
if update_lookup:
ppc["gen"][gen_end:xw_end, GEN_BUS] = bus_lookup[xw["ad_bus"].values]
ppc["gen"][gen_end:xw_end, VG] = xw["vm_pu"].values
ppc["gen"][gen_end:xw_end, GEN_STATUS] = xw_is
ppc["gen"][gen_end:xw_end, QMIN] = -q_lim_default
ppc["gen"][gen_end:xw_end, QMAX] = q_lim_default
xward_buses = bus_lookup[net["xward"]["ad_bus"].values]
ppc["bus"][xward_buses[xw_is], BUS_TYPE] = PV
ppc["bus"][xward_buses[~xw_is], BUS_TYPE] = NONE
ppc["bus"][xward_buses, VM] = net["xward"]["vm_pu"].values
def _update_gen_ppc(net, ppc):
calculate_voltage_angles = net["_options"]["calculate_voltage_angles"]
bus_lookup = net["_pd2ppc_lookups"]["bus"]
_is_elements = net["_is_elements"]
gen_is_mask = _is_elements['gen']
eg_is = net["ext_grid"][_is_elements['ext_grid']]
gen_is = net["gen"][_is_elements['gen']]
eg_end = len(eg_is)
gen_end = eg_end + len(gen_is)
xw_end = gen_end + len(net["xward"])
q_lim_default = 1e9
ext_grid_lookup = net["_pd2ppc_lookups"]["ext_grid"]
ext_grid_idx_ppc = ext_grid_lookup[eg_is.index]
ppc["gen"][ext_grid_idx_ppc, VG] = eg_is["vm_pu"].values
ppc["gen"][ext_grid_idx_ppc, GEN_STATUS] = eg_is["in_service"].values
if calculate_voltage_angles:
ppc["bus"][ext_grid_idx_ppc, VA] = eg_is["va_degree"].values
if gen_end > eg_end:
gen_lookup = net["_pd2ppc_lookups"]["gen"]
gen_idx_ppc = gen_lookup[gen_is.index]
ppc["gen"][gen_idx_ppc, PG] = - gen_is["p_kw"].values * 1e-3 * gen_is["scaling"].values
ppc["gen"][gen_idx_ppc, VG] = gen_is["vm_pu"].values
gen_buses = bus_lookup[gen_is["bus"].values]
ppc["bus"][gen_buses, VM] = gen_is["vm_pu"].values
_copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask)
_replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default)
if xw_end > gen_end:
raise NotImplementedError("xwards in combination with recycle is not properly implemented")
# _build_pp_xward(net, ppc, gen_end, xw_end, q_lim_default,
# update_lookup=False)
def _copy_q_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask):
# Note: Pypower has generator reference system, pandapower uses load reference
# system (max <-> min)
delta = net["_options"]["delta"]
if "max_q_kvar" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, QMIN] = -net["gen"]["max_q_kvar"].values[gen_is_mask] * 1e-3 - delta
if "min_q_kvar" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, QMAX] = -net["gen"]["min_q_kvar"].values[gen_is_mask] * 1e-3 + delta
def _copy_p_limits_to_ppc(net, ppc, eg_end, gen_end, gen_is_mask):
delta = net["_options"]["delta"]
if "max_p_kw" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, PMIN] = -net["gen"]["max_p_kw"].values[gen_is_mask] * 1e-3 + delta
if "min_p_kw" in net["gen"].columns:
ppc["gen"][eg_end:gen_end, PMAX] = -net["gen"]["min_p_kw"].values[gen_is_mask] * 1e-3 - delta
def _replace_nans_with_default_q_limits_in_ppc(ppc, eg_end, gen_end, q_lim_default):
# Note: Pypower has generator reference system, pandapower uses load reference system (max <-> min)
max_q_kvar = ppc["gen"][eg_end:gen_end, [QMIN]]
ncn.copyto(max_q_kvar, -q_lim_default, where=np.isnan(max_q_kvar))
ppc["gen"][eg_end:gen_end, [QMIN]] = max_q_kvar
min_q_kvar = ppc["gen"][eg_end:gen_end, [QMAX]]
ncn.copyto(min_q_kvar, q_lim_default, where=np.isnan(min_q_kvar))
ppc["gen"][eg_end:gen_end, [QMAX]] = min_q_kvar
def _replace_nans_with_default_p_limits_in_ppc(ppc, eg_end, gen_end, p_lim_default):
# Note: Pypower has generator reference system, pandapower uses load reference system (max <-> min)
max_p_kw = ppc["gen"][eg_end:gen_end, [PMIN]]
ncn.copyto(max_p_kw, -p_lim_default, where=isnan(max_p_kw))
ppc["gen"][eg_end:gen_end, [PMIN]] = max_p_kw
min_p_kw = ppc["gen"][eg_end:gen_end, [PMAX]]
ncn.copyto(min_p_kw, p_lim_default, where=isnan(min_p_kw))
ppc["gen"][eg_end:gen_end, [PMAX]] = min_p_kw
def _check_voltage_setpoints_at_same_bus(ppc):
# generator buses:
gen_bus = ppc['gen'][:, GEN_BUS].astype(int)
# generator setpoints:
gen_vm = ppc['gen'][:, VG]
if _different_values_at_one_bus(gen_bus, gen_vm):
raise UserWarning("Generators with different voltage setpoints connected to the same bus")
def _check_voltage_angles_at_same_bus(net, ppc):
gen_va = net.ext_grid.va_degree[net._is_elements["ext_grid"]].values
eg_gens = net._pd2ppc_lookups["ext_grid"][net.ext_grid.index[net._is_elements["ext_grid"]]]
gen_bus = ppc["gen"][eg_gens, GEN_BUS].astype(int)
if _different_values_at_one_bus(gen_bus, gen_va):
raise UserWarning("Ext grids with different voltage angle setpoints connected to the same bus")
def _different_values_at_one_bus(buses, values):
# buses with one or more generators and their index
unique_bus, index_first_bus = np.unique(buses, return_index=True)
# voltage setpoint lookup with the voltage of the first occurence of that bus
first_values = -np.ones(buses.max() + 1)
first_values[unique_bus] = values[index_first_bus]
# generate voltage setpoints where all generators at the same bus
# have the voltage of the first generator at that bus
values_equal = first_values[buses]
return not np.array_equal(values, values_equal)
| true | true |
f71c3ac798ea0aa03af1f53d6228417fc90cca0c | 861 | py | Python | abtools/__init__.py | menis/abtools | bfc7c6c508b174bb3b74d8f152156242ddd2ee77 | [
"MIT"
] | 9 | 2016-06-13T20:00:04.000Z | 2022-03-19T19:07:23.000Z | abtools/__init__.py | menis/abtools | bfc7c6c508b174bb3b74d8f152156242ddd2ee77 | [
"MIT"
] | null | null | null | abtools/__init__.py | menis/abtools | bfc7c6c508b174bb3b74d8f152156242ddd2ee77 | [
"MIT"
] | 4 | 2018-04-10T09:05:21.000Z | 2022-01-27T21:23:06.000Z | import os
if not os.environ.get('READTHEDOCS', None):
from ._compare import run as compare
from ._correct import run as correct
from ._finder import run as finder
from ._phylogeny import run as phylogeny
# import _stats as stats
from pkg_resources import get_distribution, DistributionNotFound
import os.path
try:
_dist = get_distribution('abtools')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'abtools')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install AbTools before checking the version'
else:
__version__ = _dist.version
| 33.115385 | 74 | 0.688734 | import os
if not os.environ.get('READTHEDOCS', None):
from ._compare import run as compare
from ._correct import run as correct
from ._finder import run as finder
from ._phylogeny import run as phylogeny
from pkg_resources import get_distribution, DistributionNotFound
import os.path
try:
_dist = get_distribution('abtools')
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'abtools')):
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install AbTools before checking the version'
else:
__version__ = _dist.version
| true | true |
f71c3b9ce33e658445e5575868064ec2d77a48fe | 2,312 | py | Python | src/borg/shellpattern.py | russelldavis/borg | 20abc9d68742a8cf5da8503cef96b2e9a5c83213 | [
"BSD-3-Clause"
] | 2 | 2021-08-19T16:25:15.000Z | 2021-11-17T10:54:16.000Z | src/borg/shellpattern.py | tschwinge/borg | 3e2d5b2b220aee2db68f81bbc0108332abc9cda9 | [
"BSD-3-Clause"
] | null | null | null | src/borg/shellpattern.py | tschwinge/borg | 3e2d5b2b220aee2db68f81bbc0108332abc9cda9 | [
"BSD-3-Clause"
] | null | null | null | import os
import re
def translate(pat, match_end=r"\Z"):
"""Translate a shell-style pattern to a regular expression.
The pattern may include ``**<sep>`` (<sep> stands for the platform-specific path separator; "/" on POSIX systems) for
matching zero or more directory levels and "*" for matching zero or more arbitrary characters with the exception of
any path separator. Wrap meta-characters in brackets for a literal match (i.e. "[?]" to match the literal character
"?").
Using match_end=regex one can give a regular expression that is used to match after the regex that is generated from
the pattern. The default is to match the end of the string.
This function is derived from the "fnmatch" module distributed with the Python standard library.
Copyright (C) 2001-2016 Python Software Foundation. All rights reserved.
TODO: support {alt1,alt2} shell-style alternatives
"""
sep = os.path.sep
n = len(pat)
i = 0
res = ""
while i < n:
c = pat[i]
i += 1
if c == "*":
if i + 1 < n and pat[i] == "*" and pat[i + 1] == sep:
# **/ == wildcard for 0+ full (relative) directory names with trailing slashes; the forward slash stands
# for the platform-specific path separator
res += r"(?:[^\%s]*\%s)*" % (sep, sep)
i += 2
else:
# * == wildcard for name parts (does not cross path separator)
res += r"[^\%s]*" % sep
elif c == "?":
# ? == any single character excluding path separator
res += r"[^\%s]" % sep
elif c == "[":
j = i
if j < n and pat[j] == "!":
j += 1
if j < n and pat[j] == "]":
j += 1
while j < n and pat[j] != "]":
j += 1
if j >= n:
res += "\\["
else:
stuff = pat[i:j].replace("\\", "\\\\")
i = j + 1
if stuff[0] == "!":
stuff = "^" + stuff[1:]
elif stuff[0] == "^":
stuff = "\\" + stuff
res += "[%s]" % stuff
else:
res += re.escape(c)
return res + match_end + "(?ms)"
| 35.030303 | 121 | 0.493945 | import os
import re
def translate(pat, match_end=r"\Z"):
sep = os.path.sep
n = len(pat)
i = 0
res = ""
while i < n:
c = pat[i]
i += 1
if c == "*":
if i + 1 < n and pat[i] == "*" and pat[i + 1] == sep:
res += r"(?:[^\%s]*\%s)*" % (sep, sep)
i += 2
else:
res += r"[^\%s]*" % sep
elif c == "?":
res += r"[^\%s]" % sep
elif c == "[":
j = i
if j < n and pat[j] == "!":
j += 1
if j < n and pat[j] == "]":
j += 1
while j < n and pat[j] != "]":
j += 1
if j >= n:
res += "\\["
else:
stuff = pat[i:j].replace("\\", "\\\\")
i = j + 1
if stuff[0] == "!":
stuff = "^" + stuff[1:]
elif stuff[0] == "^":
stuff = "\\" + stuff
res += "[%s]" % stuff
else:
res += re.escape(c)
return res + match_end + "(?ms)"
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.