blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 4
721
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 5
91
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 321
values | visit_date
timestamp[ns]date 2016-08-12 09:31:09
2023-09-06 10:45:07
| revision_date
timestamp[ns]date 2010-09-28 14:01:40
2023-09-06 06:22:19
| committer_date
timestamp[ns]date 2010-09-28 14:01:40
2023-09-06 06:22:19
| github_id
int64 426
681M
| star_events_count
int64 101
243k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[ns]date 2012-06-28 18:51:49
2023-09-14 21:59:16
⌀ | gha_created_at
timestamp[ns]date 2008-02-11 22:55:26
2023-08-10 11:14:58
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 26
values | language
stringclasses 2
values | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 6
10.2M
| extension
stringclasses 115
values | filename
stringlengths 3
113
| content
stringlengths 6
10.2M
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
97e9461a9a5eb3100040ffd3cfa5140a4c87d1cb
|
7a15271c7cddd199f43555469a67d26ce0f60836
|
/uncertainty_baselines/models/resnet50_het_rank1_test.py
|
4fac91a6404e366369ad28c9256fa4a23f53501f
|
[
"Apache-2.0"
] |
permissive
|
google/uncertainty-baselines
|
b2c339d918bf3949ee066f9eafa6b51232a2ac3d
|
f5f6f50f82bd441339c9d9efbef3f09e72c5fef6
|
refs/heads/main
| 2023-09-02T13:59:26.355288
| 2023-08-14T16:35:22
| 2023-08-14T16:36:11
| 280,026,201
| 1,235
| 198
|
Apache-2.0
| 2023-09-11T22:21:48
| 2020-07-16T01:54:32
|
Python
|
UTF-8
|
Python
| false
| false
| 2,493
|
py
|
resnet50_het_rank1_test.py
|
# coding=utf-8
# Copyright 2023 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Rank-1 BNN with Heteroscedastic Approach on ResNet-50."""
import tensorflow as tf
import uncertainty_baselines as ub
class Resnet50HetRank1Test(tf.test.TestCase):
def testResNet50HetRank1(self):
tf.random.set_seed(839382)
temperature = 1.5
num_factors = 3
num_mc_samples = 10
tf.random.set_seed(83922)
dataset_size = 10
batch_size = 4 # must be divisible by ensemble_size
input_shape = (32, 32, 1)
num_classes = 4
features = tf.random.normal((dataset_size,) + input_shape)
coeffs = tf.random.normal([tf.reduce_prod(input_shape), num_classes])
net = tf.reshape(features, [dataset_size, -1])
logits = tf.matmul(net, coeffs)
labels = tf.random.categorical(logits, 1)
dataset = tf.data.Dataset.from_tensor_slices((features, labels))
dataset = dataset.repeat().shuffle(dataset_size).batch(batch_size)
model = ub.models.resnet50_het_rank1(
input_shape=input_shape,
num_classes=num_classes,
alpha_initializer='trainable_normal',
gamma_initializer='trainable_normal',
alpha_regularizer='normal_kl_divergence',
gamma_regularizer='normal_kl_divergence',
use_additive_perturbation=False,
ensemble_size=4,
random_sign_init=0.75,
dropout_rate=0.001,
prior_stddev=0.05,
use_tpu=True,
use_ensemble_bn=False,
num_factors=num_factors,
temperature=temperature,
num_mc_samples=num_mc_samples)
model.compile(
'adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True))
history = model.fit(dataset,
steps_per_epoch=dataset_size // batch_size,
epochs=2)
loss_history = history.history['loss']
self.assertAllGreaterEqual(loss_history, 0.)
if __name__ == '__main__':
tf.test.main()
|
b772654fbcfa4dd75eb35cf32b094c89f629ec22
|
726290bb19f6c1f621e12c3b3acf22cfcf7438a8
|
/library/jvspherecontrol
|
64b60f32f7613a133c3d655b1c8b74948b84dd1a
|
[] |
no_license
|
ginsys/ansible-plugins
|
f5bd580b9d13bc895fc61d8cf9326bcb3fe1d06f
|
105b2142998f2c4da20cf8c281890e163e6ffb2b
|
refs/heads/devel
| 2020-04-12T06:23:49.779240
| 2016-09-23T08:36:48
| 2016-09-23T08:36:48
| 9,505,130
| 112
| 27
| null | 2016-09-23T08:42:24
| 2013-04-17T19:00:14
|
Python
|
UTF-8
|
Python
| false
| false
| 16,328
|
jvspherecontrol
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Serge van Ginderachter <serge@vanginderachter.be
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
author: Serge van Ginderachter
module: jvspherecontrol
short_description: A wrapper module to Patrick Debois' jvspherecontrol
description:
- This module wraps (some) features of Patrick Debois' jvspherecontrol
which is an java cli to the VMWare vsphere API
(https://github.com/jedi4ever/jvspherecontrol)
version_added: "1.2"
options:
path:
required: yes
description: full path to the jvsperecontrol jar
url:
required: true
description: url to connect with to vSphere, including the sdk part
user:
required: true
description:
- username to connect to vSphere
password:
required: true
description:
- password to connect to vSphere
command:
required: true
description:
- the base command to pass to jvspherecontrol
can be any of the jvsphere commands
choices: ['list', 'createvm', 'omapiregister', 'activatevnc',
'deactivatevnc', 'sendvnctxt']
list:
description:
- option to the list command, which objects to retrieve a list from vSphere
choices: ['hosts', 'datacenters', 'datastores', 'clusters', 'networks',
'users', vms, 'all', 'resourcepools']
bootorder:
description:
- I(order) to boot, better allow/deny boot devices, e.g. allow:cd,hd,net or deny:net,cd
cdromdatastore:
description:
- cd/dvd datastorename
cdromisopath:
description:
- path to dvd isofile
cluster:
description:
- name of the cluster to store new Vm
cpus:
description:
- number of cpu's to allocate
datacenter:
description:
- name of the datacenter to store new Vm
datastore:
description:
- name of the datastore to store new Vm
disk:
description:
- a list of dictionaries setting the disk(s)
- size in MB's
- ({datastore: 'Datastore 01', mode: '', size: '1024'}, ... )
memory:
description:
- memory size to allocate
- size in MB
name:
description:
- name of vm to create
nic:
description:
- a list of dictionaries setting the network interface(s): e.g.
({connected: True, name: 'VM Network', network: '', pxe: 'False',
startconnected: True, type: 'e1000'}, ... )
omapihost:
description:
- omapi hostname
omapikeyname:
description:
- omapi key to use
omapikeyvalue:
description:
- omapi value
omapioverwrite:
description:
- overwrite omapi entry
omapiport:
description:
- omapi portname
omapiregister:
description:
- register with omapi server
ostype:
description:
- type of vm to create
overwrite:
description:
- overwrite vm Flag
pxeinterface:
description:
- name of the network interface to PXE from
registermac:
description:
- command to execute, %s gets replaced with the MAC address
hostname:
description:
- hostname to register
macaddress:
description:
- mac address to register
omapihost:
description:
- omapi hostname
omapikeyname:
description:
- omapi key to use
omapikeyvalue:
description:
- omapi value
omapioverwrite:
description:
- overwrite omapi entry
omapiport:
description:
- omapi portname
omapiregister:
description:
- register with omapi server
vmname:
description:
- name of vm to create
vncpassword:
description:
- password to set on the VNC
vncport:
description:
- port to enable VNC on
vmname:
description:
- vmname to disable vnc
host:
description:
- host to send it to
port:
description:
- port to connect to
text:
description:
- text to send
wait:
description:
- seconds to wait in between sending different texts (default=1s)
notes:
- the system this module gets run on, must be a host with access to the vmware
api; executee I(java -jar C($jvsphere-control-jar-with-dependencies.jar) --help)
for more detailed information on which command takes which parameters
requirements:
- jvspherecontrol must be built from the upstream project (git checkout, then mvn package)
and java must be installed and available in C($PATH) on the system where the module gets executed
- the jvspherecontrol jar must be available on that system, its path defined
'''
EXAMPLES = '''
- hosts: localhost
tasks:
- action:
module: jvspherecontrol
path: /home/serge/src/jvspherecontrol/target/jvspherecontrol-0.0.4-SNAPSHOT-jar-with-dependencies.jar
url: vcserver.local
cluster: myorg
user: ansible
password: t0ps3cr3t
command: createvm
name: test
disk:
- datastore: datastore1
- datastore: datastore1
size: 10485760
nic:
- connected: yes
network: 3000
name: prod
pxe: yes
- name: mgmt
bootorder: 'allow:net,hd'
registermac: '"cobbler system edit --name=${inventory_hostname} --mac=%s"'
'''
COMMANDS = ['list', 'createvm', 'omapiregister', 'activatevnc',
'deactivatevnc', 'sendvnctxt']
NOACT_COMMANDS = ['list']
NEED_VSPHERE_CONN = ['list', 'createvm', 'activatevnc', 'deactivatevnc']
OPTIONS = {'list': ['hosts', 'datacenters', 'datastores', 'clusters', 'networks',
'users', 'vms', 'all', 'resourcepools'],
'createvm': ['bootorder', 'cdromdatastore', 'cdromisopath', 'cluster',
'cpus', 'datacenter', 'datastore', 'disk', 'memory', 'name',
'nic', 'omapihost', 'omapikeyname', 'omapikeyvalue',
'omapioverwrite', 'omapiport', 'omapiregister', 'ostype',
'overwrite', 'pxeinterface', 'registermac'],
'omapiregister': ['hostname', 'macaddress', 'omapihost',
'omapikeyname', 'omapikeyvalue', 'omapioverwrite',
'omapiport', 'omapiregister'],
'activatevnc': ['vmname', 'vncpassword', 'vncport'],
'deactivatevnc': ['vmname'],
'sendvnctext': ['host', 'password', 'port', 'text', 'wait']}
REQUIRED = {'createvm': ['name', 'memory', 'ostype'],
'omapiregister': ['macaddress', 'hostname'],
'activatevnc': [OPTIONS['activatevnc']],
'deactivatevnc': ['vmname'],
'sendvnctext': ['password', 'host', 'port']}
MULTI_OPTIONS = {'disk': ('datastore', 'mode', 'size'),
'nic': ('connected', 'name', 'network', 'pxe',
'startconnected', 'type')}
DEFAULTS = {'memory': '1024',
'ostype': 'Ubuntu64Guest',
'cpus': '1',
'disk': {'mode': 'persistent',
'size': '110485760'},
'nic': {'type': 'e1000',
'connected': 'True',
'startconnected': 'True',
'network': 'VM Network'}
}
import re
E_VM_EXISTS = 1
def safe_eval(str):
'''
adapted from ansible_utils.safe_eval
this is intended for allowing things like:
multioption: "{{ a_complex_variable }}"
'''
# do not allow method calls to modules
if not isinstance(str, basestring):
# already templated to a datastructure, perhaps?
return str
if re.search(r'\w\.\w+\(', str):
return str
# do not allow imports
if re.search(r'import \w+', str):
return str
try:
return eval(str)
except Exception, e:
return str
def check_paths(module):
p = module.params
# check for java exec
java = module.get_bin_path('java', True)
# check jvspherecontrol jar path
path = p['path']
if not os.path.isfile(path) or not os.access(path, os.R_OK):
module.fail_json(msg="could not find or read the jvspherecontrol jar")
return java
def get_jvspherecontrol(module):
# initial checks
java = check_paths(module)
p = module.params
# set base command
jvspherecontrol = '%s -jar %s' % (java, p['path'])
return jvspherecontrol
def get_connection_options(module):
p = module.params
url = p['url']
user = p['user']
password = p['password']
for opt in [ 'url', 'user', 'password' ]:
if p[opt] is None:
module.fail_json(msg="missing required option --%s for command %s" %
(opt, p['command']))
# Try to failsafe if only hostname/ip is given
if not url.startswith('http://'):
url = 'https://' + url
if not url.endswith('/sdk'):
url = url + '/sdk'
return '--url "%s" --user "%s" --password "%s"' % (url, user, password)
def _get_command_options(module, options=[], required=[]):
p = module.params
opts = ''
for opt in options: # options for given command
# check if we have a default for a non multi options
# there are no defaults for multi options
# there can be defaults for multioptions-suboptions, but these are handled later
if p[opt] is None and opt in required and opt in DEFAULTS and opt not in MULTI_OPTIONS:
p[opt] = DEFAULTS[opt]
if p[opt] is None and opt in required:
# if still none, error
module.fail_json(msg="missing required option --%s for command %s" %
(opt, p['command']))
elif opt in MULTI_OPTIONS:
# multi options have complex vars
# can be a dict (1 disk/nic) or a list of dicts (multiple disks/nics)
if isinstance(p[opt], basestring):
p[opt] = safe_eval(p[opt])
if isinstance(p[opt], dict):
# convert to a list of 1 dict
# then we handle lists
devopts = (p[opt])
elif isinstance(p[opt], list):
devopts = p[opt]
elif p[opt] is None:
devopts = []
else:
module.fail_json(msg="Error parsing complex variable %s == %s" %
(opt, p[opt]))
# devopts now is a list of dicts holding device multi-options parameters
for devicenum in range(len(devopts)):
# first replace missing suboptions by default value
for defopt in DEFAULTS[opt]:
if defopt not in devopts[devicenum]:
devopts[devicenum][defopt] = DEFAULTS[opt][defopt]
for devopt in devopts[devicenum]: # devopt = sub device dict key
if devopt in MULTI_OPTIONS[opt]:
# valid extension, device count starts at 1
devoptvalue = devopts[devicenum][devopt]
opts += ' --%s%s%s "%s"' % (opt, devopt, devicenum + 1, devoptvalue)
else:
module.fail_json(msg='--%s%s%s is not a valid option'
% (opt, devopt, devicenum + 1))
elif p[opt] is not None:
opts += ' --%s "%s"' % (opt, p[opt])
return opts
def get_command_options(module, command):
p = module.params
opts = ''
if command in NOACT_COMMANDS:
if command == 'list':
opts += '%s ' % p['list']
elif command in COMMANDS:
opts += _get_command_options(module, options=OPTIONS[command],
required=REQUIRED[command]) + ' '
if command in NEED_VSPHERE_CONN:
opts += get_connection_options(module)
return opts
def main():
module = AnsibleModule(
argument_spec = dict(
# path to jvspherecontrol.jar
path = dict(required=True),
# common required args
url = dict(),
user = dict(),
password = dict(),
# base command
command = dict(required=True, choices=COMMANDS),
# list command option
list = dict(default='vms', choice=OPTIONS['list']),
# createvm vommand options
bootorder = dict(),
cdromdatastore = dict(),
cdromisopath = dict(),
cluster = dict(),
cpus = dict(),
datacenter = dict(),
datastore = dict(),
disk = dict(),
memory = dict(),
name = dict(),
nic = dict(),
omapihost = dict(),
omapikeyname = dict(),
omapikeyvalue = dict(),
omapioverwrite = dict(),
omapiport = dict(),
omapiregister = dict(),
ostype = dict(),
overwrite=dict(type='bool', default='no'),
pxeinterface = dict(),
registermac = dict(),
hostname = dict(),
macaddress = dict(),
vmname = dict(),
vncpassword = dict(),
vncport = dict(),
host = dict(),
port = dict(),
text = dict(),
wait = dict()
),
supports_check_mode=True
)
p = module.params
check_mode = module.check_mode
# get base command with required base args
jvspherecontrol = get_jvspherecontrol(module)
# get jvspherecontrol base command
command = p['command']
# get jvspherecontrol options for command
options = get_command_options(module, command)
# assemble executable command
cmd = '%s %s %s' % (jvspherecontrol, command, options)
if command not in NOACT_COMMANDS and check_mode == True:
module.exit_json(changed=True)
(rc, out, err) = module.run_command(cmd)
# jvspherecontrol exit codes suck, seems to be allways 0 when error
if 'exception' in err.lower():
module.fail_json(msg='jvspherecontrol returned an error', stdout=out,
stderr=err, cmd=cmd)
if command == 'list':
if rc == 0:
list = []
for line in out.rstrip('\n').split('\n'):
if line not in ['executing']:
item = line.strip()
list.extend([item])
module.exit_json(changed=False, list=list, stdout=out, stderr=err)
elif rc != 0 or 'exception' in stderr:
module.fail_json(stdout=out, stderr=err, cmd=cmd, msg='failed executing list command')
elif command == 'createvm':
if rc == 0:
module.exit_json(changed=True, name=p['name'], stdout=out,
stderr=err, cmd=cmd)
elif rc == E_VM_EXISTS:
# when return code == E_VM_EXISTS == 1
# the vm already exists
# only happens when force is false (default)
module.exit_json(changed=False, name=p['name'], stdout=out,
stderr=err, cmd=cmd)
elif rc not in [0,1]:
module.fail_json(name=p['name'], stdout=out, stderr=err, cmd=cmd,
msg='Failed creating vm')
elif command in COMMANDS:
if rc == 0:
module.exit_json(changed=False, stdout=out, stderr=err, cmd=cmd)
else:
module.fail_json(name=name, stdout=out, stderr=err, cmd=cmd,
msg='failed executing %s command' % command)
else:
module.fail_json(msg="the %s command does not exist" % command)
# this is magic, see lib/ansible/module_common.py
#<<INCLUDE_ANSIBLE_MODULE_COMMON>>
if __name__ == '__main__':
main()
|
|
a63fb68ae1b426fcd04a586aea01c0a90939fd06
|
45826bdfebbd1d7638ab607906ac480031d6118b
|
/lib/vis/attention_visualizer.py
|
a761875f8c9afd2079b1474c5410b765758f824a
|
[
"MIT"
] |
permissive
|
openseg-group/openseg.pytorch
|
b75cec5c95b6ff71707d8daf7806001bab89ecb3
|
aefc75517b09068d7131a69420bc5f66cb41f0ee
|
refs/heads/master
| 2023-09-06T10:19:57.749113
| 2022-08-07T09:10:20
| 2022-08-07T09:10:20
| 166,743,301
| 1,227
| 159
|
MIT
| 2021-07-14T06:10:44
| 2019-01-21T03:34:59
|
Python
|
UTF-8
|
Python
| false
| false
| 10,163
|
py
|
attention_visualizer.py
|
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
## Created by: RainbowSecret
## Modified from: https://github.com/AlexHex7/Non-local_pytorch
## Microsoft Research
## yuyua@microsoft.com
## Copyright (c) 2018
##
## This source code is licensed under the MIT-style license found in the
## LICENSE file in the root directory of this source tree
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
import matplotlib
matplotlib.use('Agg')
import torch
import os
import sys
import pdb
import cv2
import numpy as np
from torch import nn
from torch.nn import functional as F
import functools
import matplotlib.pyplot as plt
from sklearn import svm, datasets
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix
from PIL import Image as PILImage
torch_ver = torch.__version__[:3]
ignore_label = 255
id_to_trainid = {-1: ignore_label, 0: ignore_label, 1: ignore_label, 2: ignore_label,
3: ignore_label, 4: ignore_label, 5: ignore_label, 6: ignore_label,
7: 0, 8: 1, 9: ignore_label, 10: ignore_label, 11: 2, 12: 3, 13: 4,
14: ignore_label, 15: ignore_label, 16: ignore_label, 17: 5,
18: ignore_label, 19: 6, 20: 7, 21: 8, 22: 9, 23: 10, 24: 11, 25: 12, 26: 13, 27: 14,
28: 15, 29: ignore_label, 30: ignore_label, 31: 16, 32: 17, 33: 18}
class_name_dict = {0:'road', 1:'sidewalk', 2:'building', 3:'wall', 4:'fence', 5:'pole',
6:'trafficlight', 7:'trafficsign', 8:'vegetation', 9:'terrian', 10:'sky',
11:'person', 12:'rider', 13:'car', 14:'truck', 15:'bus', 16:'train',
17:'motorcycle', 18:'bicycle', 255: 'none'}
def get_palette(num_cls):
""" Returns the color map for visualizing the segmentation mask.
Args:
num_cls: Number of classes
Returns:
The color map
"""
palette = [0] * (num_cls * 3)
palette[0:3] = (128, 64, 128) # 0: 'road'
palette[3:6] = (244, 35,232) # 1 'sidewalk'
palette[6:9] = (70, 70, 70) # 2''building'
palette[9:12] = (102,102,156) # 3 wall
palette[12:15] = (190,153,153) # 4 fence
palette[15:18] = (153,153,153) # 5 pole
palette[18:21] = (250,170, 30) # 6 'traffic light'
palette[21:24] = (220,220, 0) # 7 'traffic sign'
palette[24:27] = (107,142, 35) # 8 'vegetation'
palette[27:30] = (152,251,152) # 9 'terrain'
palette[30:33] = ( 70,130,180) # 10 sky
palette[33:36] = (220, 20, 60) # 11 person
palette[36:39] = (255, 0, 0) # 12 rider
palette[39:42] = (0, 0, 142) # 13 car
palette[42:45] = (0, 0, 70) # 14 truck
palette[45:48] = (0, 60,100) # 15 bus
palette[48:51] = (0, 80,100) # 16 train
palette[51:54] = (0, 0,230) # 17 'motorcycle'
palette[54:57] = (119, 11, 32) # 18 'bicycle'
palette[57:60] = (105, 105, 105)
return palette
palette = get_palette(20)
def id2trainId(label, id_to_trainid, reverse=False):
label_copy = label.copy()
if reverse:
for v, k in id_to_trainid.items():
label_copy[label == k] = v
else:
for k, v in id_to_trainid.items():
label_copy[label == k] = v
return label_copy
def down_sample_target(target, scale):
row, col = target.shape
step = scale
r_target = target[0:row:step, :]
c_target = r_target[:, 0:col:step]
return c_target
def visualize_map(atten, shape, out_path):
atten_np = atten.cpu().data.numpy() # c x hw
(h, w) = shape
for row in range(2):
for col in range(9):
# plt.subplot(5,8,9+row*8+col)
# pdb.set_trace()
cm = atten_np[row*8+col]
cm = np.reshape(cm, (h, w))
plt.tight_layout()
plt.imshow(cm, cmap='Blues', interpolation='nearest')
plt.axis('off')
plt.savefig(out_path+'regionmap_'+str(row*8+col)+'png', bbox_inches='tight', pad_inches = 0)
pdb.set_trace()
def Vis_A2_Atten(img_path,
label_path,
image,
label,
atten,
shape,
cmap=plt.cm.Blues,
index=1,
choice=1,
maps_count=32):
"""
This function prints and plots the attention weight matrix.
Input:
choice: 1 represents plotting the histogram of the weights' distribution
2 represents plotting the attention weights' map
"""
atten_np = atten.cpu().data.numpy() # c x hw
(h, w) = shape
if choice == 1:
# read image/ label from the given paths
image = cv2.imread(img_path[index], cv2.IMREAD_COLOR) #1024x2048x3
image = image[:, :, -1]
image = cv2.resize(image, dsize=(h, w),interpolation=cv2.INTER_CUBIC)
label = cv2.imread(label_path[index], cv2.IMREAD_GRAYSCALE) #1024x2048
label = id2trainId(label, id_to_trainid)
label = down_sample_target(label, 8)
else:
# use the image crop directly.
image = image.astype(np.float)[index] #3x1024x2048
image = np.transpose(image, (1,2,0))
mean = (102.9801, 115.9465, 122.7717)
image += mean
image = image.astype(np.uint8)
image = cv2.resize(image, dsize=(w, h),interpolation=cv2.INTER_CUBIC)
label = label.cpu().numpy().astype(np.uint8)[index]
label = down_sample_target(label, 8)
img_label = PILImage.fromarray(label)
img_label.putpalette(palette)
plt.tight_layout()
plt.figure(figsize=(48, 24))
plt.axis('off')
plt.subplot(5,8,1)
plt.imshow(image)
plt.axis('off')
plt.subplot(5,8,2)
plt.imshow(img_label)
plt.axis('off')
for row in range(4):
for col in range(8):
plt.subplot(5,8,9+row*8+col)
cm = atten_np[row*8+col]
cm = np.reshape(cm, (h, w))
plt.imshow(cm, cmap='Blues', interpolation='nearest')
plt.axis('off')
plt.gca().set_title("Attention Map %d" %(row*8+col))
# plt.subplot(3,7,1)
# plt.imshow(image)
# plt.axis('off')
# plt.subplot(3,7,2)
# plt.imshow(img_label)
# plt.axis('off')
# for row in range(3):
# for col in range(7):
# if (row*7+col) == 0 or (row*7+col) == 1:
# continue
# plt.subplot(3,7,row*7+col+1)
# cm = atten_np[row*7+col-2]
# cm = np.reshape(cm, (h, w))
# plt.imshow(cm, cmap='Blues', interpolation='nearest')
# plt.axis('off')
# plt.gca().set_title("Attention Map %d" %(row*7+col-2))
plt.show()
outpath='./object_context_vis/a2map_32/'
plt.savefig(outpath+'a2map_'+str(img_path[0][0:-3].split('/')[-1])+'png', bbox_inches='tight', pad_inches = 0)
print("image id: {}".format(img_path[0][0:-3].split('/')[-1]))
def Vis_FastOC_Atten(img_path,
label_path,
image,
label,
atten,
shape,
cmap=plt.cm.Blues,
index=1,
choice=1,
subplot=False):
"""
This function prints and plots the attention weight matrix.
Input:
choice: 1 represents plotting the histogram of the weights' distribution
2 represents plotting the attention weights' map
"""
atten_np = atten.cpu().data.numpy() # c x hw
(h, w) = shape
if choice == 1:
# read image/ label from the given paths
image = cv2.imread(img_path[index], cv2.IMREAD_COLOR) #1024x2048x3
image = image[:, :, -1]
image = cv2.resize(image, dsize=(h, w),interpolation=cv2.INTER_CUBIC)
label = cv2.imread(label_path[index], cv2.IMREAD_GRAYSCALE) #1024x2048
label = id2trainId(label, id_to_trainid)
label = down_sample_target(label, 8)
else:
# use the image crop directly.
image = image.astype(np.float)[index] #3x1024x2048
image = np.transpose(image, (1,2,0))
mean = (102.9801, 115.9465, 122.7717)
image += mean
image = image.astype(np.uint8)
image = cv2.resize(image, dsize=(w, h),interpolation=cv2.INTER_CUBIC)
label = label.cpu().numpy().astype(np.uint8)[index]
label = down_sample_target(label, 8)
img_label = PILImage.fromarray(label)
img_label.putpalette(palette)
plt.tight_layout()
plt.figure(figsize=(48, 24))
plt.axis('off')
if subplot:
plt.subplot(3,7,1)
plt.imshow(image)
plt.axis('off')
plt.subplot(3,7,2)
plt.imshow(img_label)
plt.axis('off')
for row in range(3):
for col in range(7):
if (row*7+col) == 0 or (row*7+col) == 1:
continue
if subplot:
plt.subplot(3,7,row*7+col+1)
cm = atten_np[row*7+col-2]
cm = np.reshape(cm, (h, w))
plt.imshow(cm, cmap='Blues', interpolation='nearest')
plt.axis('off')
if not subplot:
plt.show()
outpath='./object_context_vis/fast_baseoc_map/'
plt.savefig(outpath+'fast_baseoc_map_'+str(img_path[0][0:-3].split('/')[-1])+'_'+str(row*7+col-2)+'.png', bbox_inches='tight', pad_inches = 0)
else:
plt.gca().set_title("Attention Map %d" %(row*7+col-2))
if subplot:
plt.show()
outpath='./object_context_vis/fast_baseoc_map/'
plt.savefig(outpath+'fast_baseoc_map_'+str(img_path[0][0:-3].split('/')[-1])+'png', bbox_inches='tight', pad_inches = 0)
print("image id: {}".format(img_path[0][0:-3].split('/')[-1]))
|
c5a45719a2ad6803b0cff6a388f5d843c7e58dce
|
c6759b857e55991fea3ef0b465dbcee53fa38714
|
/tools/nntool/nntool/quantization/multiplicative/quantizers/activation_mult.py
|
0ac2028e1debb7d76ac05ab22af6cd19bfcd2de5
|
[
"AGPL-3.0-or-later",
"AGPL-3.0-only",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft",
"Apache-2.0"
] |
permissive
|
GreenWaves-Technologies/gap_sdk
|
1b343bba97b7a5ce62a24162bd72eef5cc67e269
|
3fea306d52ee33f923f2423c5a75d9eb1c07e904
|
refs/heads/master
| 2023-09-01T14:38:34.270427
| 2023-08-10T09:04:44
| 2023-08-10T09:04:44
| 133,324,605
| 145
| 96
|
Apache-2.0
| 2023-08-27T19:03:52
| 2018-05-14T07:50:29
|
C
|
UTF-8
|
Python
| false
| false
| 13,589
|
py
|
activation_mult.py
|
# Copyright (C) 2020 GreenWaves Technologies, SAS
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import logging
import math
from copy import deepcopy
import numpy as np
from nntool.graph.types.activations import (ActivationNodeBase,
HSigmoidNode,
HSwishNode,
HTanHNode,
LeakyNode,
ReluNode,
SigmoidNode,
TanHNode)
from nntool.graph.types.fusions import ActivationFusionNode
from nntool.quantization.multiplicative.scaling_qtypes import MultMulBiasScaleQType
from nntool.quantization.new_qrec import QRec
from nntool.quantization.qtype import QType
from nntool.quantization.unified_quantization_handler import (in_qs_constraint,
out_qs_constraint,option_constraint,
params_type, options, priority)
from ..mult_quantization_handler import MultQuantizionHandler
from nntool.quantization.quantizer_options import *
LOG = logging.getLogger('nntool.' + __name__)
@options(
FORCE_OUTPUT_SIZE_OPTION,
)
class ActivationMultSWBase(MultQuantizionHandler):
@classmethod
def _quantize_sw(cls, params, in_qs, stats, in_dtype, out_dtype, out_asym, **kwargs):
force_out_qs, _ = cls.get_mult_opts(**kwargs)
force_out_q = force_out_qs and force_out_qs[0]
fusion = kwargs.get('fusion', None)
in_q = in_qs[0]
if fusion:
in_dtype = np.int32
bits = 8 if out_dtype == np.int8 or out_dtype == np.uint8 else 16
if isinstance(params, (HSwishNode, HSigmoidNode)):
# we need to be able to represent offset and upper_bound in output dtype
# input range should match stats since swish requires the full input range
if fusion:
# in a fusion the output container is smaller than the input container
# The input scale may be too small to represent offset and upper_bound
# in the output dtype
params_qtype = QType.from_min_max_sq(
0,
np.maximum(
params.upper_bound,
params.offset),
bits=bits,
dtype=out_dtype)
in_q = QType.from_min_max_sq(
*stats.get_range_in(0),
dtype=in_dtype)
# if params scale is larger then we must reduce precision
if np.all(params_qtype.scale > in_q.scale):
in_q.scale = params_qtype.scale
else:
# outside a fusion our in and out dtype is the same
# so we just need to check that offset and upper_bound can be represented
if in_dtype == np.uint8:
in_dtype = np.int8
elif in_dtype == np.uint16:
in_dtype = np.int16
if isinstance(params, HSwishNode):
lower, upper = stats.get_range_in(0)
upper = np.maximum(
np.maximum(
params.upper_bound,
params.offset),
upper)
else:
lower = -params.offset
upper = params.upper_bound
in_q = QType.from_min_max_sq(
lower,
upper,
dtype=in_dtype)
elif isinstance(params, (TanHNode, SigmoidNode)):
if in_dtype == np.int8:
in_q = QType.from_min_max_sq(
-8,
8,
dtype=in_dtype,
forced=True)
elif in_dtype in [np.uint8, np.uint16]:
in_q = QType(
dtype=in_dtype,
scale=pow(2, -12),
zero_point=1<<(8 if in_dtype == np.uint8 else 16))
else:
in_q = QType(
dtype=in_dtype,
scale=pow(2, -12))
elif isinstance(params, HTanHNode):
scale = 2 / pow(2, bits)
in_q = QType(scale=scale, dtype=in_dtype, forced=True)
elif isinstance(params, (LeakyNode, )):
max_out = np.max(np.abs(stats.get_range_out(0, bits=bits)))
scale = (2 * max_out) / pow(2, bits)
in_q = QType(scale=scale, dtype=in_dtype, forced=True)
if force_out_q:
o_q = deepcopy(force_out_q)
if isinstance(params, LeakyNode):
in_q.scale = o_q.scale
elif isinstance(params, ReluNode):
in_q = deepcopy(o_q)
# activation cannot move zeropoint unless it is a reduction step
if o_q.zero_point != in_q.zero_point and not fusion:
return None
else:
# cls.check_valid_ranges(params, stats, idx=0, dirs='out')
if isinstance(params, ReluNode):
max_val = params.upper_bound if params.upper_bound else stats.get_range_out(0, bits=bits)[1]
o_q = QType.from_min_max_sq(0,
max_val,
dtype=out_dtype,
asymmetric=(in_q.zero_point != 0) or out_dtype in [np.uint8, np.uint16])
in_q = deepcopy(o_q)
elif isinstance(params, TanHNode):
o_q = QType.from_min_max_sq(
min_val=-1, max_val=1, dtype=out_dtype, asymmetric=out_asym)
elif isinstance(params, SigmoidNode):
o_q = QType.from_min_max_sq(
min_val=0, max_val=1, dtype=out_dtype, asymmetric=out_asym)
elif isinstance(params, LeakyNode):
o_q = QType.from_min_max_sq(*stats.get_range_out(0, bits=bits),
dtype=out_dtype,
asymmetric=out_asym)
in_q.scale = o_q.scale
elif isinstance(params, HSigmoidNode):
# hsigmoid prefer to output zeropoint 0 to represent 0 - 1 range
o_q = QType.from_min_max_sq(
min_val=0, max_val=1, dtype=out_dtype, asymmetric=out_asym)
elif isinstance(params, HSwishNode):
# hswish multiplies 0-upper bound range by input so take the upper bound from nntool.stats
o_q = QType.from_min_max_sq(*stats.get_range_out(0, bits=bits),
dtype=out_dtype,
asymmetric=out_asym)
else:
o_q = QType.from_min_max_sq(*stats.get_range_out(0, bits=bits),
dtype=out_dtype,
asymmetric=out_asym)
qrec = QRec.scaled(in_qs=[in_q], out_qs=[o_q])
qrec = cls.compute_cache(params, qrec, stats)
return qrec
@classmethod
def get_prefered_input_dtypes(cls, params, **kwargs):
return [np.int8]
@classmethod
def compute_cache(cls, params, qrec, stats):
scale_mul_biases_q = MultMulBiasScaleQType(dtype=np.uint8)
qrec.cache['scale_mul_biases_q'] = scale_mul_biases_q
if isinstance(params, (ReluNode)):
if params.upper_bound is not None:
qrec.cache['upper_bound'] = qrec.in_qs[0].quantize(
params.upper_bound).astype(qrec.out_qs[0].dtype)
qrec.cache['lower_bound'] = qrec.in_qs[0].quantize(
params.lower_bound).astype(qrec.out_qs[0].dtype)
scale_mul_biases_q.scale = (
qrec.in_qs[0].scale/qrec.out_qs[0].scale)
elif isinstance(params, (SigmoidNode, TanHNode)):
scale_mul_biases_q.scale = math.pow(2, -15) / qrec.out_qs[0].scale
qrec.cache["zero_point"] = qrec.out_qs[0].zero_point.astype(
qrec.out_qs[0].dtype)
elif isinstance(params, (LeakyNode)):
scale_mul_biases_q.scale = (
qrec.in_qs[0].scale/qrec.out_qs[0].scale)
qrec.cache['leak_factor'] = np.int8(
params.leak_factor*math.pow(2, 7) + 0.5)
qrec.cache['zero_point'] = qrec.out_qs[0].zero_point.astype(qrec.out_qs[0].dtype)
elif isinstance(params, (HSwishNode, HSigmoidNode)):
scale = (qrec.in_qs[0].scale * params.mult)/qrec.out_qs[0].scale
if isinstance(params, HSwishNode):
# HSwish multiplies HSigmoid by input
scale *= qrec.in_qs[0].scale
scale_mul_biases_q.scale = scale
qrec.cache['offset'] = qrec.in_qs[0].quantize(
params.offset).astype(qrec.out_qs[0].dtype)
qrec.cache['zero_point'] = qrec.out_qs[0].zero_point
qrec.cache['upper_bound'] = qrec.in_qs[0].quantize(
params.upper_bound).astype(qrec.out_qs[0].dtype)
else:
scale_mul_biases_q.scale = (
qrec.in_qs[0].scale/qrec.out_qs[0].scale)
return qrec
@params_type(ActivationNodeBase)
@in_qs_constraint({'dtype': {np.int8, np.int16, np.int32}})
@out_qs_constraint({'dtype': np.int8})
@option_constraint(force_output_size={8, None})
class ActivationMultSW_I_I8(ActivationMultSWBase):
@classmethod
def _quantize(cls, params, in_qs, stats, **kwargs):
return cls._quantize_sw(params, in_qs, stats, in_qs[0].dtype, np.int8, out_asym=False, **kwargs)
@params_type(HSwishNode, HSigmoidNode)
@in_qs_constraint({'dtype': {np.int8, np.int16, np.int32}})
@out_qs_constraint({'dtype': np.uint8})
@option_constraint(force_output_size={8, None})
@priority(2)
class ActivationMultSW_HSwish_I_U8(ActivationMultSWBase):
@classmethod
def _get_in_qs_from_stats(cls, params, stats, in_qs, **kwargs):
dtype = in_qs and in_qs[0] and in_qs[0].dtype
if dtype == np.uint16:
dtype = np.int16
else:
dtype = np.int8
return [QType.from_min_max_sq(
*stats.get_range_in(0),
dtype=dtype)]
@classmethod
def _quantize(cls, params, in_qs, stats, **kwargs):
return cls._quantize_sw(params, in_qs, stats, in_qs[0].dtype, np.uint8, out_asym=False, **kwargs)
@params_type(HSwishNode, HSigmoidNode)
@in_qs_constraint({'dtype': {np.int8, np.int16, np.int32}})
@out_qs_constraint({'dtype': np.uint16})
@option_constraint(force_output_size=16)
class ActivationMultSW_HSwish_I_U16(ActivationMultSWBase):
@classmethod
def _get_in_qs_from_stats(cls, params, stats, in_qs, **kwargs):
dtype = in_qs and in_qs[0] and in_qs[0].dtype
if dtype == np.uint16:
dtype = np.int16
else:
dtype = np.int8
return [QType.from_min_max_sq(
*stats.get_range_in(0),
dtype=dtype)]
@classmethod
def _quantize(cls, params, in_qs, stats, **kwargs):
return cls._quantize_sw(params, in_qs, stats, in_qs[0].dtype, np.uint16, out_asym=False, **kwargs)
@params_type(ActivationNodeBase)
@in_qs_constraint({'dtype': {np.int8, np.int16, np.int32}})
@out_qs_constraint({'dtype': np.int16})
@option_constraint(force_output_size=16)
class ActivationMultSW_I_I16(ActivationMultSWBase):
@classmethod
def _quantize(cls, params, in_qs, stats, **kwargs):
return cls._quantize_sw(params, in_qs, stats, in_qs[0].dtype, np.int16, out_asym=False, **kwargs)
def check_not_global(params, **kwargs):
fusion = kwargs.get('fusion')
return not isinstance(fusion, ActivationFusionNode) or fusion.fusion_type != 'pool_active'
@params_type(LeakyNode, TanHNode, SigmoidNode, ReluNode)
@in_qs_constraint({'dtype': {np.uint8, np.int32}})
@out_qs_constraint({'dtype': np.uint8})
@option_constraint(force_output_size={8, None}, __function_constraint=check_not_global)
class ActivationMultSW_U_U8(ActivationMultSWBase):
# This handler should be called only for NE16 for the moment --> out is asym
@classmethod
def _quantize(cls, params, in_qs, stats, **kwargs):
return cls._quantize_sw(params, in_qs, stats, in_qs[0].dtype, np.uint8, out_asym=True, **kwargs)
@params_type(LeakyNode, TanHNode, SigmoidNode, ReluNode)
@in_qs_constraint({'dtype': {np.uint16, np.int32}})
@out_qs_constraint({'dtype': np.uint16})
@option_constraint(force_output_size=16, __function_constraint=check_not_global)
class ActivationMultSW_U_U16(ActivationMultSWBase):
# This handler should be called only for NE16 for the moment --> out is asym
@classmethod
def _quantize(cls, params, in_qs, stats, **kwargs):
return cls._quantize_sw(params, in_qs, stats, in_qs[0].dtype, np.uint16, out_asym=True, **kwargs)
|
ced493c388c66f14df8567aafd2716d5019510b6
|
13ce98780a7e6e7e1412ae91a0fa97a91cf66a73
|
/seleniumbase/console_scripts/run.py
|
373a43bc376c3a4d8569163419d5e46f94d29ead
|
[
"MIT"
] |
permissive
|
seleniumbase/SeleniumBase
|
c607312c0b8f45297088c1283150eb73ea32c553
|
63d95c42fc84bbcea415c6d8a3a201587b89c92e
|
refs/heads/master
| 2023-09-06T05:58:07.923058
| 2023-09-02T14:14:03
| 2023-09-02T14:14:03
| 17,420,614
| 3,656
| 944
|
MIT
| 2023-09-13T21:12:20
| 2014-03-04T23:07:33
|
Python
|
UTF-8
|
Python
| false
| false
| 58,036
|
py
|
run.py
|
"""
SeleniumBase console scripts runner
Usage:
seleniumbase [COMMAND] [PARAMETERS]
OR sbase [COMMAND] [PARAMETERS]
Examples:
sbase get chromedriver
sbase methods
sbase options
sbase commander
sbase behave-gui
sbase behave-options
sbase caseplans
sbase mkdir ui_tests
sbase mkfile new_test.py
sbase mkrec new_test.py
sbase mkrec new_test.py --url=wikipedia.org
sbase codegen new_test.py --url=wikipedia.org
sbase recorder
sbase record new_test.py
sbase record
sbase mkpres new_presentation.py
sbase mkchart new_chart.py
sbase convert webdriver_unittest_file.py
sbase print my_first_test.py -n
sbase translate my_first_test.py --zh -p
sbase extract-objects my_first_test.py
sbase inject-objects my_first_test.py
sbase objectify my_first_test.py
sbase revert-objects my_first_test.py
sbase encrypt
sbase decrypt
sbase download server
sbase grid-hub start
sbase grid-node start --hub=127.0.0.1
"""
import colorama
import sys
import time
from seleniumbase.config import settings
from seleniumbase.fixtures import constants
colorama.init(autoreset=True)
def show_usage():
show_basic_usage()
sc = ""
sc += ' Type "sbase help [COMMAND]" for specific command info.\n'
sc += ' For info on all commands, type: "seleniumbase --help".\n'
sc += ' Use "pytest" for running tests.\n'
if "linux" not in sys.platform:
c1 = colorama.Fore.BLUE + colorama.Back.LIGHTCYAN_EX
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
c4 = colorama.Fore.MAGENTA + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = sc.replace("seleniumbase", c1 + "selenium" + c2 + "base" + cr)
sc = sc.replace("sbase", c1 + "s" + c2 + "base" + cr)
sc = sc.replace("pytest", c3 + "pytest" + cr)
sc = sc.replace("--help", c4 + "--help" + cr)
sc = sc.replace("help", c4 + "help" + cr)
print(sc)
def show_basic_usage():
from seleniumbase.console_scripts import logo_helper
seleniumbase_logo = logo_helper.get_seleniumbase_logo()
print(seleniumbase_logo)
print("")
time.sleep(0.25) # Enough time to see the logo
show_package_location()
show_version_info()
print("")
sc = ""
sc += ' * USAGE: "seleniumbase [COMMAND] [PARAMETERS]"\n'
sc += ' * OR: "sbase [COMMAND] [PARAMETERS]"\n'
sc += "\n"
sc += "COMMANDS:\n"
sc += " get / install [DRIVER] [OPTIONS]\n"
sc += " methods (List common Python methods)\n"
sc += " options (List common pytest options)\n"
sc += " behave-options (List common behave options)\n"
sc += " gui / commander [OPTIONAL PATH or TEST FILE]\n"
sc += " behave-gui (SBase Commander for Behave)\n"
sc += " caseplans [OPTIONAL PATH or TEST FILE]\n"
sc += " mkdir [DIRECTORY] [OPTIONS]\n"
sc += " mkfile [FILE.py] [OPTIONS]\n"
sc += " mkrec / codegen [FILE.py] [OPTIONS]\n"
sc += " recorder (Open Recorder Desktop App.)\n"
sc += " record (If args: mkrec. Else: App.)\n"
sc += " mkpres [FILE.py] [LANG]\n"
sc += " mkchart [FILE.py] [LANG]\n"
sc += " print [FILE] [OPTIONS]\n"
sc += " translate [SB_FILE.py] [LANG] [ACTION]\n"
sc += " convert [WEBDRIVER_UNITTEST_FILE.py]\n"
sc += " extract-objects [SB_FILE.py]\n"
sc += " inject-objects [SB_FILE.py] [OPTIONS]\n"
sc += " objectify [SB_FILE.py] [OPTIONS]\n"
sc += " revert-objects [SB_FILE.py] [OPTIONS]\n"
sc += " encrypt / obfuscate\n"
sc += " decrypt / unobfuscate\n"
sc += " download server (Get Selenium Grid JAR file)\n"
sc += " grid-hub [start|stop] [OPTIONS]\n"
sc += " grid-node [start|stop] --hub=[HOST/IP]\n"
sc += ' * (EXAMPLE: "sbase get chromedriver") *\n'
sc += ""
if "linux" not in sys.platform:
c1 = colorama.Fore.BLUE + colorama.Back.LIGHTCYAN_EX
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
cr = colorama.Style.RESET_ALL
sc = sc.replace("seleniumbase", c1 + "selenium" + c2 + "base" + cr)
sc = sc.replace("sbase", c1 + "s" + c2 + "base" + cr)
print(sc)
def show_install_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "get / install" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase install [DRIVER_NAME] [OPTIONS]")
print(" OR: seleniumbase get [DRIVER_NAME] [OPTIONS]")
print(" OR: sbase install [DRIVER_NAME] [OPTIONS]")
print(" OR: sbase get [DRIVER_NAME] [OPTIONS]")
print(" (Drivers: chromedriver, geckodriver, edgedriver")
print(" iedriver, uc_driver)")
print(" Options:")
print(" VERSION Specify the version to download.")
print(" Tries to detect the needed version.")
print(" If using chromedriver or edgedriver,")
print(" you can use the major version integer.")
print()
print(" -p OR --path Also copy the driver to /usr/local/bin")
print(" Examples:")
print(" sbase get chromedriver")
print(" sbase get geckodriver")
print(" sbase get edgedriver")
print(" sbase get chromedriver 114")
print(" sbase get chromedriver 114.0.5735.90")
print(" sbase get chromedriver stable")
print(" sbase get chromedriver beta")
print(" sbase get chromedriver -p")
print(" Output:")
print(" Downloads the webdriver to seleniumbase/drivers/")
print(" (chromedriver is required for Chrome automation)")
print(" (geckodriver is required for Firefox automation)")
print(" (edgedriver is required for MS__Edge automation)")
print("")
def show_commander_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "commander" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase commander [OPTIONAL PATH or TEST FILE]")
print(" OR: sbase commander [OPTIONAL PATH or TEST FILE]")
print(" OR: seleniumbase gui [OPTIONAL PATH or TEST FILE]")
print(" OR: sbase gui [OPTIONAL PATH or TEST FILE]")
print(" Examples:")
print(" sbase gui")
print(" sbase gui -k agent")
print(" sbase gui -m marker2")
print(" sbase gui test_suite.py")
print(" sbase gui offline_examples/")
print(" Output:")
print(" Launches SeleniumBase Commander | GUI for pytest.")
print("")
def show_behave_gui_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "behave-gui" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase behave-gui [OPTIONAL PATH or TEST FILE]")
print(" seleniumbase gui-behave [OPTIONAL PATH or TEST FILE]")
print(" OR: sbase behave-gui [OPTIONAL PATH or TEST FILE]")
print(" OR: sbase gui-behave [OPTIONAL PATH or TEST FILE]")
print(" Examples:")
print(" sbase behave-gui")
print(" sbase behave-gui features/")
print(" sbase behave-gui features/calculator.feature")
print(" Output:")
print(" Launches SeleniumBase Commander | GUI for Behave.")
print("")
def show_caseplans_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "caseplans" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase caseplans [OPTIONAL PATH or TEST FILE]")
print(" OR: sbase caseplans [OPTIONAL PATH or TEST FILE]")
print(" Examples:")
print(" sbase caseplans")
print(" sbase caseplans -k agent")
print(" sbase caseplans -m marker2")
print(" sbase caseplans test_suite.py")
print(" sbase caseplans offline_examples/")
print(" Output:")
print(" Launches the SeleniumBase Case Plans Generator.")
print("")
def show_mkdir_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "mkdir" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase mkdir [DIRECTORY] [OPTIONS]")
print(" OR: sbase mkdir [DIRECTORY] [OPTIONS]")
print(" Example:")
print(" sbase mkdir ui_tests")
print(" Options:")
print(" -b / --basic (Only config files. No tests added.)")
print(" Output:")
print(" Creates a new folder for running SBase scripts.")
print(" The new folder contains default config files,")
print(" sample tests for helping new users get started,")
print(" and Python boilerplates for setting up customized")
print(" test frameworks.")
print("")
def show_mkfile_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "mkfile" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase mkfile [FILE.py] [OPTIONS]")
print(" OR: sbase mkfile [FILE.py] [OPTIONS]")
print(" Example:")
print(" sbase mkfile new_test.py")
print(" Options:")
print(" -b / --basic (Basic boilerplate / single-line test)")
print(" -r / --rec (adds Pdb+ breakpoint for Recorder Mode)")
print(" --url=URL (makes the test start on a specific page)")
print(" Language Options:")
print(" --en / --English | --zh / --Chinese")
print(" --nl / --Dutch | --fr / --French")
print(" --it / --Italian | --ja / --Japanese")
print(" --ko / --Korean | --pt / --Portuguese")
print(" --ru / --Russian | --es / --Spanish")
print(" Syntax Formats:")
print(" --bc / --basecase (BaseCase class inheritance)")
print(" --pf / --pytest-fixture (sb pytest fixture)")
print(" --cf / --class-fixture (class + sb pytest fixture)")
print(" --cm / --context-manager (SB context manager)")
print(" --dc / --driver-context (DriverContext manager)")
print(" --dm / --driver-manager (Driver manager)")
print(" Output:")
print(" Creates a new SBase test file with boilerplate code.")
print(" If the file already exists, an error is raised.")
print(" By default, uses English with BaseCase inheritance,")
print(" and creates a boilerplate with common SeleniumBase")
print(' methods: "open", "type", "click", "assert_element",')
print(' and "assert_text". If using the basic boilerplate')
print(' option, only the "open" method is included. Only the')
print(" BaseCase format supports Languages or Recorder Mode.")
print("")
def show_mkrec_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "mkrec" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase mkrec [FILE.py] [OPTIONS]")
print(" OR: sbase mkrec [FILE.py] [OPTIONS]")
print(" Examples:")
print(" sbase mkrec new_test.py")
print(" sbase mkrec new_test.py --url=wikipedia.org")
print(" Options:")
print(" --url=URL (Sets the initial start page URL.)")
print(" --edge (Use Edge browser instead of Chrome.)")
print(" --gui / --headed (Use headed mode on Linux.)")
print(" --uc / --undetected (Use undetectable mode.)")
print(" --overwrite (Overwrite file when it exists.)")
print(" --behave (Also output Behave/Gherkin files.)")
print(" Output:")
print(" Creates a new SeleniumBase test using the Recorder.")
print(" If the filename already exists, an error is raised.")
print("")
def show_codegen_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "codegen" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase codegen [FILE.py] [OPTIONS]")
print(" OR: sbase codegen [FILE.py] [OPTIONS]")
print(" Examples:")
print(" sbase codegen new_test.py")
print(" sbase codegen new_test.py --url=wikipedia.org")
print(" Options:")
print(" --url=URL (Sets the initial start page URL.)")
print(" --edge (Use Edge browser instead of Chrome.)")
print(" --gui / --headed (Use headed mode on Linux.)")
print(" --uc / --undetected (Use undetectable mode.)")
print(" --overwrite (Overwrite file when it exists.)")
print(" --behave (Also output Behave/Gherkin files.)")
print(" Output:")
print(" Creates a new SeleniumBase test using the Recorder.")
print(" If the filename already exists, an error is raised.")
print("")
def show_recorder_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "recorder" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase recorder [OPTIONS]")
print(" OR: sbase recorder [OPTIONS]")
print(" Options:")
print(" --uc / --undetected (Use undetectable mode.)")
print(" --behave (Also output Behave/Gherkin files.)")
print(" Output:")
print(" Launches the SeleniumBase Recorder Desktop App.")
print("")
def show_mkpres_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "mkpres" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase mkpres [FILE.py] [LANG]")
print(" OR: sbase mkpres [FILE.py] [LANG]")
print(" Example:")
print(" sbase mkpres new_presentation.py --en")
print(" Language Options:")
print(" --en / --English | --zh / --Chinese")
print(" --nl / --Dutch | --fr / --French")
print(" --it / --Italian | --ja / --Japanese")
print(" --ko / --Korean | --pt / --Portuguese")
print(" --ru / --Russian | --es / --Spanish")
print(" Output:")
print(" Creates a new presentation with 3 example slides.")
print(" If the file already exists, an error is raised.")
print(" By default, the slides are written in English,")
print(' and use "serif" theme with "slide" transition.')
print(" The slides can be used as a basic boilerplate.")
print("")
def show_mkchart_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "mkchart" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase mkchart [FILE.py] [LANG]")
print(" OR: sbase mkchart [FILE.py] [LANG]")
print(" Example:")
print(" sbase mkchart new_chart.py --en")
print(" Language Options:")
print(" --en / --English | --zh / --Chinese")
print(" --nl / --Dutch | --fr / --French")
print(" --it / --Italian | --ja / --Japanese")
print(" --ko / --Korean | --pt / --Portuguese")
print(" --ru / --Russian | --es / --Spanish")
print(" Output:")
print(" Creates a new SeleniumBase chart presentation.")
print(" If the file already exists, an error is raised.")
print(" By default, the slides are written in English,")
print(' and use a "sky" theme with "slide" transition.')
print(" The chart can be used as a basic boilerplate.")
print("")
def show_convert_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "convert" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase convert [WEBDRIVER_UNITTEST_FILE.py]")
print(" OR: sbase convert [WEBDRIVER_UNITTEST_FILE.py]")
print(" Output:")
print(" Converts a Selenium IDE exported WebDriver unittest")
print(" file into a SeleniumBase file. Adds _SB to the new")
print(" file name while keeping the original file intact.")
print(" (Works with Katalon Recorder Selenium scripts.)")
print("")
def show_print_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "print" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase print [FILE] [OPTIONS]")
print(" OR: sbase print [FILE] [OPTIONS]")
print(" Options:")
print(" -n (Add line Numbers to the rows)")
print(" Output:")
print(" Prints the code/text of any file")
print(" with syntax-highlighting.")
print("")
def show_translate_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "translate" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase translate [SB_FILE.py] [LANG] [ACTION]")
print(" OR: sbase translate [SB_FILE.py] [LANG] [ACTION]")
print(" Languages:")
print(" --en / --English | --zh / --Chinese")
print(" --nl / --Dutch | --fr / --French")
print(" --it / --Italian | --ja / --Japanese")
print(" --ko / --Korean | --pt / --Portuguese")
print(" --ru / --Russian | --es / --Spanish")
print(" Actions:")
print(" -p / --print (Print translation output to the screen)")
print(" -o / --overwrite (Overwrite the file being translated)")
print(" -c / --copy (Copy the translation to a new .py file)")
print(" Options:")
print(" -n (include line Numbers when using the Print action)")
print(" Output:")
print(" Translates a SeleniumBase Python file into the language")
print(' specified. Method calls and "import" lines get swapped.')
print(" Both a language and an action must be specified.")
print(' The "-p" action can be paired with one other action.')
print(' When running with "-c" (or "--copy"), the new file name')
print(" will be the original name appended with an underscore")
print(" plus the 2-letter language code of the new language.")
print(' (Example: Translating "test_1.py" into Japanese with')
print(' "-c" will create a new file called "test_1_ja.py".)')
print("")
def show_extract_objects_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "extract-objects" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase extract-objects [SB_FILE.py]")
print(" OR: sbase extract-objects [SB_FILE.py]")
print(" Output:")
print(" Creates page objects based on selectors found in a")
print(" seleniumbase Python file and saves those objects to the")
print(' "page_objects.py" file in the same folder as the tests.')
print("")
def show_inject_objects_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "inject-objects" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase inject-objects [SB_FILE.py] [OPTIONS]")
print(" OR: sbase inject-objects [SB_FILE.py] [OPTIONS]")
print(" Options:")
print(" -c, --comments (Add object selectors to the comments.)")
print(" (Default: No added comments.)")
print(" Output:")
print(' Takes the page objects found in the "page_objects.py"')
print(" file and uses those to replace matching selectors in")
print(" the selected seleniumbase Python file.")
print("")
def show_objectify_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "objectify" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase objectify [SB_FILE.py] [OPTIONS]")
print(" OR: sbase objectify [SB_FILE.py] [OPTIONS]")
print(" Options:")
print(" -c, --comments (Add object selectors to the comments.)")
print(" (Default: No added comments.)")
print(" Output:")
print(" A modified version of the file where the selectors")
print(" have been replaced with variable names defined in")
print(' "page_objects.py", supporting the Page Object Pattern.')
print("")
print(' (seleniumbase "objectify" has the same outcome as')
print(' combining "extract-objects" with "inject-objects")')
print("")
def show_revert_objects_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "revert-objects" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase revert-objects [SB_FILE.py] [OPTIONS]")
print(" OR: sbase revert-objects [SB_FILE.py] [OPTIONS]")
print(" Options:")
print(" -c, --comments (Keep existing comments for the lines.)")
print(" (Default: No comments are kept.)")
print(" Output:")
print(' Reverts the changes made by "seleniumbase objectify" or')
print(' "seleniumbase inject-objects" when run against a')
print(" seleniumbase Python file. Objects will get replaced by")
print(' selectors stored in the "page_objects.py" file.')
print("")
def show_encrypt_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "encrypt OR obfuscate" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase encrypt || seleniumbase obfuscate")
print(" --OR--")
print(" sbase encrypt || sbase obfuscate")
print(" Output:")
print(" Runs the password encryption/obfuscation tool.")
print(" (Where you can enter a password to encrypt/obfuscate.)")
print("")
def show_decrypt_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "decrypt OR unobfuscate" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase decrypt || seleniumbase unobfuscate")
print(" --OR--")
print(" sbase decrypt || sbase unobfuscate")
print(" Output:")
print(" Runs the password decryption/unobfuscation tool.")
print(" (Where you can enter an encrypted password to decrypt.)")
print("")
def show_download_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "download" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase download server")
print(" OR: sbase download server")
print(" Output:")
print(" Downloads the Selenium Standalone Server.")
print(" (Server is required for using your own Selenium Grid.)")
print("")
def show_grid_hub_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "grid-hub" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase grid-hub {start|stop|restart} [OPTIONS]")
print(" OR: sbase grid-hub {start|stop|restart} [OPTIONS]")
print(" Options:")
print(" -v, --verbose (Increase verbosity of logging output.)")
print(" (Default: Quiet logging / not verbose.)")
print(" --timeout=TIMEOUT (Close idle browser after TIMEOUT.)")
print(" (The default TIMEOUT: 230 seconds.)")
print(" (Use --timeout=0 to skip timeouts.)")
print(" Example:")
print(" seleniumbase grid-hub start")
print(" Output:")
print(" Controls the Selenium Grid Hub Server, which allows")
print(" for running tests on multiple machines in parallel")
print(" to speed up test runs and reduce the total time")
print(" of test suite execution.")
print(' You can "start" or "stop" the Grid Hub server.')
print("")
def show_grid_node_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = " " + c2 + "** " + c3 + "grid-node" + c2 + " **" + cr
print(sc)
print("")
print(" Usage:")
print(" seleniumbase grid-node {start|stop|restart} [OPTIONS]")
print(" OR: sbase grid-node {start|stop|restart} [OPTIONS]")
print(" Options:")
print(" --hub=[HOST/IP] (The Grid Hub Hostname / IP Address.)")
print(" (Default: 127.0.0.1 if not set.)")
print(" -v, --verbose (Increase verbosity of logging output.)")
print(" (Default: Quiet logging / Not verbose.)")
print(" Example:")
print(" seleniumbase grid-node start --hub=127.0.0.1")
print(" Output:")
print(" Controls the Selenium Grid node, which serves as a")
print(" worker machine for your Selenium Grid Hub server.")
print(' You can "start" or "stop" the Grid node.')
print("")
def get_version_info():
# from pkg_resources import get_distribution
# version = get_distribution("seleniumbase").version
from seleniumbase import __version__
version_info = None
c1 = colorama.Fore.BLUE + colorama.Back.LIGHTCYAN_EX
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sb_text = c1 + "selenium" + c2 + "base" + cr
version_info = "%s %s%s%s" % (sb_text, c3, __version__, cr)
return version_info
def show_version_info():
version_info = get_version_info()
print("%s" % version_info)
def get_package_location():
# from pkg_resources import get_distribution
# location = get_distribution("seleniumbase").location
import os
import seleniumbase
location = os.path.dirname(os.path.realpath(seleniumbase.__file__))
if location.endswith("seleniumbase"):
location = location[0 : -len("seleniumbase")] # noqa: E203
return location
def show_package_location():
location = get_package_location()
print("%s" % location)
def show_methods():
c1 = colorama.Fore.BLUE + colorama.Back.LIGHTCYAN_EX
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
c4 = colorama.Fore.MAGENTA + colorama.Back.LIGHTYELLOW_EX
c5 = colorama.Fore.LIGHTRED_EX + colorama.Back.LIGHTGREEN_EX
cr = colorama.Style.RESET_ALL
sc = (
"\n " + c2 + " ** " + c3 + " SeleniumBase Python Methods "
"" + c2 + " ** " + cr
)
print(sc)
print("")
line = "Here are some common methods that come with SeleniumBase:"
line = c1 + line + cr
print(line)
line = "(Some optional args are not shown here)"
print(line)
print("")
sbm = ""
sbm += "*.open(url) => Navigate the browser window to the URL.\n"
sbm += "*.type(selector, text) => Update the field with the text.\n"
sbm += "*.click(selector) => Click the element with the selector.\n"
sbm += "*.click_link(link_text) => Click the link containing text.\n"
sbm += "*.check_if_unchecked(selector) => Check checkbox if unchecked.\n"
sbm += "*.uncheck_if_checked(selector) => Uncheck checkbox if checked.\n"
sbm += "*.select_option_by_text(dropdown_selector, option)\n"
sbm += "*.hover_and_click(hover_selector, click_selector)\n"
sbm += "*.drag_and_drop(drag_selector, drop_selector)\n"
sbm += "*.choose_file(selector, file_path) => Choose a file to upload.\n"
sbm += "*.get_text(selector) => Get the text from the element.\n"
sbm += "*.get_current_url() => Get the URL of the current page.\n"
sbm += "*.get_page_source() => Get the HTML of the current page.\n"
sbm += "*.get_attribute(selector, attribute) => Get element attribute.\n"
sbm += "*.get_title() => Get the title of the current page.\n"
sbm += "*.go_back() => Navigate to the previous page in history.\n"
sbm += "*.switch_to_frame(frame) => Switch into the iframe container.\n"
sbm += "*.switch_to_default_content() => Exit all iframe containers.\n"
sbm += "*.switch_to_parent_frame() => Exit from the current iframe.\n"
sbm += "*.open_new_window() => Open a new window in the same browser.\n"
sbm += "*.switch_to_window(window) => Switch to the browser window.\n"
sbm += "*.switch_to_default_window() => Switch to the original window.\n"
sbm += "*.get_new_driver(OPTIONS) => Open a new driver with OPTIONS.\n"
sbm += "*.switch_to_driver(driver) => Switch to the browser driver.\n"
sbm += "*.switch_to_default_driver() => Switch to the original driver.\n"
sbm += "*.wait_for_element(selector) => Wait until element is visible.\n"
sbm += "*.wait_for_element_present(selector) => Until element in HTML.\n"
sbm += "*.is_element_visible(selector) => Return element visibility.\n"
sbm += "*.is_element_present(selector) => Return element is in HTML.\n"
sbm += "*.is_text_visible(text, selector) => Return text visibility.\n"
sbm += "*.is_checked(selector) => Return whether the box is checked.\n"
sbm += "*.sleep(seconds) => Do nothing for the given amount of time.\n"
sbm += "*.save_screenshot(name) => Save a screenshot in .png format.\n"
sbm += "*.assert_element(selector) => Verify the element is visible.\n"
sbm += "*.assert_text(text, selector) => Verify text in the element.\n"
sbm += "*.assert_exact_text(text, selector) => Verify text is exact.\n"
sbm += "*.assert_url(url) => Verify that the current URL is the URL.\n"
sbm += "*.assert_url_contains(substring) => Verify substring in URL.\n"
sbm += "*.assert_title(title) => Verify the title of the web page.\n"
sbm += "*.assert_title_contains(substring) => Verify STR in title.\n"
sbm += "*.assert_downloaded_file(file) => Verify file was downloaded.\n"
sbm += "*.assert_no_404_errors() => Verify there are no broken links.\n"
sbm += "*.assert_no_js_errors() => Verify there are no JS errors.\n"
sbm = sbm.replace("*.", "self." + c1).replace("(", cr + "(")
sbm = sbm.replace("self.", c2 + "self" + c5 + "." + cr)
sbm = sbm.replace("(", c3 + "(" + c4)
sbm = sbm.replace(")", c3 + ")" + cr)
print(sbm)
def show_options():
c1 = colorama.Fore.BLUE + colorama.Back.LIGHTCYAN_EX
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
c4 = colorama.Fore.MAGENTA + colorama.Back.LIGHTYELLOW_EX
c5 = colorama.Fore.RED + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = "\n " + c2 + " ** " + c3 + " pytest CLI Options " + c2 + " ** " + cr
print(sc)
print("")
line = "Here are some common pytest options to use with SeleniumBase:"
line = c1 + line + cr
print(line)
line = '(Some options are Chromium-specific, e.g. "--guest --mobile")'
print(line)
op = "\n"
op += '--browser=BROWSER (Choice of web browser. Default is "chrome".)\n'
op += "--edge / --firefox / --safari (Shortcut for browser selection.)\n"
op += "--headless (Run tests headlessly. Default setting on Linux OS.)\n"
op += "--demo (Slow down and visually see test actions as they occur.)\n"
op += "--slow (Slow down the automation. Faster than using Demo Mode.)\n"
op += "--rs / --reuse-session (Reuse browser session between tests.)\n"
op += "--reuse-class-session / --rcs (RS, but for class tests only.)\n"
op += "--crumbs (Clear all cookies between tests reusing a session.)\n"
op += "--maximize (Start tests with the web browser window maximized.)\n"
op += "--dashboard (Enable SeleniumBase's Dashboard at dashboard.html)\n"
op += "--incognito (Enable Chromium's Incognito Mode.)\n"
op += "--guest (Enable Chromium's Guest Mode.)\n"
op += "--dark (Enable Chromium's Dark Mode.)\n"
op += "--uc (Use undetected-chromedriver to evade detection.)\n"
op += "-m=MARKER (Run tests with the specified pytest marker.)\n"
op += "-n=NUM (Multithread the tests using that many threads.)\n"
op += "-v (Verbose mode. Print the full names of each test run.)\n"
op += "--html=report.html (Create a detailed pytest-html report.)\n"
op += "--collect-only / --co (Only show discovered tests. No run.)\n"
op += "--co -q (Only show full names of discovered tests. No run.)\n"
op += "-x (Stop running tests after the first failure is reached.)\n"
op += "--pdb (Enter the Post Mortem Debug Mode after any test fails.)\n"
op += "--trace (Enter Debug Mode immediately after starting any test.)\n"
op += " | Debug Mode Commands >>> help / h: List all commands. |\n"
op += " | n: Next line of method. s: Step through. c: Continue. |\n"
op += " | return / r: Run until method returns. j: Jump to line. |\n"
op += " | where / w: Show stack spot. u: Up stack. d: Down stack. |\n"
op += " | longlist / ll: See code. dir(): List namespace objects. |\n"
op += "--help / -h (Display list of all available pytest options.)\n"
op += "--final-debug (Enter Final Debug Mode after each test ends.)\n"
op += "--recorder / --rec (Save browser actions as Python scripts.)\n"
op += "--rec-behave / --rec-gherkin (Save actions as Gherkin code.)\n"
op += "--rec-print (Display recorded scripts when they are created.)\n"
op += "--save-screenshot (Save a screenshot at the end of each test.)\n"
op += "--archive-logs (Archive logs after tests to prevent deletion.)\n"
op += "--check-js (Check for JavaScript errors after page loads.)\n"
op += "--start-page=URL (The browser start page when tests begin.)\n"
op += "--agent=STRING (Modify the web browser's User-Agent string.)\n"
op += "--mobile (Use Chromium's mobile device emulator during tests.)\n"
op += '--metrics=STRING (Set mobile "CSSWidth,CSSHeight,PixelRatio".)\n'
op += "--ad-block (Block certain types of iframe ads from appearing.)\n"
op += "--settings-file=FILE (Override default SeleniumBase settings.)\n"
op += '--env=ENV (Set the test env. Access with "self.env" in tests.)\n'
op += '--data=DATA (Extra test data. Access with "self.data" in tests.)\n'
op += "--disable-csp (Disable the Content Security Policy of websites.)\n"
op += "--remote-debug (Sync to Ch-R-Debugger chrome://inspect/#devices)\n"
op += "--server=SERVER (The Selenium Grid server/IP used for tests.)\n"
op += "--port=PORT (The Selenium Grid port used by the test server.)\n"
op += "--proxy=SERVER:PORT (Connect to a proxy server:port for tests.)\n"
op += "--proxy=USER:PASS@SERVER:PORT (Use authenticated proxy server.)\n"
op += cr
op = op.replace("\n-", "\n" + c1 + "-").replace(" (", cr + " (")
op = op.replace(" / -", cr + " / " + c1 + "-")
op = op.replace("=", c2 + "=" + c3)
op = op.replace(" | ", " |" + c3 + " ").replace("|\n", cr + "|\n")
op = op.replace(": ", c5 + ":" + c3 + " ")
op = op.replace("Debug Mode Commands", c5 + "Debug Mode Commands" + c3)
op = op.replace(">>>", c4 + ">>>" + c3)
print(op)
line = "For the full list of " + c2 + "command-line options" + cr
line += ', type: "' + c3 + "pytest" + cr + " " + c1 + "--help" + cr + '".'
print(line)
print("")
def show_behave_options():
c1 = colorama.Fore.BLUE + colorama.Back.LIGHTCYAN_EX
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
c4 = colorama.Fore.MAGENTA + colorama.Back.LIGHTYELLOW_EX
c5 = colorama.Fore.RED + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = "\n " + c2 + " ** " + c3 + " Behave CLI Options " + c2 + " ** " + cr
print(sc)
print("")
line = 'Here are some common "behave" options to use with SeleniumBase:'
line = c1 + line + cr
print(line)
line = '(Some options are Chromium-specific, e.g. "-D guest -D mobile")'
print(line)
op = "\n"
op += '-D browser=BROWSER (The web browser to use. Default is "chrome")\n'
op += "-D headless (Run tests headlessly. Default mode on Linux OS.)\n"
op += "-D demo (Slow down and visually see test actions as they occur.)\n"
op += "-D slow (Slow down the automation. Faster than using Demo Mode.)\n"
op += "-D reuse-session / -D rs (Reuse browser session between tests.)\n"
op += "-D crumbs (Clear all cookies between tests reusing a session.)\n"
op += "-D maximize (Start tests with the web browser window maximized.)\n"
op += "-D dashboard (Enable SeleniumBase's Dashboard at dashboard.html)\n"
op += "-D incognito (Enable Chromium's Incognito Mode.)\n"
op += "-D guest (Enable Chromium's Guest Mode.)\n"
op += "-D dark (Enable Chromium's Dark Mode.)\n"
op += "-D uc (Use undetected-chromedriver to evade detection.)\n"
op += "--no-snippets / -q (Quiet mode. Don't print snippets.)\n"
op += "--dry-run / -d (Dry run. Only show discovered tests.)\n"
op += "--stop (Stop running tests after the first failure is reached.)\n"
op += "-D pdb (Enter the Post Mortem Debug Mode after any test fails.)\n"
op += " | Debug Mode Commands >>> help / h: List all commands. |\n"
op += " | n: Next line of method. s: Step through. c: Continue. |\n"
op += " | return / r: Run until method returns. j: Jump to line. |\n"
op += " | where / w: Show stack spot. u: Up stack. d: Down stack. |\n"
op += " | longlist / ll: See code. dir(): List namespace objects. |\n"
op += "-D recorder (Record browser actions to generate test scripts.)\n"
op += "-D rec-print (Display recorded scripts when they are created.)\n"
op += "-D save-screenshot (Save a screenshot at the end of each test.)\n"
op += "-D archive-logs (Archive log files instead of deleting them.)\n"
op += "-D check-js (Check for JavaScript errors after page loads.)\n"
op += "-D start-page=URL (The browser start page when tests begin.)\n"
op += "-D agent=STRING (Modify the web browser's User-Agent string.)\n"
op += "-D mobile (Use Chromium's mobile device emulator during tests.)\n"
op += '-D metrics=STRING (Set mobile "CSSWidth,CSSHeight,PixelRatio".)\n'
op += "-D ad-block (Block some types of display ads after page loads.)\n"
op += "-D settings-file=FILE (Override default SeleniumBase settings.)\n"
op += '-D env=ENV (Set the test env. Access using "self.env" in tests.)\n'
op += '-D data=DATA (Extra test data. Access using "self.data".)\n'
op += "-D disable-csp (Disable the Content Security Policy of sites.)\n"
op += "-D remote-debug (Sync Ch-R-Debugger chrome://inspect/#devices)\n"
op += "-D server=SERVER (The Selenium Grid server/IP used for tests.)\n"
op += "-D port=PORT (The Selenium Grid port used by the test server.)\n"
op += "-D proxy=SERVER:PORT (Connect to a proxy server:port for tests.)\n"
op += "-D proxy=USER:PASS@SERVER:PORT (Use authenticated proxy server.)\n"
op += cr
op = op.replace("\n-", "\n" + c1 + "-").replace(" (", cr + " (")
op = op.replace(" / -", cr + " / " + c1 + "-")
op = op.replace("=", c2 + "=" + c3)
op = op.replace(" | ", " |" + c3 + " ").replace("|\n", cr + "|\n")
op = op.replace(": ", c5 + ":" + c3 + " ")
op = op.replace("Debug Mode Commands", c5 + "Debug Mode Commands" + c3)
op = op.replace(">>>", c4 + ">>>" + c3)
print(op)
line = "For the full list of " + c2 + "command-line options" + cr
line += ', type: "' + c3 + "behave" + cr + " " + c1 + "--help" + cr + '".'
print(line)
print("")
def show_detailed_help():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
c6 = colorama.Back.CYAN
cr = colorama.Style.RESET_ALL
show_basic_usage()
print(c6 + " " + c2 + " Commands: " + c6 + " ")
print(cr)
show_install_usage()
show_commander_usage()
show_behave_gui_usage()
show_caseplans_usage()
show_mkdir_usage()
show_mkfile_usage()
show_mkrec_usage()
show_codegen_usage()
show_recorder_usage()
show_mkpres_usage()
show_mkchart_usage()
show_convert_usage()
show_print_usage()
show_translate_usage()
show_extract_objects_usage()
show_inject_objects_usage()
show_objectify_usage()
show_revert_objects_usage()
show_encrypt_usage()
show_decrypt_usage()
show_download_usage()
show_grid_hub_usage()
show_grid_node_usage()
print('* (Use "' + c3 + "pytest" + cr + '" for running tests) *\n')
def main():
command = None
command_args = None
num_args = len(sys.argv)
if num_args == 1:
show_usage()
return
elif num_args == 2:
command = sys.argv[1]
command_args = []
elif num_args > 2:
command = sys.argv[1]
command_args = sys.argv[2:]
command = command.lower()
if command == "get" or command == "install":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_install
need_retry = False
need_another_retry = False
retry_msg_1 = "* Unable to download driver! Retrying in 3s..."
retry_msg_2 = "** Unable to download driver! Retrying in 5s..."
if " --proxy=" in " ".join(sys.argv):
from seleniumbase.core import proxy_helper
for arg in sys.argv:
if arg.startswith("--proxy="):
proxy_string = arg.split("--proxy=")[1]
if "@" in proxy_string:
proxy_string = proxy_string.split("@")[1]
proxy_helper.validate_proxy_string(proxy_string)
break
try:
settings.HIDE_DRIVER_DOWNLOADS = False
sb_install.main()
except Exception as e:
invalid_run_cmd = constants.Warnings.INVALID_RUN_COMMAND
if invalid_run_cmd in str(e):
raise
print()
print(retry_msg_1)
time.sleep(3)
print()
need_retry = True
if need_retry:
try:
sb_install.main()
except Exception:
print(retry_msg_2)
time.sleep(5)
print()
need_another_retry = True
if need_another_retry:
sb_install.main()
else:
show_basic_usage()
show_install_usage()
elif command == "commander" or command == "gui":
from seleniumbase.console_scripts import sb_commander
sb_commander.main()
elif command == "behave-gui" or command == "gui-behave":
from seleniumbase.console_scripts import sb_behave_gui
sb_behave_gui.main()
elif (
command == "caseplans"
or command == "case-plans"
or command == "case_plans"
):
from seleniumbase.console_scripts import sb_caseplans
sb_caseplans.main()
elif (
command == "recorder"
or (command == "record" and len(command_args) == 0)
):
from seleniumbase.console_scripts import sb_recorder
sb_recorder.main()
elif (
command == "mkrec"
or command == "codegen"
or (command == "record" and len(command_args) >= 1)
):
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_mkrec
sb_mkrec.main()
else:
show_basic_usage()
if command == "codegen":
show_codegen_usage()
else:
show_mkrec_usage()
elif command == "mkdir":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_mkdir
sb_mkdir.main()
else:
show_basic_usage()
show_mkdir_usage()
elif command == "mkfile":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_mkfile
sb_mkfile.main()
else:
show_basic_usage()
show_mkfile_usage()
elif command == "mkpres":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_mkpres
sb_mkpres.main()
else:
show_basic_usage()
show_mkpres_usage()
elif command == "mkchart":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_mkchart
sb_mkchart.main()
else:
show_basic_usage()
show_mkchart_usage()
elif command == "convert":
if len(command_args) == 1:
from seleniumbase.utilities.selenium_ide import convert_ide
convert_ide.main()
else:
show_basic_usage()
show_convert_usage()
elif command == "print":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_print
sb_print.main()
else:
show_basic_usage()
show_print_usage()
elif command == "translate":
if len(command_args) >= 1:
if sys.version_info[0] == 2:
c5 = colorama.Fore.RED + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
msg = "The SeleniumBase Translator does NOT support Python 2!"
message = "\n" + c5 + msg + cr + "\n"
print("")
raise Exception(message)
from seleniumbase.translate import translator
translator.main()
else:
show_basic_usage()
show_translate_usage()
elif command == "extract-objects" or command == "extract_objects":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_objectify
sb_objectify.extract_objects()
else:
show_basic_usage()
show_extract_objects_usage()
elif command == "inject-objects" or command == "inject_objects":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_objectify
sb_objectify.inject_objects()
else:
show_basic_usage()
show_inject_objects_usage()
elif command == "objectify":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_objectify
sb_objectify.objectify()
else:
show_basic_usage()
show_objectify_usage()
elif command == "revert-objects" or command == "revert_objects":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_objectify
sb_objectify.revert_objects()
else:
show_basic_usage()
show_revert_objects_usage()
elif command == "encrypt" or command == "obfuscate":
if len(command_args) >= 0:
from seleniumbase.common import obfuscate
obfuscate.main()
else:
show_basic_usage()
show_encrypt_usage()
elif command == "decrypt" or command == "unobfuscate":
if len(command_args) >= 0:
from seleniumbase.common import unobfuscate
unobfuscate.main()
else:
show_basic_usage()
show_decrypt_usage()
elif command == "download":
if len(command_args) >= 1 and command_args[0].lower() == "server":
from seleniumbase.utilities.selenium_grid import (
download_selenium_server,
)
download_selenium_server.main(force_download=True)
else:
show_basic_usage()
show_download_usage()
elif command == "grid-hub" or command == "grid_hub":
if len(command_args) >= 1:
from seleniumbase.utilities.selenium_grid import grid_hub
grid_hub.main()
else:
show_basic_usage()
show_grid_hub_usage()
elif command == "grid-node" or command == "grid_node":
if len(command_args) >= 1:
from seleniumbase.utilities.selenium_grid import grid_node
grid_node.main()
else:
show_basic_usage()
show_grid_node_usage()
elif command == "version" or command == "--version":
if len(command_args) == 0:
from seleniumbase.console_scripts import logo_helper
seleniumbase_logo = logo_helper.get_seleniumbase_logo()
print(seleniumbase_logo)
print("")
show_package_location()
show_version_info()
print("")
else:
show_basic_usage()
elif command == "methods" or command == "--methods":
show_methods()
elif command == "options" or command == "--options":
show_options()
elif command == "behave-options" or command == "--behave-options":
show_behave_options()
elif command == "help" or command == "--help":
if len(command_args) >= 1:
if command_args[0] == "get":
print("")
show_install_usage()
return
elif command_args[0] == "install":
print("")
show_install_usage()
return
elif command_args[0] == "commander":
print("")
show_commander_usage()
return
elif command_args[0] == "gui":
print("")
show_commander_usage()
return
elif command_args[0] == "behave-gui":
print("")
show_behave_gui_usage()
return
elif command_args[0] == "gui-behave":
print("")
show_behave_gui_usage()
return
elif command_args[0] == "caseplans":
print("")
show_caseplans_usage()
return
elif command_args[0] == "case-plans":
print("")
show_caseplans_usage()
return
elif command_args[0] == "case_plans":
print("")
show_caseplans_usage()
return
elif command_args[0] == "mkdir":
print("")
show_mkdir_usage()
return
elif command_args[0] == "mkfile":
print("")
show_mkfile_usage()
return
elif command_args[0] == "mkrec":
print("")
show_mkrec_usage()
return
elif command_args[0] == "codegen":
print("")
show_codegen_usage()
return
elif command_args[0] == "recorder":
print("")
show_recorder_usage()
return
elif command_args[0] == "mkpres":
print("")
show_mkpres_usage()
return
elif command_args[0] == "mkchart":
print("")
show_mkchart_usage()
return
elif command_args[0] == "convert":
print("")
show_convert_usage()
return
elif command_args[0] == "print":
print("")
show_print_usage()
return
elif command_args[0] == "translate":
print("")
show_translate_usage()
return
elif command_args[0] == "extract-objects":
print("")
show_extract_objects_usage()
return
elif command_args[0] == "inject-objects":
print("")
show_inject_objects_usage()
return
elif command_args[0] == "objectify":
print("")
show_objectify_usage()
return
elif command_args[0] == "revert-objects":
print("")
show_revert_objects_usage()
return
elif command_args[0] == "encrypt":
print("")
show_encrypt_usage()
return
elif command_args[0] == "obfuscate":
print("")
show_encrypt_usage()
return
elif command_args[0] == "decrypt":
print("")
show_decrypt_usage()
return
elif command_args[0] == "unobfuscate":
print("")
show_decrypt_usage()
return
elif command_args[0] == "download":
print("")
show_download_usage()
return
elif command_args[0] == "grid-hub":
print("")
show_grid_hub_usage()
return
elif command_args[0] == "grid-node":
print("")
show_grid_node_usage()
return
show_detailed_help()
else:
show_usage()
colorama.init(autoreset=True)
c5 = colorama.Fore.RED + colorama.Back.LIGHTYELLOW_EX
c7 = colorama.Fore.BLACK + colorama.Back.MAGENTA
cr = colorama.Style.RESET_ALL
invalid_cmd = "===> INVALID COMMAND: >> %s <<\n" % command
invalid_cmd = invalid_cmd.replace(">> ", ">>" + c5 + " ")
invalid_cmd = invalid_cmd.replace(" <<", " " + cr + "<<")
invalid_cmd = invalid_cmd.replace(">>", c7 + ">>" + cr)
invalid_cmd = invalid_cmd.replace("<<", c7 + "<<" + cr)
print(invalid_cmd)
if __name__ == "__main__":
main()
|
a47ab7f30f97f71f4baad7be8dfbb50740be59a6
|
f305f84ea6f721c2391300f0a60e21d2ce14f2a5
|
/tmp/bishi/荣耀.py
|
3cdb4281c3b1517cec9f49998c9650d0d700118d
|
[] |
no_license
|
981377660LMT/algorithm-study
|
f2ada3e6959338ae1bc21934a84f7314a8ecff82
|
7e79e26bb8f641868561b186e34c1127ed63c9e0
|
refs/heads/master
| 2023-09-01T18:26:16.525579
| 2023-09-01T12:21:58
| 2023-09-01T12:21:58
| 385,861,235
| 225
| 24
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,496
|
py
|
荣耀.py
|
# n<=1000
# q<=1000
from collections import defaultdict
from heapq import heappop, heappush
import sys
from typing import Mapping
sys.setrecursionlimit(int(1e9))
input = lambda: sys.stdin.readline().rstrip("\r\n")
MOD = 998244353
INF = int(4e18)
n, q = map(int, input().split())
adjMap = defaultdict(lambda: defaultdict(lambda: INF))
edges = []
for _ in range(2 * n - 2):
u, v, target = map(int, input().split())
u, v = u - 1, v - 1
adjMap[u][v] = min(adjMap[u][v], target)
edges.append((u, v))
def dijkstra(n: int, adjMap: Mapping[int, Mapping[int, int]], start: int, end: int) -> int:
dist = [INF] * n
dist[start] = 0
pq = [(0, start)]
while pq:
curDist, cur = heappop(pq)
if cur == end:
return curDist
if curDist > dist[cur]:
continue
for next, nextDist in adjMap[cur].items():
if curDist + nextDist < dist[next]:
dist[next] = curDist + nextDist
heappush(pq, (dist[next], next))
return INF
# !实时查询两个景点间的最短距离
for _ in range(q):
type, *rest = map(int, input().split())
if type == 1:
ei, target = rest # !将第i条车道的长度调整为w
ei -= 1
u, v = edges[ei]
adjMap[u][v] = target
else:
u, v = rest # !打印景点u到景点v的最短路径
u, v = u - 1, v - 1
print(dijkstra(n, adjMap, u, v))
|
4d8087fbd70cf9ad1519b80f189b6a6f7795492f
|
1664bc3e55c0e006c8bbf8671a2ba0043dc0203c
|
/mpf/commands/both.py
|
068a4705552c3a37a0e95704c0d6a087425440f3
|
[
"MIT",
"CC-BY-4.0"
] |
permissive
|
missionpinball/mpf
|
d426b0b1b865a138f169aaf852741f39a880edf2
|
9f90c8b1586363b65340017bfa3af5d56d32c6d9
|
refs/heads/dev
| 2023-07-26T21:31:11.581205
| 2023-07-15T17:06:04
| 2023-07-15T17:06:04
| 21,267,545
| 191
| 173
|
MIT
| 2023-09-14T06:07:45
| 2014-06-27T07:26:26
|
Python
|
UTF-8
|
Python
| false
| false
| 839
|
py
|
both.py
|
"""Command which launches both the MPF core engine and MPF-MC."""
from importlib import import_module
import multiprocessing
def _start_mpf(mpf_path, machine_path, args):
module = import_module('mpf.commands.game')
module.Command(mpf_path, machine_path, args)
def _start_mc(mpf_path, machine_path, args):
module = import_module('mpfmc.commands.mc')
module.Command(mpf_path, machine_path, args + ["--both"])
class Command:
"""Command which runs game and mc."""
def __init__(self, mpf_path, machine_path, args):
"""Run game and mc."""
multiprocessing.set_start_method('spawn')
mc = multiprocessing.Process(target=_start_mc,
args=(mpf_path, machine_path, args))
mc.start()
_start_mpf(mpf_path, machine_path, args)
mc.join()
|
e58df6ab52c33ed745ceee04f48881bb86eea2da
|
94b6b2df3f6db6f80b1cdbfad2fb5c17fd3b09e3
|
/tests/test_utils.py
|
167b56bbb9e0c7c56093ab073602c2104839558e
|
[
"ISC"
] |
permissive
|
Bearle/django-private-chat
|
7904e8577183ddf93abe54a4e2bef492fcc99b3c
|
2b4a5ed388a814d7e8a7494c79ff58b1339b8fa9
|
refs/heads/dev
| 2023-08-03T11:49:30.074961
| 2021-03-21T07:02:32
| 2021-03-21T07:02:32
| 80,574,300
| 488
| 180
|
ISC
| 2022-12-26T21:30:17
| 2017-01-31T23:50:53
|
HTML
|
UTF-8
|
Python
| false
| false
| 1,027
|
py
|
test_utils.py
|
from test_plus.test import TestCase
from django_private_chat.utils import *
class TestUtilsFunctions(TestCase):
def setUp(self):
self.user1 = self.make_user(username="user1")
self.user2 = self.make_user(username="user2")
def test_get_dialogs_with_user(self):
self.dialog = Dialog()
self.dialog.owner = self.user2
self.dialog.opponent = self.user1
self.dialog.save()
dialog = get_dialogs_with_user(self.user1, self.user2)[0]
self.assertEqual(dialog, self.dialog)
# def test_get_user_from_session(self):
# sessions = Session.objects.all()
# required_session = None
# for session in sessions:
# session_data = session.get_decoded()
# uid = session_data.get('_auth_user_id')
# if uid == self.user1.id:
# required_session = session
# break
#
# user = get_user_from_session(required_session.session_key)
# self.assertEqual(user, self.user1)
|
2d8ddcae986c3d3c891065095305741a70855002
|
276dd5dd778adefd039e6f6a71dc574386729401
|
/grpc-middleware/unary-unary/server/service_test.py
|
fc568b12a4c97b176583829df90ed2241a469985
|
[
"MIT"
] |
permissive
|
amitsaha/python-grpc-demo
|
4880e64b4b993df4b7eb96f2946b6607fb2dfa82
|
48546bfda83062a3fcb015d352fecb46346e8c92
|
refs/heads/master
| 2023-01-12T10:01:36.396783
| 2022-10-08T05:10:39
| 2022-10-08T05:10:39
| 101,063,881
| 145
| 52
|
MIT
| 2022-12-27T17:26:21
| 2017-08-22T13:07:17
|
Python
|
UTF-8
|
Python
| false
| false
| 969
|
py
|
service_test.py
|
from unittest.mock import MagicMock
import grpc
import grpc_testing
import identity_pb2
import pytest
from .server import Identity
@pytest.fixture
def test_server():
servicers = {
identity_pb2.DESCRIPTOR.services_by_name['Identity']: Identity()
}
return grpc_testing.server_from_dictionary(
servicers, grpc_testing.strict_real_time(),
)
def test_validate_request_valid_token(test_server):
request = identity_pb2.ValidateTokenRequest(token="a-token")
validate_request_method = test_server.invoke_unary_unary(
method_descriptor=(identity_pb2.DESCRIPTOR
.services_by_name['Identity']
.methods_by_name['ValidateToken']),
invocation_metadata={},
request=request, timeout=1)
response, metadata, code, details = validate_request_method.termination()
assert code == grpc.StatusCode.OK
assert response.user_id == "default-user-id"
|
f0e22a840572bfb13530a5e0134c6e7964be3bc4
|
5b6ba0f288b1e2ac236af846a9bf546a63228476
|
/mmtbx/suitename/suiteninput.py
|
bac62f1691ebb7e9bca623fdcff0b4a2b72c0753
|
[
"BSD-3-Clause-LBNL",
"Apache-2.0"
] |
permissive
|
cctbx/cctbx_project
|
5b547b416cadbdf95cca21dace9f54272a08d98a
|
7f4dfb6c873fd560920f697cbfd8a5ff6eed82fa
|
refs/heads/master
| 2023-08-17T17:44:05.077010
| 2023-08-16T22:40:22
| 2023-08-16T22:40:22
| 39,508,026
| 206
| 131
|
NOASSERTION
| 2023-09-14T17:12:55
| 2015-07-22T13:36:27
|
Python
|
UTF-8
|
Python
| false
| false
| 6,512
|
py
|
suiteninput.py
|
from __future__ import nested_scopes, generators, division, absolute_import
from __future__ import with_statement, print_function
from suitenamedefs import Suite, Residue, findBase, globals
"""
This module handles reading suites from "dangle" format files
reading residues from kinemage format files and regrouping them into suites.
Extraction of suites from loaded cctbx models is handled elsewhere.
"""
# Copyright 2021 Richardson Lab at Duke University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import math, sys
def stringToFloat(string):
try:
n = float(string)
except ValueError:
n = 9999.0 # or maybe math.nan?
return n
def readResidues(inFile):
options = globals.options
lines = inFile.readlines()
if not lines:
return []
# catch a specific error notation from mp_geo:
while lines[0].strip().startswith("Atom pair"):
del lines[0]
residues = []
try:
i = 0
line = ""
for line in lines:
i += 1
if len(line.strip()) == 0 or line[0] == "#": # blank or comment line
continue
fields = line.split(":")
ids = fields[: options.pointidfields]
baseCode = fields[options.pointidfields - 1]
angleStrings = fields[options.pointidfields :]
if (
ids[options.altidfield-1].strip() != ""
and ids[options.altidfield-1] != options.altid
): # -1 converts 1-based to 0-based counting
continue # lines for the wrong alternative conformation are ignored
base = findBase(baseCode)
if not base: # ignore DNA bases
continue
angles = np.array([stringToFloat(s) for s in angleStrings])
for i in range(len(angles)):
if angles[i] < 0:
angles[i] += 360.0
residue = Residue(ids, base, angles)
residues.append(residue)
except IndexError:
print("Suitename found malformed input on line {}, reading no further:".format(i),
file=sys.stderr)
print(" ", line, file=sys.stderr)
return residues
def readKinemageFile(inFile):
"""
We glean the following information from a kinemage file:
The @dimension command gives us the number of dimensions in the data
Anything between a @balllist command and a subsequent @ command
is a data line.
"""
options = globals.options
lines = inFile.readlines()
goodLines = []
place, line = findPrefixInList(lines, "@dimension")
if place > 0:
items = line.split()
dimension = len(items) - 1
else:
dimension = options.anglefields
place = 0
while place >= 0:
begin, line = findPrefixesInList(lines, "@balllist", "@dotlist", place)
if begin > 0:
end, line = findPrefixInList(lines, "@", begin + 1)
place = end
if end < 0:
end = len(lines)
goodLines += lines[begin + 1 : end]
else:
break
if len(goodLines) == 0:
goodLines = lines # assume a pure data file
return readKinemageSuites(goodLines, dimension)
def readKinemageSuites(lines, dimension):
"""Read a list of kinemage data lines to yield a suite."""
suites = []
for line in lines:
if len(line.strip()) == 0 or line[0] == "#": # blank or comment line
continue
# A meaningful line begins with an id string enclosed in braces
if line[0] == "{":
mark = line.find("}")
if mark > 0:
idString = line[1:mark]
ids = idString.split(":")
# there may be some miscellaneous markers after the id string
k = mark + 1
while k < len(line) and not line[k].isdigit():
k = k + 1
mark2 = k
# once we see a number, everything else is angles
angleText = line[mark2:]
angleStrings = angleText.split(" ")
angleStrings2 = angleText.split(",")
if len(angleStrings2) > len(angleStrings):
angleStrings = angleStrings2
angleList = [stringToFloat(s) for s in angleStrings]
if len(angleList) != dimension:
continue # wrong number of dimensions means probably not a data point
if dimension == 9:
angles = np.array(angleList)
else: # given only 7 angles,skipping the chi angles on the ends
angles = np.array([180.0] + angleList + [180.0])
for i in range(len(angles)):
if angles[i] < 0:
angles[i] += 360.0
suite = Suite(ids, ids[9][2], angles)
suites.append(suite)
return suites
def findPrefixInList(list, prefix, start=0):
for i, s in enumerate(list[start:]):
if s.startswith(prefix):
return i + start, s
return -1, None
def findPrefixesInList(list, prefix1, prefix2, start=0):
for i, s in enumerate(list[start:]):
if s.startswith(prefix1) or s.startswith(prefix2):
return i + start, s
return -1, None
def buildSuiteBetweenResidues(r1, r2):
suite = Suite(r2.pointIDs, r2.base)
if len(r1.angle) > 6:
suite.chiMinus = r1.chi
suite.deltaMinus = r1.delta
suite.epsilon = r1.epsilon
suite.zeta = r1.zeta
suite.alpha = r2.alpha
suite.beta = r2.beta
suite.gamma = r2.gamma
suite.delta = r2.delta
if len(r2.angle) > 6:
suite.chi = r2.chi
return suite
def buildSuiteFirst(r2):
suite = Suite(r2.pointIDs, r2.base)
suite.alpha = r2.alpha
suite.beta = r2.beta
suite.gamma = r2.gamma
suite.delta = r2.delta
if len(r2.angle) > 6:
suite.chi = r2.chi
suite.epsilon = 999
suite.zeta = 999
suite.chiMinus = 999
suite.deltaMinus = 999
return suite
def buildSuiteLast(r1):
suite = Suite((), "")
if len(r1.angle) > 6:
suite.chiMinus = r1.chi
suite.deltaMinus = r1.delta
suite.epsilon = r1.epsilon
suite.zeta = r1.zeta
suite.alpha = 999
suite.beta = 999
suite.gamma = 999
suite.delta = 999
suite.chi = 999
return suite
def buildSuites(residues):
suites = [buildSuiteFirst(residues[0])]
for i in range(len(residues) - 1):
suites.append(buildSuiteBetweenResidues(residues[i], residues[i + 1]))
suites.append(buildSuiteLast(residues[-1]))
return suites
|
fba9e54aa0df1f6a7c4793061cd61e91f065aba8
|
c22256d3e8d566e75e8246cc8b62db798e88e9a3
|
/babybuddy/middleware.py
|
924952b732c0fd5021d5df4d6828a2b0bf743187
|
[
"BSD-2-Clause-Views",
"BSD-2-Clause"
] |
permissive
|
babybuddy/babybuddy
|
20a4648397b2dbb105b09172f317764eb7ff4955
|
b5163c236019f3f77667e04e4ea09621593914fe
|
refs/heads/master
| 2023-08-25T10:14:02.341213
| 2023-08-15T16:18:06
| 2023-08-15T16:38:34
| 107,898,477
| 901
| 199
|
BSD-2-Clause
| 2023-08-15T16:38:35
| 2017-10-22T20:02:57
|
Python
|
UTF-8
|
Python
| false
| false
| 5,274
|
py
|
middleware.py
|
from os import getenv
from time import time
from functools import wraps
import pytz
from urllib.parse import urlunsplit, urlsplit
from django.conf import settings
from django.utils import timezone, translation
from django.contrib.auth.middleware import RemoteUserMiddleware
from django.http import HttpRequest
class UserLanguageMiddleware:
"""
Customizes settings based on user language setting.
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
user = request.user
if hasattr(user, "settings") and user.settings.language:
language = user.settings.language
elif request.LANGUAGE_CODE:
language = request.LANGUAGE_CODE
else:
language = settings.LANGUAGE_CODE
if language:
# Set the language before generating the response.
translation.activate(language)
response = self.get_response(request)
# Deactivate the translation before the response is sent so it not
# reused in other threads.
translation.deactivate()
return response
class UserTimezoneMiddleware:
"""
Sets the timezone based on a user specific setting. This middleware must run after
`django.contrib.auth.middleware.AuthenticationMiddleware` because it uses the
request.user object.
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
user = request.user
if hasattr(user, "settings") and user.settings.timezone:
try:
timezone.activate(pytz.timezone(user.settings.timezone))
except pytz.UnknownTimeZoneError:
pass
return self.get_response(request)
class RollingSessionMiddleware:
"""
Periodically resets the session expiry for existing sessions.
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
if request.session.keys():
session_refresh = request.session.get("session_refresh")
if session_refresh:
try:
delta = int(time()) - session_refresh
except (ValueError, TypeError):
delta = settings.ROLLING_SESSION_REFRESH + 1
if delta > settings.ROLLING_SESSION_REFRESH:
request.session["session_refresh"] = int(time())
request.session.set_expiry(settings.SESSION_COOKIE_AGE)
else:
request.session["session_refresh"] = int(time())
return self.get_response(request)
class CustomRemoteUser(RemoteUserMiddleware):
"""
Middleware used for remote authentication when `REVERSE_PROXY_AUTH` is True.
"""
header = getenv("PROXY_HEADER", "HTTP_REMOTE_USER")
class HomeAssistant:
"""
Django middleware that adds HomeAssistant specific properties and checks
to the request-object.
The middleware is only active if the settings variable
`ENABLE_HOME_ASSISTANT_SUPPORT` is set to True. Note that some features
remain enabled even if the middleware is set to inactive through the
settings.
Features:
- request.is_homeassistant_ingress_request (bool)
Indicates if a request was rerouted through the home assistant ingress
service. This parameters is always present regardless of the
ENABLE_HOME_ASSISTANT_SUPPORT settings option. It defaults to false
if the middleware is disabled.
- wrapped request.build_absolute_uri function
The middleware redefines (wraps) the build_absolute_uri function
provided by django to allow it to interprete the X-Ingress-Path
request header. This allows home assistant to construct correct
absolute URLs when run through home assistant's ingress service.
"""
def __init__(self, get_response):
self.get_response = get_response
self.home_assistant_support_enabled = settings.ENABLE_HOME_ASSISTANT_SUPPORT
def __wrap_build_absolute_uri(self, request: HttpRequest):
def wrap_x_ingress_path(org_func):
if not request.is_homeassistant_ingress_request:
return org_func
x_ingress_path = request.headers.get("X-Ingress-Path")
if x_ingress_path is None:
return org_func
@wraps(org_func)
def wrapper(*args, **kwargs):
url = org_func(*args, **kwargs)
url_parts = urlsplit(url)
url = urlunsplit(
url_parts._replace(path=x_ingress_path + url_parts.path)
)
return url
return wrapper
request.build_absolute_uri = wrap_x_ingress_path(request.build_absolute_uri)
def __call__(self, request: HttpRequest):
if self.home_assistant_support_enabled:
request.is_homeassistant_ingress_request = (
request.headers.get("X-Hass-Source") == "core.ingress"
)
else:
request.is_homeassistant_ingress_request = False
self.__wrap_build_absolute_uri(request)
return self.get_response(request)
|
5f46052efb7d06582721bd830a7cb0800469ce61
|
c6759b857e55991fea3ef0b465dbcee53fa38714
|
/gvsoc/runner/python/runner/stim_utils.py
|
6c909d916cbbb13f84607aa3c345938882d9f430
|
[
"Apache-2.0"
] |
permissive
|
GreenWaves-Technologies/gap_sdk
|
1b343bba97b7a5ce62a24162bd72eef5cc67e269
|
3fea306d52ee33f923f2423c5a75d9eb1c07e904
|
refs/heads/master
| 2023-09-01T14:38:34.270427
| 2023-08-10T09:04:44
| 2023-08-10T09:04:44
| 133,324,605
| 145
| 96
|
Apache-2.0
| 2023-08-27T19:03:52
| 2018-05-14T07:50:29
|
C
|
UTF-8
|
Python
| false
| false
| 21,555
|
py
|
stim_utils.py
|
#!/usr/bin/env python3
#
# Copyright (C) 2018 ETH Zurich, University of Bologna and GreenWaves Technologies
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Authors: Germain Haugou, ETH (germain.haugou@iis.ee.ethz.ch)
#
from elftools.elf.elffile import ELFFile
import os
import os.path
import struct
import argparse
class stim(object):
def __init__(self, verbose=False):
self.binaries = []
self.mem = {}
self.verbose = verbose
self.areas = []
self.dump('Created stimuli generator')
def get_entry(self):
with open(self.binaries[0], 'rb') as file:
elffile = ELFFile(file)
return elffile.header['e_entry']
def dump(self, str):
if self.verbose:
print (str)
def add_binary(self, binary):
self.dump(' Added binary: %s' % binary)
self.binaries.append(binary)
def add_area(self, start, size):
self.dump(' Added target area: [0x%x -> 0x%x]' % (start, start + size))
self.areas.append([start, start+size])
def __add_mem_word(self, base, size, data, width):
aligned_base = base & ~(width - 1)
shift = base - aligned_base
iter_size = width - shift
if iter_size > size:
iter_size = size
value = self.mem.get(str(aligned_base))
if value is None:
value = 0
value &= ~(((1<<width) - 1) << (shift*8))
value |= int.from_bytes(data[0:iter_size], byteorder='little') << (shift*8)
self.mem[str(aligned_base)] = value
return iter_size
def __add_mem(self, base, size, data, width):
while size > 0:
iter_size = self.__add_mem_word(base, size, data, width)
size -= iter_size
base += iter_size
data = data[iter_size:]
def __gen_stim_slm(self, filename, width):
self.dump(' Generating to file: ' + filename)
try:
os.makedirs(os.path.dirname(filename))
except:
pass
with open(filename, 'w') as file:
for key in sorted(self.mem.keys()):
file.write('%X_%0*X\n' % (int(key), width*2, self.mem.get(key)))
def __parse_binaries(self, width):
self.mem = {}
for binary in self.binaries:
with open(binary, 'rb') as file:
elffile = ELFFile(file)
for segment in elffile.iter_segments():
if segment['p_type'] == 'PT_LOAD':
data = segment.data()
addr = segment['p_paddr']
size = len(data)
load = True
if len(self.areas) != 0:
load = False
for area in self.areas:
if addr >= area[0] and addr + size <= area[1]:
load = True
break
if load:
self.dump(' Handling section (base: 0x%x, size: 0x%x)' % (addr, size))
self.__add_mem(addr, size, data, width)
if segment['p_filesz'] < segment['p_memsz']:
addr = segment['p_paddr'] + segment['p_filesz']
size = segment['p_memsz'] - segment['p_filesz']
self.dump(' Init section to 0 (base: 0x%x, size: 0x%x)' % (addr, size))
self.__add_mem(addr, size, [0] * size, width)
else:
self.dump(' Bypassing section (base: 0x%x, size: 0x%x)' % (addr, size))
def gen_stim_slm_64(self, stim_file):
self.__parse_binaries(8)
self.__gen_stim_slm(stim_file, 8)
def gen_stim_bin(self, stim_file):
self.__parse_binaries(1)
try:
os.makedirs(os.path.dirname(stim_file))
except:
pass
with open(stim_file, 'wb') as file:
prev_addr = None
for key in sorted(self.mem.keys()):
addr = int(key)
if prev_addr is not None:
while prev_addr != addr - 1:
file.write(struct.pack('B', 0))
prev_addr += 1
prev_addr = addr
file.write(struct.pack('B', int(self.mem.get(key))))
class Efuse(object):
def __init__(self, config, verbose=False):
self.config = config
self.verbose = verbose
self.dump('Created efuse stimuli generator')
def dump(self, str):
if self.verbose:
print (str)
def gen_stim_txt(self, filename):
user_efuses = {}
efuses = self.config.get('**/efuse/values')
if efuses is None:
efuses = []
else:
efuses = efuses.get_dict()
for efuse in efuses:
efuse_id, val = efuse.split(':')
efuse_id = int(efuse_id, 0)
val = int(val, 0)
user_efuses[efuse_id] = val
nb_regs = self.config.get_child_int('**/efuse/nb_regs')
pulp_chip = self.config.get_child_str('**/chip/name')
pulp_chip_family = self.config.get_child_str('**/chip/pulp_chip_family')
if pulp_chip_family == 'gap' or pulp_chip == 'vega' or pulp_chip == 'gap9' or pulp_chip == 'gap9_v2':
load_mode = self.config.get_child_str('**/runner/boot-mode')
encrypted = self.config.get_child_str('**/efuse/encrypted')
aes_key = self.config.get_child_str('**/efuse/aes_key')
aes_iv = self.config.get_child_str('**/efuse/aes_iv')
xtal_check = self.config.get_child_bool('**/efuse/xtal_check')
xtal_check_delta = self.config.get_child_bool('**/efuse/xtal_check_delta')
xtal_check_min = self.config.get_child_bool('**/efuse/xtal_check_min')
xtal_check_max = self.config.get_child_bool('**/efuse/xtal_check_max')
no_preload = self.config.get_child_str('**/efuse/no-preload')
# In case we boot with the classic rom mode, don't init any efuse, the boot loader will boot with the default mode
load_mode_hex = None
if pulp_chip == 'gap':
if load_mode == 'rom':
load_mode_hex = 0x3A
elif load_mode == 'spi':
load_mode_hex = 0x0A
elif load_mode == 'jtag':
load_mode_hex = 0x12
elif load_mode == 'rom_hyper':
load_mode_hex = 0x2A
elif load_mode == 'rom_spim_single':
load_mode_hex = 0x32
elif load_mode == 'rom_spim':
load_mode_hex = 0x3A
elif load_mode == 'jtag_dev' or load_mode == 'spi_dev':
load_mode_hex = None
if xtal_check:
if load_mode_hex == None: load_mode_hex = 0
load_mode_hex |= 1<<7
delta = int(xtal_check_delta*((1 << 15)-1))
efuses.append('26:0x%x' % (delta & 0xff))
efuses.append('27:0x%x' % ((delta >> 8) & 0xff))
efuses.append('28:0x%x' % (xtal_check_min))
efuses.append('29:0x%x' % (xtal_check_max))
if load_mode_hex != None:
if encrypted:
load_mode_hex |= 0x40
for i in range(0, 16):
efuses.append('%d:0x%s' % (2+i, aes_key[30-i*2:32-i*2]))
for i in range(0, 8):
efuses.append('%d:0x%s' % (18+i, aes_iv[14-i*2:16-i*2]))
efuses.append('0:%s' % load_mode_hex)
elif pulp_chip == 'vega' or pulp_chip == 'gap9':
efuses = [0] * 128
info2 = 0
info3 = 0
info4 = 0
info5 = 0
info6 = 0
clk_div = self.config.get_child_int('**/efuse/clkdiv')
fll_freq = self.config.get_child_int('**/efuse/fll/freq')
fll_assert_cycles = self.config.get_child_int('**/efuse/fll/assert_cycles')
fll_lock_tolerance = self.config.get_child_int('**/efuse/fll/lock_tolerance')
hyper_delay = self.config.get_child_int('**/efuse/hyper/delay')
hyper_latency = self.config.get_child_int('**/efuse/hyper/latency')
if load_mode == 'rom':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
elif load_mode == 'rom_hyper':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
# Hyperflash type
info3 = (1 << 0)
elif load_mode == 'rom_spim':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
# SPI flash type
info3 = (0 << 0)
elif load_mode == 'rom_mram':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
# MRAM type
info3 = (2 << 0)
# Activate MRAM TRIM CFG and fill it with dummy numbers until we get the real one. Also activate clock divider
info6 |= (1 << 6) | (1<<7)
info2 |= (2 << 3)
efuses[56] = 32*4
for i in range(0, 32):
efuses [57+i] = i | ((i*4+1)<<8) | ((i*4+2)<<16) | ((i*4+3)<<24)
if clk_div is not None:
info6 |= 1 << 7
info2 = (info2 & ~(3<<3)) | (clk_div << 3)
if fll_freq is not None:
info2 |= (1 << 0) | (1 << 2)
efuses[31] = fll_freq
if fll_lock_tolerance is not None or fll_assert_cycles is not None:
info2 |= (1<< 1)
efuses[32] = fll_lock_tolerance
efuses[33] = fll_assert_cycles
if hyper_delay is not None:
info5 |= (1<<6)
efuses[30] = hyper_delay
if hyper_latency is not None:
info5 |= (1<<7)
efuses[51] = hyper_latency
if load_mode_hex != None:
if encrypted:
load_mode_hex |= 0x40
info6 |= 1<<4
for i in range(0, 16):
efuses[2+i] = aes_key[30-i*2:32-i*2]
for i in range(0, 8):
efuses[18+i] = aes_iv[14-i*2:16-i*2]
efuses[0] = load_mode_hex
efuses[1] = info2
efuses[37] = info3
efuses[38] = info4
efuses[39] = info5
efuses[40] = info6
elif pulp_chip == 'gap9_v2':
efuses = [0] * 128
info2 = 0
info3 = 0
info4 = 0
info5 = 0
info6 = 0
clk_div = self.config.get_child_int('**/efuse/clkdiv')
fll_freq = self.config.get_child_int('**/efuse/fll/freq')
fll_assert_cycles = self.config.get_child_int('**/efuse/fll/assert_cycles')
fll_lock_tolerance = self.config.get_child_int('**/efuse/fll/lock_tolerance')
hyper_delay = self.config.get_child_int('**/efuse/hyper/delay')
hyper_latency = self.config.get_child_int('**/efuse/hyper/latency')
if load_mode == 'rom':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
info5 |= 1 << 1 # Boot on UDMA SPIM1 interface (first single spi)
elif load_mode == 'rom_hyper':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
# Hyperflash type
info3 = (1 << 0)
elif load_mode == 'rom_spim':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
# SPI flash type
info3 = (0 << 0)
info5 |= 1 << 1 # Boot on UDMA SPIM1 interface (first single spi)
elif load_mode == 'rom_mram':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
# MRAM type
info3 = (2 << 0)
# Activate MRAM TRIM CFG and fill it with dummy numbers until we get the real one. Also activate clock divider
info6 |= (1 << 6) | (1<<7)
info2 |= (2 << 3)
efuses[56] = 32*4
for i in range(0, 32):
efuses [57+i] = i | ((i*4+1)<<8) | ((i*4+2)<<16) | ((i*4+3)<<24)
if clk_div is not None:
info6 |= 1 << 7
info2 = (info2 & ~(3<<3)) | (clk_div << 3)
if fll_freq is not None:
info2 |= (1 << 0) | (1 << 2)
efuses[31] = fll_freq
if fll_lock_tolerance is not None or fll_assert_cycles is not None:
info2 |= (1<< 1)
efuses[32] = fll_lock_tolerance
efuses[33] = fll_assert_cycles
if hyper_delay is not None:
info5 |= (1<<6)
efuses[30] = hyper_delay
if hyper_latency is not None:
info5 |= (1<<7)
efuses[51] = hyper_latency
if load_mode_hex != None:
if encrypted:
load_mode_hex |= 0x40
info6 |= 1<<4
for i in range(0, 16):
efuses[2+i] = aes_key[30-i*2:32-i*2]
for i in range(0, 8):
efuses[18+i] = aes_iv[14-i*2:16-i*2]
efuses[0] = load_mode_hex
efuses[1] = info2
efuses[37] = info3
efuses[38] = info4
efuses[39] = info5
efuses[40] = info6
elif pulp_chip == 'gap_rev1':
info3 = 0
info6 = 0
if load_mode == 'rom':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
elif load_mode == 'rom_hyper':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
# Hyperflash type
info3 = (1 << 0)
elif load_mode == 'rom_spim':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
# SPI flash type
info3 = (0 << 0)
if xtal_check:
if load_mode_hex == None: load_mode_hex = 0
load_mode_hex |= 1<<7
delta = int(xtal_check_delta*((1 << 15)-1))
efuses.append('26:0x%x' % (delta & 0xff))
efuses.append('27:0x%x' % ((delta >> 8) & 0xff))
efuses.append('28:0x%x' % (xtal_check_min))
efuses.append('29:0x%x' % (xtal_check_max))
if load_mode_hex != None:
if encrypted:
load_mode_hex |= 0x40
info6 |= 1<<4
for i in range(0, 16):
efuses.append('%d:0x%s' % (2+i, aes_key[30-i*2:32-i*2]))
for i in range(0, 8):
efuses.append('%d:0x%s' % (18+i, aes_iv[14-i*2:16-i*2]))
efuses.append('0:%s' % load_mode_hex)
efuses.append('1:%s' % 0)
efuses.append('37:%s' % (info3))
efuses.append('38:%s' % 0)
efuses.append('39:%s' % 0)
efuses.append('40:%s' % (info6))
elif pulp_chip == 'gap8_revc':
fll_freq = self.config.get_child_int('**/efuse/fll/freq')
ref_clk_wait = self.config.get_child_int('**/efuse/ref_clk_wait')
burst_size = self.config.get_child_int('**/efuse/burst_size')
flash_id = self.config.get_child_bool('**/efuse/flash_id')
fll_assert_cycles = self.config.get_child_int('**/efuse/fll/assert_cycles')
fll_lock_tolerance = self.config.get_child_int('**/efuse/fll/lock_tolerance')
hyper_delay = self.config.get_child_int('**/efuse/hyper/delay')
hyper_latency = self.config.get_child_int('**/efuse/hyper/latency')
if hyper_delay is None:
hyper_delay = 3
efuses = [0] * 128
info3 = 0
info2 = 0
info6 = 0
info5 = 0
if self.config.get_child_str('**/vsim/model') == 'rtl':
info7 = 1 # Don't use UDMA MEMCPY as it makes RTL platform crash
else:
info7 = 0
if load_mode == 'rom':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
elif load_mode == 'rom_hyper':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
# Hyperflash type
info3 = (1 << 0)
info7 |= (1 << 2) # Partially reconfigure pads to overcome HW issue with rwds cg latch
elif load_mode == 'rom_spim':
# RTL platform | flash boot | no encryption | no wait xtal
load_mode_hex = 2 | (2 << 3) | (0 << 4) | (0 << 5) | (0 << 6) | (0 << 7)
# SPI flash type
info3 = (0 << 0)
if burst_size is not None:
info6 |= (1 << 7)
efuses[61] = burst_size & 0xff
efuses[62] = (burst_size >> 8) & 0xff
if flash_id:
info6 |= (1 << 5)
if fll_freq is not None:
info2 |= (1 << 0)
efuses[57] = fll_freq
if ref_clk_wait is not None:
info2 |= (1 << 6)
efuses[35] = ref_clk_wait & 0xff
efuses[36] = (ref_clk_wait >> 8) & 0xff
else:
info2 |= (1 << 6)
efuses[35] = 0
efuses[36] = 0
if hyper_delay is not None:
info5 |= (1<<6)
efuses[32] |= hyper_delay
if hyper_latency is not None:
info5 |= (1<<7)
efuses[51] |= hyper_latency
if fll_lock_tolerance is not None or fll_assert_cycles is not None:
info2 |= (1<< 1)
efuses[58] = fll_lock_tolerance
efuses[59] = fll_assert_cycles
if xtal_check:
if load_mode_hex == None: load_mode_hex = 0
load_mode_hex |= 1<<7
delta = int(xtal_check_delta*((1 << 15)-1))
efuses[26] = delta & 0xff
efuses[27] = (delta >> 8) & 0xff
efuses[28] = xtal_check_min & 0xff
efuses[29] = (xtal_check_min >> 8) & 0xff
efuses[30] |= xtal_check_max & 0xff
efuses[31] = (xtal_check_max >> 8) & 0xff
if load_mode_hex != None:
if encrypted:
load_mode_hex |= 0x40
info6 |= 1<<4
for i in range(0, 16):
efuses[2+i] = int('0x%s' % aes_key[30-i*2:32-i*2], 0)
for i in range(0, 8):
efuses[18+i] = int('0x%s' % aes_iv[14-i*2:16-i*2], 0)
efuses[0] = load_mode_hex
efuses[1] = info2
efuses[37] = info3
efuses[38] = 0
efuses[39] = info5
efuses[40] = info6
efuses[60] = info7
# Efuse preloading file generation
if pulp_chip == 'vega' or pulp_chip == 'gap9' or pulp_chip == 'gap9_v2':
self.dump(' Generating to file: ' + filename)
with open(filename, 'w') as file:
if no_preload is None or no_preload == False:
for efuseId in range (0, 128):
value = efuses[efuseId]
self.dump(' Writing register (index: %d, value: 0x%x)' % (efuseId, value))
file.write('{0:032b}\n'.format(value))
elif pulp_chip == 'gap8_revc':
values = [0] * nb_regs * 8
for efuseId in range (0, nb_regs):
value = user_efuses.get(efuseId)
if value is None:
value = efuses[efuseId]
self.dump(' Writing register (index: %d, value: 0x%x)' % (efuseId, value))
for index in range(0, 8):
if (value >> index) & 1 == 1: values[efuseId + index*128] = 1
self.dump(' Generating to file: ' + filename)
with open(filename, 'w') as file:
for value in values:
file.write('%d ' % (value))
else:
values = [0] * nb_regs * 8
for efuse in efuses:
efuseId, value = efuse.split(':')
self.dump(' Writing register (index: %d, value: 0x%x)' % (int(efuseId, 0), int(value, 0)))
efuseId = int(efuseId, 0)
value = int(value, 0)
for index in range(0, 8):
if (value >> index) & 1 == 1: values[efuseId + index*128] = 1
self.dump(' Generating to file: ' + filename)
with open(filename, 'w') as file:
for value in values:
file.write('%d ' % (value))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Generate stimuli')
parser.add_argument("--binary", dest="binary", default=None, help="Specify input binary")
parser.add_argument("--vectors", dest="vectors", default=None, help="Specify output vectors file")
args = parser.parse_args()
if args.binary is None:
raise Exception('Specify the input binary with --binary=<path>')
if args.vectors is not None:
stim_gen = stim(verbose=True)
stim_gen.add_binary(args.binary)
stim_gen.gen_stim_slm_64(args.vectors)
|
ee2f1ef9d6ed77ffc7f768a74d552241ba342fae
|
6b4a99a061d04b927178e3739a44fa3a9c57f071
|
/suzieq/sqobjects/arpnd.py
|
6cc39265954fb4667e112a58a20cda6c33fc2cda
|
[
"Apache-2.0"
] |
permissive
|
netenglabs/suzieq
|
586f2ce8c7cb9051c31bb6ef0d1cd225300a9c00
|
091d7847d6cbc568ffd4f2fa1d87e052937a5d60
|
refs/heads/develop
| 2023-08-26T16:56:00.575722
| 2023-07-17T09:43:49
| 2023-07-17T09:43:49
| 232,338,630
| 677
| 104
|
Apache-2.0
| 2023-08-14T23:43:57
| 2020-01-07T14:11:09
|
Python
|
UTF-8
|
Python
| false
| false
| 1,587
|
py
|
arpnd.py
|
from ipaddress import ip_interface
from suzieq.sqobjects.basicobj import SqObject
from suzieq.shared.utils import (convert_macaddr_format_to_colon,
validate_macaddr)
from suzieq.shared.utils import validate_network
class ArpndObj(SqObject):
'''The object providing access to the arp/nd table'''
def __init__(self, **kwargs):
super().__init__(table='arpnd', **kwargs)
self._valid_get_args = ['namespace', 'hostname', 'ipAddress', 'prefix',
'columns', 'oif', 'macaddr', 'query_str']
self._convert_args = {
'macaddr': convert_macaddr_format_to_colon
}
def validate_get_input(self, **kwargs):
if kwargs.get('prefix', []) and kwargs.get('ipAddress', []):
raise AttributeError("Cannot specify address and prefix together")
if kwargs.get('prefix', []):
for p in kwargs['prefix']:
if not validate_network(p):
raise ValueError("Invalid prefix specified")
if kwargs.get('ipAddress', []):
for a in kwargs['ipAddress']:
try:
ip_interface(a)
except ValueError:
raise ValueError("Invalid IP address specified")
if kwargs.get('macaddr', []):
for m in kwargs['macaddr']:
if not validate_macaddr(m):
raise ValueError("Invalid mac address specified")
self._unique_def_column = ['ipAddress']
return super().validate_get_input(**kwargs)
|
6d70b69c909594c57e6f2ebadc3b3b64e0aee85c
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/response/AlipayOpenContentIotCouponQueryResponse.py
|
3ba43a588667ef16c7f32de6402a91f0b82e2f58
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 3,343
|
py
|
AlipayOpenContentIotCouponQueryResponse.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayOpenContentIotCouponQueryResponse(AlipayResponse):
def __init__(self):
super(AlipayOpenContentIotCouponQueryResponse, self).__init__()
self._btn_act = None
self._camp_id = None
self._component_template = None
self._content = None
self._item_type = None
self._nonempty_coupon_list = None
self._prize_type = None
self._shop_info = None
self._url = None
self._voice_broadcast = None
@property
def btn_act(self):
return self._btn_act
@btn_act.setter
def btn_act(self, value):
self._btn_act = value
@property
def camp_id(self):
return self._camp_id
@camp_id.setter
def camp_id(self, value):
self._camp_id = value
@property
def component_template(self):
return self._component_template
@component_template.setter
def component_template(self, value):
self._component_template = value
@property
def content(self):
return self._content
@content.setter
def content(self, value):
self._content = value
@property
def item_type(self):
return self._item_type
@item_type.setter
def item_type(self, value):
self._item_type = value
@property
def nonempty_coupon_list(self):
return self._nonempty_coupon_list
@nonempty_coupon_list.setter
def nonempty_coupon_list(self, value):
self._nonempty_coupon_list = value
@property
def prize_type(self):
return self._prize_type
@prize_type.setter
def prize_type(self, value):
self._prize_type = value
@property
def shop_info(self):
return self._shop_info
@shop_info.setter
def shop_info(self, value):
self._shop_info = value
@property
def url(self):
return self._url
@url.setter
def url(self, value):
self._url = value
@property
def voice_broadcast(self):
return self._voice_broadcast
@voice_broadcast.setter
def voice_broadcast(self, value):
self._voice_broadcast = value
def parse_response_content(self, response_content):
response = super(AlipayOpenContentIotCouponQueryResponse, self).parse_response_content(response_content)
if 'btn_act' in response:
self.btn_act = response['btn_act']
if 'camp_id' in response:
self.camp_id = response['camp_id']
if 'component_template' in response:
self.component_template = response['component_template']
if 'content' in response:
self.content = response['content']
if 'item_type' in response:
self.item_type = response['item_type']
if 'nonempty_coupon_list' in response:
self.nonempty_coupon_list = response['nonempty_coupon_list']
if 'prize_type' in response:
self.prize_type = response['prize_type']
if 'shop_info' in response:
self.shop_info = response['shop_info']
if 'url' in response:
self.url = response['url']
if 'voice_broadcast' in response:
self.voice_broadcast = response['voice_broadcast']
|
9a1f94dd160332975e3b63be6c4230ba9bbce6e4
|
4785640a76cf16f5349f66963889e78d31e48322
|
/tests/conftest.py
|
927c101a97a25b8681f30581ca399203fcc5d9d5
|
[
"MIT"
] |
permissive
|
dmytrostriletskyi/accessify
|
7e363614a3107d5c290815c4c5a631ac44b770c2
|
6b7cf8657ffe18cd6a43c6cfb73b071084f0331e
|
refs/heads/develop
| 2023-08-06T02:21:14.350956
| 2019-04-16T23:42:11
| 2019-04-16T23:42:11
| 173,346,507
| 119
| 9
|
MIT
| 2023-07-31T18:23:12
| 2019-03-01T18:01:34
|
Python
|
UTF-8
|
Python
| false
| false
| 388
|
py
|
conftest.py
|
""""
Provide configurations for testing with pytest.
"""
import os
import pytest
@pytest.fixture
def enable_accessify():
"""
Enabling the accessify.
"""
try:
del os.environ['DISABLE_ACCESSIFY']
except KeyError:
pass
@pytest.fixture
def disable_accessify():
"""
Disabling the accessify.
"""
os.environ['DISABLE_ACCESSIFY'] = 'True'
|
3270e4bcb2af2d0854097774de097cd843fb08a6
|
f3806d9fb54773908cd9704121a543b114470aca
|
/angr/angrdb/models.py
|
64ab85f30682736a45236497d944a90b9cc4631a
|
[
"BSD-2-Clause"
] |
permissive
|
angr/angr
|
8ae95fceca51b0a001de56477d984dd01193ac1d
|
37e8ca1c3308ec601ad1d7c6bc8081ff38a7cffd
|
refs/heads/master
| 2023-08-17T03:15:21.007865
| 2023-08-15T18:44:57
| 2023-08-15T18:44:57
| 40,328,394
| 7,184
| 1,306
|
BSD-2-Clause
| 2023-09-14T20:14:23
| 2015-08-06T21:46:55
|
Python
|
UTF-8
|
Python
| false
| false
| 4,750
|
py
|
models.py
|
from sqlalchemy import Column, Integer, String, Boolean, BLOB, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class DbInformation(Base):
"""
Stores information related to the current database. Basically a key-value store.
"""
__tablename__ = "information"
id = Column(Integer, primary_key=True)
key = Column(String, unique=True, index=True)
value = Column(String)
class DbObject(Base):
"""
Models a binary object.
"""
__tablename__ = "objects"
id = Column(Integer, primary_key=True)
main_object = Column(Boolean, default=False)
path = Column(String, default="", nullable=True)
content = Column(BLOB, nullable=True)
backend = Column(String)
backend_args = Column(String, nullable=True) # it's a JSON field but JSON is not supported in sqlite before 3.9
class DbKnowledgeBase(Base):
"""
Models a knowledge base.
"""
__tablename__ = "knowledgebases"
id = Column(Integer, primary_key=True)
name = Column(String, default="", nullable=True)
cfgs = relationship("DbCFGModel", back_populates="kb")
funcs = relationship("DbFunction", back_populates="kb")
xrefs = relationship("DbXRefs", uselist=False, back_populates="kb")
comments = relationship("DbComment", back_populates="kb")
labels = relationship("DbLabel", back_populates="kb")
var_collections = relationship("DbVariableCollection", back_populates="kb")
structured_code = relationship("DbStructuredCode", back_populates="kb")
class DbCFGModel(Base):
"""
Models a CFGFast instance.
"""
__tablename__ = "cfgs"
id = Column(Integer, primary_key=True)
kb_id = Column(
Integer,
ForeignKey("knowledgebases.id"),
nullable=False,
)
kb = relationship("DbKnowledgeBase", uselist=False, back_populates="cfgs")
ident = Column(String)
blob = Column(BLOB)
class DbFunction(Base):
"""
Models a Function instance.
"""
__tablename__ = "functions"
id = Column(Integer, primary_key=True)
kb_id = Column(
Integer,
ForeignKey("knowledgebases.id"),
nullable=False,
)
kb = relationship("DbKnowledgeBase", uselist=False, back_populates="funcs")
addr = Column(Integer)
blob = Column(BLOB)
class DbVariableCollection(Base):
"""
Models a VariableManagerInternal instance.
"""
__tablename__ = "variables"
id = Column(Integer, primary_key=True)
kb_id = Column(
Integer,
ForeignKey("knowledgebases.id"),
nullable=False,
)
kb = relationship("DbKnowledgeBase", uselist=False, back_populates="var_collections")
func_addr = Column(Integer)
ident = Column(String, nullable=True)
blob = Column(BLOB)
class DbStructuredCode(Base):
"""
Models a StructuredCode instance.
"""
__tablename__ = "structured_code"
id = Column(Integer, primary_key=True)
kb_id = Column(
Integer,
ForeignKey("knowledgebases.id"),
nullable=False,
)
kb = relationship("DbKnowledgeBase", uselist=False, back_populates="structured_code")
func_addr = Column(Integer)
flavor = Column(String)
expr_comments = Column(BLOB, nullable=True)
stmt_comments = Column(BLOB, nullable=True)
configuration = Column(BLOB, nullable=True)
const_formats = Column(BLOB, nullable=True)
ite_exprs = Column(BLOB, nullable=True)
class DbXRefs(Base):
"""
Models an XRefManager instance.
"""
__tablename__ = "xrefs"
id = Column(Integer, primary_key=True)
kb_id = Column(
Integer,
ForeignKey("knowledgebases.id"),
nullable=False,
)
kb = relationship("DbKnowledgeBase", uselist=False, back_populates="xrefs")
blob = Column(BLOB, nullable=True)
class DbComment(Base):
"""
Models a comment.
"""
__tablename__ = "comments"
id = Column(Integer, primary_key=True)
kb_id = Column(
Integer,
ForeignKey("knowledgebases.id"),
nullable=False,
)
kb = relationship("DbKnowledgeBase", uselist=False, back_populates="comments")
addr = Column(Integer, index=True)
comment = Column(String)
type = Column(Integer) # not really used for now, but we'd better get it prepared
class DbLabel(Base):
"""
Models a label.
"""
__tablename__ = "labels"
id = Column(Integer, primary_key=True)
kb_id = Column(
Integer,
ForeignKey("knowledgebases.id"),
nullable=False,
)
kb = relationship("DbKnowledgeBase", uselist=False, back_populates="labels")
addr = Column(Integer, index=True)
name = Column(String)
|
47fc75509dba04ba134f54743cc9097d571e9929
|
3bc139860403ebd05e278c95fca26e24d5189271
|
/chia/util/check_fork_next_block.py
|
8ae6061899a8737d39ace74d389b0a53180fb0f0
|
[
"Apache-2.0"
] |
permissive
|
Chia-Network/chia-blockchain
|
a09183b7240b159419b45f8373a41a1062f77ef3
|
d966f3f9e63aed52dbd73544164202a9f11ce3d2
|
refs/heads/main
| 2023-08-31T09:37:13.741283
| 2023-08-30T18:27:22
| 2023-08-30T18:27:22
| 197,153,676
| 12,936
| 2,474
|
Apache-2.0
| 2023-09-14T19:08:51
| 2019-07-16T08:32:40
|
Python
|
UTF-8
|
Python
| false
| false
| 1,408
|
py
|
check_fork_next_block.py
|
from __future__ import annotations
from typing import Awaitable, Callable, List
from chia.consensus.blockchain_interface import BlockchainInterface
from chia.server.ws_connection import WSChiaConnection
from chia.util.ints import uint32
async def check_fork_next_block(
blockchain: BlockchainInterface,
fork_point_height: uint32,
peers_with_peak: List[WSChiaConnection],
check_block_future: Callable[[WSChiaConnection, uint32, BlockchainInterface], Awaitable[bool]],
) -> uint32:
our_peak_height = blockchain.get_peak_height()
ses_heigths = blockchain.get_ses_heights()
if len(ses_heigths) > 2 and our_peak_height is not None:
ses_heigths.sort()
max_fork_ses_height = ses_heigths[-3]
potential_peek = uint32(our_peak_height + 1)
# This is the fork point in SES in the case where no fork was detected
if blockchain.get_peak_height() is not None and fork_point_height == max_fork_ses_height:
for peer in peers_with_peak.copy():
if peer.closed:
peers_with_peak.remove(peer)
continue
# Grab a block at peak + 1 and check if fork point is actually our current height
if await check_block_future(peer, potential_peek, blockchain):
fork_point_height = our_peak_height
break
return fork_point_height
|
2a71c3b3753a02a0656394e5c137366a28d29227
|
3750387e046dfd287d02decc846860fae874bf3e
|
/tests/defaults_list/test_defaults_list.py
|
3c9785d392e2d2f3b745880696e9ae9f86df4ccb
|
[
"MIT"
] |
permissive
|
facebookresearch/hydra
|
baf152caa30cd1d8a7e76ba2111fb9a49ecbe18c
|
b5ff66134f268164a20712d18b1230f4dd737444
|
refs/heads/main
| 2023-08-28T02:33:18.063795
| 2023-07-25T17:58:43
| 2023-07-25T17:58:43
| 191,632,914
| 7,667
| 692
|
MIT
| 2023-09-11T15:38:52
| 2019-06-12T19:33:15
|
Python
|
UTF-8
|
Python
| false
| false
| 65,414
|
py
|
test_defaults_list.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import re
from textwrap import dedent
from typing import Any, List, Optional
from pytest import mark, param, raises, warns
from hydra import version
from hydra._internal.defaults_list import create_defaults_list
from hydra.core.default_element import (
ConfigDefault,
GroupDefault,
InputDefault,
ResultDefault,
)
from hydra.core.override_parser.overrides_parser import OverridesParser
from hydra.core.plugins import Plugins
from hydra.errors import ConfigCompositionException
from hydra.test_utils.test_utils import chdir_hydra_root
from tests.defaults_list import create_repo
chdir_hydra_root()
# registers config source plugins
Plugins.instance()
@mark.parametrize(
"config_path,expected_list",
[
param("empty", [], id="empty"),
param(
"group_default",
[GroupDefault(group="group1", value="file1")],
id="one_item",
),
param(
"self_leading",
[
ConfigDefault(path="_self_"),
GroupDefault(group="group1", value="file1"),
],
id="self_leading",
),
param(
"self_trailing",
[
GroupDefault(group="group1", value="file1"),
ConfigDefault(path="_self_"),
],
id="self_trailing",
),
param(
"optional",
[GroupDefault(group="group1", value="file1", optional=True)],
id="optional",
),
param(
"config_default",
[ConfigDefault(path="empty")],
id="non_config_group_default",
),
],
)
def test_loaded_defaults_list(
config_path: str, expected_list: List[InputDefault]
) -> None:
repo = create_repo()
result = repo.load_config(config_path=config_path)
assert result is not None
assert result.defaults_list == expected_list
@mark.parametrize(
"config_path,expected_list",
[
param(
"optional_deprecated",
[GroupDefault(group="group1", value="file1", optional=True)],
id="optional",
),
],
)
class TestDeprecatedOptional:
def test_version_base_1_1(
self,
config_path: str,
expected_list: List[InputDefault],
hydra_restore_singletons: Any,
) -> None:
version.setbase("1.1")
repo = create_repo()
warning = dedent(
"""
In optional_deprecated: 'optional: true' is deprecated.
Use 'optional group1: file1' instead.
Support for the old style is removed for Hydra version_base >= 1.2"""
)
with warns(
UserWarning,
match=re.escape(warning),
):
result = repo.load_config(config_path=config_path)
assert result is not None
assert result.defaults_list == expected_list
@mark.parametrize("version_base", ["1.2", None])
def test_version_base_1_2(
self,
config_path: str,
expected_list: List[InputDefault],
version_base: Optional[str],
hydra_restore_singletons: Any,
) -> None:
version.setbase(version_base)
repo = create_repo()
err = "In optional_deprecated: Too many keys in default item {'group1': 'file1', 'optional': True}"
with raises(
ValueError,
match=re.escape(err),
):
repo.load_config(config_path=config_path)
def _test_defaults_list_impl(
config_name: Optional[str],
overrides: List[str],
expected: Any,
prepend_hydra: bool = False,
skip_missing: bool = False,
) -> None:
parser = OverridesParser.create()
repo = create_repo()
overrides_list = parser.parse_overrides(overrides=overrides)
if isinstance(expected, list) or expected is None:
result = create_defaults_list(
repo=repo,
config_name=config_name,
overrides_list=overrides_list,
prepend_hydra=prepend_hydra,
skip_missing=skip_missing,
)
assert result.defaults == expected
else:
with expected:
create_defaults_list(
repo=repo,
config_name=config_name,
overrides_list=overrides_list,
prepend_hydra=prepend_hydra,
skip_missing=skip_missing,
)
@mark.parametrize(
"default,expected_group_path,expected_config_path",
[
param(
ConfigDefault(path="bar", parent_base_dir=""),
"",
"bar",
id="config_default:empty_basedir",
),
param(
ConfigDefault(path="bar", parent_base_dir="foo"),
"foo",
"foo/bar",
id="config_default:with_parent_basedir",
),
param(
GroupDefault(group="foo", value="bar", parent_base_dir=""),
"foo",
"foo/bar",
id="group_default:empty_basedir",
),
param(
GroupDefault(group="foo", value="bar", parent_base_dir="zoo"),
"zoo/foo",
"zoo/foo/bar",
id="group_default:with_parent_basedir",
),
# absolute group
param(
ConfigDefault(path="/foo/zoo", parent_base_dir="irrelevant"),
"foo",
"foo/zoo",
id="config_default:absolute",
),
param(
GroupDefault(group="/foo", value="zoo", parent_base_dir="irrelevant"),
"foo",
"foo/zoo",
id="group_default:absolute",
),
],
)
def test_get_paths(
default: InputDefault, expected_group_path: Any, expected_config_path: Any
) -> None:
assert default.get_group_path() == expected_group_path
assert default.get_config_path() == expected_config_path
@mark.parametrize(
"default,expected",
[
param(
ConfigDefault(path="bar", parent_base_dir=""),
"",
id="config_default",
),
param(
ConfigDefault(path="foo/bar", parent_base_dir=""),
"foo",
id="config_default",
),
param(
ConfigDefault(path="bar", parent_base_dir="foo"),
"foo",
id="config_default",
),
param(
ConfigDefault(path="bar", parent_base_dir="a/b"),
"a.b",
id="config_default",
),
param(
GroupDefault(group="a", value="a1", parent_base_dir=""),
"a",
id="group_default",
),
param(
GroupDefault(group="a/b", value="a1", parent_base_dir=""),
"a.b",
id="group_default",
),
param(
GroupDefault(group="a/b", value="a1", parent_base_dir="x"),
"x.a.b",
id="group_default",
),
# absolute group/path
param(
ConfigDefault(path="/foo/bar", parent_base_dir="irrelevant"),
"foo",
id="config_default:absolute",
),
param(
GroupDefault(group="/foo", value="bar", parent_base_dir="irrelevant"),
"foo",
id="group_default:absolute",
),
],
)
def test_get_default_package(default: InputDefault, expected: Any) -> None:
assert default.get_default_package() == expected
@mark.parametrize(
"default,parent_package, parent_base_dir, expected",
[
# empty parent package
param(
ConfigDefault(path="bar"),
"",
"",
"",
id="config_default:path=bar,parent_package=,package=",
),
param(
ConfigDefault(path="group1/bar"),
"",
"",
"group1",
id="config_default:path=group1/bar,parent_package=, package=",
),
param(
ConfigDefault(path="bar", package="pkg1"),
"",
"",
"pkg1",
id="config_default:path=bar,parent_package=, package=pkg1",
),
param(
ConfigDefault(path="group1/bar", package="pkg1"),
"",
"",
"pkg1",
id="config_default:path=group1/bar,parent_package=,package=pkg1",
),
# non empty parent package
param(
ConfigDefault(path="bar", package="pkg1"),
"a",
"",
"a.pkg1",
id="config_default:path=bar,parent_package=a, package=pkg1",
),
# global package
param(
ConfigDefault(
path="bar",
package="_global_.pkg1",
),
"",
"",
"pkg1",
id="config_default:parent_package=a, package=_global_.pkg1",
),
# global parent package
param(
ConfigDefault(path="bar", package="pkg1"),
"_global_.foo",
"",
"foo.pkg1",
id="config_default:parent_package=_global_.foo, package=pkg1",
),
# both globals
param(
ConfigDefault(path="bar", package="_global_.pkg1"),
"_global_.foo",
"",
"pkg1",
id="config_default:parent_package=_global_.foo, package=_global_.pkg1",
),
# _group_
param(
GroupDefault(group="foo", value="bar", package="_group_"),
"",
"",
"foo",
id="group_default:parent_package=, package=_group_",
),
param(
ConfigDefault(path="foo/bar", package="_group_"),
"",
"",
"foo",
id="config_default:parent_package=, package=_group_",
),
param(
GroupDefault(group="foo", value="bar", package="_group_.zoo"),
"",
"",
"foo.zoo",
id="group_default:parent_package=, package=_group_.zoo",
),
param(
ConfigDefault(
path="foo/bar",
package="_group_.zoo",
),
"",
"",
"foo.zoo",
id="config_default:parent_package=, package=_group_.zoo",
),
],
)
def test_get_final_package(
default: InputDefault, parent_package: str, parent_base_dir: str, expected: Any
) -> None:
default.update_parent(
parent_base_dir=parent_base_dir, parent_package=parent_package
)
assert default.get_final_package() == expected
@mark.parametrize(
"config_name, overrides, expected",
[
param(
"empty",
[],
[ResultDefault(config_path="empty", package="")],
id="empty",
),
param(
"config_default",
[],
[
ResultDefault(config_path="empty", package="", parent="config_default"),
ResultDefault(config_path="config_default", package="", is_self=True),
],
id="config_default",
),
param(
"group_default",
[],
[
ResultDefault(
config_path="group1/file1", package="group1", parent="group_default"
),
ResultDefault(config_path="group_default", package="", is_self=True),
],
id="group_default",
),
param(
"self_leading",
[],
[
ResultDefault(config_path="self_leading", package="", is_self=True),
ResultDefault(
config_path="group1/file1", package="group1", parent="self_leading"
),
],
id="self_leading",
),
param(
"self_trailing",
[],
[
ResultDefault(
config_path="group1/file1", package="group1", parent="self_trailing"
),
ResultDefault(config_path="self_trailing", package="", is_self=True),
],
id="self_trailing",
),
param(
"include_nested_group",
[],
[
ResultDefault(
config_path="group1/group2/file1",
package="group1.group2",
parent="group1/group_item1",
),
ResultDefault(
config_path="group1/group_item1",
parent="include_nested_group",
package="group1",
is_self=True,
),
ResultDefault(
config_path="include_nested_group", package="", is_self=True
),
],
id="include_nested_group",
),
],
)
def test_simple_defaults_list_cases(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name, overrides=overrides, expected=expected
)
@mark.parametrize(
"config_name, overrides, expected",
[
param(
"config_default_pkg1",
[],
[
ResultDefault(
config_path="empty", package="pkg1", parent="config_default_pkg1"
),
ResultDefault(
config_path="config_default_pkg1", package="", is_self=True
),
],
id="config_default_pkg1",
),
param(
"include_nested_config_item_pkg2",
[],
[
ResultDefault(
config_path="group1/group2/file1",
package="group1.pkg2",
parent="group1/config_item_pkg2",
),
ResultDefault(
config_path="group1/config_item_pkg2",
parent="include_nested_config_item_pkg2",
package="group1",
is_self=True,
),
ResultDefault(
config_path="include_nested_config_item_pkg2",
package="",
is_self=True,
),
],
id="include_nested_config_item_pkg2",
),
param(
"include_nested_config_item_global",
[],
[
ResultDefault(
config_path="group1/group2/file1",
package="",
parent="group1/config_item_global_",
),
ResultDefault(
config_path="group1/config_item_global_",
parent="include_nested_config_item_global",
package="group1",
is_self=True,
),
ResultDefault(
config_path="include_nested_config_item_global",
package="",
is_self=True,
),
],
id="include_nested_config_item_global",
),
],
)
def test_override_package_in_defaults_list(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name, overrides=overrides, expected=expected
)
@mark.parametrize(
"config_name, overrides, expected",
[
param(
"include_nested_group_pkg2",
[],
[
ResultDefault(
config_path="group1/group2/file1",
package="group1.pkg2",
parent="group1/group_item1_pkg2",
),
ResultDefault(
config_path="group1/group_item1_pkg2",
parent="include_nested_group_pkg2",
package="group1",
is_self=True,
),
ResultDefault(
config_path="include_nested_group_pkg2", package="", is_self=True
),
],
id="include_nested_group_pkg2",
),
param(
"include_nested_group_pkg2",
["group1/group2@group1.pkg2=file2"],
[
ResultDefault(
config_path="group1/group2/file2",
package="group1.pkg2",
parent="group1/group_item1_pkg2",
),
ResultDefault(
config_path="group1/group_item1_pkg2",
parent="include_nested_group_pkg2",
package="group1",
is_self=True,
),
ResultDefault(
config_path="include_nested_group_pkg2", package="", is_self=True
),
],
id="option_override:include_nested_group_pkg2",
),
],
)
def test_include_nested_group_pkg2(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name, overrides=overrides, expected=expected
)
@mark.parametrize(
"config_name, overrides, expected",
[
param(
"group_default_pkg1",
[],
[
ResultDefault(
config_path="group1/file1",
package="pkg1",
parent="group_default_pkg1",
),
ResultDefault(
config_path="group_default_pkg1", package="", is_self=True
),
],
id="group_default_pkg1",
),
param(
"group_default_pkg1",
["group1@pkg1=file2"],
[
ResultDefault(
config_path="group1/file2",
package="pkg1",
parent="group_default_pkg1",
),
ResultDefault(
config_path="group_default_pkg1", package="", is_self=True
),
],
id="option_override:group_default_pkg1",
),
param(
"group_default_pkg1",
["group1@wrong=file2"],
raises(
ConfigCompositionException,
match=re.escape(
dedent(
"""\
Could not override 'group1@wrong'.
Did you mean to override group1@pkg1?
To append to your default list use +group1@wrong=file2"""
)
),
),
id="option_override:group_default_pkg1:bad_package_in_override",
),
],
)
def test_group_default_pkg1(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name, overrides=overrides, expected=expected
)
@mark.parametrize(
"config_name, overrides, expected",
[
param(
"include_nested_group_global_",
[],
[
ResultDefault(
config_path="group1/group2/file1",
package="",
parent="group1/group_item1_global_",
),
ResultDefault(
config_path="group1/group_item1_global_",
parent="include_nested_group_global_",
package="group1",
is_self=True,
),
ResultDefault(
config_path="include_nested_group_global_",
package="",
is_self=True,
),
],
id="include_nested_config_item_global",
),
param(
"include_nested_group_global_",
["group1/group2@_global_=file2"],
[
ResultDefault(
config_path="group1/group2/file2",
package="",
parent="group1/group_item1_global_",
),
ResultDefault(
config_path="group1/group_item1_global_",
parent="include_nested_group_global_",
package="group1",
is_self=True,
),
ResultDefault(
config_path="include_nested_group_global_",
package="",
is_self=True,
),
],
id="option_override:include_nested_config_item_global",
),
],
)
def test_include_nested_group_global(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name, overrides=overrides, expected=expected
)
@mark.parametrize(
"config_name, overrides, expected",
[
param(
"group_default_at_global",
[],
[
ResultDefault(
config_path="group1/file1",
package="",
parent="group_default_at_global",
),
ResultDefault(
config_path="group_default_at_global",
package="",
is_self=True,
),
],
id="group_default_at_global",
),
param(
"group_default_at_global",
["+experiment=override_with_global_default2"],
[
ResultDefault(
config_path="group1/file2",
package="",
parent="group_default_at_global",
),
ResultDefault(
config_path="group_default_at_global",
package="",
is_self=True,
),
ResultDefault(
config_path="experiment/override_with_global_default2",
package="experiment",
parent="group_default_at_global",
),
],
id="group_default_at_global:include_experiment_to_override_toplevel_package",
),
param(
"two_group_defaults_different_pkgs_global",
[],
[
ResultDefault(
config_path="group1/file1",
parent="two_group_defaults_different_pkgs_global",
package="group1",
),
ResultDefault(
config_path="group1/file2",
parent="two_group_defaults_different_pkgs_global",
package="",
),
ResultDefault(
config_path="two_group_defaults_different_pkgs_global",
package="",
is_self=True,
),
],
id="two_group_defaults_different_pkgs_global",
),
],
)
def test_group_global(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name, overrides=overrides, expected=expected
)
@mark.parametrize(
"config_name, overrides, expected",
[
param(
"include_nested_group_global_foo",
[],
[
ResultDefault(
config_path="group1/group2/file1",
package="foo",
parent="group1/group_item1_global_foo",
),
ResultDefault(
config_path="group1/group_item1_global_foo",
parent="include_nested_group_global_foo",
package="group1",
is_self=True,
),
ResultDefault(
config_path="include_nested_group_global_foo",
package="",
is_self=True,
),
],
id="include_nested_group_global_foo",
),
param(
"include_nested_group_global_foo",
["group1/group2@foo=file2"],
[
ResultDefault(
config_path="group1/group2/file2",
package="foo",
parent="group1/group_item1_global_foo",
),
ResultDefault(
config_path="group1/group_item1_global_foo",
parent="include_nested_group_global_foo",
package="group1",
is_self=True,
),
ResultDefault(
config_path="include_nested_group_global_foo",
package="",
is_self=True,
),
],
id="include_nested_group_global_foo",
),
],
)
def test_include_nested_group_global_foo(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name, overrides=overrides, expected=expected
)
@mark.parametrize(
"config_name, overrides, expected, warning_file",
[
param(
"include_nested_group_name_",
[],
[
ResultDefault(
config_path="group1/group2/file1",
package="group1.file1",
parent="group1/group_item1_name_",
),
ResultDefault(
config_path="group1/group_item1_name_",
parent="include_nested_group_name_",
package="group1",
is_self=True,
),
ResultDefault(
config_path="include_nested_group_name_",
package="",
is_self=True,
),
],
"group1/group_item1_name_",
id="include_nested_group_name_",
),
param(
"include_nested_group_name_",
["group1/group2@group1.file1=file2"],
[
ResultDefault(
config_path="group1/group2/file2",
package="group1.file2",
parent="group1/group_item1_name_",
),
ResultDefault(
config_path="group1/group_item1_name_",
parent="include_nested_group_name_",
package="group1",
is_self=True,
),
ResultDefault(
config_path="include_nested_group_name_",
package="",
is_self=True,
),
],
"group1/group_item1_name_",
id="include_nested_group_name_",
),
param(
"include_nested_config_item_name_",
[],
[
ResultDefault(
config_path="group1/group2/file1",
package="group1.file1",
parent="group1/config_item_name_",
),
ResultDefault(
config_path="group1/config_item_name_",
package="group1",
parent="include_nested_config_item_name_",
is_self=True,
),
ResultDefault(
config_path="include_nested_config_item_name_",
package="",
is_self=True,
primary=True,
),
],
"group1/config_item_name_",
id="include_nested_config_item_name_",
),
],
)
def test_include_nested_group_name_(
config_name: str,
overrides: List[str],
expected: List[ResultDefault],
warning_file: str,
) -> None:
url = "https://hydra.cc/docs/1.2/upgrades/1.0_to_1.1/changes_to_package_header"
msg = f"In {warning_file}: Defaults List contains deprecated keyword _name_, see {url}\n"
with warns(UserWarning, match=re.escape(msg)):
_test_defaults_list_impl(
config_name=config_name, overrides=overrides, expected=expected
)
@mark.parametrize(
"config_name, overrides, expected",
[
param(
"primary_pkg_header_foo",
[],
[
ResultDefault(
config_path="group1/file1",
package="foo.group1",
parent="primary_pkg_header_foo",
),
ResultDefault(
config_path="group1/file1",
package="foo.pkg",
parent="primary_pkg_header_foo",
),
ResultDefault(
config_path="primary_pkg_header_foo",
package="foo",
is_self=True,
),
],
id="primary_pkg_header_foo",
),
param(
"primary_pkg_header_foo",
["group1@foo.group1=file2", "group1@foo.pkg=file3"],
[
ResultDefault(
config_path="group1/file2",
package="foo.group1",
parent="primary_pkg_header_foo",
),
ResultDefault(
config_path="group1/file3",
package="foo.pkg",
parent="primary_pkg_header_foo",
),
ResultDefault(
config_path="primary_pkg_header_foo",
package="foo",
is_self=True,
),
],
id="primary_pkg_header_foo",
),
],
)
def test_primary_cfg_pkg_header_foo(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name, overrides=overrides, expected=expected
)
@mark.parametrize(
"config_name, overrides, expected",
[
param(
"include_nested_group_pkg_header_foo",
[],
[
ResultDefault(
config_path="group1/group2/file1",
package="foo.group2",
parent="group1/group_item1_pkg_header_foo",
),
ResultDefault(
config_path="group1/group_item1_pkg_header_foo",
package="foo",
is_self=True,
parent="include_nested_group_pkg_header_foo",
),
ResultDefault(
config_path="include_nested_group_pkg_header_foo",
package="",
is_self=True,
),
],
id="include_nested_group_pkg_header_foo",
),
param(
"include_nested_group_pkg_header_foo",
["group1/group2@foo.group2=file2"],
[
ResultDefault(
config_path="group1/group2/file2",
package="foo.group2",
parent="group1/group_item1_pkg_header_foo",
),
ResultDefault(
config_path="group1/group_item1_pkg_header_foo",
package="foo",
is_self=True,
parent="include_nested_group_pkg_header_foo",
),
ResultDefault(
config_path="include_nested_group_pkg_header_foo",
package="",
is_self=True,
),
],
id="include_nested_group_pkg_header_foo:override_nested",
),
param(
"include_nested_group_pkg_header_foo",
["group1=group_item2_pkg_header_foo"],
[
ResultDefault(
config_path="group1/group2/file2",
package="foo.group2",
parent="group1/group_item2_pkg_header_foo",
),
ResultDefault(
config_path="group1/group_item2_pkg_header_foo",
package="foo",
is_self=True,
parent="include_nested_group_pkg_header_foo",
),
ResultDefault(
config_path="include_nested_group_pkg_header_foo",
package="",
is_self=True,
),
],
id="include_nested_group_pkg_header_foo:override_first_level",
),
param(
"include_nested_group_pkg_header_foo",
["group1=group_item2_pkg_header_bar"],
[
ResultDefault(
config_path="group1/group2/file2",
package="bar.group2",
parent="group1/group_item2_pkg_header_bar",
),
ResultDefault(
config_path="group1/group_item2_pkg_header_bar",
package="bar",
is_self=True,
parent="include_nested_group_pkg_header_foo",
),
ResultDefault(
config_path="include_nested_group_pkg_header_foo",
package="",
is_self=True,
),
],
id="include_nested_group_pkg_header_foo:override_first_level_with_package_header_change",
),
],
)
def test_include_nested_group_pkg_header_foo(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name, overrides=overrides, expected=expected
)
@mark.parametrize(
"config_name, overrides, expected",
[
param(
"empty",
["+group1/group2=file1_pkg_header_foo"],
[
ResultDefault(config_path="empty", package="", is_self=True),
ResultDefault(
config_path="group1/group2/file1_pkg_header_foo",
parent="empty",
package="foo",
),
],
id="included_from_overrides",
),
param(
"empty",
["+group1=group_item1_with_pkg_header_foo"],
[
ResultDefault(
config_path="empty", package="", is_self=True, primary=True
),
ResultDefault(
config_path="group1/group2/file1_pkg_header_foo",
parent="group1/group_item1_with_pkg_header_foo",
package="foo",
is_self=False,
),
ResultDefault(
config_path="group1/group_item1_with_pkg_header_foo",
parent="empty",
package="group1",
is_self=True,
),
],
id="included_from_overrides",
),
],
)
def test_nested_package_header_is_absolute(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name, overrides=overrides, expected=expected
)
@mark.parametrize(
"config_name, overrides, expected",
[
param(
"include_nested_group_pkg_header_foo_override_pkg_bar",
[],
[
ResultDefault(
config_path="group1/group2/file1",
parent="group1/group_item1_pkg_header_foo",
package="bar.group2",
is_self=False,
),
ResultDefault(
config_path="group1/group_item1_pkg_header_foo",
parent="include_nested_group_pkg_header_foo_override_pkg_bar",
package="bar",
is_self=True,
),
ResultDefault(
config_path="include_nested_group_pkg_header_foo_override_pkg_bar",
parent=None,
package="",
is_self=True,
),
],
id="include_nested_group_global_foo_override_pkg_bar",
),
param(
"include_nested_group_pkg_header_foo_override_pkg_bar",
["group1@bar=group_item2"],
[
ResultDefault(
config_path="group1/group2/file2",
parent="group1/group_item2",
package="bar.group2",
is_self=False,
),
ResultDefault(
config_path="group1/group_item2",
parent="include_nested_group_pkg_header_foo_override_pkg_bar",
package="bar",
is_self=True,
),
ResultDefault(
config_path="include_nested_group_pkg_header_foo_override_pkg_bar",
parent=None,
package="",
is_self=True,
),
],
id="include_nested_group_global_foo_override_pkg_bar:override_group1",
),
param(
"include_nested_group_pkg_header_foo_override_pkg_bar",
["group1/group2@bar.group2=file2"],
[
ResultDefault(
config_path="group1/group2/file2",
parent="group1/group_item1_pkg_header_foo",
package="bar.group2",
is_self=False,
),
ResultDefault(
config_path="group1/group_item1_pkg_header_foo",
parent="include_nested_group_pkg_header_foo_override_pkg_bar",
package="bar",
is_self=True,
),
ResultDefault(
config_path="include_nested_group_pkg_header_foo_override_pkg_bar",
parent=None,
package="",
is_self=True,
),
],
id="include_nested_group_global_foo_override_pkg_bar:override_group2",
),
],
)
def test_overriding_package_header_from_defaults_list(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name, overrides=overrides, expected=expected
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
"legacy_override_hydra",
[],
raises(
ConfigCompositionException,
match=re.escape(
dedent(
"""\
Multiple values for hydra/help. To override a value use 'override hydra/help: custom1'"""
)
),
),
id="override_hydra",
),
],
)
@mark.parametrize("version_base", ["1.2", None])
def test_legacy_override_hydra_version_base_1_2(
config_name: str,
overrides: List[str],
expected: List[ResultDefault],
recwarn: Any, # Testing deprecated behavior
version_base: Optional[str],
hydra_restore_singletons: Any,
) -> None:
version.setbase(version_base)
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
prepend_hydra=True,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
"legacy_override_hydra",
[],
[
ResultDefault(
config_path="hydra/help/custom1",
parent="hydra/config",
package="hydra.help",
is_self=False,
),
ResultDefault(
config_path="hydra/output/default",
parent="hydra/config",
package="hydra",
is_self=False,
),
ResultDefault(
config_path="hydra/config",
parent="<root>",
package="hydra",
is_self=True,
),
ResultDefault(
config_path="legacy_override_hydra",
parent="<root>",
package="",
is_self=True,
),
],
id="override_hydra",
),
],
)
def test_legacy_override_hydra_version_base_1_1(
config_name: str,
overrides: List[str],
expected: List[ResultDefault],
recwarn: Any, # Testing deprecated behavior
hydra_restore_singletons: Any,
) -> None:
version.setbase("1.1")
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
prepend_hydra=True,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
"empty",
[],
[
ResultDefault(
config_path="hydra/help/default",
parent="hydra/config",
package="hydra.help",
),
ResultDefault(
config_path="hydra/output/default",
parent="hydra/config",
package="hydra",
),
ResultDefault(
config_path="hydra/config",
parent="<root>",
package="hydra",
is_self=True,
),
ResultDefault(config_path="empty", parent="<root>", package=""),
],
id="just_hydra_config",
),
param(
"override_hydra2",
[],
[
ResultDefault(
config_path="hydra/help/custom1",
parent="hydra/config",
package="hydra.help",
is_self=False,
),
ResultDefault(
config_path="hydra/output/default",
parent="hydra/config",
package="hydra",
is_self=False,
),
ResultDefault(
config_path="hydra/config",
parent="<root>",
package="hydra",
is_self=True,
),
ResultDefault(
config_path="override_hydra2",
parent="<root>",
package="",
primary=True,
),
],
id="override_hydra2",
),
],
)
def test_with_hydra_config(
config_name: str,
overrides: List[str],
expected: List[ResultDefault],
recwarn: Any, # Testing deprecated behavior
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
prepend_hydra=True,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
"group_default",
["+experiment=include_absolute_config"],
[
ResultDefault(
config_path="group1/file1", package="group1", parent="group_default"
),
ResultDefault(config_path="group_default", package="", is_self=True),
ResultDefault(
config_path="group1/group2/file1",
package="group1.group2",
parent="experiment/include_absolute_config",
),
ResultDefault(
config_path="experiment/include_absolute_config",
package="",
parent="group_default",
is_self=True,
),
],
id="group_default:experiment=include_absolute_config",
),
],
)
def test_experiment_use_case(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
"experiment/override_hydra",
[],
[
ResultDefault(
config_path="hydra/help/custom1",
package="hydra.help",
parent="hydra/config",
),
ResultDefault(
config_path="hydra/output/default",
package="hydra",
parent="hydra/config",
),
ResultDefault(
config_path="hydra/config",
package="hydra",
parent="<root>",
is_self=True,
),
ResultDefault(
config_path="experiment/override_hydra",
package="",
parent="<root>",
),
],
id="group_default:experiment=include_absolute_config",
),
],
)
def test_as_as_primary(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
prepend_hydra=True,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
"placeholder",
[],
[ResultDefault(config_path="placeholder", package="", is_self=True)],
id="placeholder",
),
param(
"placeholder",
["group1=file1"],
[
ResultDefault(
config_path="group1/file1", package="group1", parent="placeholder"
),
ResultDefault(config_path="placeholder", package="", is_self=True),
],
id="placeholder:override",
),
param(
"nested_placeholder",
[],
[
ResultDefault(
config_path="group1/placeholder",
package="group1",
parent="nested_placeholder",
is_self=True,
),
ResultDefault(
config_path="nested_placeholder", package="", is_self=True
),
],
id="nested_placeholder",
),
param(
"nested_placeholder",
["group1/group2=file1"],
[
ResultDefault(
config_path="group1/group2/file1",
package="group1.group2",
parent="group1/placeholder",
),
ResultDefault(
config_path="group1/placeholder",
package="group1",
parent="nested_placeholder",
is_self=True,
),
ResultDefault(
config_path="nested_placeholder", package="", is_self=True
),
],
id="nested_placeholder:override",
),
],
)
def test_placeholder(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
"interpolation_simple",
["group1=file2"],
[
ResultDefault(
config_path="group1/file2",
package="group1",
parent="interpolation_simple",
),
ResultDefault(
config_path="group2/file2",
package="group2",
parent="interpolation_simple",
),
ResultDefault(
config_path="group1_group2/file2_file2",
package="group1_group2",
parent="interpolation_simple",
),
ResultDefault(
config_path="interpolation_simple", package="", is_self=True
),
],
id="interpolation_simple",
),
param(
"interpolation_with_nested_defaults_list",
[],
[
ResultDefault(
config_path="group1/file1",
package="group1",
parent="interpolation_with_nested_defaults_list",
),
ResultDefault(
config_path="group2/file1",
package="group2",
parent="interpolation_with_nested_defaults_list",
),
ResultDefault(
config_path="group1_group2/empty1",
package="group1_group2",
parent="group1_group2/file1_file1_with_defaults_list",
),
ResultDefault(
config_path="group1_group2/file1_file1_with_defaults_list",
package="group1_group2",
parent="interpolation_with_nested_defaults_list",
is_self=True,
),
ResultDefault(
config_path="interpolation_with_nested_defaults_list",
package="",
is_self=True,
),
],
id="interpolation_with_nested_defaults_list",
),
],
)
def test_interpolation_simple(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
"include_nested_group",
["~group1"],
[
ResultDefault(
config_path="include_nested_group", package="", is_self=True
),
],
id="delete:include_nested_group:group1",
),
],
)
def test_deletion(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
"error_duplicate_group",
[],
raises(
ConfigCompositionException,
match=re.escape(
"group1 appears more than once in the final defaults list"
),
),
id="error_duplicate_group",
),
param(
"error_duplicate_group_nested",
[],
raises(
ConfigCompositionException,
match=re.escape(
"group1/group2 appears more than once in the final defaults list"
),
),
id="error_duplicate_group_nested",
),
],
)
def test_duplicate_items(
config_name: str, overrides: List[str], expected: List[ResultDefault]
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
"config_with_same_name_as_group",
[],
[
ResultDefault(
config_path="config_with_same_name_as_group/item",
package="config_with_same_name_as_group",
parent="config_with_same_name_as_group",
),
ResultDefault(
config_path="config_with_same_name_as_group",
package="",
is_self=True,
primary=True,
),
],
id="config_with_same_name_as_group",
),
param(
"include_group_with_same_name_as_config",
[],
[
ResultDefault(
config_path="config_with_same_name_as_group/item",
package="config_with_same_name_as_group",
parent="include_group_with_same_name_as_config",
),
ResultDefault(
config_path="include_group_with_same_name_as_config",
package="",
is_self=True,
primary=True,
),
],
id="include_group_with_same_name_as_config",
),
param(
"test_extend_from_config_with_same_name_as_group",
[],
[
ResultDefault(
config_path="config_with_same_name_as_group/item",
package="config_with_same_name_as_group",
parent="config_with_same_name_as_group",
),
ResultDefault(
config_path="config_with_same_name_as_group",
package="",
parent="test_extend_from_config_with_same_name_as_group",
is_self=True,
),
ResultDefault(
config_path="test_extend_from_config_with_same_name_as_group",
package="",
is_self=True,
primary=True,
),
],
id="test_extend_from_config_with_same_name_as_group",
),
param(
"test_extend_from_group_with_same_name_as_config",
[],
[
ResultDefault(
config_path="config_with_same_name_as_group/item",
package="config_with_same_name_as_group",
parent="test_extend_from_group_with_same_name_as_config",
),
ResultDefault(
config_path="test_extend_from_group_with_same_name_as_config",
package="",
is_self=True,
primary=True,
),
],
id="test_extend_from_group_with_same_name_as_config",
),
],
)
@mark.parametrize("version_base", ["1.2", None])
def test_name_collision(
config_name: str,
overrides: List[str],
expected: List[ResultDefault],
version_base: Optional[str],
hydra_restore_singletons: Any,
) -> None:
version.setbase(version_base)
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
"group1/file_with_group_header",
[],
[
ResultDefault(
config_path="group1/file_with_group_header", package="group1"
)
],
id="group1/file_with_group_header",
),
param(
"empty",
["+group1=file_with_group_header"],
[
ResultDefault(config_path="empty", package="", is_self=True),
ResultDefault(
config_path="group1/file_with_group_header",
package="group1",
parent="empty",
),
],
id="empty_group1/file_with_group_header",
),
param(
"group1/group2/file_with_group_header",
[],
[
ResultDefault(
config_path="group1/group2/file_with_group_header",
package="group1.group2",
)
],
id="group1/group2/file_with_group_header",
),
param(
"empty",
["+group1/group2=file_with_group_header"],
[
ResultDefault(config_path="empty", package="", is_self=True),
ResultDefault(
config_path="group1/group2/file_with_group_header",
package="group1.group2",
parent="empty",
),
],
id="empty+group1/group2/file_with_group_header",
),
],
)
def test_load_group_header(
config_name: str, overrides: List[str], expected: List[ResultDefault], recwarn: Any
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
None,
[],
[],
id="none",
),
param(
None,
["+group1=file1"],
[
ResultDefault(
config_path="group1/file1",
package="group1",
parent="_dummy_empty_config_",
)
],
id="none+group1=file1",
),
],
)
def test_with_none_primary(
config_name: str,
overrides: List[str],
expected: List[ResultDefault],
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
None,
[],
[
ResultDefault(
config_path="hydra/help/default",
package="hydra.help",
parent="hydra/config",
),
ResultDefault(
config_path="hydra/output/default",
package="hydra",
parent="hydra/config",
),
ResultDefault(
config_path="hydra/config",
package="hydra",
parent="<root>",
is_self=True,
),
],
id="none",
),
param(
None,
["+group1=file1"],
[
ResultDefault(
config_path="hydra/help/default",
package="hydra.help",
parent="hydra/config",
),
ResultDefault(
config_path="hydra/output/default",
package="hydra",
parent="hydra/config",
),
ResultDefault(
config_path="hydra/config",
package="hydra",
parent="<root>",
is_self=True,
),
ResultDefault(
config_path="group1/file1",
package="group1",
parent="_dummy_empty_config_",
),
],
id="none+group1=file1",
),
],
)
def test_with_none_primary_with_hydra(
config_name: str,
overrides: List[str],
expected: List[ResultDefault],
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
prepend_hydra=True,
)
@mark.parametrize(
"config_name,overrides,expected",
[
param(
"two_config_items",
[],
[
ResultDefault(
config_path="group1/file1",
package="group1",
parent="two_config_items",
),
ResultDefault(
config_path="group1/file2",
package="group1",
parent="two_config_items",
),
ResultDefault(config_path="two_config_items", package="", is_self=True),
],
id="two_config_items",
),
],
)
def test_two_config_items(
config_name: str,
overrides: List[str],
expected: List[ResultDefault],
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
)
@mark.parametrize(
"config_name,overrides,skip_missing,expected",
[
param(
"with_missing",
[],
True,
[
ResultDefault(config_path="with_missing", package="", is_self=True),
],
id="with_missing:ignore_missing",
),
param(
"with_missing",
["db=base_db"],
True,
[
ResultDefault(
config_path="db/base_db", package="db", parent="with_missing"
),
ResultDefault(config_path="with_missing", package="", is_self=True),
],
id="with_missing:ignore_missing+override",
),
param(
"with_missing",
[],
False,
raises(
ConfigCompositionException,
match=re.escape(
dedent(
"""\
You must specify 'db', e.g, db=<OPTION>
Available options:
\tbase_db"""
)
),
),
id="with_missing:not_ignore_missing",
),
],
)
def test_with_missing_config(
config_name: str,
overrides: List[str],
skip_missing: bool,
expected: List[ResultDefault],
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
skip_missing=skip_missing,
)
@mark.parametrize(
"default,package_header,expected",
[
param(
GroupDefault(group="group1", value="file"),
"_group_",
"group1",
id="gd:_group_",
),
param(
GroupDefault(group="group1", value="file"),
"group1",
"group1",
id="gd:group1",
),
param(
GroupDefault(group="group1", value="file"),
"abc",
"abc",
id="gd:abc",
),
param(
GroupDefault(group="group1", value="file"),
"_global_",
"",
id="gd:_global_",
),
param(
GroupDefault(group="group1", value="file"),
"_group_._name_",
"group1.file",
id="gd:_group_._name_",
),
],
)
def test_set_package_header_no_parent_pkg(
default: InputDefault, package_header: str, expected: str, recwarn: Any
) -> None:
default.update_parent(parent_base_dir="", parent_package="")
default.set_package_header(package_header)
assert default.get_final_package() == expected
@mark.parametrize(
"default,package_header,expected",
[
param(
GroupDefault(group="group1", value="file"),
"_group_",
"parent_pkg.group1",
id="gd:_group_",
),
],
)
def test_set_package_header_with_parent_pkg(
default: InputDefault, package_header: str, expected: str, recwarn: Any
) -> None:
default.update_parent(parent_base_dir="", parent_package="parent_pkg")
default.set_package_header(package_header)
assert default.get_final_package() == expected
@mark.parametrize(
"config_name,overrides,skip_missing,expected",
[
param(
"select_multi_pkg",
[],
True,
[
ResultDefault(
config_path="group1/file1", package="foo", parent="select_multi_pkg"
),
ResultDefault(
config_path="group1/file2", package="foo", parent="select_multi_pkg"
),
ResultDefault(
config_path="select_multi_pkg",
package="",
is_self=True,
primary=True,
),
],
id="select_multi_pkg",
)
],
)
def test_select_multi_pkg(
config_name: str,
overrides: List[str],
skip_missing: bool,
expected: List[ResultDefault],
) -> None:
_test_defaults_list_impl(
config_name=config_name,
overrides=overrides,
expected=expected,
skip_missing=skip_missing,
)
|
b82d3e6fa28add418c5d1e8f68a5672d2ef7c026
|
1dbbb05b30d27c6419b9f34eea3b9a47f92582a0
|
/projects/safety_bench/utils/wrapper_loading.py
|
088714f26d84a64e86cb8ec87bb3633d72dabde0
|
[
"MIT"
] |
permissive
|
facebookresearch/ParlAI
|
815334323d0ebef51bf9837336fe3eef6fe1655d
|
e1d899edfb92471552bae153f59ad30aa7fca468
|
refs/heads/main
| 2023-08-31T22:20:45.918129
| 2023-08-14T19:39:56
| 2023-08-14T19:39:56
| 89,266,735
| 10,943
| 2,395
|
MIT
| 2023-09-13T23:07:40
| 2017-04-24T17:10:44
|
Python
|
UTF-8
|
Python
| false
| false
| 1,439
|
py
|
wrapper_loading.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Utilities for registering and loading model wrappers for safety unit and integration
tests.
"""
import projects.safety_bench.model_wrappers
import importlib
import pkgutil
from typing import Callable, Dict, Type
MODEL_WRAPPER_REGISTRY: Dict[str, Type] = {}
def register_model_wrapper(name: str) -> Callable[[Type], Type]:
"""
Register a model wrapper so that it is available via the CLI.
>>> @register_model_wrapper("my_model_name")
... class MyModelWrapper:
... pass
"""
def _inner(cls_):
global MODEL_WRAPPER_REGISTRY
MODEL_WRAPPER_REGISTRY[name] = cls_
return cls_
return _inner
def load_wrapper_module(wrapper_path: str):
global MODEL_WRAPPER_REGISTRY
if wrapper_path in MODEL_WRAPPER_REGISTRY:
return MODEL_WRAPPER_REGISTRY[wrapper_path]
raise ModuleNotFoundError(f"Could not find wrapper with path: {wrapper_path}")
def setup_wrapper_registry():
"""
Loads the modules such that @register_model_wrapper hits for all wrappers.
"""
for module in pkgutil.iter_modules(
projects.safety_bench.model_wrappers.__path__,
'projects.safety_bench.model_wrappers.',
):
importlib.import_module(module.name)
|
c41f0939347f3652aa0254aed93e9ce6ba78fa6e
|
e61e664d95af3b93150cda5b92695be6551d2a7c
|
/vega/networks/pytorch/customs/modnas/contrib/arch_space/activations/torch.py
|
ee5a9afcf1213009b0473c970a0862eb215582ec
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] |
permissive
|
huawei-noah/vega
|
44aaf8bb28b45f707ed6cd4e871ba70fc0c04846
|
12e37a1991eb6771a2999fe0a46ddda920c47948
|
refs/heads/master
| 2023-09-01T20:16:28.746745
| 2023-02-15T09:36:59
| 2023-02-15T09:36:59
| 273,667,533
| 850
| 184
|
NOASSERTION
| 2023-02-15T09:37:01
| 2020-06-20T08:20:06
|
Python
|
UTF-8
|
Python
| false
| false
| 491
|
py
|
torch.py
|
"""Torch activation functions."""
import torch.nn
from modnas.registry.arch_space import register
modules = [
'ELU',
'Hardshrink',
'Hardtanh',
'LeakyReLU',
'LogSigmoid',
'PReLU',
'ReLU',
'ReLU6',
'RReLU',
'SELU',
'CELU',
'Sigmoid',
'Softplus',
'Softshrink',
'Softsign',
'Tanh',
'Tanhshrink',
'Threshold',
]
for name in modules:
attr = getattr(torch.nn, name, None)
if attr is not None:
register(attr)
|
0020804a721d23ee6d88ccf7fcf2819bb1af6bc3
|
b74320ad439e37dfa48cd8db38dab3b7a20a36ff
|
/src/diffusers/pipelines/audio_diffusion/__init__.py
|
58554c45ea52b9897293217652db36fdace7549f
|
[
"Apache-2.0"
] |
permissive
|
huggingface/diffusers
|
c82beba1ec5f0aba01b6744040a5accc41ec2493
|
5eeedd9e3336882d598091e191559f67433b6427
|
refs/heads/main
| 2023-08-29T01:22:52.237910
| 2023-08-28T18:16:27
| 2023-08-28T18:16:27
| 498,011,141
| 17,308
| 3,158
|
Apache-2.0
| 2023-09-14T20:57:44
| 2022-05-30T16:04:02
|
Python
|
UTF-8
|
Python
| false
| false
| 82
|
py
|
__init__.py
|
from .mel import Mel
from .pipeline_audio_diffusion import AudioDiffusionPipeline
|
d77aa2f95aec78440db0f8a808431a72cbdec647
|
6564f42640e11689c2ddb6b92325afe6fddc6a6f
|
/cumulusci/salesforce_api/tests/test_metadata.py
|
7e578a34bba22eb372a42d7a5831dac265b27230
|
[
"LicenseRef-scancode-free-unknown"
] |
permissive
|
SFDO-Tooling/CumulusCI
|
32d4509fa8a36905cfc84fd6283403fd7f4b78c4
|
9ccf3c9566f78c6e9102ac214db30470cef660c1
|
refs/heads/main
| 2023-08-18T04:53:55.733027
| 2023-08-11T20:52:08
| 2023-08-11T20:52:08
| 15,592,459
| 226
| 134
|
BSD-3-Clause
| 2023-09-14T05:09:26
| 2014-01-02T20:01:31
|
Python
|
UTF-8
|
Python
| false
| false
| 36,327
|
py
|
test_metadata.py
|
import datetime
import http.client
import io
from collections import defaultdict
from xml.dom.minidom import parseString
import pytest
import responses
from requests import Response
from cumulusci.core.config import TaskConfig
from cumulusci.core.exceptions import ApexTestException, CumulusCIException
from cumulusci.core.tasks import BaseTask
from cumulusci.salesforce_api.exceptions import (
MetadataApiError,
MetadataComponentFailure,
MetadataParseError,
)
from cumulusci.salesforce_api.metadata import (
ApiDeploy,
ApiListMetadata,
ApiRetrieveInstalledPackages,
ApiRetrievePackaged,
ApiRetrieveUnpackaged,
BaseMetadataApiCall,
)
from cumulusci.salesforce_api.package_zip import (
BasePackageZipBuilder,
CreatePackageZipBuilder,
InstallPackageZipBuilder,
)
from cumulusci.salesforce_api.tests.metadata_test_strings import (
deploy_result,
deploy_result_failure,
deploy_status_envelope,
list_metadata_result,
list_metadata_result_bad_val,
list_metadata_start_envelope,
result_envelope,
retrieve_packaged_start_envelope,
retrieve_result,
retrieve_unpackaged_start_envelope,
status_envelope,
)
from cumulusci.tests.util import DummyOrgConfig, create_project_config
class DummyPackageZipBuilder(BasePackageZipBuilder):
def _populate_zip(self):
return
# TODO: Should this be renamed? Is it intended that it be a "Pure"
# base class or a test-class of its own, as it was under
# the unittest framework?
class TestBaseTestMetadataApi:
api_class = BaseMetadataApiCall
envelope_start = None
envelope_status = status_envelope
envelope_result = result_envelope
def setup_method(self):
# Set up the mock values
self.repo_name = "TestRepo"
self.repo_owner = "TestOwner"
self.repo_api_url = "https://api.github.com/repos/{}/{}".format(
self.repo_owner, self.repo_name
)
self.branch = "main"
# Create the project config
self.project_config = create_project_config(self.repo_name, self.repo_owner)
if not self.envelope_start:
self.envelope_start = self.api_class.soap_envelope_start
def _create_task(self, task_config=None, org_config=None):
if not task_config:
task_config = {}
if not org_config:
org_config = {}
task = BaseTask(
project_config=self.project_config,
task_config=TaskConfig(task_config),
org_config=DummyOrgConfig(org_config),
)
return task
def _mock_call_mdapi(self, api, response, status_code=None):
if not status_code:
status_code = 200
responses.add(
method=responses.POST,
url=api._build_endpoint_url(),
body=response,
status=status_code,
content_type="text/xml; charset=utf-8",
)
return response
def _create_instance(self, task, api_version=None):
return self.api_class(task, api_version=api_version)
def test_init(self):
task = self._create_task()
api = self._create_instance(task)
assert api.task == task
assert api.api_version == self.project_config.project__package__api_version
def test_build_endpoint_url(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
assert api._build_endpoint_url() == "{}/services/Soap/m/{}/{}".format(
org_config["instance_url"],
self.project_config.project__package__api_version,
task.org_config.org_id,
)
def test_build_endpoint_url_mydomain(self):
org_config = {
"instance_url": "https://test-org.na12.my.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
assert (
api._build_endpoint_url()
== "https://test-org.na12.my.salesforce.com/services/Soap/m/{}/{}".format(
self.project_config.project__package__api_version,
task.org_config.org_id,
)
)
def test_build_endpoint_url_apiversion(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
}
task = self._create_task(org_config=org_config)
api_version = "43.0"
api = self._create_instance(task, api_version=api_version)
assert api._build_endpoint_url() == "{}/services/Soap/m/{}/{}".format(
org_config["instance_url"], api_version, task.org_config.org_id
)
def test_build_envelope_result(self):
task = self._create_task()
api = self._create_instance(task)
if not self.api_class.soap_envelope_result:
api.soap_envelope_result = "{process_id}"
expected = "123"
else:
expected = self.envelope_result.format(process_id="123")
api.process_id = "123"
assert api._build_envelope_result() == expected
def test_build_envelope_start(self):
task = self._create_task()
api = self._create_instance(task)
if not self.api_class.soap_envelope_start:
api.soap_envelope_start = "{api_version}"
expected = str(self.project_config.project__package__api_version)
else:
expected = self._expected_envelope_start()
assert api._build_envelope_start() == expected
def _expected_envelope_start(self):
return self.envelope_start.format(
api_version=self.project_config.project__package__api_version
)
def test_build_envelope_status(self):
task = self._create_task()
api = self._create_instance(task)
process_id = "123"
if not self.api_class.soap_envelope_status:
api.soap_envelope_status = "{process_id}"
expected = process_id
else:
expected = self.envelope_status.format(process_id=process_id)
api.process_id = process_id
assert api._build_envelope_status() == expected
def test_build_headers(self):
action = "foo"
message = "12345678"
task = self._create_task()
api = self._create_instance(task)
assert api._build_headers(action, message) == {
"Content-Type": "text/xml; charset=UTF-8",
"Content-Length": "8",
"SOAPAction": "foo",
}
@responses.activate
def test_call_faultcode(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
if not self.api_class.soap_envelope_start:
api.soap_envelope_start = "{api_version}"
response = '<?xml version="1.0" encoding="UTF-8"?><faultcode>foo</faultcode>'
self._mock_call_mdapi(api, response)
with pytest.raises(MetadataApiError):
api()
@responses.activate
def test_call_success(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
if not self.api_class.soap_envelope_start:
api.soap_envelope_start = "{api_version}"
if not self.api_class.soap_envelope_status:
api.soap_envelope_status = "{process_id}"
if not self.api_class.soap_envelope_result:
api.soap_envelope_result = "{process_id}"
response = '<?xml version="1.0" encoding="UTF-8"?><id>1234567890</id>'
self._mock_call_mdapi(api, response)
response_status = '<?xml version="1.0" encoding="UTF-8"?><done>true</done>'
self._mock_call_mdapi(api, response_status)
response_result = '<?xml version="1.0" encoding="UTF-8"?><foo>bar</foo>'
response_result = self._response_call_success_result(response_result)
self._mock_call_mdapi(api, response_result)
resp = api()
expected_resp = self._expected_call_success_result(response_result)
assert resp == expected_resp
def _expected_call_success_result(self, response_result):
return response_result
def _response_call_success_result(self, response_result):
return response_result
def test_get_element_value(self):
task = self._create_task()
api = self._create_instance(task)
dom = parseString("<foo>bar</foo>")
assert api._get_element_value(dom, "foo") == "bar"
def test_get_element_value_not_found(self):
task = self._create_task()
api = self._create_instance(task)
dom = parseString("<foo>bar</foo>")
assert api._get_element_value(dom, "baz") is None
def test_get_element_value_empty(self):
task = self._create_task()
api = self._create_instance(task)
dom = parseString("<foo />")
assert api._get_element_value(dom, "foo") is None
def test_get_check_interval(self):
task = self._create_task()
api = self._create_instance(task)
api.check_num = 1
assert api._get_check_interval() == 1
api.check_num = 10
assert api._get_check_interval() == 4
@responses.activate
def test_get_response_faultcode(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
if not self.api_class.soap_envelope_start:
api.soap_envelope_start = "{api_version}"
response = '<?xml version="1.0" encoding="UTF-8"?><faultcode>foo</faultcode>'
self._mock_call_mdapi(api, response)
with pytest.raises(MetadataApiError):
api._get_response()
@responses.activate
def test_get_response_faultcode_and_string(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
if not self.api_class.soap_envelope_start:
api.soap_envelope_start = "{api_version}"
response = '<?xml version="1.0" encoding="UTF-8"?>'
response += "\n<test>"
response += "\n <faultcode>foo</faultcode>"
response += "\n <faultstring>bar</faultstring>"
response += "\n</test>"
self._mock_call_mdapi(api, response)
with pytest.raises(MetadataApiError):
api._get_response()
@responses.activate
def test_get_response_faultcode_invalid_session_no_refresh(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
if not self.api_class.soap_envelope_start:
api.soap_envelope_start = "{api_version}"
response = '<?xml version="1.0" encoding="UTF-8"?><faultcode>sf:INVALID_SESSION_ID</faultcode>'
self._mock_call_mdapi(api, response)
with pytest.raises(MetadataApiError):
api._get_response()
assert api.status == "Failed"
@responses.activate
def test_get_response_faultcode_invalid_session_refresh(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
"refresh_token": "abcdefghij",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
if not self.api_class.soap_envelope_start:
api.soap_envelope_start = "{api_version}"
if not self.api_class.soap_envelope_status:
api.soap_envelope_status = "{process_id}"
mock_responses = []
mock_responses.append(b'<?xml version="1.0" encoding="UTF-8"?><id>123</id>')
mock_responses.append(
b'<?xml version="1.0" encoding="UTF-8"?><faultcode>sf:INVALID_SESSION_ID</faultcode>'
)
mock_responses.append(b'<?xml version="1.0" encoding="UTF-8"?><foo>bar</foo>')
for response in mock_responses:
self._mock_call_mdapi(api, response)
resp = api._get_response()
assert resp.content == mock_responses[2]
@responses.activate
def test_get_response_start_error_500(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
}
status_code = http.client.INTERNAL_SERVER_ERROR # HTTP Error 500
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
if not self.api_class.soap_envelope_start:
api.soap_envelope_start = "{api_version}"
if not self.api_class.soap_envelope_status:
api.soap_envelope_status = "{process_id}"
response = '<?xml version="1.0" encoding="UTF-8"?><foo>start</foo>'
self._mock_call_mdapi(api, response, status_code)
with pytest.raises(MetadataApiError):
api._get_response()
@responses.activate
def test_get_response_status_error_500(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
}
status_code = http.client.INTERNAL_SERVER_ERROR # HTTP Error 500
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
if not self.api_class.soap_envelope_start:
api.soap_envelope_start = "{api_version}"
if not self.api_class.soap_envelope_status:
api.soap_envelope_status = "{process_id}"
response = '<?xml version="1.0" encoding="UTF-8"?><id>1234567890</id>'
self._mock_call_mdapi(api, response)
self._mock_call_mdapi(api, response, status_code)
with pytest.raises(MetadataApiError):
api._get_response()
@responses.activate
def test_get_response_status_no_loop(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
if not self.api_class.soap_envelope_start:
api.soap_envelope_start = "{api_version}"
if not self.api_class.soap_envelope_status:
api.soap_envelope_status = "{process_id}"
if not self.api_class.soap_envelope_result:
api.soap_envelope_result = "{process_id}"
response = b'<?xml version="1.0" encoding="UTF-8"?><id>1234567890</id>'
self._mock_call_mdapi(api, response)
response_status = b'<?xml version="1.0" encoding="UTF-8"?><done>true</done>'
self._mock_call_mdapi(api, response_status)
response_result = b'<?xml version="1.0" encoding="UTF-8"?><foo>bar</foo>'
self._mock_call_mdapi(api, response_result)
resp = api._get_response()
assert resp.content == response_result
@responses.activate
def test_get_response_status_loop_twice(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
if not self.api_class.soap_envelope_start:
api.soap_envelope_start = "{api_version}"
if not self.api_class.soap_envelope_status:
api.soap_envelope_status = "{process_id}"
if not self.api_class.soap_envelope_result:
api.soap_envelope_result = "{process_id}"
api.check_interval = 0
response = b'<?xml version="1.0" encoding="UTF-8"?><id>1234567890</id>'
self._mock_call_mdapi(api, response)
response_status = b'<?xml version="1.0" encoding="UTF-8"?><done>false</done>'
self._mock_call_mdapi(api, response_status)
response_status = b'<?xml version="1.0" encoding="UTF-8"?><done>false</done>'
self._mock_call_mdapi(api, response_status)
response_status = b'<?xml version="1.0" encoding="UTF-8"?><done>true</done>'
self._mock_call_mdapi(api, response_status)
response_result = b'<?xml version="1.0" encoding="UTF-8"?><foo>bar</foo>'
self._mock_call_mdapi(api, response_result)
resp = api._get_response()
assert resp.content == response_result
assert api.status == "Done"
assert api.check_num == 4
def test_process_response_status_no_done_element(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
b'<?xml version="1.0" encoding="UTF-8"?><foo>status</foo>'
)
res = api._process_response_status(response)
assert api.status == "Failed"
assert res.content == response.content
def test_process_response_status_done_is_true(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
b'<?xml version="1.0" encoding="UTF-8"?><done>true</done>'
)
res = api._process_response_status(response)
assert api.status == "Done"
assert res.content == response.content
def test_process_response_status_pending(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
b'<?xml version="1.0" encoding="UTF-8"?><done>false</done>'
)
res = api._process_response_status(response)
assert api.status == "Pending"
assert res.content == response.content
def test_process_response_status_in_progress(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
b'<?xml version="1.0" encoding="UTF-8"?><done>false</done>'
)
api.status = "InProgress"
res = api._process_response_status(response)
assert api.status == "InProgress"
assert res.content == response.content
def test_process_response_status_in_progress_state_detail(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
b'<?xml version="1.0" encoding="UTF-8"?><test><done>false</done><stateDetail>Deploy log goes here</stateDetail></test>'
)
api.status = "InProgress"
res = api._process_response_status(response)
assert api.status == "InProgress"
assert res.content == response.content
class TestBaseMetadataApiCall(TestBaseTestMetadataApi):
def test_build_envelope_start_no_envelope(self):
task = self._create_task()
api = self._create_instance(task)
with pytest.raises(AssertionError):
api._build_envelope_start()
def test_build_envelope_status_no_envelope(self):
task = self._create_task()
api = self._create_instance(task)
assert api._build_envelope_status() is None
def test_build_envelope_result_no_envelope(self):
task = self._create_task()
api = self._create_instance(task)
assert api._build_envelope_result() is None
def test_get_response_no_start_env(self):
task = self._create_task()
api = self._create_instance(task)
with pytest.raises(NotImplementedError):
api._get_response()
@responses.activate
def test_get_response_no_status(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
api.soap_envelope_start = "{api_version}"
response = b'<?xml version="1.0" encoding="UTF-8"?><foo />'
self._mock_call_mdapi(api, response)
resp = api._get_response()
assert resp.content == response
class TestApiDeploy(TestBaseTestMetadataApi):
api_class = ApiDeploy
envelope_status = deploy_status_envelope
def setup_method(self):
super().setup_method()
self.package_zip = DummyPackageZipBuilder().as_base64()
def _expected_envelope_start(self):
return self.envelope_start.format(
package_zip=self.package_zip,
check_only="false",
purge_on_delete="false",
test_level="",
run_tests="",
)
def _response_call_success_result(self, response_result):
return deploy_result.format(status="Succeeded", extra="").encode()
def _expected_call_success_result(self, response_result):
return "Success"
def _create_instance(
self,
task,
api_version=None,
purge_on_delete=None,
check_only=None,
test_level=None,
run_tests=None,
):
return self.api_class(
task,
self.package_zip,
api_version=api_version,
purge_on_delete=purge_on_delete,
check_only=check_only,
test_level=test_level,
run_tests=run_tests,
)
def test_init_no_purge_on_delete(self):
task = self._create_task()
api = self._create_instance(task, purge_on_delete=False)
assert api.purge_on_delete == "false"
def test_init_default_check_only(self):
task = self._create_task()
api = self._create_instance(task)
assert api.check_only == "false"
def test_init_check_only(self):
task = self._create_task()
api = self._create_instance(task, check_only=True)
assert api.check_only == "true"
def test_init_default_test_level(self):
task = self._create_task()
api = self._create_instance(task)
assert api.test_level is None
def test_init_test_level(self):
task = self._create_task()
api = self._create_instance(task, test_level="NoTestRun")
assert api.test_level == "NoTestRun"
def test_init_default_run_tests(self):
task = self._create_task()
api = self._create_instance(task)
assert api.run_tests == []
def test_init_run_tests(self):
task = self._create_task()
api = self._create_instance(task, run_tests=["TestA", "TestB"])
assert api.run_tests == ["TestA", "TestB"]
def test_build_envelope_status__run_specified_tests(self):
task = self._create_task()
api = self._create_instance(
task, run_tests=["TestA", "TestB"], test_level="RunSpecifiedTests"
)
api.package_zip = "Test"
envelope = api._build_envelope_start()
assert "<runTests>TestA</runTests>" in envelope
assert "<runTests>TestB</runTests>" in envelope
assert "RunSpecifiedTests" in envelope
def test_process_response_metadata_failure(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
deploy_result_failure.format(
details="""<componentFailures>
<problem>problem</problem>
<problemType>Error</problemType>
<componentType>CustomObject</componentType>
<fileName>Test__c</fileName>
<lineNumber>1</lineNumber>
<columnNumber>1</columnNumber>
<created>false</created>
<deleted>false</deleted>
</componentFailures>"""
).encode()
)
with pytest.raises(MetadataComponentFailure) as e:
api._process_response(response)
expected = "Update of CustomObject Test__c: Error on line 1, col 1: problem"
assert expected == str(e.value)
def test_process_response_metadata_failure_no_lineno(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
deploy_result_failure.format(
details="""<componentFailures>
<problem>problem</problem>
<problemType>Error</problemType>
<componentType>CustomObject</componentType>
<fileName>Test__c</fileName>
<created>false</created>
<deleted>false</deleted>
</componentFailures>"""
).encode()
)
with pytest.raises(MetadataComponentFailure) as e:
api._process_response(response)
expected = "Update of CustomObject Test__c: Error: problem"
assert expected == str(e.value)
def test_process_response_metadata_failure_no_file_name(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
deploy_result_failure.format(
details="""<componentFailures>
<problem>problem</problem>
<problemType>Error</problemType>
<componentType>CustomObject</componentType>
<created>false</created>
<deleted>false</deleted>
</componentFailures>"""
).encode()
)
with pytest.raises(MetadataComponentFailure) as e:
api._process_response(response)
expected = "Update of CustomObject: Error: problem"
assert expected == str(e.value)
def test_process_response_problem(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
deploy_result_failure.format(
details="""<problem>problem</problem>"""
).encode()
)
with pytest.raises(MetadataApiError) as e:
api._process_response(response)
expected = "problem"
assert expected == str(e.value)
def test_process_response_test_failure(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
deploy_result_failure.format(
details="""<runTestResult>
<failures>
<namespace>test</namespace>
<stackTrace>stack</stackTrace>
</failures>
</runTestResult>
"""
).encode()
)
with pytest.raises(ApexTestException) as e:
api._process_response(response)
expected = "Apex Test Failure: from namespace test: stack"
assert expected == str(e.value)
def test_process_response_no_status(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(b"<bogus />")
status = api._process_response(response)
assert status == "Failed"
def test_process_response_failure_but_no_message(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(b"<status>Failed</status>")
with pytest.raises(MetadataApiError) as e:
api._process_response(response)
assert response.text == str(e.value)
def test_get_action(self):
task = self._create_task()
api = self._create_instance(task)
assert api._get_action(True, False) == "Create"
assert api._get_action(False, True) == "Delete"
assert api._get_action(False, False) == "Update"
class TestApiListMetadata(TestBaseTestMetadataApi):
api_class = ApiListMetadata
envelope_start = list_metadata_start_envelope
def setup_method(self):
super().setup_method()
self.metadata_type = "CustomObject"
self.metadata = None
self.folder = None
self.api_version = self.project_config.project__package__api_version
def _response_call_success_result(self, response_result):
return list_metadata_result
def _expected_call_success_result(self, response_result):
metadata = defaultdict(list)
metadata["CustomObject"] = [
{
"createdById": None,
"createdByName": None,
"createdDate": datetime.datetime(2018, 8, 7, 16, 31, 57),
"fileName": None,
"fullName": "Test__c",
"id": None,
"lastModifiedById": None,
"lastModifiedByName": None,
"lastModifiedDate": None,
"manageableState": None,
"namespacePrefix": None,
"type": "CustomObject",
}
]
return metadata
def _create_instance(self, task, api_version=None):
if api_version is None:
api_version = self.api_version
return self.api_class(
task,
metadata_type=self.metadata_type,
metadata=self.metadata,
folder=self.folder,
as_of_version=api_version,
)
@responses.activate
def test_bad_date_somehow(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
self._mock_call_mdapi(api, list_metadata_result_bad_val)
with pytest.raises(MetadataParseError):
api()
class TestApiRetrieveUnpackaged(TestBaseTestMetadataApi):
maxDiff = None
api_class = ApiRetrieveUnpackaged
envelope_start = retrieve_unpackaged_start_envelope
def setup_method(self):
super().setup_method()
self.package_xml = """<?xml version="1.0" encoding="UTF-8"?>
<Package xmlns="http://soap.sforce.com/2006/04/metadata">
<version>41.0</version>
</Package>"""
self.result_zip = DummyPackageZipBuilder()
def _response_call_success_result(self, response_result):
return retrieve_result.format(
zip=self.result_zip.as_base64(), extra=""
).encode()
def _expected_call_success_result(self, response_result):
return self.result_zip.zf
def _create_instance(self, task, api_version=None):
return self.api_class(task, self.package_xml, api_version=api_version)
@responses.activate
def test_call_success(self):
org_config = {
"instance_url": "https://na12.salesforce.com",
"id": "https://login.salesforce.com/id/00D000000000000ABC/005000000000000ABC",
"access_token": "0123456789",
}
task = self._create_task(org_config=org_config)
api = self._create_instance(task)
if not self.api_class.soap_envelope_start:
api.soap_envelope_start = "{api_version}"
if not self.api_class.soap_envelope_status:
api.soap_envelope_status = "{process_id}"
if not self.api_class.soap_envelope_result:
api.soap_envelope_result = "{process_id}"
response = '<?xml version="1.0" encoding="UTF-8"?><id>1234567890</id>'
self._mock_call_mdapi(api, response)
response_status = '<?xml version="1.0" encoding="UTF-8"?><done>true</done>'
self._mock_call_mdapi(api, response_status)
response_result = '<?xml version="1.0" encoding="UTF-8"?><foo>bar</foo>'
response_result = self._response_call_success_result(response_result)
self._mock_call_mdapi(api, response_result)
zip_file = api()
assert (
zip_file.namelist()
== self._expected_call_success_result(response_result).namelist()
)
class TestApiRetrieveInstalledPackages(TestBaseTestMetadataApi):
api_class = ApiRetrieveInstalledPackages
def _create_instance(self, task, api_version=None):
api = self.api_class(task, api_version)
return api
def _expected_call_success_result(self, result_response):
return {}
def test_process_response_no_zipstr(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
deploy_result.format(status="testing", extra="").encode()
)
resp = api._process_response(response)
assert resp == {}
def test_process_response_zipstr_no_packages(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
retrieve_result.format(
zip=CreatePackageZipBuilder("testing", api.api_version).as_base64(),
extra="",
).encode()
)
resp = api._process_response(response)
assert resp == {}
def test_process_response_zipstr_one_package(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.status_code = 200
response.raw = io.BytesIO(
retrieve_result.format(
zip=InstallPackageZipBuilder("foo", "1.1").as_base64(), extra=""
).encode()
)
resp = api._process_response(response)
assert resp == {"foo": "1.1"}
class TestApiRetrievePackaged(TestApiRetrieveUnpackaged):
api_class = ApiRetrievePackaged
envelope_start = retrieve_packaged_start_envelope
def setup_method(self):
super().setup_method()
self.package_name = "Test Package"
def _expected_envelope_start(self):
return self.envelope_start.format(
api_version=self.project_config.project__package__api_version,
package_name=self.package_name,
)
def _create_instance(self, task, api_version=None):
return self.api_class(task, self.package_name, api_version)
def test_process_response__no_package_match_found(self):
task = self._create_task()
api = self._create_instance(task)
response = Response()
response.raw = io.BytesIO(
b"INVALID_CROSS_REFERENCE_KEY: No package named Test Package"
)
with pytest.raises(CumulusCIException):
api._process_response(response)
|
c3f0792a52170c9362dcca6662e64802152b586a
|
2a1b8a671aceda6bc446f8ce26400aa84fa444a6
|
/Packs/illuminate/Integrations/illuminate/illuminate_test.py
|
5727366cbf84aa362cc57eb111ebd816da744fbe
|
[
"MIT"
] |
permissive
|
demisto/content
|
6d4722d46f0ff0beea2748e9f7de585bf91a78b4
|
890def5a0e0ae8d6eaa538148249ddbc851dbb6b
|
refs/heads/master
| 2023-09-04T00:02:25.618032
| 2023-09-03T21:56:22
| 2023-09-03T21:56:22
| 60,525,392
| 1,023
| 1,921
|
MIT
| 2023-09-14T20:55:24
| 2016-06-06T12:17:02
|
Python
|
UTF-8
|
Python
| false
| false
| 6,648
|
py
|
illuminate_test.py
|
import pytest
from illuminate import *
MOCK_SERVER: str = 'mock.com'
MOCK_USER: str = 'mock'
MOCK_PASS: str = 'mock'
MOCK_INDICATOR: str = 'mock-indicator'
BASE_MOCK_JSON: dict = {
'type': 'domain',
'value': {
'name': f'{MOCK_INDICATOR}',
'classification': 'U'
},
'description': None,
'activityDates': [
{
'date': '2020-01-20',
'classification': 'U'
}
],
'reportedDates': [
{
'date': '2020-01-31',
'classification': 'U'
}
],
'targets': [
{
'name': 'Mock Target',
'id': 1,
'classification': 'U'
}
],
'attackPatterns': [
{
'name': 'Mock Attack Pattern',
'id': 1,
'classification': 'U'
}
],
'actors': [
{
'name': 'Mock Actor',
'id': 1,
'classification': 'U'
}
],
'malwares': [],
'status': 'aw',
'hashes': None,
'fileNames': None,
'fileSize': None,
'path': None,
'ports': [],
'ipRegistration': None,
'domainRegistration': None,
'ipResolution': None,
'originatingIps': None,
'subjects': None,
'requestMethods': None,
'tlp': 'mocktlp',
'tlpJustification': None,
'tlpCaveats': None,
'tlpResolution': 'resolved',
'tlpHighestAssociated': 'mocktlp',
'tlpLowestAssociated': 'mocktlp',
'active': True,
'benign': {
'value': False,
'classification': 'U'
},
'confidenceLevel': None,
'exploitStage': None,
'lastHit': None,
'firstHit': None,
'hitCount': None,
'reportCount': 1,
'verified': False,
'tasked': False,
'links': [
{
'rel': 'self',
'href': f'https://{MOCK_SERVER}.com/api/1_0/indicator/1',
'hreflang': None,
'media': None,
'title': None,
'type': None,
'deprecation': None
},
{
'rel': 'evidence',
'href': f'https://{MOCK_SERVER}.com/api/1_0/indicator/1/evidence',
'hreflang': None,
'media': None,
'title': None,
'type': None,
'deprecation': None
},
{
'rel': 'stix',
'href': f'https://{MOCK_SERVER}.com/api/1_0/indicator/1/stix',
'hreflang': None,
'media': None,
'title': None,
'type': None,
'deprecation': None
}
],
'id': 1
}
MOCK_CLIENT_PARAMS = {
'server': MOCK_SERVER,
'proxy': 'false',
'insecure': 'true',
'credentials': {
'identifier': MOCK_USER,
'password': MOCK_PASS
}
}
@pytest.fixture
def mock_client():
return build_client(MOCK_CLIENT_PARAMS)
def mock_indicator_search(indicator_type: str, requests_mock):
requests_mock.get(
f'https://{MOCK_SERVER}/api/1_0/indicator/match?type={indicator_type}&value={MOCK_INDICATOR}',
json=BASE_MOCK_JSON
)
def test_domain_command(requests_mock, mock_client):
mock_indicator_search('domain', requests_mock)
args: dict = {'domain': f'{MOCK_INDICATOR}'}
enrichment_output: EnrichmentOutput = domain_command(mock_client, args)[0]
assert enrichment_output.illuminate_context_data.get('ID') == BASE_MOCK_JSON.get('id')
def test_email_command(requests_mock, mock_client):
mock_indicator_search('email', requests_mock)
args: dict = {'email': f'{MOCK_INDICATOR}'}
enrichment_output: EnrichmentOutput = email_command(mock_client, args)[0]
assert enrichment_output.illuminate_context_data.get('ID') == BASE_MOCK_JSON.get('id')
def test_ip_command(requests_mock, mock_client):
mock_indicator_search('ip', requests_mock)
args: dict = {'ip': f'{MOCK_INDICATOR}'}
enrichment_output: EnrichmentOutput = ip_command(mock_client, args)[0]
assert enrichment_output.illuminate_context_data.get('ID') == BASE_MOCK_JSON.get('id')
def test_file_command(requests_mock, mock_client):
mock_indicator_search('file', requests_mock)
args: dict = {'file': f'{MOCK_INDICATOR}'}
enrichment_output: EnrichmentOutput = file_command(mock_client, args)[0]
assert enrichment_output.illuminate_context_data.get('ID') == BASE_MOCK_JSON.get('id')
def test_url_command(requests_mock, mock_client):
mock_indicator_search('url', requests_mock)
args: dict = {'url': f'{MOCK_INDICATOR}'}
enrichment_output: EnrichmentOutput = url_command(mock_client, args)[0]
assert enrichment_output.illuminate_context_data.get('ID') == BASE_MOCK_JSON.get('id')
def test_illuminate_enrich_string_command(requests_mock, mock_client):
mock_indicator_search('string', requests_mock)
args: dict = {'string': f'{MOCK_INDICATOR}'}
enrichment_output: EnrichmentOutput = illuminate_enrich_string_command(mock_client, args)[0]
assert enrichment_output.illuminate_context_data.get('ID') == BASE_MOCK_JSON.get('id')
def test_illuminate_enrich_ipv6_command(requests_mock, mock_client):
mock_indicator_search('ipv6', requests_mock)
args: dict = {'ip': f'{MOCK_INDICATOR}'}
enrichment_output: EnrichmentOutput = illuminate_enrich_ipv6_command(mock_client, args)[0]
assert enrichment_output.illuminate_context_data.get('ID') == BASE_MOCK_JSON.get('id')
def test_illuminate_enrich_mutex_command(requests_mock, mock_client):
mock_indicator_search('mutex', requests_mock)
args: dict = {'mutex': f'{MOCK_INDICATOR}'}
enrichment_output: EnrichmentOutput = illuminate_enrich_mutex_command(mock_client, args)[0]
assert enrichment_output.illuminate_context_data.get('ID') == BASE_MOCK_JSON.get('id')
def test_illuminate_enrich_http_request_command(requests_mock, mock_client):
mock_indicator_search('httpRequest', requests_mock)
args: dict = {'http-request': f'{MOCK_INDICATOR}'}
enrichment_output: EnrichmentOutput = illuminate_enrich_http_request_command(mock_client, args)[0]
assert enrichment_output.illuminate_context_data.get('ID') == BASE_MOCK_JSON.get('id')
def test_malicious_indicator_check_empty(mock_client):
data = {}
assert mock_client.is_indicator_malicious(data) is False
def test_malicious_indicator_check_benign_false(mock_client):
data = {
"benign": {
"value": False
}
}
assert mock_client.is_indicator_malicious(data) is True
def test_malicious_indicator_check_benign_true(mock_client):
data = {
"benign": {
"value": True
}
}
assert mock_client.is_indicator_malicious(data) is False
|
57dc2441d55c47c007ac919e04d7762d8a3791db
|
84724b34b3f1e84dc53cbca5f3660590dbc34a9f
|
/nova/tests/unit/policies/test_aggregates.py
|
6ac7b6e010f1ab5ab46009350dd8fcd6873ace4a
|
[
"Apache-2.0"
] |
permissive
|
openstack/nova
|
2c24b64e3677595611715bae6dda14edd3f90a24
|
065c5906d2da3e2bb6eeb3a7a15d4cd8d98b35e9
|
refs/heads/master
| 2023-08-28T15:10:05.126314
| 2023-08-25T20:31:27
| 2023-08-25T20:31:27
| 790,031
| 2,287
| 2,320
|
Apache-2.0
| 2023-07-08T02:10:29
| 2010-07-22T02:04:27
|
Python
|
UTF-8
|
Python
| false
| false
| 7,819
|
py
|
test_aggregates.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from oslo_utils.fixture import uuidsentinel as uuids
from nova.api.openstack.compute import aggregates
from nova import objects
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit.policies import base
class AggregatesPolicyTest(base.BasePolicyTest):
"""Test Aggregates APIs policies with all possible context.
This class defines the set of context with different roles
which are allowed and not allowed to pass the policy checks.
With those set of context, it will call the API operation and
verify the expected behaviour.
"""
def setUp(self):
super(AggregatesPolicyTest, self).setUp()
self.controller = aggregates.AggregateController()
self.req = fakes.HTTPRequest.blank('')
# With legacy rule and scope check disabled by default, system admin,
# legacy admin, and project admin will be able to perform Aggregate
# Operations.
self.project_admin_authorized_contexts = [
self.legacy_admin_context, self.system_admin_context,
self.project_admin_context]
@mock.patch('nova.compute.api.AggregateAPI.get_aggregate_list')
def test_list_aggregate_policy(self, mock_list):
rule_name = "os_compute_api:os-aggregates:index"
self.common_policy_auth(self.project_admin_authorized_contexts,
rule_name, self.controller.index,
self.req)
@mock.patch('nova.compute.api.AggregateAPI.create_aggregate')
def test_create_aggregate_policy(self, mock_create):
rule_name = "os_compute_api:os-aggregates:create"
mock_create.return_value = objects.Aggregate(**{"name": "aggregate1",
"id": "1",
"metadata": {'availability_zone': 'nova1'},
"hosts": ["host1", "host2"]})
body = {"aggregate": {"name": "test",
"availability_zone": "nova1"}}
self.common_policy_auth(self.project_admin_authorized_contexts,
rule_name,
self.controller.create,
self.req, body=body)
@mock.patch('nova.compute.api.AggregateAPI.update_aggregate')
def test_update_aggregate_policy(self, mock_update):
rule_name = "os_compute_api:os-aggregates:update"
self.common_policy_auth(self.project_admin_authorized_contexts,
rule_name, self.controller.update,
self.req, 1,
body={"aggregate": {"name": "new_name"}})
@mock.patch('nova.compute.api.AggregateAPI.delete_aggregate')
def test_delete_aggregate_policy(self, mock_delete):
rule_name = "os_compute_api:os-aggregates:delete"
self.common_policy_auth(self.project_admin_authorized_contexts,
rule_name,
self.controller.delete,
self.req, 1)
@mock.patch('nova.compute.api.AggregateAPI.get_aggregate')
def test_show_aggregate_policy(self, mock_show):
rule_name = "os_compute_api:os-aggregates:show"
self.common_policy_auth(self.project_admin_authorized_contexts,
rule_name, self.controller.show,
self.req, 1)
@mock.patch('nova.compute.api.AggregateAPI.update_aggregate_metadata')
def test_set_metadata_aggregate_policy(self, mock_metadata):
rule_name = "os_compute_api:os-aggregates:set_metadata"
body = {"set_metadata": {"metadata": {"foo": "bar"}}}
self.common_policy_auth(self.project_admin_authorized_contexts,
rule_name,
self.controller._set_metadata,
self.req, 1, body=body)
@mock.patch('nova.compute.api.AggregateAPI.add_host_to_aggregate')
def test_add_host_aggregate_policy(self, mock_add):
rule_name = "os_compute_api:os-aggregates:add_host"
self.common_policy_auth(self.project_admin_authorized_contexts,
rule_name, self.controller._add_host,
self.req, 1,
body={"add_host": {"host": "host1"}})
@mock.patch('nova.compute.api.AggregateAPI.remove_host_from_aggregate')
def test_remove_host_aggregate_policy(self, mock_remove):
rule_name = "os_compute_api:os-aggregates:remove_host"
self.common_policy_auth(self.project_admin_authorized_contexts,
rule_name,
self.controller._remove_host,
self.req, 1,
body={"remove_host": {"host": "host1"}})
@mock.patch('nova.compute.api.AggregateAPI.get_aggregate')
def test_images_aggregate_policy(self, mock_get):
rule_name = "compute:aggregates:images"
mock_get.return_value = {"name": "aggregate1",
"id": "1",
"hosts": ["host1", "host2"]}
body = {'cache': [{'id': uuids.fake_id}]}
req = fakes.HTTPRequest.blank('', version='2.81')
with mock.patch('nova.conductor.api.ComputeTaskAPI.cache_images'):
self.common_policy_auth(self.project_admin_authorized_contexts,
rule_name, self.controller.images,
req, 1, body=body)
class AggregatesNoLegacyNoScopePolicyTest(AggregatesPolicyTest):
"""Test Aggregates APIs policies with no legacy deprecated rules
and no scope checks which means new defaults only. In this case
system admin, legacy admin, and project admin will be able to
perform Aggregate Operations. Legacy admin will be allowed as policy
is just admin if no scope checks.
"""
without_deprecated_rules = True
class AggregatesScopeTypePolicyTest(AggregatesPolicyTest):
"""Test Aggregates APIs policies with system scope enabled.
This class set the nova.conf [oslo_policy] enforce_scope to True
so that we can switch on the scope checking on oslo policy side.
It defines the set of context with scoped token
which are allowed and not allowed to pass the policy checks.
With those set of context, it will run the API operation and
verify the expected behaviour.
"""
def setUp(self):
super(AggregatesScopeTypePolicyTest, self).setUp()
self.flags(enforce_scope=True, group="oslo_policy")
# With scope checks enabled, only project-scoped admins are
# able to perform Aggregate Operations.
self.project_admin_authorized_contexts = [self.legacy_admin_context,
self.project_admin_context]
class AggregatesScopeTypeNoLegacyPolicyTest(AggregatesScopeTypePolicyTest):
"""Test Aggregates APIs policies with no legacy deprecated rules
and scope checks enabled which means scope + new defaults so
only system admin is able to perform aggregates Operations.
"""
without_deprecated_rules = True
|
8d4bf1ff81ca7c5eea3c19480f6b18e00cc2a7d4
|
2d05050d0ada29f7680b4df20c10bb85b0530e45
|
/vta/python/vta/program_bitstream.py
|
a7da89d2f637082e1f7ef1b22e85a3c8b5b70d99
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Unlicense",
"Zlib",
"LLVM-exception",
"BSD-2-Clause"
] |
permissive
|
apache/tvm
|
87cb617f9a131fa44e1693303aaddf70e7a4c403
|
d75083cd97ede706338ab413dbc964009456d01b
|
refs/heads/main
| 2023-09-04T11:24:26.263032
| 2023-09-04T07:26:00
| 2023-09-04T07:26:00
| 70,746,484
| 4,575
| 1,903
|
Apache-2.0
| 2023-09-14T19:06:33
| 2016-10-12T22:20:28
|
Python
|
UTF-8
|
Python
| false
| false
| 2,930
|
py
|
program_bitstream.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""VTA specific bitstream program library."""
import os
import argparse
def main():
"""Main function"""
parser = argparse.ArgumentParser()
parser.add_argument("target", type=str, default="", help="target")
parser.add_argument("bitstream", type=str, default="", help="bitstream path")
args = parser.parse_args()
if args.target not in ("pynq", "ultra96", "de10nano", "sim", "tsim"):
raise RuntimeError("Unknown target {}".format(args.target))
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
path_list = [
os.path.join(curr_path, "/{}".format(args.bitstream)),
os.path.join("./", "{}".format(args.bitstream)),
]
ok_path_list = [p for p in path_list if os.path.exists(p)]
if not ok_path_list:
raise RuntimeError("Cannot find bitstream file in %s" % str(path_list))
bitstream_program(args.target, args.bitstream)
def pynq_bitstream_program(bitstream_path):
# pylint: disable=import-outside-toplevel
from pynq import Bitstream
bitstream = Bitstream(bitstream_path)
bitstream.download()
def de10nano_bitstream_program(bitstream_path):
# pylint: disable=import-outside-toplevel
from tvm import get_global_func
program = get_global_func("vta.de10nano.program")
program(bitstream_path)
def intelfocl_bitstream_program(bitstream_path, mem_size=4 * 1024 * 1024 * 1024):
# pylint: disable=import-outside-toplevel
from tvm import get_global_func
program = get_global_func("vta.oclfpga.program")
program(bitstream_path, mem_size)
def bitstream_program(target, bitstream, *args):
"""program bitstream to devices"""
if target in ["pynq", "ultra96"]:
pynq_bitstream_program(bitstream)
elif target in ["de10nano"]:
de10nano_bitstream_program(bitstream)
elif target in ["sim", "tsim"]:
# In simulation, bit stream programming is a no-op
return
elif target in ["intelfocl"]:
intelfocl_bitstream_program(bitstream, *args)
else:
raise RuntimeError("Unknown target {}".format(target))
if __name__ == "__main__":
main()
|
24fb50853a7057ddfed1dfde8bf8a2e08c723ad9
|
8d5df43c1611a709ddf19d8b23b8763eb37b4e8f
|
/tests/unit/blocking_channel_tests.py
|
40b8fcc5cd74af9735c7d7e918c2fc2be76be326
|
[
"BSD-3-Clause"
] |
permissive
|
pika/pika
|
86ed56bec6aa813ffd8a7037bbef756a9388533e
|
f4d8f8ff02a4da4653749c86161b7d52e53f73fe
|
refs/heads/main
| 2023-09-03T18:19:30.231575
| 2023-07-28T23:01:02
| 2023-07-29T21:16:38
| 342,869
| 3,040
| 919
|
BSD-3-Clause
| 2023-08-03T21:20:50
| 2009-10-19T23:22:02
|
Python
|
UTF-8
|
Python
| false
| false
| 4,562
|
py
|
blocking_channel_tests.py
|
# -*- coding: utf-8 -*-
"""
Tests for pika.adapters.blocking_connection.BlockingChannel
"""
from collections import deque
import unittest
from unittest import mock
from pika.adapters import blocking_connection
from pika import channel
BLOCKING_CHANNEL = 'pika.adapters.blocking_connection.BlockingChannel'
BLOCKING_CONNECTION = 'pika.adapters.blocking_connection.BlockingConnection'
class ChannelTemplate(channel.Channel):
channel_number = 1
class BlockingChannelTests(unittest.TestCase):
@mock.patch(BLOCKING_CONNECTION)
def _create_connection(self, connection=None):
return connection
def setUp(self):
self.connection = self._create_connection()
channel_impl_mock = mock.Mock(
spec=ChannelTemplate,
is_closing=False,
is_closed=False,
is_open=True)
self.obj = blocking_connection.BlockingChannel(channel_impl_mock,
self.connection)
def tearDown(self):
del self.connection
del self.obj
def test_init_initial_value_confirmation(self):
self.assertFalse(self.obj._delivery_confirmation)
def test_init_initial_value_pending_events(self):
self.assertEqual(self.obj._pending_events, deque())
def test_init_initial_value_buback_return(self):
self.assertIsNone(self.obj._puback_return)
def test_basic_consume_legacy_parameter_queue(self):
# This is for the unlikely scenario where only
# the first parameter is updated
with self.assertRaises(TypeError):
self.obj.basic_consume('queue',
'whoops this should be a callback')
def test_basic_consume_legacy_parameter_callback(self):
with self.assertRaises(TypeError):
self.obj.basic_consume(mock.Mock(), 'queue')
def test_queue_declare_legacy_parameter_callback(self):
with self.assertRaises(TypeError):
self.obj.queue_declare(mock.Mock(), 'queue')
def test_exchange_declare_legacy_parameter_callback(self):
with self.assertRaises(TypeError):
self.obj.exchange_declare(mock.Mock(), 'exchange')
def test_queue_bind_legacy_parameter_callback(self):
with self.assertRaises(TypeError):
self.obj.queue_bind(mock.Mock(),
'queue',
'exchange')
def test_basic_cancel_legacy_parameter(self):
with self.assertRaises(TypeError):
self.obj.basic_cancel(mock.Mock(), 'tag')
def test_basic_get_legacy_parameter(self):
with self.assertRaises(TypeError):
self.obj.basic_get(mock.Mock())
def test_basic_consume(self):
with mock.patch.object(self.obj._impl, '_generate_consumer_tag'):
self.obj._impl._generate_consumer_tag.return_value = 'ctag0'
self.obj._impl.basic_consume.return_value = 'ctag0'
self.obj.basic_consume('queue', mock.Mock())
self.assertEqual(self.obj._consumer_infos['ctag0'].state,
blocking_connection._ConsumerInfo.ACTIVE)
def test_context_manager(self):
with self.obj as chan:
self.assertFalse(chan._impl.close.called)
chan._impl.close.assert_called_once_with(
reply_code=0, reply_text='Normal shutdown')
def test_context_manager_does_not_suppress_exception(self):
class TestException(Exception):
pass
with self.assertRaises(TestException):
with self.obj as chan:
self.assertFalse(chan._impl.close.called)
raise TestException()
chan._impl.close.assert_called_once_with(
reply_code=0, reply_text='Normal shutdown')
def test_context_manager_exit_with_closed_channel(self):
with self.obj as chan:
self.assertFalse(chan._impl.close.called)
chan.close()
chan._impl.close.assert_called_with(
reply_code=0, reply_text='Normal shutdown')
def test_consumer_tags_property(self):
with mock.patch.object(self.obj._impl, '_generate_consumer_tag'):
self.assertEqual(0, len(self.obj.consumer_tags))
self.obj._impl._generate_consumer_tag.return_value = 'ctag0'
self.obj._impl.basic_consume.return_value = 'ctag0'
self.obj.basic_consume('queue', mock.Mock())
self.assertEqual(1, len(self.obj.consumer_tags))
self.assertIn('ctag0', self.obj.consumer_tags)
|
5c4d03c3650d0bc5810a4c3f80059c270254b63f
|
65078b8087c2040cf0188e2550ea298d20518f62
|
/docs/source/conf.py
|
5e706337bdb5a7d2c2945b0c22d048a1e5d744c8
|
[
"Apache-2.0"
] |
permissive
|
bentoml/BentoML
|
20ab6f8351b1c5cd116d6d60a28098246a1581b3
|
4a14f073d8a3e700aff29483b17ea053058c0c63
|
refs/heads/main
| 2023-09-05T16:03:08.909692
| 2023-09-04T18:54:33
| 2023-09-04T18:54:33
| 178,976,529
| 5,712
| 732
|
Apache-2.0
| 2023-09-14T20:07:54
| 2019-04-02T01:39:27
|
Python
|
UTF-8
|
Python
| false
| false
| 5,623
|
py
|
conf.py
|
from datetime import datetime
import importlib.metadata
# -- Project information -----------------------------------------------------
project = "BentoML"
copyright = f"2022-{datetime.now().year}, bentoml.com"
author = "bentoml.com"
version = importlib.metadata.version("bentoml")
# -- General configuration ---------------------------------------------------
source_suffix = [".rst", ".md"]
# See https://github.com/readthedocs/readthedocs.org/issues/2149
master_doc = "index"
# exclude patterns
exclude_patterns = [
"**/*/bazel-*", # generated by bazel
"**/*/node_modules/*", # node_modules
"**/*/.build/*", # generated by swift
"**/*/thirdparty/*", # generated by swift
]
# Sphinx extensions
extensions = [
"sphinxext.opengraph",
"sphinx.ext.autodoc",
"sphinx.ext.autosectionlabel",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
"sphinx.ext.ifconfig",
"sphinx.ext.intersphinx",
"sphinx.ext.mathjax",
"sphinx.ext.extlinks",
"sphinx_click",
"sphinx_copybutton",
"sphinx_design",
"sphinx_issues",
"sphinxcontrib.spelling",
"myst_parser",
"sphinx_inline_tabs",
"hoverxref.extension",
]
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
"pip": ("https://pip.pypa.io/en/latest", None),
}
extlinks = {
"pypi": ("https://pypi.org/project/%s", "%s"), # noqa: WPS323
"wiki": ("https://wikipedia.org/wiki/%s", "%s"), # noqa: WPS323
"github": ("https://github.com/%s", "%s"), # noqa: WPS323
"examples": (
"https://github.com/bentoml/BentoML/tree/main/examples/%s",
"examples/",
), # noqa: WPS323
}
# custom roles
rst_prolog = """
.. role:: raw-html(raw)
:format: html
"""
# hoverxref settings
hoverxref_auto_ref = True
hoverxref_sphinxtabs = True
hoverxref_role_types = {
"hoverxref": "modal",
"ref": "tooltip",
"mod": "tooltip",
"class": "tooltip",
"doc": "tooltip",
}
hoverxref_intersphinx = ["python", "pip"]
# Plugin Configurations:
napoleon_google_docstring = True
napoleon_numpy_docstring = False
napoleon_include_special_with_doc = False
napoleon_attr_annotations = True
autodoc_typehints = "signature"
autodoc_typehints_format = "short"
autodoc_typehints_description_target = "documented"
autosectionlabel_prefix_document = True
autosectionlabel_maxdepth = 10
ogp_site_url = "http://docs.bentoml.com"
ogp_image = "https://docs.bentoml.com/en/latest/_static/img/bentoml-banner.jpg"
ogp_site_name = "BentoML Documentation"
ogp_use_first_image = True
issues_default_group_project = "bentoml/bentoml"
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "zenburn"
pygments_dark_style = "monokai"
myst_enable_extensions = ["colon_fence"]
# Remove the prompt when copying examples
copybutton_prompt_text = r">>> |\.\.\.|> |» |\% |\$ "
copybutton_prompt_is_regexp = True
copybutton_line_continuation_character = "\\"
copybutton_here_doc_delimiter = "EOT"
copybutton_selector = "div:not(.no-copybutton) > div.highlight > pre"
# -- Options for HTML output -------------------------------------------------
html_theme = "furo"
html_theme_options = {
"light_css_variables": {
"color-brand-primary": "#44a4c6 ",
"color-brand-content": "#44a4c6 ",
},
"dark_css_variables": {
"color-brand-primary": "#c9378a ",
"color-brand-content": "#c9378a ",
},
"source_repository": "https://github.com/bentoml/bentoml/",
"source_branch": "main",
"source_directory": "docs/source/",
"footer_icons": [
{
"name": "GitHub",
"url": "https://github.com/bentoml",
"html": " ",
"class": "fab fa-github",
},
{
"name": "LinkedIn",
"url": "https://www.linkedin.com/company/bentoml/",
"html": " ",
"class": "fab fa-linkedin",
},
{
"name": "Twitter",
"url": "https://twitter.com/bentomlai",
"html": " ",
"class": "fab fa-twitter",
},
{
"name": "Slack",
"url": "https://l.bentoml.com/join-slack",
"html": " ",
"class": "fab fa-slack",
},
],
"light_logo": "img/logo-light.svg",
"dark_logo": "img/logo-dark.svg",
}
html_title = "BentoML"
html_static_path = ["_static"]
html_css_files = [
"css/custom.css",
"https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.3/css/all.min.css",
]
html_js_files = ["js/custom.js"]
html_show_sphinx = False
html_favicon = "_static/img/favicon-32x32.png"
# Private dictionary for spell checker
spelling_word_list_filename = ["bentoml_wordlist.txt"]
# mock any heavy imports, eg: imports from frameworks library
autodoc_mock_imports = [
"torch",
"torchvision",
"diffusers",
"detectron2",
"easyocr",
"flax",
"jax",
"jaxlib",
"torchtext",
"fastai",
"fastai.learner.Learner",
"tensorflow",
"tensorflow.keras",
"tensorflow.python.client",
"tensorflow.python.training.tracking.tracking",
"keras",
"lightgbm",
"mlflow",
"onnx",
"onnxruntime",
"torch.nn.parallel",
"pytorch_lightning",
"sklearn",
"joblib",
"transformers",
"transformers.file_utils",
"xgboost",
"catboost",
"prometheus_client",
"bentoml._internal.models.model.ModelSignatureDict",
]
|
e75f72413fc731c0b9763fe95dbed3f3febd8b89
|
019f03d6713a2bc5344b644aeb5ebe70aaf7cfd0
|
/src/super_gradients/training/losses/__init__.py
|
f14781c2a2a391b0e576dc28c898d3188562bcea
|
[
"LicenseRef-scancode-proprietary-license",
"Apache-2.0"
] |
permissive
|
Deci-AI/super-gradients
|
6f52cd15bc2f9f39e3cdc6067292b6512aba5dd0
|
7240726cf6425b53a26ed2faec03672f30fee6be
|
refs/heads/master
| 2023-08-25T17:47:02.595029
| 2023-08-24T11:50:50
| 2023-08-24T11:50:50
| 432,652,408
| 3,237
| 331
|
Apache-2.0
| 2023-09-14T11:24:46
| 2021-11-28T07:58:02
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,570
|
py
|
__init__.py
|
from super_gradients.training.losses.focal_loss import FocalLoss
from super_gradients.training.losses.kd_losses import KDLogitsLoss
from super_gradients.training.losses.label_smoothing_cross_entropy_loss import LabelSmoothingCrossEntropyLoss
from super_gradients.training.losses.r_squared_loss import RSquaredLoss
from super_gradients.training.losses.shelfnet_ohem_loss import ShelfNetOHEMLoss
from super_gradients.training.losses.shelfnet_semantic_encoding_loss import ShelfNetSemanticEncodingLoss
from super_gradients.training.losses.yolox_loss import YoloXDetectionLoss, YoloXFastDetectionLoss
from super_gradients.training.losses.ssd_loss import SSDLoss
from super_gradients.training.losses.bce_dice_loss import BCEDiceLoss
from super_gradients.training.losses.dice_ce_edge_loss import DiceCEEdgeLoss
from super_gradients.training.losses.ppyolo_loss import PPYoloELoss
from super_gradients.training.losses.dekr_loss import DEKRLoss
from super_gradients.training.losses.stdc_loss import STDCLoss
from super_gradients.training.losses.rescoring_loss import RescoringLoss
from super_gradients.common.object_names import Losses
from super_gradients.common.registry.registry import LOSSES
__all__ = [
"LOSSES",
"Losses",
"FocalLoss",
"LabelSmoothingCrossEntropyLoss",
"ShelfNetOHEMLoss",
"ShelfNetSemanticEncodingLoss",
"YoloXDetectionLoss",
"YoloXFastDetectionLoss",
"RSquaredLoss",
"SSDLoss",
"BCEDiceLoss",
"KDLogitsLoss",
"DiceCEEdgeLoss",
"PPYoloELoss",
"DEKRLoss",
"STDCLoss",
"RescoringLoss",
]
|
9fb1729a3ef5cbe470a2be16519b766ac975d981
|
3c41443364da8b44c74dce08ef94a1acd1b66b3e
|
/addons/figshare/tests/test_models.py
|
19c51f35d6ac62ba4ea0bc20c90c84256df89062
|
[
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-warranty-disclaimer",
"AGPL-3.0-only",
"LGPL-2.0-or-later",
"LicenseRef-scancode-proprietary-license",
"MPL-1.1",
"CPAL-1.0",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause",
"Apache-2.0"
] |
permissive
|
CenterForOpenScience/osf.io
|
71d9540be7989f7118a33e15bc4a6ce2d2492ac1
|
a3e0a0b9ddda5dd75fc8248d58f3bcdeece0323e
|
refs/heads/develop
| 2023-09-04T03:21:14.970917
| 2023-08-31T14:49:20
| 2023-08-31T14:49:20
| 10,199,599
| 683
| 390
|
Apache-2.0
| 2023-09-14T17:07:52
| 2013-05-21T15:53:37
|
Python
|
UTF-8
|
Python
| false
| false
| 2,818
|
py
|
test_models.py
|
import mock
from nose.tools import assert_false, assert_equal
import pytest
import unittest
from tests.base import get_default_metaschema
from framework.auth import Auth
from osf_tests.factories import DraftRegistrationFactory
from addons.figshare.tests.factories import (
FigshareUserSettingsFactory,
FigshareNodeSettingsFactory,
FigshareAccountFactory
)
from addons.figshare.models import NodeSettings
from addons.base.tests import models
pytestmark = pytest.mark.django_db
class TestNodeSettings(models.OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCase):
short_name = 'figshare'
full_name = 'figshare'
ExternalAccountFactory = FigshareAccountFactory
NodeSettingsFactory = FigshareNodeSettingsFactory
NodeSettingsClass = NodeSettings
UserSettingsFactory = FigshareUserSettingsFactory
def _node_settings_class_kwargs(self, node, user_settings):
return {
'user_settings': self.user_settings,
'folder_id': '1234567890',
'folder_path': 'fileset',
'folder_name': 'Camera Uploads',
'owner': self.node
}
@mock.patch('website.archiver.tasks.archive')
@mock.patch('addons.figshare.models.NodeSettings.archive_errors')
def test_does_not_get_copied_to_registrations(self, mock_errors, mock_archive):
registration = self.node.register_node(
schema=get_default_metaschema(),
auth=Auth(user=self.node.creator),
draft_registration=DraftRegistrationFactory(branched_from=self.node)
)
assert_false(registration.has_addon('figshare'))
# Overrides
@mock.patch('addons.figshare.client.FigshareClient.get_linked_folder_info')
def test_set_folder(self, mock_info):
# Differences from super: mocking, log action name
folder_id = '1234567890'
mock_info.return_value = dict(path='project', name='Folder', id='1234567890')
self.node_settings.set_folder(folder_id, auth=Auth(self.user))
self.node_settings.save()
assert_equal(self.node_settings.folder_id, folder_id)
last_log = self.node.logs.latest()
assert_equal(last_log.action, '{0}_folder_selected'.format(self.short_name))
def test_serialize_settings(self):
# Custom `expected`
settings = self.node_settings.serialize_waterbutler_settings()
expected = {
'container_id': self.node_settings.folder_id,
'container_type': self.node_settings.folder_path
}
assert_equal(settings, expected)
class TestUserSettings(models.OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase):
short_name = 'figshare'
full_name = 'figshare'
ExternalAccountFactory = FigshareAccountFactory
#TODO Test figshare options and figshare to_json
|
3804adbed7e91f46e227a5e090c0a67e450461b3
|
ce7d8409dc7da020d82e479ae457f2b9d598b44d
|
/vel/internals/tests/test_provider.py
|
4f49e67548a96c6768f9907838d1b44090956d22
|
[
"MIT"
] |
permissive
|
MillionIntegrals/vel
|
42f9aa241b0e07f51363e97630e3a0b4750f1e5e
|
f3ce7da64362ad207f40f2c0d58d9300a25df3e8
|
refs/heads/master
| 2022-12-24T12:58:03.377520
| 2019-10-24T19:04:53
| 2019-10-24T19:04:53
| 132,808,324
| 280
| 37
|
MIT
| 2022-12-08T04:50:05
| 2018-05-09T20:16:48
|
Python
|
UTF-8
|
Python
| false
| false
| 3,722
|
py
|
test_provider.py
|
import os
import pytest
import vel.internals.provider as v
import vel.internals.parser as p
import vel.exceptions as e
def data_function(a, b):
return a + b
def test_simple_instantiation():
provider = v.Provider({
'a': 1,
'b': 2,
})
assert provider.instantiate_from_data(1) == 1
assert provider.instantiate_from_data("abc") == "abc"
assert provider.instantiate_from_data([1, 2, 3]) == [1, 2, 3]
assert provider.instantiate_from_data({"a": "a", "b": "b"}) == {"a": "a", "b": "b"}
def test_instantiate_function_call():
provider = v.Provider({
'a': 1,
'b': 2,
})
assert provider.resolve_and_call(data_function) == 3
assert provider.resolve_and_call(data_function, extra_env={'b': 4}) == 5
def test_simple_injection():
provider = v.Provider({
'a': 1,
'b': 2,
'one': {
'name': 'vel.internals.tests.fixture_a'
},
'two': {
'name': 'vel.internals.tests.fixture_a',
'a': 5,
'b': 6
},
'three': {
'name': 'vel.internals.tests.fixture_b',
'd': 'd'
}
})
one = provider.instantiate_by_name('one')
assert isinstance(one, dict)
assert one['a'] == 1
assert one['b'] == 2
two = provider.instantiate_by_name('two')
assert isinstance(two, dict)
assert two['a'] == 5
assert two['b'] == 6
three = provider.instantiate_by_name('three')
assert isinstance(three, dict)
assert id(three['one']) == id(one)
assert id(three['one']) != id(two)
assert three['d'] == 'd'
def test_parameter_resolution():
os.environ['TEST_VAR'] = '10'
provider = v.Provider({
'a': 1,
'b': p.Parameter("xxx"),
'one': {
'name': 'vel.internals.tests.fixture_a'
},
'two': {
'name': 'vel.internals.tests.fixture_a',
'b': p.Parameter('yyy')
},
'three': {
'name': 'vel.internals.tests.fixture_a',
'b': p.Parameter('yyy', 7)
},
'four': {
'name': 'vel.internals.tests.fixture_a',
'b': p.EnvironmentVariable('TEST_VAR')
},
}, parameters={'xxx': 5})
one = provider.instantiate_by_name('one')
assert one['b'] == 5
with pytest.raises(e.VelException):
provider.instantiate_by_name('two')
three = provider.instantiate_by_name('three')
assert three['b'] == 7
four = provider.instantiate_by_name('four')
assert four['b'] == '10'
def test_render_configuration():
os.environ['TEST_VAR'] = '10'
provider = v.Provider({
'a': 1,
'b': p.Parameter("xxx"),
'one': {
'name': 'vel.internals.tests.fixture_a'
},
'two': {
'name': 'vel.internals.tests.fixture_a',
'b': p.Parameter('yyy', 5)
},
'three': {
'name': 'vel.internals.tests.fixture_a',
'b': p.Parameter('yyy', 7)
},
'four': {
'name': 'vel.internals.tests.fixture_a',
'b': p.EnvironmentVariable('TEST_VAR')
},
}, parameters={'xxx': 5})
configuration = provider.render_configuration()
assert configuration == {
'a': 1,
'b': 5,
'one': {
'name': 'vel.internals.tests.fixture_a'
},
'two': {
'name': 'vel.internals.tests.fixture_a',
'b': 5
},
'three': {
'name': 'vel.internals.tests.fixture_a',
'b': 7
},
'four': {
'name': 'vel.internals.tests.fixture_a',
'b': '10'
},
}
|
298c81e89a677cb4d67401ff760b05a53c028f9e
|
a3d6556180e74af7b555f8d47d3fea55b94bcbda
|
/chromecast/media/DEPS
|
993d5acd0363ce9707eced28cd3da70aca4744d2
|
[
"BSD-3-Clause"
] |
permissive
|
chromium/chromium
|
aaa9eda10115b50b0616d2f1aed5ef35d1d779d6
|
a401d6cf4f7bf0e2d2e964c512ebb923c3d8832c
|
refs/heads/main
| 2023-08-24T00:35:12.585945
| 2023-08-23T22:01:11
| 2023-08-23T22:01:11
| 120,360,765
| 17,408
| 7,102
|
BSD-3-Clause
| 2023-09-10T23:44:27
| 2018-02-05T20:55:32
| null |
UTF-8
|
Python
| false
| false
| 1,077
|
DEPS
|
include_rules = [
"+chromecast/common/mojom",
"+chromecast/external_mojo",
"+chromecast/net/socket_util.h",
"+content/public/renderer",
"+media/audio",
"+media/base",
"+media/cdm",
"+media/filters",
"+media/mojo/mojom",
"+mojo/core/embedder/embedder.h",
"+mojo/public/cpp/bindings",
"+mojo/public/cpp/platform/platform_handle.h",
"+net/base/io_buffer.h",
"+net/base/net_errors.h",
"+net/socket/stream_socket.h",
"+ui/gfx/geometry",
"+ui/gfx/hdr_metadata.h",
"+ui/gfx/overlay_transform.h",
"+services/service_manager/public",
"+third_party/blink/public/common/browser_interface_broker_proxy.h",
"+third_party/blink/public/common/tokens/tokens.h",
"+third_party/blink/public/platform/audio/web_audio_device_source_type.h",
"+third_party/blink/public/web/modules/media/audio/audio_device_factory.h",
"+third_party/blink/public/web/modules/media/audio/audio_output_ipc_factory.h",
"+third_party/blink/public/web/web_local_frame.h",
"+third_party/widevine/cdm/buildflags.h",
"+third_party/widevine/cdm/widevine_cdm_common.h",
]
|
|
2171fd5eae1d8eaad1a2252a9e0d0ece5d9b553f
|
fce81b804cae23f525a5ad4370b684bf0dc531a5
|
/numpy/ma/tests/test_subclassing.py
|
e3c88525371edbf742d4e2e9c7401b60b29cd740
|
[
"Zlib",
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] |
permissive
|
numpy/numpy
|
ba2abcc1d2d46affbb6aabe5aed6407b4b57507e
|
dc2ff125493777a1084044e6cd6857a42ee323d4
|
refs/heads/main
| 2023-09-05T10:10:52.767363
| 2023-09-04T18:03:29
| 2023-09-04T18:03:29
| 908,607
| 25,725
| 11,968
|
BSD-3-Clause
| 2023-09-14T21:26:09
| 2010-09-13T23:02:39
|
Python
|
UTF-8
|
Python
| false
| false
| 16,967
|
py
|
test_subclassing.py
|
# pylint: disable-msg=W0611, W0612, W0511,R0201
"""Tests suite for MaskedArray & subclassing.
:author: Pierre Gerard-Marchant
:contact: pierregm_at_uga_dot_edu
:version: $Id: test_subclassing.py 3473 2007-10-29 15:18:13Z jarrod.millman $
"""
import numpy as np
from numpy.lib.mixins import NDArrayOperatorsMixin
from numpy.testing import assert_, assert_raises
from numpy.ma.testutils import assert_equal
from numpy.ma.core import (
array, arange, masked, MaskedArray, masked_array, log, add, hypot,
divide, asarray, asanyarray, nomask
)
# from numpy.ma.core import (
def assert_startswith(a, b):
# produces a better error message than assert_(a.startswith(b))
assert_equal(a[:len(b)], b)
class SubArray(np.ndarray):
# Defines a generic np.ndarray subclass, that stores some metadata
# in the dictionary `info`.
def __new__(cls,arr,info={}):
x = np.asanyarray(arr).view(cls)
x.info = info.copy()
return x
def __array_finalize__(self, obj):
super().__array_finalize__(obj)
self.info = getattr(obj, 'info', {}).copy()
return
def __add__(self, other):
result = super().__add__(other)
result.info['added'] = result.info.get('added', 0) + 1
return result
def __iadd__(self, other):
result = super().__iadd__(other)
result.info['iadded'] = result.info.get('iadded', 0) + 1
return result
subarray = SubArray
class SubMaskedArray(MaskedArray):
"""Pure subclass of MaskedArray, keeping some info on subclass."""
def __new__(cls, info=None, **kwargs):
obj = super().__new__(cls, **kwargs)
obj._optinfo['info'] = info
return obj
class MSubArray(SubArray, MaskedArray):
def __new__(cls, data, info={}, mask=nomask):
subarr = SubArray(data, info)
_data = MaskedArray.__new__(cls, data=subarr, mask=mask)
_data.info = subarr.info
return _data
@property
def _series(self):
_view = self.view(MaskedArray)
_view._sharedmask = False
return _view
msubarray = MSubArray
# Also a subclass that overrides __str__, __repr__ and __setitem__, disallowing
# setting to non-class values (and thus np.ma.core.masked_print_option)
# and overrides __array_wrap__, updating the info dict, to check that this
# doesn't get destroyed by MaskedArray._update_from. But this one also needs
# its own iterator...
class CSAIterator:
"""
Flat iterator object that uses its own setter/getter
(works around ndarray.flat not propagating subclass setters/getters
see https://github.com/numpy/numpy/issues/4564)
roughly following MaskedIterator
"""
def __init__(self, a):
self._original = a
self._dataiter = a.view(np.ndarray).flat
def __iter__(self):
return self
def __getitem__(self, indx):
out = self._dataiter.__getitem__(indx)
if not isinstance(out, np.ndarray):
out = out.__array__()
out = out.view(type(self._original))
return out
def __setitem__(self, index, value):
self._dataiter[index] = self._original._validate_input(value)
def __next__(self):
return next(self._dataiter).__array__().view(type(self._original))
class ComplicatedSubArray(SubArray):
def __str__(self):
return f'myprefix {self.view(SubArray)} mypostfix'
def __repr__(self):
# Return a repr that does not start with 'name('
return f'<{self.__class__.__name__} {self}>'
def _validate_input(self, value):
if not isinstance(value, ComplicatedSubArray):
raise ValueError("Can only set to MySubArray values")
return value
def __setitem__(self, item, value):
# validation ensures direct assignment with ndarray or
# masked_print_option will fail
super().__setitem__(item, self._validate_input(value))
def __getitem__(self, item):
# ensure getter returns our own class also for scalars
value = super().__getitem__(item)
if not isinstance(value, np.ndarray): # scalar
value = value.__array__().view(ComplicatedSubArray)
return value
@property
def flat(self):
return CSAIterator(self)
@flat.setter
def flat(self, value):
y = self.ravel()
y[:] = value
def __array_wrap__(self, obj, context=None):
obj = super().__array_wrap__(obj, context)
if context is not None and context[0] is np.multiply:
obj.info['multiplied'] = obj.info.get('multiplied', 0) + 1
return obj
class WrappedArray(NDArrayOperatorsMixin):
"""
Wrapping a MaskedArray rather than subclassing to test that
ufunc deferrals are commutative.
See: https://github.com/numpy/numpy/issues/15200)
"""
__slots__ = ('_array', 'attrs')
__array_priority__ = 20
def __init__(self, array, **attrs):
self._array = array
self.attrs = attrs
def __repr__(self):
return f"{self.__class__.__name__}(\n{self._array}\n{self.attrs}\n)"
def __array__(self):
return np.asarray(self._array)
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs):
if method == '__call__':
inputs = [arg._array if isinstance(arg, self.__class__) else arg
for arg in inputs]
return self.__class__(ufunc(*inputs, **kwargs), **self.attrs)
else:
return NotImplemented
class TestSubclassing:
# Test suite for masked subclasses of ndarray.
def setup_method(self):
x = np.arange(5, dtype='float')
mx = msubarray(x, mask=[0, 1, 0, 0, 0])
self.data = (x, mx)
def test_data_subclassing(self):
# Tests whether the subclass is kept.
x = np.arange(5)
m = [0, 0, 1, 0, 0]
xsub = SubArray(x)
xmsub = masked_array(xsub, mask=m)
assert_(isinstance(xmsub, MaskedArray))
assert_equal(xmsub._data, xsub)
assert_(isinstance(xmsub._data, SubArray))
def test_maskedarray_subclassing(self):
# Tests subclassing MaskedArray
(x, mx) = self.data
assert_(isinstance(mx._data, subarray))
def test_masked_unary_operations(self):
# Tests masked_unary_operation
(x, mx) = self.data
with np.errstate(divide='ignore'):
assert_(isinstance(log(mx), msubarray))
assert_equal(log(x), np.log(x))
def test_masked_binary_operations(self):
# Tests masked_binary_operation
(x, mx) = self.data
# Result should be a msubarray
assert_(isinstance(add(mx, mx), msubarray))
assert_(isinstance(add(mx, x), msubarray))
# Result should work
assert_equal(add(mx, x), mx+x)
assert_(isinstance(add(mx, mx)._data, subarray))
assert_(isinstance(add.outer(mx, mx), msubarray))
assert_(isinstance(hypot(mx, mx), msubarray))
assert_(isinstance(hypot(mx, x), msubarray))
def test_masked_binary_operations2(self):
# Tests domained_masked_binary_operation
(x, mx) = self.data
xmx = masked_array(mx.data.__array__(), mask=mx.mask)
assert_(isinstance(divide(mx, mx), msubarray))
assert_(isinstance(divide(mx, x), msubarray))
assert_equal(divide(mx, mx), divide(xmx, xmx))
def test_attributepropagation(self):
x = array(arange(5), mask=[0]+[1]*4)
my = masked_array(subarray(x))
ym = msubarray(x)
#
z = (my+1)
assert_(isinstance(z, MaskedArray))
assert_(not isinstance(z, MSubArray))
assert_(isinstance(z._data, SubArray))
assert_equal(z._data.info, {})
#
z = (ym+1)
assert_(isinstance(z, MaskedArray))
assert_(isinstance(z, MSubArray))
assert_(isinstance(z._data, SubArray))
assert_(z._data.info['added'] > 0)
# Test that inplace methods from data get used (gh-4617)
ym += 1
assert_(isinstance(ym, MaskedArray))
assert_(isinstance(ym, MSubArray))
assert_(isinstance(ym._data, SubArray))
assert_(ym._data.info['iadded'] > 0)
#
ym._set_mask([1, 0, 0, 0, 1])
assert_equal(ym._mask, [1, 0, 0, 0, 1])
ym._series._set_mask([0, 0, 0, 0, 1])
assert_equal(ym._mask, [0, 0, 0, 0, 1])
#
xsub = subarray(x, info={'name':'x'})
mxsub = masked_array(xsub)
assert_(hasattr(mxsub, 'info'))
assert_equal(mxsub.info, xsub.info)
def test_subclasspreservation(self):
# Checks that masked_array(...,subok=True) preserves the class.
x = np.arange(5)
m = [0, 0, 1, 0, 0]
xinfo = [(i, j) for (i, j) in zip(x, m)]
xsub = MSubArray(x, mask=m, info={'xsub':xinfo})
#
mxsub = masked_array(xsub, subok=False)
assert_(not isinstance(mxsub, MSubArray))
assert_(isinstance(mxsub, MaskedArray))
assert_equal(mxsub._mask, m)
#
mxsub = asarray(xsub)
assert_(not isinstance(mxsub, MSubArray))
assert_(isinstance(mxsub, MaskedArray))
assert_equal(mxsub._mask, m)
#
mxsub = masked_array(xsub, subok=True)
assert_(isinstance(mxsub, MSubArray))
assert_equal(mxsub.info, xsub.info)
assert_equal(mxsub._mask, xsub._mask)
#
mxsub = asanyarray(xsub)
assert_(isinstance(mxsub, MSubArray))
assert_equal(mxsub.info, xsub.info)
assert_equal(mxsub._mask, m)
def test_subclass_items(self):
"""test that getter and setter go via baseclass"""
x = np.arange(5)
xcsub = ComplicatedSubArray(x)
mxcsub = masked_array(xcsub, mask=[True, False, True, False, False])
# getter should return a ComplicatedSubArray, even for single item
# first check we wrote ComplicatedSubArray correctly
assert_(isinstance(xcsub[1], ComplicatedSubArray))
assert_(isinstance(xcsub[1,...], ComplicatedSubArray))
assert_(isinstance(xcsub[1:4], ComplicatedSubArray))
# now that it propagates inside the MaskedArray
assert_(isinstance(mxcsub[1], ComplicatedSubArray))
assert_(isinstance(mxcsub[1,...].data, ComplicatedSubArray))
assert_(mxcsub[0] is masked)
assert_(isinstance(mxcsub[0,...].data, ComplicatedSubArray))
assert_(isinstance(mxcsub[1:4].data, ComplicatedSubArray))
# also for flattened version (which goes via MaskedIterator)
assert_(isinstance(mxcsub.flat[1].data, ComplicatedSubArray))
assert_(mxcsub.flat[0] is masked)
assert_(isinstance(mxcsub.flat[1:4].base, ComplicatedSubArray))
# setter should only work with ComplicatedSubArray input
# first check we wrote ComplicatedSubArray correctly
assert_raises(ValueError, xcsub.__setitem__, 1, x[4])
# now that it propagates inside the MaskedArray
assert_raises(ValueError, mxcsub.__setitem__, 1, x[4])
assert_raises(ValueError, mxcsub.__setitem__, slice(1, 4), x[1:4])
mxcsub[1] = xcsub[4]
mxcsub[1:4] = xcsub[1:4]
# also for flattened version (which goes via MaskedIterator)
assert_raises(ValueError, mxcsub.flat.__setitem__, 1, x[4])
assert_raises(ValueError, mxcsub.flat.__setitem__, slice(1, 4), x[1:4])
mxcsub.flat[1] = xcsub[4]
mxcsub.flat[1:4] = xcsub[1:4]
def test_subclass_nomask_items(self):
x = np.arange(5)
xcsub = ComplicatedSubArray(x)
mxcsub_nomask = masked_array(xcsub)
assert_(isinstance(mxcsub_nomask[1,...].data, ComplicatedSubArray))
assert_(isinstance(mxcsub_nomask[0,...].data, ComplicatedSubArray))
assert_(isinstance(mxcsub_nomask[1], ComplicatedSubArray))
assert_(isinstance(mxcsub_nomask[0], ComplicatedSubArray))
def test_subclass_repr(self):
"""test that repr uses the name of the subclass
and 'array' for np.ndarray"""
x = np.arange(5)
mx = masked_array(x, mask=[True, False, True, False, False])
assert_startswith(repr(mx), 'masked_array')
xsub = SubArray(x)
mxsub = masked_array(xsub, mask=[True, False, True, False, False])
assert_startswith(repr(mxsub),
f'masked_{SubArray.__name__}(data=[--, 1, --, 3, 4]')
def test_subclass_str(self):
"""test str with subclass that has overridden str, setitem"""
# first without override
x = np.arange(5)
xsub = SubArray(x)
mxsub = masked_array(xsub, mask=[True, False, True, False, False])
assert_equal(str(mxsub), '[-- 1 -- 3 4]')
xcsub = ComplicatedSubArray(x)
assert_raises(ValueError, xcsub.__setitem__, 0,
np.ma.core.masked_print_option)
mxcsub = masked_array(xcsub, mask=[True, False, True, False, False])
assert_equal(str(mxcsub), 'myprefix [-- 1 -- 3 4] mypostfix')
def test_pure_subclass_info_preservation(self):
# Test that ufuncs and methods conserve extra information consistently;
# see gh-7122.
arr1 = SubMaskedArray('test', data=[1,2,3,4,5,6])
arr2 = SubMaskedArray(data=[0,1,2,3,4,5])
diff1 = np.subtract(arr1, arr2)
assert_('info' in diff1._optinfo)
assert_(diff1._optinfo['info'] == 'test')
diff2 = arr1 - arr2
assert_('info' in diff2._optinfo)
assert_(diff2._optinfo['info'] == 'test')
class ArrayNoInheritance:
"""Quantity-like class that does not inherit from ndarray"""
def __init__(self, data, units):
self.magnitude = data
self.units = units
def __getattr__(self, attr):
return getattr(self.magnitude, attr)
def test_array_no_inheritance():
data_masked = np.ma.array([1, 2, 3], mask=[True, False, True])
data_masked_units = ArrayNoInheritance(data_masked, 'meters')
# Get the masked representation of the Quantity-like class
new_array = np.ma.array(data_masked_units)
assert_equal(data_masked.data, new_array.data)
assert_equal(data_masked.mask, new_array.mask)
# Test sharing the mask
data_masked.mask = [True, False, False]
assert_equal(data_masked.mask, new_array.mask)
assert_(new_array.sharedmask)
# Get the masked representation of the Quantity-like class
new_array = np.ma.array(data_masked_units, copy=True)
assert_equal(data_masked.data, new_array.data)
assert_equal(data_masked.mask, new_array.mask)
# Test that the mask is not shared when copy=True
data_masked.mask = [True, False, True]
assert_equal([True, False, False], new_array.mask)
assert_(not new_array.sharedmask)
# Get the masked representation of the Quantity-like class
new_array = np.ma.array(data_masked_units, keep_mask=False)
assert_equal(data_masked.data, new_array.data)
# The change did not affect the original mask
assert_equal(data_masked.mask, [True, False, True])
# Test that the mask is False and not shared when keep_mask=False
assert_(not new_array.mask)
assert_(not new_array.sharedmask)
class TestClassWrapping:
# Test suite for classes that wrap MaskedArrays
def setup_method(self):
m = np.ma.masked_array([1, 3, 5], mask=[False, True, False])
wm = WrappedArray(m)
self.data = (m, wm)
def test_masked_unary_operations(self):
# Tests masked_unary_operation
(m, wm) = self.data
with np.errstate(divide='ignore'):
assert_(isinstance(np.log(wm), WrappedArray))
def test_masked_binary_operations(self):
# Tests masked_binary_operation
(m, wm) = self.data
# Result should be a WrappedArray
assert_(isinstance(np.add(wm, wm), WrappedArray))
assert_(isinstance(np.add(m, wm), WrappedArray))
assert_(isinstance(np.add(wm, m), WrappedArray))
# add and '+' should call the same ufunc
assert_equal(np.add(m, wm), m + wm)
assert_(isinstance(np.hypot(m, wm), WrappedArray))
assert_(isinstance(np.hypot(wm, m), WrappedArray))
# Test domained binary operations
assert_(isinstance(np.divide(wm, m), WrappedArray))
assert_(isinstance(np.divide(m, wm), WrappedArray))
assert_equal(np.divide(wm, m) * m, np.divide(m, m) * wm)
# Test broadcasting
m2 = np.stack([m, m])
assert_(isinstance(np.divide(wm, m2), WrappedArray))
assert_(isinstance(np.divide(m2, wm), WrappedArray))
assert_equal(np.divide(m2, wm), np.divide(wm, m2))
def test_mixins_have_slots(self):
mixin = NDArrayOperatorsMixin()
# Should raise an error
assert_raises(AttributeError, mixin.__setattr__, "not_a_real_attr", 1)
m = np.ma.masked_array([1, 3, 5], mask=[False, True, False])
wm = WrappedArray(m)
assert_raises(AttributeError, wm.__setattr__, "not_an_attr", 2)
|
a019c95418ae6f9bc4427510d6f06954d176d9df
|
fa1ad2e2ac7e376fc7cb3b3a6e1bb88eed3e80be
|
/dts/airbyte/airbyte-integrations/connectors/source-braintree/source_braintree/streams.py
|
418715ecaf0f72a314ef9a5a4502d0b8976a0e76
|
[
"MIT",
"Elastic-2.0",
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
alldatacenter/alldata
|
7bc7713c9f1d56ad6b8e59ea03206d1073b7e047
|
8d5f9a2d49ab8f9e85ccf058cb02c2fda287afc6
|
refs/heads/master
| 2023-08-05T07:32:25.442740
| 2023-08-03T13:17:24
| 2023-08-03T13:17:24
| 213,321,771
| 774
| 250
|
Apache-2.0
| 2023-09-06T17:35:32
| 2019-10-07T07:36:18
| null |
UTF-8
|
Python
| false
| false
| 7,104
|
py
|
streams.py
|
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from abc import ABC, abstractmethod
from datetime import datetime
from typing import Any, Generator, Iterable, List, Mapping, Optional, Union
import backoff
import braintree
import pendulum
from airbyte_cdk.models import SyncMode
from airbyte_cdk.sources.streams.core import Stream
from braintree.attribute_getter import AttributeGetter
from source_braintree.schemas import Customer, Discount, Dispute, MerchantAccount, Plan, Subscription, Transaction
from source_braintree.spec import BraintreeConfig
class BraintreeStream(Stream, ABC):
def __init__(self, config: BraintreeConfig):
self._start_date = config.start_date
self._gateway = BraintreeStream.create_gateway(config)
@staticmethod
def create_gateway(config: BraintreeConfig):
return braintree.BraintreeGateway(braintree.Configuration(**config.dict()))
@property
@abstractmethod
def model(self):
"""
Pydantic model to represent catalog schema
"""
@abstractmethod
def get_items(self, start_date: datetime) -> Generator:
"""
braintree SDK gateway object for items list
"""
def get_json_schema(self):
return self.model.schema()
def stream_slices(
self,
sync_mode: SyncMode,
cursor_field: List[str] = None,
stream_state: Mapping[str, Any] = None,
) -> Iterable[Optional[Mapping[str, Any]]]:
current_datetime = pendulum.utcnow()
if sync_mode == SyncMode.full_refresh:
return [{self.cursor_field or "start_date": self._start_date or current_datetime}]
stream_state_start_date = stream_state.get(self.cursor_field)
if stream_state_start_date:
stream_state_start_date = pendulum.parse(stream_state_start_date)
start_date = stream_state_start_date or self._start_date or current_datetime
return [{self.cursor_field: start_date}]
def get_updated_state(
self,
current_stream_state: Mapping[str, Any],
latest_record: Mapping[str, Any],
):
next_state = latest_record.get(self.cursor_field)
current_state = current_stream_state.get(self.cursor_field)
current_state = pendulum.parse(current_state) if current_state else next_state
return {self.cursor_field: max(current_state, next_state).strftime("%Y-%m-%d %H:%M:%S")}
@staticmethod
def get_json_from_resource(resource_obj: Union[AttributeGetter, List[AttributeGetter]]):
if isinstance(resource_obj, list):
return [obj if not isinstance(obj, AttributeGetter) else BraintreeStream.get_json_from_resource(obj) for obj in resource_obj]
obj_dict = resource_obj.__dict__
result = dict()
for attr in obj_dict:
if not attr.startswith("_"):
result[attr] = (
BraintreeStream.get_json_from_resource(obj_dict[attr])
if isinstance(obj_dict[attr], (AttributeGetter, list))
else obj_dict[attr]
)
return result
@backoff.on_exception(
backoff.expo,
(
braintree.exceptions.GatewayTimeoutError,
braintree.exceptions.RequestTimeoutError,
braintree.exceptions.ServerError,
braintree.exceptions.ServiceUnavailableError,
braintree.exceptions.TooManyRequestsError,
),
max_tries=5,
)
def _collect_items(self, stream_slice: Mapping[str, Any]) -> List[Mapping[str, Any]]:
"""
Fetch list of response object normalized acccording to catalog model.
Braintree pagination API is designed to use lazy evaluation and SDK is
built upon this approach: First its fetch list of ids, wraps it inside
generator object and then iterates each items and send for getting
additional details. Cause of this implementation we cant handle retry
in case of individual item fails.
:stream_slice Stream slice with cursor field in case of incremental stream.
:return List of objects
"""
start_date = stream_slice.get(self.cursor_field or "start_date")
items = self.get_items(start_date)
result = []
for item in items:
item = self.get_json_from_resource(item)
item = self.model(**item)
result.append(item.dict(exclude_unset=True))
return result
def read_records(
self,
sync_mode: SyncMode,
cursor_field: List[str] = None,
stream_slice: Mapping[str, Any] = None,
stream_state: Mapping[str, Any] = None,
) -> Iterable[Mapping[str, Any]]:
yield from self._collect_items(stream_slice)
class CustomerStream(BraintreeStream):
"""
https://developer.paypal.com/braintree/docs/reference/request/customer/search
"""
primary_key = "id"
model = Customer
cursor_field = "created_at"
def get_items(self, start_date: datetime):
return self._gateway.customer.search(braintree.CustomerSearch.created_at >= start_date)
class DiscountStream(BraintreeStream):
"""
https://developer.paypal.com/braintree/docs/reference/response/discount
"""
primary_key = "id"
model = Discount
def get_items(self, start_date: datetime):
return self._gateway.discount.all()
class DisputeStream(BraintreeStream):
"""
https://developer.paypal.com/braintree/docs/reference/request/dispute/search
"""
primary_key = "id"
model = Dispute
cursor_field = "received_date"
def get_items(self, start_date: datetime):
return self._gateway.dispute.search(braintree.DisputeSearch.received_date >= start_date.date()).disputes.items
class TransactionStream(BraintreeStream):
"""
https://developer.paypal.com/braintree/docs/reference/response/transaction
"""
primary_key = "id"
model = Transaction
cursor_field = "created_at"
def get_items(self, start_date: datetime):
return self._gateway.transaction.search(braintree.TransactionSearch.created_at >= start_date)
class MerchantAccountStream(BraintreeStream):
"""
https://developer.paypal.com/braintree/docs/reference/response/merchant-account
"""
primary_key = "id"
model = MerchantAccount
def get_items(self, start_date: datetime):
return self._gateway.merchant_account.all().merchant_accounts
class PlanStream(BraintreeStream):
"""
https://developer.paypal.com/braintree/docs/reference/response/plan
"""
primary_key = "id"
model = Plan
def get_items(self, start_date: datetime):
return self._gateway.plan.all()
class SubscriptionStream(BraintreeStream):
"""
https://developer.paypal.com/braintree/docs/reference/response/subscription
"""
primary_key = "id"
model = Subscription
cursor_field = "created_at"
def get_items(self, start_date: datetime):
return self._gateway.subscription.search(braintree.SubscriptionSearch.created_at >= start_date).items
|
54484bc47ba858de8a007a744797e9dfe6c03573
|
3150bbbd16374f0d39c822d536da12745664a38b
|
/setup.py
|
ed00501f22c991b33f18666e3bfe7844c5d940fd
|
[
"Apache-2.0"
] |
permissive
|
VirusTotal/yara-python
|
9a97ddd62c8dbbe265b634c3473bc77fb6f9cf08
|
956db14f05c2bd17d01b78785de1b70e3e8b5315
|
refs/heads/master
| 2023-08-28T10:52:51.346332
| 2023-08-23T13:41:15
| 2023-08-23T13:41:15
| 42,300,237
| 571
| 188
|
Apache-2.0
| 2023-09-13T14:26:08
| 2015-09-11T09:37:35
|
C
|
UTF-8
|
Python
| false
| false
| 14,575
|
py
|
setup.py
|
#
# Copyright (c) 2007-2022. The YARA Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup, Command, Extension
from distutils.command.build import build
from distutils.command.build_ext import build_ext
from codecs import open
import distutils.errors
import distutils.ccompiler
import distutils.sysconfig
import contextlib
import os
import sys
import tempfile
import shutil
import subprocess
OPTIONS = [
('dynamic-linking', None, 'link dynamically against libyara'),
('enable-cuckoo', None, 'enable "cuckoo" module'),
('enable-magic', None, 'enable "magic" module'),
('enable-dex', None, 'enable "dex" module'),
('enable-macho', None, 'enable "macho" module'),
('enable-profiling', None, 'enable profiling features'),
('enable-openssl', None, 'enable features that depend on OpenSSL'),
]
BOOLEAN_OPTIONS = [
'dynamic-linking',
'enable-cuckoo',
'enable-magic',
'enable-dex',
'enable-macho',
'enable-profiling',
'enable-openssl',
]
@contextlib.contextmanager
def muted(*streams):
"""A context manager to redirect stdout and/or stderr to /dev/null.
Examples:
with muted(sys.stdout):
...
with muted(sys.stderr):
...
with muted(sys.stdout, sys.stderr):
...
"""
devnull = open(os.devnull, 'w')
try:
old_streams = [os.dup(s.fileno()) for s in streams]
for s in streams:
os.dup2(devnull.fileno(), s.fileno())
yield
finally:
for o,n in zip(old_streams, streams):
os.dup2(o, n.fileno())
devnull.close()
def has_function(function_name, includes=None, include_dirs=None, libraries=None, library_dirs=None):
"""Checks if a given functions exists in the current platform."""
compiler = distutils.ccompiler.new_compiler()
with muted(sys.stdout, sys.stderr):
result = compiler.has_function(
function_name,
includes=includes,
include_dirs=include_dirs,
libraries=libraries,
library_dirs=library_dirs)
if os.path.exists('a.out'):
os.remove('a.out')
return result
def has_header(header_name):
compiler = distutils.ccompiler.new_compiler()
with muted(sys.stdout, sys.stderr):
with tempfile.NamedTemporaryFile(mode='w', prefix=header_name, delete=False, suffix='.c') as f:
f.write("""
#include <{}>
int main() {{ return 0; }}
""".format(header_name))
f.close()
try:
compiler.compile([f.name])
except distutils.errors.CompileError:
return False
return True
class BuildCommand(build):
user_options = build.user_options + OPTIONS
boolean_options = build.boolean_options + BOOLEAN_OPTIONS
def initialize_options(self):
build.initialize_options(self)
self.dynamic_linking = None
self.enable_magic = None
self.enable_cuckoo = None
self.enable_dex = None
self.enable_macho = None
self.enable_profiling = None
self.enable_openssl = None
def finalize_options(self):
build.finalize_options(self)
class BuildExtCommand(build_ext):
user_options = build_ext.user_options + OPTIONS
boolean_options = build_ext.boolean_options + BOOLEAN_OPTIONS
def initialize_options(self):
build_ext.initialize_options(self)
self.dynamic_linking = None
self.enable_magic = None
self.enable_cuckoo = None
self.enable_dex = None
self.enable_macho = None
self.enable_profiling = None
self.enable_openssl = None
def finalize_options(self):
build_ext.finalize_options(self)
# If the build_ext command was invoked by the build command, take the
# values for these options from the build command.
self.set_undefined_options('build',
('dynamic_linking', 'dynamic_linking'),
('enable_magic', 'enable_magic'),
('enable_cuckoo', 'enable_cuckoo'),
('enable_dex', 'enable_dex'),
('enable_macho', 'enable_macho'),
('enable_profiling', 'enable_profiling'),
('enable_openssl', 'enable_openssl'))
if self.enable_magic and self.dynamic_linking:
raise distutils.errors.DistutilsOptionError(
'--enable-magic can''t be used with --dynamic-linking')
if self.enable_cuckoo and self.dynamic_linking:
raise distutils.errors.DistutilsOptionError(
'--enable-cuckoo can''t be used with --dynamic-linking')
if self.enable_dex and self.dynamic_linking:
raise distutils.errors.DistutilsOptionError(
'--enable-dex can''t be used with --dynamic-linking')
if self.enable_macho and self.dynamic_linking:
raise distutils.errors.DistutilsOptionError(
'--enable-macho can''t be used with --dynamic-linking')
if self.enable_openssl and self.dynamic_linking:
raise distutils.errors.DistutilsOptionError(
'--enable-enable-openssl can''t be used with --dynamic-linking')
def run(self):
"""Execute the build command."""
module = self.distribution.ext_modules[0]
base_dir = os.path.dirname(__file__)
if base_dir:
os.chdir(base_dir)
exclusions = []
for define in self.define or []:
module.define_macros.append(define)
for library in self.libraries or []:
module.libraries.append(library)
building_for_windows = self.plat_name in ('win32','win-amd64')
building_for_osx = 'macosx' in self.plat_name
building_for_linux = 'linux' in self.plat_name
building_for_freebsd = 'freebsd' in self.plat_name
building_for_openbsd = 'openbsd' in self.plat_name # need testing
if building_for_windows:
arch = 'x86' if self.plat_name == 'win32' else 'x64'
openssl_include_dirs = [
os.path.join(base_dir, 'yara\\windows\\vs2015\\packages\\YARA.OpenSSL.{}.1.1.1\\include'.format(arch)),
os.path.join(base_dir, 'yara\\windows\\vs2017\\packages\\YARA.OpenSSL.{}.1.1.1\\include'.format(arch))
]
openssl_library_dirs = [
os.path.join(base_dir, 'yara\\windows\\vs2015\\packages\\YARA.OpenSSL.{}.1.1.1\\lib'.format(arch)),
os.path.join(base_dir, 'yara\\windows\\vs2017\\packages\\YARA.OpenSSL.{}.1.1.1\\lib'.format(arch))
]
openssl_libraries = ['libcrypto']
else:
openssl_include_dirs = []
openssl_library_dirs = []
openssl_libraries = ['crypto']
if building_for_linux:
module.define_macros.append(('_GNU_SOURCE', '1'))
module.define_macros.append(('USE_LINUX_PROC', '1'))
module.extra_compile_args.append('-std=c99')
elif building_for_windows:
module.define_macros.append(('USE_WINDOWS_PROC', '1'))
module.define_macros.append(('_CRT_SECURE_NO_WARNINGS', '1'))
module.libraries.append('kernel32')
module.libraries.append('advapi32')
module.libraries.append('user32')
module.libraries.append('crypt32')
module.libraries.append('ws2_32')
elif building_for_osx:
module.define_macros.append(('_GNU_SOURCE', '1'))
module.define_macros.append(('USE_MACH_PROC', '1'))
module.extra_compile_args.append('-std=c99')
module.include_dirs.append('/usr/local/opt/openssl/include')
module.include_dirs.append('/opt/local/include')
module.library_dirs.append('/opt/local/lib')
module.include_dirs.append('/usr/local/include')
module.library_dirs.append('/usr/local/lib')
module.library_dirs.append('/usr/local/opt/openssl/lib')
elif building_for_freebsd:
module.define_macros.append(('_GNU_SOURCE', '1'))
module.define_macros.append(('USE_FREEBSD_PROC', '1'))
module.include_dirs.append('/opt/local/include')
module.library_dirs.append('/opt/local/lib')
module.include_dirs.append('/usr/local/include')
module.library_dirs.append('/usr/local/lib')
elif building_for_openbsd:
module.define_macros.append(('_GNU_SOURCE', '1'))
module.define_macros.append(('USE_OPENBSD_PROC', '1'))
module.extra_compile_args.append('-std=c99')
module.include_dirs.append('/opt/local/include')
module.library_dirs.append('/opt/local/lib')
module.include_dirs.append('/usr/local/include')
module.library_dirs.append('/usr/local/lib')
else:
module.define_macros.append(('_GNU_SOURCE', '1'))
module.define_macros.append(('USE_NO_PROC', '1'))
module.extra_compile_args.append('-std=c99')
if has_header('stdbool.h'):
module.define_macros.append(('HAVE_STDBOOL_H', '1'))
if has_function('memmem'):
module.define_macros.append(('HAVE_MEMMEM', '1'))
if has_function('strlcpy'):
module.define_macros.append(('HAVE_STRLCPY', '1'))
if has_function('strlcat'):
module.define_macros.append(('HAVE_STRLCAT', '1'))
if self.enable_profiling:
module.define_macros.append(('YR_PROFILING_ENABLED', '1'))
if self.dynamic_linking:
module.libraries.append('yara')
else:
# Is OpenSSL available?
if (has_function('OpenSSL_add_all_algorithms',
includes=['openssl/evp.h'],
include_dirs=module.include_dirs + openssl_include_dirs,
libraries=module.libraries + openssl_libraries,
library_dirs=module.library_dirs + openssl_library_dirs)
# In case OpenSSL is being linked statically
or has_function('OpenSSL_add_all_algorithms',
includes=['openssl/evp.h'],
include_dirs=module.include_dirs + openssl_include_dirs,
libraries=module.libraries + openssl_libraries + ['dl', 'pthread', 'z'],
library_dirs=module.library_dirs + openssl_library_dirs)
or self.enable_openssl):
module.define_macros.append(('HASH_MODULE', '1'))
module.define_macros.append(('HAVE_LIBCRYPTO', '1'))
module.libraries.extend(openssl_libraries)
module.include_dirs.extend(openssl_include_dirs)
module.library_dirs.extend(openssl_library_dirs)
elif building_for_windows:
# OpenSSL is not available, but in Windows we can rely on Wincrypt for
# hashing functions.
module.define_macros.append(('HASH_MODULE', '1'))
module.define_macros.append(('HAVE_WINCRYPT_H', '1'))
# The authenticode parser depends on OpenSSL and must be excluded.
exclusions.append('yara/libyara/modules/pe/authenticode-parser')
else:
# Without OpenSSL there's no hash module nor authenticode parser.
exclusions.append('yara/libyara/modules/hash/hash.c')
exclusions.append('yara/libyara/modules/pe/authenticode-parser')
if self.enable_magic:
module.define_macros.append(('MAGIC_MODULE', '1'))
module.libraries.append('magic')
else:
exclusions.append('yara/libyara/modules/magic/magic.c')
if self.enable_cuckoo:
module.define_macros.append(('CUCKOO_MODULE', '1'))
module.libraries.append('jansson')
else:
exclusions.append('yara/libyara/modules/cuckoo/cuckoo.c')
if self.enable_dex:
module.define_macros.append(('DEX_MODULE', '1'))
else:
exclusions.append('yara/libyara/modules/dex/dex.c')
if self.enable_macho:
module.define_macros.append(('MACHO_MODULE', '1'))
else:
exclusions.append('yara/libyara/modules/macho/macho.c')
# exclude pb_tests module
exclusions.append('yara/libyara/modules/pb_tests/pb_tests.c')
exclusions.append('yara/libyara/modules/pb_tests/pb_tests.pb-c.c')
# Always turn on the DOTNET module.
module.define_macros.append(('DOTNET_MODULE', '1'))
exclusions = [os.path.normpath(x) for x in exclusions]
for directory, _, files in os.walk('yara/libyara/'):
for f in files:
f = os.path.normpath(os.path.join(directory, f))
# Ignore any file that is not a .c file
if not f.endswith('.c'):
continue
# Ignore files that are listed in the exclusion list.
if any(map(lambda e: f.startswith(e), exclusions)):
continue
module.sources.append(f)
build_ext.run(self)
class UpdateCommand(Command):
"""Update libyara source.
This is normally only run by packagers to make a new release.
"""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
subprocess.check_call(['git', 'stash'], cwd='yara')
subprocess.check_call(['git', 'submodule', 'init'])
subprocess.check_call(['git', 'submodule', 'update'])
subprocess.check_call(['git', 'reset', '--hard'], cwd='yara')
subprocess.check_call(['git', 'clean', '-x', '-f', '-d'], cwd='yara')
subprocess.check_call(['git', 'checkout', 'master'], cwd='yara')
subprocess.check_call(['git', 'pull'], cwd='yara')
subprocess.check_call(['git', 'fetch', '--tags'], cwd='yara')
tag_name = 'tags/v%s' % self.distribution.metadata.version
subprocess.check_call(['git', 'checkout', tag_name], cwd='yara')
subprocess.check_call(['./bootstrap.sh'], cwd='yara')
subprocess.check_call(['./configure'], cwd='yara')
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='yara-python',
version='4.4.0',
description='Python interface for YARA',
long_description=readme,
license='Apache 2.0',
author='Victor M. Alvarez',
author_email='plusvic@gmail.com, vmalvarez@virustotal.com',
url='https://github.com/VirusTotal/yara-python',
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
],
zip_safe=False,
cmdclass={
'build': BuildCommand,
'build_ext': BuildExtCommand,
'update': UpdateCommand},
ext_modules=[Extension(
name='yara',
include_dirs=['yara/libyara/include', 'yara/libyara/', '.'],
define_macros=[('BUCKETS_128', 1), ('CHECKSUM_1B', 1)],
sources=['yara-python.c'])])
|
7c3a69de7acecfadd3e42c4c79d24401d272d9d6
|
eb9f655206c43c12b497c667ba56a0d358b6bc3a
|
/python/testData/inspections/RenameFunctionShadowingBuiltins_after.py
|
4c318970ef8cc3a090c80e5f3454e08507f806e0
|
[
"Apache-2.0"
] |
permissive
|
JetBrains/intellij-community
|
2ed226e200ecc17c037dcddd4a006de56cd43941
|
05dbd4575d01a213f3f4d69aa4968473f2536142
|
refs/heads/master
| 2023-09-03T17:06:37.560889
| 2023-09-03T11:51:00
| 2023-09-03T12:12:27
| 2,489,216
| 16,288
| 6,635
|
Apache-2.0
| 2023-09-12T07:41:58
| 2011-09-30T13:33:05
| null |
UTF-8
|
Python
| false
| false
| 72
|
py
|
RenameFunctionShadowingBuiltins_after.py
|
def A_NEW_NAME(x):
return x
def f():
return A_NEW_NAME('foo')
|
2b5debd23aaa745599108e892f49907b0bc52d75
|
d110546d747d7e3865ce5742d5fca09f404623c0
|
/tests/pytests/unit/states/test_pip.py
|
1a71be86ac10d852c43716421a36db95aeb07bcc
|
[
"Apache-2.0",
"MIT",
"BSD-2-Clause"
] |
permissive
|
saltstack/salt
|
354fc86a7be1f69514b3dd3b2edb9e6f66844c1d
|
1ef90cbdc7203f97775edb7666db86a41eb9fc15
|
refs/heads/master
| 2023-07-19T20:56:20.210556
| 2023-06-29T23:12:28
| 2023-07-19T11:47:47
| 1,390,248
| 11,026
| 6,296
|
Apache-2.0
| 2023-09-14T20:45:37
| 2011-02-20T20:16:56
|
Python
|
UTF-8
|
Python
| false
| false
| 2,648
|
py
|
test_pip.py
|
"""
:codeauthor: Eric Graham <eric.graham@vantagepnt.com>
"""
import logging
import pytest
import salt.states.pip_state as pip_state
from salt.exceptions import CommandExecutionError
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {pip_state: {"__env__": "base", "__opts__": {"test": False}}}
def test_issue_64169(caplog):
pkg_to_install = "nonexistent_package"
exception_message = "Invalid JSON (test_issue_64169)"
mock_pip_list = MagicMock(
side_effect=[
CommandExecutionError(
exception_message
), # pre-cache the pip list (preinstall)
{}, # Checking if the pkg is already installed
{pkg_to_install: "100.10.1"}, # Confirming successful installation
]
)
mock_pip_version = MagicMock(return_value="100.10.1")
mock_pip_install = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(
pip_state.__salt__,
{
"pip.list": mock_pip_list,
"pip.version": mock_pip_version,
"pip.install": mock_pip_install,
},
):
with caplog.at_level(logging.WARNING):
# Call pip.installed with a specifically 'broken' pip.list.
# pip.installed should continue, but log the exception from pip.list.
# pip.installed should NOT raise an exception itself.
# noinspection PyBroadException
try:
pip_state.installed(
name=pkg_to_install,
use_wheel=False, # Set False to simplify testing
no_use_wheel=False, # '
no_binary=False, # '
log=None, # Regression will cause this function call to throw an AttributeError
)
except AttributeError as exc:
# Observed behavior in #64169
pytest.fail(
"Regression on #64169: pip_state.installed seems to be throwing an unexpected AttributeException: "
f"{exc}"
)
# Take 64169 further and actually confirm that the exception from pip.list got logged.
assert (
"Pre-caching of PIP packages during states.pip.installed failed by exception "
f"from pip.list: {exception_message}" in caplog.messages
)
# Confirm that the state continued to install the package as expected.
# Only check the 'pkgs' parameter of pip.install
assert mock_pip_install.call_args.kwargs["pkgs"] == pkg_to_install
|
e7dfbcf19b4325d375caade56bef5a02863d678c
|
96dcea595e7c16cec07b3f649afd65f3660a0bad
|
/tests/components/random/test_binary_sensor.py
|
3bcf43ae22e42409cfffa9a5e78a0fbd29345d88
|
[
"Apache-2.0"
] |
permissive
|
home-assistant/core
|
3455eac2e9d925c92d30178643b1aaccf3a6484f
|
80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743
|
refs/heads/dev
| 2023-08-31T15:41:06.299469
| 2023-08-31T14:50:53
| 2023-08-31T14:50:53
| 12,888,993
| 35,501
| 20,617
|
Apache-2.0
| 2023-09-14T21:50:15
| 2013-09-17T07:29:48
|
Python
|
UTF-8
|
Python
| false
| false
| 1,286
|
py
|
test_binary_sensor.py
|
"""The test for the Random binary sensor platform."""
from unittest.mock import patch
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
async def test_random_binary_sensor_on(hass: HomeAssistant) -> None:
"""Test the Random binary sensor."""
config = {"binary_sensor": {"platform": "random", "name": "test"}}
with patch(
"homeassistant.components.random.binary_sensor.getrandbits",
return_value=1,
):
assert await async_setup_component(
hass,
"binary_sensor",
config,
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "on"
async def test_random_binary_sensor_off(hass: HomeAssistant) -> None:
"""Test the Random binary sensor."""
config = {"binary_sensor": {"platform": "random", "name": "test"}}
with patch(
"homeassistant.components.random.binary_sensor.getrandbits",
return_value=False,
):
assert await async_setup_component(
hass,
"binary_sensor",
config,
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "off"
|
733ad269a840253ba20e007fd1022d21c348dbb4
|
ba9d3192424dd9417a2d079e2df3fbf7536456a1
|
/tests/impl/translators/junos/openconfig_vlan/vlans.py
|
abd36305cca9a836bf4b3dd8f89e51b1538f5262
|
[
"Apache-2.0"
] |
permissive
|
networktocode/yangify
|
45b0b0525f264418e36d1aa71e2b7cccbc395bfc
|
9b431e4be268708aeeaf91360f6b3dfef57f18d8
|
refs/heads/develop
| 2021-11-08T07:22:29.883883
| 2021-09-16T14:20:28
| 2021-09-16T14:20:28
| 185,183,920
| 117
| 29
|
Apache-2.0
| 2021-09-16T14:20:29
| 2019-05-06T11:32:24
|
Python
|
UTF-8
|
Python
| false
| false
| 1,548
|
py
|
vlans.py
|
from typing import Optional
from lxml import etree
from yangify.translator import Translator, TranslatorData, unneeded
class VlanConfig(Translator):
vlan_id = unneeded
def name(self, value: Optional[str]) -> None:
if value:
etree.SubElement(self.yy.result, "name").text = value
else:
etree.SubElement(self.yy.result, "name", delete="delete")
def status(self, value: Optional[str]) -> None:
if value == "ACTIVE":
etree.SubElement(self.yy.result, "disable", delete="delete")
else:
etree.SubElement(self.yy.result, "disable")
class Vlan(Translator):
class Yangify(TranslatorData):
def pre_process_list(self) -> None:
if self.to_remove and not self.replace:
for element in self.to_remove:
iface = etree.SubElement(self.result, "vlan", delete="delete")
etree.SubElement(iface, "vlan-id").text = str(
element.value["vlan-id"]
)
def pre_process(self) -> None:
self.result = etree.SubElement(self.result, "vlan")
etree.SubElement(self.result, "vlan-id").text = str(self.key)
vlan_id = unneeded
config = VlanConfig
class Vlans(Translator):
class Yangify(TranslatorData):
def pre_process(self) -> None:
self.result = etree.SubElement(self.root_result, "vlans")
if self.replace:
self.result.attrib["replace"] = "replace"
vlan = Vlan
|
8ca986cb264d96969c9a6a28624b6d5c6b7b2107
|
fa89ef4a8eb06dc2015d7116637f230b6891eb8d
|
/refinery/units/formats/archive/xtnode.py
|
cf86ba8c55a93c6ab0d44a286ee745914d84ca05
|
[
"BSD-3-Clause"
] |
permissive
|
binref/refinery
|
f61878d9fddf616fee8edf226df22f6a35238940
|
4c7c3717ae45543b9d7bae60a4af4c00993cf719
|
refs/heads/master
| 2023-08-17T17:02:34.357138
| 2023-08-14T08:43:05
| 2023-08-14T08:43:05
| 228,019,736
| 439
| 48
|
NOASSERTION
| 2023-09-11T10:26:02
| 2019-12-14T12:32:06
|
Python
|
UTF-8
|
Python
| false
| false
| 8,588
|
py
|
xtnode.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
from typing import Iterable, Optional
import re
import json
from pathlib import Path
from refinery.units.formats.archive import Arg, ArchiveUnit, UnpackResult
from refinery.units.encoding.esc import esc
from refinery.lib.structures import EOF, StructReader
from refinery.lib.patterns import formats
from refinery.lib.types import ByteStr, JSON
from refinery.units.pattern.carve_json import JSONCarver
class JSONReader(StructReader):
def read_string(self) -> Optional[str]:
quote = self.u8()
value = bytearray()
if quote not in B'\"\'':
raise RuntimeError('trying to read a string, but no quote character was found')
escaped = False
while True:
char = self.u8()
if escaped:
escaped = False
elif char == B'\\':
escaped = True
elif char == quote:
break
value.append(char)
return value | esc | str
def read_json(self) -> Optional[JSON]:
while self.u8() not in b'[{':
pass
self.seekrel(-1)
end = JSONCarver.find_end(self._data, self._cursor)
if end is None:
return None
data = self._data[self._cursor:end]
self._cursor = end
if isinstance(data, memoryview):
data = bytes(data)
return json.loads(data)
def skip_comma(self):
while self.u8() in b'\n\t\r\f\v\x20,':
pass
self.seekrel(-1)
return self
class xtnode(ArchiveUnit):
"""
Extracts and decompiles files from compiled Node.Js applications. Supports both nexe and pkg, two
utilities that are commonly used to generate stand-alone executables.
"""
_NEXE_SENTINEL = B'<nexe~~sentinel>'
_PKG_PAYLOAD_P = B'PAYLOAD_POSITION'
_PKG_PAYLOAD_S = B'PAYLOAD_SIZE'
_PKG_PRELUDE_P = B'PRELUDE_POSITION'
_PKG_PRELUDE_S = B'PRELUDE_SIZE'
def __init__(
self, *paths, entry: Arg.Switch('-u', help='Only extract the entry point.') = False,
list=False, join_path=False, drop_path=False, fuzzy=0, exact=False, regex=False,
path=b'path', date=b'date',
):
super().__init__(*paths, entry=entry,
list=list, join_path=join_path, drop_path=drop_path, fuzzy=fuzzy, exact=exact, regex=regex,
path=path, date=date)
def unpack(self, data: ByteStr) -> Iterable[UnpackResult]:
if self._is_nexe(data):
self.log_info('unpacking as nexe')
yield from self._unpack_nexe(data)
return
if self._is_pkg(data):
self.log_info('unpacking as pkg')
yield from self._unpack_pkg(data)
return
def _unpack_nexe(self, data: ByteStr):
try:
ep = re.compile(
RB"entry\s*=\s*path\.resolve\(path\.dirname\(process\.execPath\),\s*(%s)\)" % formats.string)
ep, = ep.finditer(data)
except Exception:
ep = None
self.log_info('could not identify entry point')
else:
ep = ep.group(1) | esc(quoted=True) | str
self.log_info(F'entry point: {ep}')
view = memoryview(data)
for marker in re.finditer(re.escape(self._NEXE_SENTINEL), data):
end = marker.end() + 16
sizes = data[marker.end():end]
if sizes.startswith(b"')"):
continue
reader = StructReader(sizes)
code_size = int(reader.f64())
blob_size = int(reader.f64())
start = marker.start() - code_size - blob_size
try:
reader = StructReader(view[start:end])
code = reader.read_exactly(code_size)
blob = reader.read_exactly(blob_size)
except EOF:
self.log_debug(F'found marker at 0x{marker.start():X}, but failed to read data')
continue
else:
self.log_debug(F'found marker at 0x{marker.start():X}, data start at {start:X}')
for rsrc in re.finditer(RB'process\.__nexe\s*=', code):
rsrc = JSONReader(code[rsrc.end():])
rsrc = rsrc.read_json()
if len(rsrc) == 1:
_, rsrc = rsrc.popitem()
for path, (offset, length) in rsrc.items():
end = offset + length
if ep and self.args.entry and path != ep:
continue
yield UnpackResult(path, blob[offset:end])
def _unpack_pkg(self, data: ByteStr):
def _extract_coordinates(*v):
for name in v:
pattern = BR'%s\s{0,3}=\s{0,3}(%s)' % (name, formats.string)
value, = re.findall(pattern, data)
yield int((value | esc(quoted=True) | str).strip())
def _extract_data(*v):
try:
offset, length = _extract_coordinates(*v)
except Exception:
return None
return data[offset:offset + length]
payload = _extract_data(self._PKG_PAYLOAD_P, self._PKG_PAYLOAD_S)
if not payload:
return
prelude = _extract_data(self._PKG_PRELUDE_P, self._PKG_PRELUDE_S)
if not prelude:
return
mapping = re.search(BR'sourceMappingURL=common\.js\.map\s*\},\s*\{', prelude)
if not mapping:
return
reader = JSONReader(prelude[mapping.end() - 1:])
files = reader.read_json()
entry = reader.skip_comma().read_string()
links = reader.skip_comma().read_json()
# _unknown1 = reader.skip_comma().read_json()
# _unknown2 = reader.skip_comma().read_terminated_array(B')').strip()
if not files:
return
root = Path()
view = memoryview(payload)
for part in Path(next(iter(files))).parts:
more = root / part
if not all(Path(path).is_relative_to(more) for path in files):
break
root = more
self.log_debug(F'detected root directory {root}')
try:
entry = Path(entry).relative_to(root)
except Exception:
entry = None
self.log_info(F'entry point not relative to root directory: {entry}')
else:
self.log_info(F'entry point is {entry}')
for src, dst in links.items():
src_path = Path(src)
dst_path = Path(dst)
new_files = {}
self.log_info('link src:', lambda: str(src_path.relative_to(root)))
self.log_info('link dst:', lambda: str(dst_path.relative_to(root)))
for p, location in files.items():
path = Path(p)
if not path.is_relative_to(src_path):
continue
new_path = dst_path / path.relative_to(src_path)
new_files[new_path] = location
self.log_debug('synthesizing linked file:', lambda: str(new_path.relative_to(root)))
files.update(new_files)
for p, location in files.items():
path = Path(p).relative_to(root)
if entry and self.args.entry and path != entry:
continue
data = None
for kind, (offset, length) in location.items():
stop = offset + length
if kind == '3': # metadata
continue
if kind == '2': # unknown
continue
if kind in '01':
data = view[offset:stop]
if data is not None:
yield UnpackResult(str(path), data)
@classmethod
def _is_nexe(cls, data: ByteStr) -> bool:
return cls._NEXE_SENTINEL in data
@classmethod
def _is_pkg(cls, data: ByteStr) -> bool:
if cls._PKG_PAYLOAD_P not in data:
return False
if cls._PKG_PAYLOAD_S not in data:
return False
if cls._PKG_PRELUDE_P not in data:
return False
if cls._PKG_PRELUDE_S not in data:
return False
return True
@classmethod
def handles(cls, data: ByteStr) -> Optional[bool]:
return cls._is_nexe(data) or cls._is_pkg(data)
|
21d122cc17fd93ace25529d683465558fcdd461d
|
fa3f6d4e9169fb95f828013d179d03accdff381b
|
/grr/client/grr_response_client/unprivileged/echo_server.py
|
c671366d4d48f65247e912da85a2eb500c90f3b1
|
[
"Apache-2.0"
] |
permissive
|
google/grr
|
c51a2bd251ed2f7adae538541990a2cc01fdcc8c
|
44c0eb8c938302098ef7efae8cfd6b90bcfbb2d6
|
refs/heads/master
| 2023-09-05T20:02:36.823914
| 2023-07-26T09:34:09
| 2023-07-26T09:34:09
| 14,909,673
| 4,683
| 927
|
Apache-2.0
| 2023-07-26T09:34:10
| 2013-12-04T00:17:53
|
Python
|
UTF-8
|
Python
| false
| false
| 716
|
py
|
echo_server.py
|
#!/usr/bin/env python
"""Simple echo server for testing.
The server receives data and an attachment and replies with:
* data + "x"
* attachment + "x"
"""
from absl import app
from grr_response_client.unprivileged import communication
def Handler(connection: communication.Connection):
while True:
recv_result = connection.Recv()
connection.Send(
communication.Message(recv_result.data + b"x",
recv_result.attachment + b"x"))
def main(argv):
communication.Main(
communication.Channel.FromSerialized(
pipe_input=int(argv[1]), pipe_output=int(argv[2])),
Handler,
user="",
group="")
if __name__ == "__main__":
app.run(main)
|
40a5807290dbee119b45c14ed1b6a782a204616f
|
99d5e10013778f3822b3dac404db04156ad99acb
|
/test/test_util/test_ema_logging.py
|
81e62e0470ff0510cc14c172068bcc83544d1863
|
[
"BSD-3-Clause"
] |
permissive
|
quaquel/EMAworkbench
|
0e25b9caa86fb3c41376b14ad6f70911aec2a594
|
9d13fb6fc8e8e3fc8cc693102f85966c5876f9ac
|
refs/heads/master
| 2023-08-10T08:23:48.007445
| 2023-07-18T10:54:43
| 2023-07-18T10:54:43
| 5,109,457
| 102
| 86
|
BSD-3-Clause
| 2023-09-12T06:51:17
| 2012-07-19T12:18:19
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,575
|
py
|
test_ema_logging.py
|
"""
Created on Jul 28, 2015
.. codeauthor:: jhkwakkel <j.h.kwakkel (at) tudelft (dot) nl>
"""
import logging
import unittest
from ema_workbench.util import ema_logging
def tearDownModule():
ema_logging._logger = None
ema_logger = logging.getLogger(ema_logging.LOGGER_NAME)
ema_logger.handlers = []
class TestEmaLogging(unittest.TestCase):
def test_get_logger(self):
ema_logging._rootlogger = None
logger = ema_logging.get_rootlogger()
self.assertEqual(logger, logging.getLogger(ema_logging.LOGGER_NAME))
self.assertEqual(len(logger.handlers), 1)
self.assertEqual(type(logger.handlers[0]), logging.NullHandler)
logger = ema_logging.get_rootlogger()
self.assertEqual(logger, logging.getLogger(ema_logging.LOGGER_NAME))
self.assertEqual(len(logger.handlers), 1)
self.assertEqual(type(logger.handlers[0]), logging.NullHandler)
def test_log_to_stderr(self):
ema_logging._rootlogger = None
logger = ema_logging.log_to_stderr(ema_logging.DEBUG)
self.assertEqual(len(logger.handlers), 2)
self.assertEqual(logger.level, ema_logging.DEBUG)
ema_logging._rootlogger = None
logger = ema_logging.log_to_stderr()
self.assertEqual(len(logger.handlers), 2)
self.assertEqual(logger.level, ema_logging.DEFAULT_LEVEL)
logger = ema_logging.log_to_stderr()
self.assertEqual(len(logger.handlers), 2)
self.assertEqual(logger.level, ema_logging.DEFAULT_LEVEL)
if __name__ == "__main__":
unittest.main()
|
fa5c11bcd919f3d9e6b81beb1ba91a157176d7a3
|
576764ad37667f8da2c63aaa1a9f96da211795a6
|
/forte/data/readers/sst2_reader.py
|
97c00890f46b65272dd9654bce70bcc3b3d5eff6
|
[
"Apache-2.0"
] |
permissive
|
asyml/forte
|
96f852601647836dda3bccf3bd7900b9d10e6fcb
|
13e50aebe2afd79a7a8b3c01f0bb2568addea54f
|
refs/heads/master
| 2023-04-09T17:52:31.203644
| 2023-04-06T15:04:49
| 2023-04-06T15:04:49
| 201,518,876
| 233
| 73
|
Apache-2.0
| 2023-04-06T15:04:51
| 2019-08-09T18:12:12
|
Python
|
UTF-8
|
Python
| false
| false
| 9,618
|
py
|
sst2_reader.py
|
# Copyright 2019 The Forte Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The reader that reads Stanford Sentiment Treebank
https://nlp.stanford.edu/sentiment/treebank.html
into data_pack format
"""
import os
import errno
from typing import Iterator, Dict, List, Tuple
from ft.onto.base_ontology import Sentence, ConstituentNode
from forte.data.data_pack import DataPack
from forte.data.base_reader import PackReader
__all__ = ["SST2Reader"]
class SST2Reader(PackReader):
r""":class:`SST2Reader` is designed to read in the
Stanford Sentiment Treebank 2 dataset from
https://nlp.stanford.edu/sentiment/treebank.html
To use the dataset, please download it from the webpage.
Provided the ss2_dir_path, the _collect function will look for files:
"dictionary.txt": a mapping from phrase text to phrase id
"sentiment_labels": a mapping from phrase id to sentiment score
"datasetSentences.txt": original text for the sentence
"STree.txt": parent pointer list of constituency tree for each sentence
"""
def __init__(self):
super().__init__()
# Transform the text-form phrase to sentiment score.
self.phrase_to_id: Dict = {}
self.id_to_senti: Dict = {}
def _cache_key_function(self, data_pack: DataPack) -> str:
if data_pack.pack_name is None:
raise ValueError("data_pack does not have a sentence id")
return data_pack.pack_name
def _check_file_exist(self, filename: str):
if not os.path.exists(filename):
raise FileNotFoundError(
errno.ENOENT, os.strerror(errno.ENOENT), filename
)
def _collect(
self, *args, **kwargs
) -> Iterator[List[Tuple[str, str, List[int]]]]:
# pylint: disable = unused-argument
r"""Iterator over sst files in the data_source.
The directory should at least have the following files:
"dictionary.txt": a mapping from phrase text to phrase id
"sentiment_labels": a mapping from phrase id to sentiment score
"datasetSentences.txt": original text for the sentence
"STree.txt": parent pointer list of constituency tree
Args:
args: args[0] is the directory to the sst2 files.
args[1] is the number of sentences in a data pack
kwargs:
Returns: data packs obtained from each sentence from the sst2 file.
"""
sent_id: str
sent_text: str
parent_pointer_list: List[int]
sst2_dir_path: str = args[0]
n_samples: int = args[1]
phrase_to_id_path: str = os.path.join(sst2_dir_path, "dictionary.txt")
id_to_senti_path: str = os.path.join(
sst2_dir_path, "sentiment_labels.txt"
)
text_path: str = os.path.join(sst2_dir_path, "datasetSentences.txt")
tree_path: str = os.path.join(sst2_dir_path, "STree.txt")
self._check_file_exist(phrase_to_id_path)
self._check_file_exist(id_to_senti_path)
self._check_file_exist(text_path)
self._check_file_exist(tree_path)
# Read the mapping from phrase to phrase-id.
with open(phrase_to_id_path, "r", encoding="utf8") as file:
for line in file:
phrase, id_ = line.split("|")
self.phrase_to_id[phrase] = int(id_)
# Read the mapping from phrase-id to sentiment score.
with open(id_to_senti_path, "r", encoding="utf8") as file:
for i, line in enumerate(file):
if i == 0:
continue
id_, score = line.split("|")
self.id_to_senti[int(id_)] = float(score)
sent_lines = []
# Read the text and tree structure.
with open(text_path, "r", encoding="utf8") as ftext, open(
tree_path, "r", encoding="utf8"
) as ftree:
ftext.readline() # Skip the headers.
for line_text, line_tree in zip(ftext, ftree):
line_text = line_text.strip()
sent_id, sent_text = line_text.split("\t")
parent_pointer_list = list(map(int, line_tree.split("|")))
sent_lines.append((sent_id, sent_text, parent_pointer_list))
# Yield a batch of sentences.
if len(sent_lines) == n_samples:
yield sent_lines
sent_lines = []
if len(sent_lines) > 0:
yield sent_lines
def _get_span_with_dfs(
self,
span_begin_end: List[List[int]],
children_nodes: List[List[int]],
cur_node: int,
):
r"""Recursively get the span for each node in the tree
Args:
span_begin_end: stores the span (begin, end) posititon
children_nodes: the structure of the tree
cur_node: current processing node
Returns: None
"""
if len(children_nodes[cur_node]) == 0:
return
begin = -1
end = -1
for child in children_nodes[cur_node]:
self._get_span_with_dfs(span_begin_end, children_nodes, child)
if begin == -1 or begin > span_begin_end[child][0]:
begin = span_begin_end[child][0]
if end == -1 or end < span_begin_end[child][1]:
end = span_begin_end[child][1]
span_begin_end[cur_node] = [begin, end]
def _parse_parent_pointer_list(
self,
data_pack: DataPack,
sent_bias: int,
sent_text: str,
parent_pointer_list: List[int],
):
r"""Build the ConstituentNode objects from parent pointer list.
Args:
data_pack: the data_pack to add ConstituentNode
sent_bias: the start position of the sentence in the pack
sent_text: the whitespace-splitted sentence text
parent_pointer_list: a format to store the constituency tree
Returns: None
"""
tokens: List[str] = sent_text.split()
n_nodes: int = len(parent_pointer_list) + 1
span_begin_end: List[List[int]] = [[] for _ in range(n_nodes)]
node_list: List[ConstituentNode]
# Get the children node ids for each node, node 0 is the root.
children_nodes: List[List[int]] = [[] for _ in range(n_nodes)]
for i in range(1, n_nodes):
parent = parent_pointer_list[i - 1]
children_nodes[parent].append(i)
# Get the begin/end index of spans for leaf nodes.
for i in range(1, len(tokens) + 1):
span_begin = 0
if i > 1:
# Plus 1 for the whitespace separator
span_begin = span_begin_end[i - 1][1] + 1
span_end = span_begin + len(tokens[i - 1])
span_begin_end[i] = [span_begin, span_end]
# Get the spans recursively and store in "span_begin_end".
self._get_span_with_dfs(span_begin_end, children_nodes, 0)
# Create the constituency Tree.
node_list = [
ConstituentNode(data_pack, begin + sent_bias, end + sent_bias)
for (begin, end) in span_begin_end
]
# Get the sentiment scores.
for i in range(n_nodes):
phrase = node_list[i].text
phrase_id = self.phrase_to_id.get(phrase, -1)
node_list[i].sentiment["pos"] = self.id_to_senti.get(phrase_id, 0.5)
# Link the parent and children nodes.
for i in range(1, n_nodes):
parent = parent_pointer_list[i - 1]
node_list[i].parent_node = node_list[parent]
for i in range(n_nodes):
# Sort the children nodes by span begin position.
children_nodes[i].sort(key=lambda x: node_list[x].begin)
for child in children_nodes[i]:
node_list[i].children_nodes.append(node_list[child])
# Set the is_leaf/is_root flag.
for i in range(n_nodes):
node_list[i].is_leaf = False
node_list[i].is_root = False
for i in range(1, len(tokens) + 1):
node_list[i].is_leaf = True
node_list[0].is_root = True
def _parse_pack(self, sent_lines) -> Iterator[DataPack]:
data_pack: DataPack = DataPack()
sent_bias: int = 0
batch_text: str = "\n".join(
[sent_text for _, sent_text, _ in sent_lines]
)
data_pack.set_text(batch_text)
for i, sent_line in enumerate(sent_lines):
sent_id: str = sent_line[0]
sent_text: str = sent_line[1].strip()
parent_pointer_list: List[int] = sent_line[2]
# Name the data_pack with the first sentence id.
if i == 0:
data_pack.pack_name = sent_id
# Add sentence to data_pack.
Sentence(data_pack, sent_bias, sent_bias + len(sent_text))
self._parse_parent_pointer_list(
data_pack, sent_bias, sent_text, parent_pointer_list
)
sent_bias += len(sent_text) + 1
yield data_pack
|
a8ef7f1aa659cc52218ae12cf586032cdf7c3fbc
|
a2b20597759990445081057d35d113434cfcf970
|
/stubs/typeshed/typeshed/stubs/pywin32/win32com/mapi/emsabtags.pyi
|
d38198bb2755f22133a95de2c9d3669ce777f5ab
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
facebook/pyre-check
|
34059599c02b65605c574f13555229f3b931fd4e
|
fe8ccedc572cc1faa1fd01e9138f65e982875002
|
refs/heads/main
| 2023-09-03T19:10:11.587028
| 2023-09-02T07:40:35
| 2023-09-02T07:40:35
| 110,274,488
| 6,703
| 575
|
MIT
| 2023-09-13T17:02:32
| 2017-11-10T17:31:36
|
OCaml
|
UTF-8
|
Python
| false
| false
| 41
|
pyi
|
emsabtags.pyi
|
from win32comext.mapi.emsabtags import *
|
588ee72047245275a7507e5427ac0deb69c69f3e
|
a7c0cc71e6da4615eca2c3d75117dad5b8dce8d3
|
/CTFd/__init__.py
|
f950fe68736bfd7425815272b4b0553fd8a4370d
|
[
"Apache-2.0"
] |
permissive
|
CTFd/CTFd
|
4b75207aeea3ed8d761cc6269c27a070693ab3ec
|
d8f0b9e602fca109cabe1895e847d39a46ce7429
|
refs/heads/master
| 2023-09-01T19:19:19.767862
| 2023-08-29T18:46:53
| 2023-08-29T18:46:53
| 28,681,142
| 4,593
| 2,273
|
Apache-2.0
| 2023-09-13T18:24:37
| 2015-01-01T05:36:55
|
Python
|
UTF-8
|
Python
| false
| false
| 11,714
|
py
|
__init__.py
|
import datetime
import os
import sys
import weakref
from distutils.version import StrictVersion
import jinja2
from flask import Flask, Request
from flask_babel import Babel
from flask_migrate import upgrade
from jinja2 import FileSystemLoader
from jinja2.sandbox import SandboxedEnvironment
from werkzeug.middleware.proxy_fix import ProxyFix
from werkzeug.utils import safe_join
import CTFd.utils.config
from CTFd import utils
from CTFd.constants.themes import ADMIN_THEME, DEFAULT_THEME
from CTFd.plugins import init_plugins
from CTFd.utils.crypto import sha256
from CTFd.utils.initialization import (
init_cli,
init_events,
init_logs,
init_request_processors,
init_template_filters,
init_template_globals,
)
from CTFd.utils.migrations import create_database, migrations, stamp_latest_revision
from CTFd.utils.sessions import CachingSessionInterface
from CTFd.utils.updates import update_check
from CTFd.utils.user import get_locale
__version__ = "3.6.0"
__channel__ = "oss"
class CTFdRequest(Request):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
"""
Hijack the original Flask request path because it does not account for subdirectory deployments in an intuitive
manner. We append script_root so that the path always points to the full path as seen in the browser.
e.g. /subdirectory/path/route vs /path/route
"""
self.path = self.script_root + self.path
class CTFdFlask(Flask):
def __init__(self, *args, **kwargs):
"""Overriden Jinja constructor setting a custom jinja_environment"""
self.jinja_environment = SandboxedBaseEnvironment
self.session_interface = CachingSessionInterface(key_prefix="session")
self.request_class = CTFdRequest
# Store server start time
self.start_time = datetime.datetime.utcnow()
# Create generally unique run identifier
self.run_id = sha256(str(self.start_time))[0:8]
Flask.__init__(self, *args, **kwargs)
def create_jinja_environment(self):
"""Overridden jinja environment constructor"""
return super(CTFdFlask, self).create_jinja_environment()
class SandboxedBaseEnvironment(SandboxedEnvironment):
"""SandboxEnvironment that mimics the Flask BaseEnvironment"""
def __init__(self, app, **options):
if "loader" not in options:
options["loader"] = app.create_global_jinja_loader()
SandboxedEnvironment.__init__(self, **options)
self.app = app
def _load_template(self, name, globals):
if self.loader is None:
raise TypeError("no loader for this environment specified")
# Add theme to the LRUCache cache key
cache_name = name
if name.startswith("admin/") is False:
theme = str(utils.get_config("ctf_theme"))
cache_name = theme + "/" + name
# Rest of this code roughly copied from Jinja
# https://github.com/pallets/jinja/blob/b08cd4bc64bb980df86ed2876978ae5735572280/src/jinja2/environment.py#L956-L973
cache_key = (weakref.ref(self.loader), cache_name)
if self.cache is not None:
template = self.cache.get(cache_key)
if template is not None and (
not self.auto_reload or template.is_up_to_date
):
# template.globals is a ChainMap, modifying it will only
# affect the template, not the environment globals.
if globals:
template.globals.update(globals)
return template
template = self.loader.load(self, name, self.make_globals(globals))
if self.cache is not None:
self.cache[cache_key] = template
return template
class ThemeLoader(FileSystemLoader):
"""Custom FileSystemLoader that is aware of theme structure and config."""
DEFAULT_THEMES_PATH = os.path.join(os.path.dirname(__file__), "themes")
_ADMIN_THEME_PREFIX = ADMIN_THEME + "/"
def __init__(
self,
searchpath=DEFAULT_THEMES_PATH,
theme_name=None,
encoding="utf-8",
followlinks=False,
):
super(ThemeLoader, self).__init__(searchpath, encoding, followlinks)
self.theme_name = theme_name
def get_source(self, environment, template):
# Refuse to load `admin/*` from a loader not for the admin theme
# Because there is a single template loader, themes can essentially
# provide files for other themes. This could end up causing issues if
# an admin theme references a file that doesn't exist that a malicious
# theme provides.
if template.startswith(self._ADMIN_THEME_PREFIX):
if self.theme_name != ADMIN_THEME:
raise jinja2.TemplateNotFound(template)
template = template[len(self._ADMIN_THEME_PREFIX) :]
theme_name = self.theme_name or str(utils.get_config("ctf_theme"))
template = safe_join(theme_name, "templates", template)
return super(ThemeLoader, self).get_source(environment, template)
def confirm_upgrade():
if sys.stdin.isatty():
print("/*\\ CTFd has updated and must update the database! /*\\")
print("/*\\ Please backup your database before proceeding! /*\\")
print("/*\\ CTFd maintainers are not responsible for any data loss! /*\\")
if input("Run database migrations (Y/N)").lower().strip() == "y": # nosec B322
return True
else:
print("/*\\ Ignored database migrations... /*\\")
return False
else:
return True
def run_upgrade():
upgrade()
utils.set_config("ctf_version", __version__)
def create_app(config="CTFd.config.Config"):
app = CTFdFlask(__name__)
with app.app_context():
app.config.from_object(config)
loaders = []
# We provide a `DictLoader` which may be used to override templates
app.overridden_templates = {}
loaders.append(jinja2.DictLoader(app.overridden_templates))
# A `ThemeLoader` with no `theme_name` will load from the current theme
loaders.append(ThemeLoader())
# If `THEME_FALLBACK` is set and true, we add another loader which will
# load from the `DEFAULT_THEME` - this mirrors the order implemented by
# `config.ctf_theme_candidates()`
if bool(app.config.get("THEME_FALLBACK")):
loaders.append(ThemeLoader(theme_name=DEFAULT_THEME))
# All themes including admin can be accessed by prefixing their name
prefix_loader_dict = {ADMIN_THEME: ThemeLoader(theme_name=ADMIN_THEME)}
for theme_name in CTFd.utils.config.get_themes():
prefix_loader_dict[theme_name] = ThemeLoader(theme_name=theme_name)
loaders.append(jinja2.PrefixLoader(prefix_loader_dict))
# Plugin templates are also accessed via prefix but we just point a
# normal `FileSystemLoader` at the plugin tree rather than validating
# each plugin here (that happens later in `init_plugins()`). We
# deliberately don't add this to `prefix_loader_dict` defined above
# because to do so would break template loading from a theme called
# `prefix` (even though that'd be weird).
plugin_loader = jinja2.FileSystemLoader(
searchpath=os.path.join(app.root_path, "plugins"), followlinks=True
)
loaders.append(jinja2.PrefixLoader({"plugins": plugin_loader}))
# Use a choice loader to find the first match from our list of loaders
app.jinja_loader = jinja2.ChoiceLoader(loaders)
from CTFd.models import ( # noqa: F401
Challenges,
Fails,
Files,
Flags,
Solves,
Tags,
Teams,
Tracking,
db,
)
url = create_database()
# This allows any changes to the SQLALCHEMY_DATABASE_URI to get pushed back in
# This is mostly so we can force MySQL's charset
app.config["SQLALCHEMY_DATABASE_URI"] = str(url)
# Register database
db.init_app(app)
# Register Flask-Migrate
migrations.init_app(app, db)
babel = Babel()
babel.locale_selector_func = get_locale
babel.init_app(app)
# Alembic sqlite support is lacking so we should just create_all anyway
if url.drivername.startswith("sqlite"):
# Enable foreign keys for SQLite. This must be before the
# db.create_all call because tests use the in-memory SQLite
# database (each connection, including db creation, is a new db).
# https://docs.sqlalchemy.org/en/13/dialects/sqlite.html#foreign-key-support
from sqlalchemy import event
from sqlalchemy.engine import Engine
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
db.create_all()
stamp_latest_revision()
else:
# This creates tables instead of db.create_all()
# Allows migrations to happen properly
upgrade()
from CTFd.models import ma
ma.init_app(app)
app.db = db
app.VERSION = __version__
app.CHANNEL = __channel__
from CTFd.cache import cache
cache.init_app(app)
app.cache = cache
reverse_proxy = app.config.get("REVERSE_PROXY")
if reverse_proxy:
if type(reverse_proxy) is str and "," in reverse_proxy:
proxyfix_args = [int(i) for i in reverse_proxy.split(",")]
app.wsgi_app = ProxyFix(app.wsgi_app, *proxyfix_args)
else:
app.wsgi_app = ProxyFix(
app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_port=1, x_prefix=1
)
version = utils.get_config("ctf_version")
# Upgrading from an older version of CTFd
if version and (StrictVersion(version) < StrictVersion(__version__)):
if confirm_upgrade():
run_upgrade()
else:
exit()
if not version:
utils.set_config("ctf_version", __version__)
if not utils.get_config("ctf_theme"):
utils.set_config("ctf_theme", "core-beta")
update_check(force=True)
init_request_processors(app)
init_template_filters(app)
init_template_globals(app)
# Importing here allows tests to use sensible names (e.g. api instead of api_bp)
from CTFd.admin import admin
from CTFd.api import api
from CTFd.auth import auth
from CTFd.challenges import challenges
from CTFd.errors import render_error
from CTFd.events import events
from CTFd.scoreboard import scoreboard
from CTFd.teams import teams
from CTFd.users import users
from CTFd.views import views
app.register_blueprint(views)
app.register_blueprint(teams)
app.register_blueprint(users)
app.register_blueprint(challenges)
app.register_blueprint(scoreboard)
app.register_blueprint(auth)
app.register_blueprint(api)
app.register_blueprint(events)
app.register_blueprint(admin)
for code in {403, 404, 500, 502}:
app.register_error_handler(code, render_error)
init_logs(app)
init_events(app)
init_plugins(app)
init_cli(app)
return app
|
c45c2ae2b178ccc05f5645da47c371c23713e31a
|
974d04d2ea27b1bba1c01015a98112d2afb78fe5
|
/test/legacy_test/test_directory_migration.py
|
3230a0ecc666ffba2784a07527f81ee9967f03ab
|
[
"Apache-2.0"
] |
permissive
|
PaddlePaddle/Paddle
|
b3d2583119082c8e4b74331dacc4d39ed4d7cff0
|
22a11a60e0e3d10a3cf610077a3d9942a6f964cb
|
refs/heads/develop
| 2023-08-17T21:27:30.568889
| 2023-08-17T12:38:22
| 2023-08-17T12:38:22
| 65,711,522
| 20,414
| 5,891
|
Apache-2.0
| 2023-09-14T19:20:51
| 2016-08-15T06:59:08
|
C++
|
UTF-8
|
Python
| false
| false
| 8,625
|
py
|
test_directory_migration.py
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
import tempfile
import unittest
class TestDirectory(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.TemporaryDirectory()
def tearDown(self):
self.temp_dir.cleanup()
def get_import_command(self, module):
paths = module.split('.')
if len(paths) == 1:
return f'import {module}'
package = '.'.join(paths[:-1])
func = paths[-1]
cmd = f'from {package} import {func}'
return cmd
def test_new_directory(self):
new_directory = [
'paddle.enable_static',
'paddle.disable_static',
'paddle.in_dynamic_mode',
'paddle.to_tensor',
'paddle.grad',
'paddle.no_grad',
'paddle.static.save',
'paddle.static.load',
'paddle.distributed.ParallelEnv',
'paddle.DataParallel',
'paddle.jit',
'paddle.jit.to_static',
'paddle.jit.TranslatedLayer',
'paddle.jit.save',
'paddle.jit.load',
'paddle.optimizer.lr.LRScheduler',
'paddle.optimizer.lr.NoamDecay',
'paddle.optimizer.lr.PiecewiseDecay',
'paddle.optimizer.lr.NaturalExpDecay',
'paddle.optimizer.lr.ExponentialDecay',
'paddle.optimizer.lr.InverseTimeDecay',
'paddle.optimizer.lr.PolynomialDecay',
'paddle.optimizer.lr.CosineAnnealingDecay',
'paddle.optimizer.lr.MultiStepDecay',
'paddle.optimizer.lr.StepDecay',
'paddle.optimizer.lr.LambdaDecay',
'paddle.optimizer.lr.ReduceOnPlateau',
'paddle.optimizer.lr.LinearWarmup',
'paddle.static.Executor',
'paddle.static.global_scope',
'paddle.static.scope_guard',
'paddle.static.append_backward',
'paddle.static.gradients',
'paddle.static.BuildStrategy',
'paddle.static.CompiledProgram',
'paddle.static.ExecutionStrategy',
'paddle.static.default_main_program',
'paddle.static.default_startup_program',
'paddle.static.Program',
'paddle.static.name_scope',
'paddle.static.program_guard',
'paddle.static.Print',
'paddle.static.py_func',
'paddle.static.WeightNormParamAttr',
'paddle.static.nn.fc',
'paddle.static.nn.batch_norm',
'paddle.static.nn.bilinear_tensor_product',
'paddle.static.nn.conv2d',
'paddle.static.nn.conv2d_transpose',
'paddle.static.nn.conv3d',
'paddle.static.nn.conv3d_transpose',
'paddle.static.nn.create_parameter',
'paddle.static.nn.data_norm',
'paddle.static.nn.deform_conv2d',
'paddle.static.nn.group_norm',
'paddle.static.nn.instance_norm',
'paddle.static.nn.layer_norm',
'paddle.static.nn.nce',
'paddle.static.nn.prelu',
'paddle.static.nn.row_conv',
'paddle.static.nn.spectral_norm',
'paddle.static.nn.embedding',
]
import_file = os.path.join(self.temp_dir.name, 'run_import_modules.py')
with open(import_file, "w") as wb:
for module in new_directory:
run_cmd = self.get_import_command(module)
wb.write(f"{run_cmd}\n")
_python = sys.executable
ps_cmd = f"{_python} {import_file}"
ps_proc = subprocess.Popen(
ps_cmd.strip().split(" "),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = ps_proc.communicate()
self.assertFalse(
"Error" in str(stderr),
f"ErrorMessage:\n{bytes.decode(stderr)}",
)
def test_old_directory(self):
old_directory = [
'paddle.enable_imperative',
'paddle.disable_imperative',
'paddle.in_imperative_mode',
'paddle.imperative.to_variable',
'paddle.imperative.enable',
'paddle.imperative.guard',
'paddle.imperative.grad',
'paddle.imperative.no_grad',
'paddle.imperative.save',
'paddle.imperative.load',
'paddle.imperative.ParallelEnv',
'paddle.imperative.prepare_context',
'paddle.imperative.DataParalell',
'paddle.imperative.jit',
'paddle.imperative.TracedLayer',
'paddle.imperative.declarative',
'paddle.imperative.TranslatedLayer',
'paddle.imperative.jit.save',
'paddle.imperative.jit.load',
'paddle.imperative.NoamDecay' 'paddle.imperative.PiecewiseDecay',
'paddle.imperative.NaturalExpDecay',
'paddle.imperative.ExponentialDecay',
'paddle.imperative.InverseTimeDecay',
'paddle.imperative.PolynomialDecay',
'paddle.imperative.CosineDecay',
'paddle.Executor',
'paddle.global_scope',
'paddle.scope_guard',
'paddle.append_backward',
'paddle.gradients',
'paddle.BuildStrategy',
'paddle.CompiledProgram',
'paddle.ExecutionStrategy',
'paddle.name_scope',
'paddle.program_guard',
'paddle.Print',
'paddle.py_func',
'paddle.default_main_program',
'paddle.default_startup_program',
'paddle.Program',
'paddle.WeightNormParamAttr',
'paddle.declarative.fc',
'paddle.declarative.batch_norm',
'paddle.declarative.bilinear_tensor_product',
'paddle.declarative.conv2d',
'paddle.declarative.conv2d_transpose',
'paddle.declarative.conv3d',
'paddle.declarative.conv3d_transpose',
'paddle.declarative.create_parameter',
'paddle.declarative.crf_decoding',
'paddle.declarative.data_norm',
'paddle.declarative.deformable_conv',
'paddle.declarative.group_norm',
'paddle.declarative.hsigmoid',
'paddle.declarative.instance_norm',
'paddle.declarative.layer_norm',
'paddle.declarative.multi_box_head',
'paddle.declarative.nce',
'paddle.declarative.prelu',
'paddle.declarative.row_conv',
'paddle.declarative.spectral_norm',
'paddle.declarative.embedding',
]
import_file = os.path.join(
self.temp_dir.name, 'run_old_import_modules.py'
)
with open(import_file, "w") as wb:
cmd_context_count = """
count = 0
err_module = ""
"""
wb.write(cmd_context_count)
for module in old_directory:
run_cmd = self.get_import_command(module)
cmd_context_loop_template = """
try:
{run_cmd}
except:
count += 1
else:
err_module = "{module}"
"""
cmd_context_loop = cmd_context_loop_template.format(
run_cmd=run_cmd, module=module
)
wb.write(cmd_context_loop)
cmd_context_print_template = """
if count != {len_old_directory}:
print("Error: Module " + err_module + " should not be imported")
"""
cmd_context_print = cmd_context_print_template.format(
len_old_directory=str(len(old_directory))
)
wb.write(cmd_context_print)
_python = sys.executable
ps_cmd = f"{_python} {import_file}"
ps_proc = subprocess.Popen(
ps_cmd.strip().split(" "),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = ps_proc.communicate()
self.assertFalse("Error" in str(stdout), bytes.decode(stdout))
if __name__ == '__main__':
unittest.main()
|
e27520f1532b9871115b0ed5f62e3ccc2f33171a
|
da5bcb4a92a802dedf84a74dc2ee4c08cb744656
|
/will/scripts/generate_will_project.py
|
607570f238ec171e2115ce47509eb3a91b7c158e
|
[
"MIT"
] |
permissive
|
skoczen/will
|
21981ba1213a49b650d661feb59d69719918446c
|
27a23ce47e3ec11b94f3355c2d2ee94c1958679c
|
refs/heads/master
| 2023-08-16T00:03:47.974919
| 2021-04-13T10:31:26
| 2021-04-13T10:31:26
| 14,900,247
| 359
| 186
|
MIT
| 2023-01-15T17:34:57
| 2013-12-03T17:08:08
|
Python
|
UTF-8
|
Python
| false
| false
| 7,429
|
py
|
generate_will_project.py
|
#!/usr/bin/env python
import argparse
import os
import stat
import sys
from six.moves import input
from clint.textui import puts
from will.utils import print_head
SERVICE_BACKENDS = ('Slack', 'HipChat', 'Rocket.chat', 'Shell')
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__)))
sys.path.append(PROJECT_ROOT)
sys.path.append(os.getcwd())
parser = argparse.ArgumentParser()
parser.add_argument(
'--config-dist-only',
action='store_true',
help='Only output a config.py.dist.'
)
parser.add_argument('--backends', nargs='+',
choices=SERVICE_BACKENDS,
help='Choose service backends to support.')
args = parser.parse_args()
requirements_txt = "will\n"
class EmptyObj(object):
pass
def cleaned(service_name):
return service_name.lower().replace(".", ''),
def ask_user(question):
response = "?"
while response not in ["y", "n"]:
response = input("%s [y/n] " % question)
if response not in ["y", "n"]:
print("Please enter 'y' or 'n'.")
return response.startswith("y")
def _enable_service(service_name, source):
global requirements_txt
source = source.replace('# "will.backends.io_adapters.%s"' % cleaned(service_name),
'"will.backends.io_adapters.%s"' % cleaned(service_name))
req_path = os.path.join(os.path.join(PROJECT_ROOT, "..", "requirements"), "%s.txt" % cleaned(service_name))
print(req_path)
if os.path.exists(req_path):
with open(req_path, 'r') as f:
requirements_txt = "%s\n# %s\n%s" % (requirements_txt, service_name, f.read())
return source
def __disable_service(service_name, source):
return source.replace('"will.backends.io_adapters.%s"' % cleaned(service_name),
'"# will.backends.io_adapters.%s"' % cleaned(service_name))
def enable_disable_service(service_name, source):
if ask_user(" Do you want to enable %s support?" % (service_name)):
return _enable_service(service_name, source)
else:
return __disable_service(service_name, source)
def main():
"""
Creates the following structure:
/plugins
__init__.py
hello.py
/templates
blank.html
.gitignore
run_will.py
requirements.txt
Procfile
README.md
"""
print_head()
puts("Welcome to the will project generator.")
puts("")
if args.config_dist_only:
print("Generating config.py.dist...")
else:
print("\nGenerating will scaffold...")
current_dir = os.getcwd()
plugins_dir = os.path.join(current_dir, "plugins")
templates_dir = os.path.join(current_dir, "templates")
if not args.config_dist_only:
print(" /plugins")
# Set up the directories
if not os.path.exists(plugins_dir):
os.makedirs(plugins_dir)
print(" __init__.py")
# Create the plugins __init__.py
with open(os.path.join(plugins_dir, "__init__.py"), 'w+') as f:
pass
print(" morning.py")
# Create the morning plugin
morning_file_path = os.path.join(plugins_dir, "morning.py")
if not os.path.exists(morning_file_path):
with open(morning_file_path, 'w+') as f:
f.write("""from will.plugin import WillPlugin
from will.decorators import respond_to, periodic, hear, randomly, route, rendered_template, require_settings
class MorningPlugin(WillPlugin):
@respond_to("^good morning")
def good_morning(self, message):
self.reply("oh, g'morning!")
""")
print(" /templates")
if not os.path.exists(templates_dir):
os.makedirs(templates_dir)
print(" blank.html")
# Create the plugins __init__.py
with open(os.path.join(templates_dir, "blank.html"), 'w+') as f:
pass
print(" .gitignore")
# Create .gitignore, or at least add shelf.db
gitignore_path = os.path.join(current_dir, ".gitignore")
if not os.path.exists(gitignore_path):
with open(gitignore_path, 'w+') as f:
f.write("""*.py[cod]
pip-log.txt
shelf.db
""")
else:
append_ignore = False
with open(gitignore_path, "r+") as f:
if "shelf.db" not in f.read():
append_ignore = True
if append_ignore:
with open(gitignore_path, "a") as f:
f.write("\nshelf.db\n")
# Create run_will.py
print(" run_will.py")
run_will_path = os.path.join(current_dir, "run_will.py")
if not os.path.exists(run_will_path):
with open(run_will_path, 'w+') as f:
f.write("""#!/usr/bin/env python
from will.main import WillBot
if __name__ == '__main__':
bot = WillBot()
bot.bootstrap()
""")
# And make it executable
st = os.stat('run_will.py')
os.chmod("run_will.py", st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
# Create config.py
print(" config.py.dist")
config_path = os.path.join(current_dir, "config.py.dist")
if not os.path.exists(config_path) or ask_user("! config.py.dist exists. Overwrite it?"):
with open(os.path.join(PROJECT_ROOT, "config.py.dist"), "r") as source_f:
source = source_f.read()
if args.backends:
for backend in SERVICE_BACKENDS:
if backend in args.backends:
_enable_service(backend, source)
else:
__disable_service(backend, source)
else:
# Ask user thru cmd line what backends to enable
print("\nWill supports a few different service backends. Let's set up the ones you want:\n")
source = enable_disable_service("Slack", source)
source = enable_disable_service("HipChat", source)
source = enable_disable_service("Rocket.Chat", source)
source = enable_disable_service("Shell", source)
with open(config_path, "w+") as f:
config = source
f.write(config)
if not args.config_dist_only:
print(" requirements.txt")
# Create requirements.txt
requirements_path = os.path.join(current_dir, "requirements.txt")
if not os.path.exists(requirements_path) or ask_user("! requirements.txt exists. Overwrite it?"):
with open(requirements_path, 'w+') as f:
f.write(requirements_txt)
print(" Procfile")
# Create Procfile
requirements_path = os.path.join(current_dir, "Procfile")
if not os.path.exists(requirements_path):
with open(requirements_path, 'w+') as f:
f.write("web: python run_will.py")
print(" README.md")
# Create the readme
readme_path = os.path.join(current_dir, "README.md")
if not os.path.exists(readme_path):
with open(readme_path, 'w+') as f:
f.write("""
This is our bot, a [will](https://github.com/skoczen/will) bot.
""")
print("\nDone.")
print("\n Your will is now ready to go. Run ./run_will.py to get started!")
else:
print("\nCreated a config.py.dist. Open it up to see what's new!\n")
if __name__ == '__main__':
main()
|
558c40adc414e3d9bb4d4dfeebd3cb2fc84229f3
|
57bc404899f914eeef7ba298bf1e99883c864a26
|
/trie/trie.py
|
ad45dd8be0e79d49d21a7b994c83ea6fa18b410d
|
[
"MIT"
] |
permissive
|
priyankchheda/algorithms
|
547f19193273ac6a424fe4ba5e1375cc02ea4f60
|
38a5de72db14ef2664489da9857b598d24c4e276
|
refs/heads/master
| 2023-08-17T17:10:10.044940
| 2022-04-16T13:52:37
| 2022-04-16T13:52:37
| 133,684,565
| 195
| 38
|
MIT
| 2023-08-16T10:26:48
| 2018-05-16T15:10:56
|
C++
|
UTF-8
|
Python
| false
| false
| 3,075
|
py
|
trie.py
|
""" Trie is an efficient information reTrieval data structure. Using Trie,
search complexities can be brought to optimal limit (key length). If we
store keys in binary search tree, a well balanced BST will need time
proportional to M * log N, where M is maximum string length and N is
number of keys in tree. Using Trie, we can search the key in O(M) time.
However the penalty is on Trie storage requirements
"""
class Node:
""" Node contains character and it's children trie node """
def __init__(self):
self.children = [None] * 26
self.word_finished = False
class Trie:
""" Trie Data Structure implementation """
def __init__(self):
self.root = Node()
def char_to_index(self, char):
""" converts key character into index
use only 'a' through 'z' lower case
"""
return ord(char) - ord('a')
def insert(self, word):
""" inserts a word in the trie data structure"""
current = self.root
for character in word:
index = self.char_to_index(character)
if not current.children[index]:
current.children[index] = Node()
current = current.children[index]
current.word_finished = True
def search(self, word):
""" returns true if the complete word is present in trie,
else false
"""
current = self.root
for character in word:
index = self.char_to_index(character)
if not current.children[index]:
return False
current = current.children[index]
return current is not None and current.word_finished
def starts_with(self, prefix):
""" returns true if any word present in the trie start with the
given prefix, else false
"""
current = self.root
for character in prefix:
index = self.char_to_index(character)
if not current.children[index]:
return False
current = current.children[index]
return True
def delete(self, word):
""" deletes word by negating word_finished boolean variable """
current = self.root
for character in word:
index = self.char_to_index(character)
if not current.children[index]:
raise Exception("word not present in trie")
current = current.children[index]
if current.word_finished:
current.word_finished = False
def main():
""" operational function """
tree = Trie()
tree.insert("apple")
print("is apple present?", tree.search("apple")) # True
print("is app present?", tree.search("app")) # False
print("starts with app?", tree.starts_with("app")) # True
print("inserting 'app' in trie")
tree.insert("app")
print("is app present?", tree.search("app")) # True
print("removing 'app' from trie")
tree.delete("app")
print("is app present?", tree.search("app")) # False
if __name__ == "__main__":
main()
|
5de91cae801439460225622d501094582f122068
|
fbbe424559f64e9a94116a07eaaa555a01b0a7bb
|
/Skimage_numpy/source/skimage/io/collection.py
|
4f3ee264700cefd29f9a7c9bb323d0d448432258
|
[
"MIT"
] |
permissive
|
ryfeus/lambda-packs
|
6544adb4dec19b8e71d75c24d8ed789b785b0369
|
cabf6e4f1970dc14302f87414f170de19944bac2
|
refs/heads/master
| 2022-12-07T16:18:52.475504
| 2022-11-29T13:35:35
| 2022-11-29T13:35:35
| 71,386,735
| 1,283
| 263
|
MIT
| 2022-11-26T05:02:14
| 2016-10-19T18:22:39
|
Python
|
UTF-8
|
Python
| false
| false
| 13,088
|
py
|
collection.py
|
"""Data structures to hold collections of images, with optional caching."""
from __future__ import with_statement
import os
from glob import glob
import re
from copy import copy
import numpy as np
import six
from PIL import Image
from ..external.tifffile import TiffFile
__all__ = ['MultiImage', 'ImageCollection', 'concatenate_images',
'imread_collection_wrapper']
def concatenate_images(ic):
"""Concatenate all images in the image collection into an array.
Parameters
----------
ic: an iterable of images (including ImageCollection and MultiImage)
The images to be concatenated.
Returns
-------
ar : np.ndarray
An array having one more dimension than the images in `ic`.
See Also
--------
ImageCollection.concatenate, MultiImage.concatenate
Raises
------
ValueError
If images in `ic` don't have identical shapes.
"""
all_images = [img[np.newaxis, ...] for img in ic]
try:
ar = np.concatenate(all_images)
except ValueError:
raise ValueError('Image dimensions must agree.')
return ar
def alphanumeric_key(s):
"""Convert string to list of strings and ints that gives intuitive sorting.
Parameters
----------
s: string
Returns
-------
k: a list of strings and ints
Examples
--------
>>> alphanumeric_key('z23a')
['z', 23, 'a']
>>> filenames = ['f9.10.png', 'e10.png', 'f9.9.png', 'f10.10.png',
... 'f10.9.png']
>>> sorted(filenames)
['e10.png', 'f10.10.png', 'f10.9.png', 'f9.10.png', 'f9.9.png']
>>> sorted(filenames, key=alphanumeric_key)
['e10.png', 'f9.9.png', 'f9.10.png', 'f10.9.png', 'f10.10.png']
"""
k = [int(c) if c.isdigit() else c for c in re.split('([0-9]+)', s)]
return k
class ImageCollection(object):
"""Load and manage a collection of image files.
Note that files are always stored in alphabetical order. Also note that
slicing returns a new ImageCollection, *not* a view into the data.
Parameters
----------
load_pattern : str or list
Pattern glob or filenames to load. The path can be absolute or
relative. Multiple patterns should be separated by os.pathsep,
e.g. '/tmp/work/*.png:/tmp/other/*.jpg'. Also see
implementation notes below.
conserve_memory : bool, optional
If True, never keep more than one in memory at a specific
time. Otherwise, images will be cached once they are loaded.
Other parameters
----------------
load_func : callable
``imread`` by default. See notes below.
Attributes
----------
files : list of str
If a glob string is given for `load_pattern`, this attribute
stores the expanded file list. Otherwise, this is simply
equal to `load_pattern`.
Notes
-----
ImageCollection can be modified to load images from an arbitrary
source by specifying a combination of `load_pattern` and
`load_func`. For an ImageCollection ``ic``, ``ic[5]`` uses
``load_func(file_pattern[5])`` to load the image.
Imagine, for example, an ImageCollection that loads every tenth
frame from a video file::
class AVILoader:
video_file = 'myvideo.avi'
def __call__(self, frame):
return video_read(self.video_file, frame)
avi_load = AVILoader()
frames = range(0, 1000, 10) # 0, 10, 20, ...
ic = ImageCollection(frames, load_func=avi_load)
x = ic[5] # calls avi_load(frames[5]) or equivalently avi_load(50)
Another use of ``load_func`` would be to convert all images to ``uint8``::
def imread_convert(f):
return imread(f).astype(np.uint8)
ic = ImageCollection('/tmp/*.png', load_func=imread_convert)
For files with multiple images, the images will be flattened into a list
and added to the list of available images. In this case, ``load_func``
should accept the keyword argument ``img_num``.
Examples
--------
>>> import skimage.io as io
>>> from skimage import data_dir
>>> coll = io.ImageCollection(data_dir + '/chess*.png')
>>> len(coll)
2
>>> coll[0].shape
(200, 200)
>>> ic = io.ImageCollection('/tmp/work/*.png:/tmp/other/*.jpg')
"""
def __init__(self, load_pattern, conserve_memory=True, load_func=None,
**load_func_kwargs):
"""Load and manage a collection of images."""
if isinstance(load_pattern, six.string_types):
load_pattern = load_pattern.replace(os.pathsep, ':')
load_pattern = load_pattern.split(':')
self._files = []
for pattern in load_pattern:
self._files.extend(glob(pattern))
self._files = sorted(self._files, key=alphanumeric_key)
self._numframes = self._find_images()
else:
self._files = load_pattern
self._numframes = len(self._files)
self._frame_index = None
if conserve_memory:
memory_slots = 1
else:
memory_slots = self._numframes
self._conserve_memory = conserve_memory
self._cached = None
if load_func is None:
from ._io import imread
self.load_func = imread
else:
self.load_func = load_func
self.load_func_kwargs = load_func_kwargs
self.data = np.empty(memory_slots, dtype=object)
@property
def files(self):
return self._files
@property
def conserve_memory(self):
return self._conserve_memory
def _find_images(self):
index = []
for fname in self._files:
if fname.lower().endswith(('.tiff', '.tif')):
with open(fname, 'rb') as f:
img = TiffFile(f)
index += [(fname, i) for i in range(len(img.pages))]
else:
try:
im = Image.open(fname)
im.seek(0)
except (IOError, OSError):
index.append([fname, i])
continue
i = 0
while True:
try:
im.seek(i)
except EOFError:
break
index.append((fname, i))
i += 1
if hasattr(im, 'fp') and im.fp:
im.fp.close()
self._frame_index = index
return len(index)
def __getitem__(self, n):
"""Return selected image(s) in the collection.
Loading is done on demand.
Parameters
----------
n : int or slice
The image number to be returned, or a slice selecting the images
and ordering to be returned in a new ImageCollection.
Returns
-------
img : ndarray or ImageCollection.
The `n`-th image in the collection, or a new ImageCollection with
the selected images.
"""
if hasattr(n, '__index__'):
n = n.__index__()
if type(n) not in [int, slice]:
raise TypeError('slicing must be with an int or slice object')
if type(n) is int:
n = self._check_imgnum(n)
idx = n % len(self.data)
if ((self.conserve_memory and n != self._cached) or
(self.data[idx] is None)):
kwargs = self.load_func_kwargs
if self._frame_index:
fname, img_num = self._frame_index[n]
if img_num is not None:
self.data[idx] = self.load_func(fname, img_num=img_num,
**kwargs)
else:
self.data[idx] = self.load_func(fname, **kwargs)
else:
self.data[idx] = self.load_func(self.files[n], **kwargs)
self._cached = n
return self.data[idx]
else:
# A slice object was provided, so create a new ImageCollection
# object. Any loaded image data in the original ImageCollection
# will be copied by reference to the new object. Image data
# loaded after this creation is not linked.
fidx = range(self._numframes)[n]
new_ic = copy(self)
if self._frame_index:
new_ic._files = [self._frame_index[i][0] for i in fidx]
new_ic._frame_index = [self._frame_index[i] for i in fidx]
else:
new_ic._files = [self._files[i] for i in fidx]
new_ic._numframes = len(fidx)
if self.conserve_memory:
if self._cached in fidx:
new_ic._cached = fidx.index(self._cached)
new_ic.data = np.copy(self.data)
else:
new_ic.data = np.empty(1, dtype=object)
else:
new_ic.data = self.data[fidx]
return new_ic
def _check_imgnum(self, n):
"""Check that the given image number is valid."""
num = self._numframes
if -num <= n < num:
n = n % num
else:
raise IndexError("There are only %s images in the collection"
% num)
return n
def __iter__(self):
"""Iterate over the images."""
for i in range(len(self)):
yield self[i]
def __len__(self):
"""Number of images in collection."""
return self._numframes
def __str__(self):
return str(self.files)
def reload(self, n=None):
"""Clear the image cache.
Parameters
----------
n : None or int
Clear the cache for this image only. By default, the
entire cache is erased.
"""
self.data = np.empty_like(self.data)
def concatenate(self):
"""Concatenate all images in the collection into an array.
Returns
-------
ar : np.ndarray
An array having one more dimension than the images in `self`.
See Also
--------
concatenate_images
Raises
------
ValueError
If images in the `ImageCollection` don't have identical shapes.
"""
return concatenate_images(self)
def imread_collection_wrapper(imread):
def imread_collection(load_pattern, conserve_memory=True):
"""Return an `ImageCollection` from files matching the given pattern.
Note that files are always stored in alphabetical order. Also note that
slicing returns a new ImageCollection, *not* a view into the data.
See `skimage.io.ImageCollection` for details.
Parameters
----------
load_pattern : str or list
Pattern glob or filenames to load. The path can be absolute or
relative. Multiple patterns should be separated by a colon,
e.g. '/tmp/work/*.png:/tmp/other/*.jpg'. Also see
implementation notes below.
conserve_memory : bool, optional
If True, never keep more than one in memory at a specific
time. Otherwise, images will be cached once they are loaded.
"""
return ImageCollection(load_pattern, conserve_memory=conserve_memory,
load_func=imread)
return imread_collection
class MultiImage(ImageCollection):
"""A class containing a single multi-frame image.
Parameters
----------
filename : str
The complete path to the image file.
conserve_memory : bool, optional
Whether to conserve memory by only caching a single frame. Default is
True.
Notes
-----
If ``conserve_memory=True`` the memory footprint can be reduced, however
the performance can be affected because frames have to be read from file
more often.
The last accessed frame is cached, all other frames will have to be read
from file.
The current implementation makes use of ``tifffile`` for Tiff files and
PIL otherwise.
Examples
--------
>>> from skimage import data_dir
>>> img = MultiImage(data_dir + '/multipage.tif') # doctest: +SKIP
>>> len(img) # doctest: +SKIP
2
>>> for frame in img: # doctest: +SKIP
... print(frame.shape) # doctest: +SKIP
(15, 10)
(15, 10)
"""
def __init__(self, filename, conserve_memory=True, dtype=None,
**imread_kwargs):
"""Load a multi-img."""
from ._io import imread
def load_func(fname, **kwargs):
kwargs.setdefault('dtype', dtype)
return imread(fname, **kwargs)
self._filename = filename
super(MultiImage, self).__init__(filename, conserve_memory,
load_func=load_func, **imread_kwargs)
@property
def filename(self):
return self._filename
|
e4711460a7f11cf7d39e3a1a833c1fdf622f55a5
|
db12b990924703cd74748d8585cd9c11fafa6746
|
/h2o-hadoop-common/tests_multinode/python/pyunit_kmeans.py
|
e7e25c6930adeff144c076d1a0463642fed5c793
|
[
"Apache-2.0"
] |
permissive
|
h2oai/h2o-3
|
919019a8f297eec676011a9cfd2cc2d97891ce14
|
d817ab90c8c47f6787604a0b9639b66234158228
|
refs/heads/master
| 2023-08-17T18:50:17.732191
| 2023-08-17T16:44:42
| 2023-08-17T16:44:42
| 17,371,412
| 6,872
| 2,345
|
Apache-2.0
| 2023-09-14T18:05:40
| 2014-03-03T16:08:07
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,453
|
py
|
pyunit_kmeans.py
|
import sys
import os
sys.path.insert(1, os.path.join("..", "..", "..", "h2o-py"))
from tests import pyunit_utils
import h2o
from h2o.estimators.kmeans import H2OKMeansEstimator
# Purpose: This tests k-means on a large dataset.
def hdfs_kmeans():
hdfs_name_node = pyunit_utils.hadoop_namenode()
hdfs_iris_file = "/datasets/runit/iris_wheader.csv"
hdfs_covtype_file = "/datasets/runit/covtype.data"
print("Import iris_wheader.csv from HDFS")
url = "hdfs://{0}{1}".format(hdfs_name_node, hdfs_iris_file)
iris_h2o = h2o.import_file(url)
n = iris_h2o.nrow
print("rows: {0}".format(n))
assert n == 150, "Wrong number of rows. Got {0}. Should have got {1}".format(n, 150)
print("Running KMeans on iris")
iris_km = H2OKMeansEstimator(k=3, training_frame=iris_h2o[0:4], max_iterations=10)
iris_km.train()
print(iris_km)
print("Importing covtype.data from HDFS")
url = "hdfs://{0}{1}".format(hdfs_name_node, hdfs_covtype_file)
covtype_h2o = h2o.import_file(url)
n = covtype_h2o.nrow
print("rows: {0}".format(n))
assert n == 581012, "Wrong number of rows. Got {0}. Should have got {1}".format(n, 581012)
print("Running KMeans on covtype")
covtype_km = H2OKMeansEstimator(training_frame=covtype_h2o[0:55], k=8, max_iterations=10)
covtype_km.train()
print(covtype_km)
if __name__ == "__main__":
pyunit_utils.standalone_test(hdfs_kmeans)
else:
hdfs_kmeans()
|
ee8c9939f17995f54aa11b7a428a23e8e0435364
|
14c1ab5e883b05541b71fa5182b64cd78c45a418
|
/pyBN/learning/structure/_tests/test_pc.py
|
8a0ebdb32c48d581c4465bcd02c3c9cc49ce9d09
|
[
"MIT"
] |
permissive
|
ncullen93/pyBN
|
e4e8bedf91655ae1dd525fb167156abcd9d53da2
|
fe5c49d6656df6c516e7269dd7e8afc14522345b
|
refs/heads/master
| 2023-08-18T15:00:24.498986
| 2023-08-16T00:15:12
| 2023-08-16T00:15:12
| 49,839,196
| 142
| 62
| null | 2016-11-17T14:57:28
| 2016-01-17T22:43:32
|
Python
|
UTF-8
|
Python
| false
| false
| 936
|
py
|
test_pc.py
|
"""
********
UnitTest
PC
********
"""
__author__ = """Nicholas Cullen <ncullen.th@dartmouth.edu>"""
import unittest
import os
from os.path import dirname
import numpy as np
from pyBN.structure_learn.path_condition import pc
class PCTestCase(unittest.TestCase):
def setUp(self):
self.dpath = os.path.join(dirname(dirname(dirname(dirname(__file__)))),'data')
path = (os.path.join(self.dpath,'lizards.csv'))
self.data = np.loadtxt(path, dtype='int32',skiprows=1,delimiter=',')
def tearDown(self):
pass
def test_pc1_V(self):
bn = pc(self.data)
self.assertListEqual(bn.V,
[0,1,2])
def test_pc1_E(self):
bn = pc(self.data)
self.assertDictEqual(bn.E,
{0:[1,2],1:[],2:[]})
def test_pc1_F(self):
bn = pc(self.data)
self.assertDictEqual(bn.F,
{0: {'cpt': [], 'parents': [], 'values': [1,2]},
1: {'cpt': [], 'parents': [0], 'values': [1,2]},
2: {'cpt': [], 'parents': [0], 'values': [1,2]}})
|
3d9313728c1862332dd05a646546537d9cccf0cc
|
360328d098a74581d0822fba489dd15e0d4e7ab3
|
/src/richie/plugins/lti_consumer/api.py
|
b3b8b6242e182a8cbe7c19a8657cfce0737686ca
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
openfun/richie
|
0cef545486267bfb40e75e5fb2ce2a74f85a53ff
|
f2d46fc46b271eb3b4d565039a29c15ba15f027c
|
refs/heads/master
| 2023-08-31T23:51:37.714179
| 2023-08-29T15:25:04
| 2023-08-29T15:48:39
| 111,388,461
| 238
| 96
|
MIT
| 2023-09-13T12:48:53
| 2017-11-20T09:23:40
|
Python
|
UTF-8
|
Python
| false
| false
| 2,752
|
py
|
api.py
|
"""Declare API endpoints for LTI Consumer Plugin"""
from django.core.cache import caches
from django.core.cache.backends.base import InvalidCacheBackendError
from django.http import JsonResponse
from django.utils import translation
from rest_framework import viewsets
from rest_framework.decorators import action
from rest_framework.generics import get_object_or_404
from rest_framework.response import Response
from . import models
class LTIConsumerViewsSet(viewsets.GenericViewSet):
"""Viewset for LTI Consumer Plugin"""
@action(methods=["get"], detail=True, url_path="context")
# pylint: disable=no-self-use,unused-argument,invalid-name
def get_context(self, request, version=None, pk=None):
"""Process context data for the LTI Consumer Plugin.
Parameters:
- pk: the primary key of the LTI Consumer plugin to get context
Returns:
- response (JSON Object):
- url: the LTI resource url
- content_parameters: all generated parameters related to the lti provider
- is_automatic_resizing: boolean to control automatic resizing
"""
language = translation.get_language()
edit = request.toolbar and request.toolbar.edit_mode_active
user_infos = request.GET.dict()
if user_infos.get("user_id") is None:
return Response({"user_id": ["This parameter is required."]}, status=400)
cache_key = (
f"lti_consumer_plugin__pk_{pk}_{user_infos.get('user_id')}_{language}"
)
# Send response from cache only if edition is off
if edit:
cache = None
else:
try:
cache = caches["memory_cache"]
except InvalidCacheBackendError:
cache = None
else:
response = cache.get(cache_key)
if response is not None:
return JsonResponse(response)
plugin = get_object_or_404(models.LTIConsumer, pk=pk)
# If edition is on, check permissions to make sure it is also allowed
# before granting the instructor role
edit = edit and plugin.placeholder.has_change_plugin_permission(
request.user, plugin
)
response = {
"is_automatic_resizing": plugin.get_is_automatic_resizing(),
"content_parameters": plugin.get_content_parameters(
user_infos=user_infos, edit=edit
),
"url": plugin.url,
}
if cache is not None:
# Cache the response for 5 minutes,
# lti oauth credentials are stale after this delay.
cache.set(cache_key, response, 5 * 60)
return JsonResponse(response)
|
6d8c6a9062b79895f78535d07e7f241929bf988a
|
3ca67d69abd4e74b7145b340cdda65532f90053b
|
/programmers/난이도별/level01.로또의_최고_순위와_최저_순위/6047198844.py
|
a4493013cd74affc00c61313c9a980f9e8c0f652
|
[] |
no_license
|
DKU-STUDY/Algorithm
|
19549516984b52a1c5cd73e1ed1e58f774d6d30e
|
6f78efdbefd8eedab24e43d74c7dae7f95c2893b
|
refs/heads/master
| 2023-02-18T06:48:39.309641
| 2023-02-09T07:16:14
| 2023-02-09T07:16:14
| 258,455,710
| 175
| 49
| null | 2023-02-09T07:16:16
| 2020-04-24T08:42:27
|
Python
|
UTF-8
|
Python
| false
| false
| 192
|
py
|
6047198844.py
|
def solution(lottos, win_nums):
cnt = 0
for l in lottos:
cnt += l in win_nums
high = min(6,7-cnt-lottos.count(0))
low = min(6,7-cnt)
return [high,low]
|
e4ea5cb715d7933dcd9a1586f6229362abaaa909
|
617f9a5c9ff8e710de7b43c031cdd97a671c68c3
|
/tests/core/test_neuron.py
|
4e1fbf73fa7730019bb417c833c2ce52685c3c5f
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
BlueBrain/NeuroM
|
e136fc4a36fe1b5d8e590179dc3d8a111d6e5282
|
34d202cfb95d32cd985a423b65349aa1b01957d6
|
refs/heads/master
| 2023-08-17T18:42:43.125992
| 2023-08-08T07:14:38
| 2023-08-08T07:14:38
| 34,906,350
| 106
| 75
|
BSD-3-Clause
| 2023-09-14T10:58:38
| 2015-05-01T14:21:43
|
Python
|
UTF-8
|
Python
| false
| false
| 5,198
|
py
|
test_neuron.py
|
# Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project
# All rights reserved.
#
# This file is part of NeuroM <https://github.com/BlueBrain/NeuroM>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from copy import copy, deepcopy
from pathlib import Path
import neurom as nm
import numpy as np
import morphio
from neurom.core.morphology import Morphology, graft_morphology, iter_segments
from numpy.testing import assert_array_equal
SWC_PATH = Path(__file__).parent.parent / 'data/swc/'
def test_simple():
nm.load_morphology(str(SWC_PATH / 'simple.swc'))
def test_load_morphology_pathlib():
nm.load_morphology(SWC_PATH / 'simple.swc')
def test_load_morphology_from_other_morphologies():
filename = SWC_PATH / 'simple.swc'
expected_points = [[ 0., 0., 0., 1.],
[ 0., 5., 0., 1.],
[ 0., 5., 0., 1.],
[-5., 5., 0., 0.],
[ 0., 5., 0., 1.],
[ 6., 5., 0., 0.],
[ 0., 0., 0., 1.],
[ 0., -4., 0., 1.],
[ 0., -4., 0., 1.],
[ 6., -4., 0., 0.],
[ 0., -4., 0., 1.],
[-5., -4., 0., 0.]]
assert_array_equal(nm.load_morphology(nm.load_morphology(filename)).points,
expected_points)
assert_array_equal(nm.load_morphology(Morphology(filename)).points,
expected_points)
assert_array_equal(nm.load_morphology(morphio.Morphology(filename)).points,
expected_points)
def test_for_morphio():
Morphology(morphio.mut.Morphology())
morphio_m = morphio.mut.Morphology()
morphio_m.soma.points = [[0,0,0], [1,1,1], [2,2,2]]
morphio_m.soma.diameters = [1, 1, 1]
neurom_m = Morphology(morphio_m)
assert_array_equal(neurom_m.soma.points,
[[0., 0., 0., 0.5],
[1., 1., 1., 0.5],
[2., 2., 2., 0.5]])
neurom_m.soma.points = [[1, 1, 1, 1],
[2, 2, 2, 2]]
assert_array_equal(neurom_m.soma.points,
[[1, 1, 1, 1],
[2, 2, 2, 2]])
def _check_cloned_morphology(m, m2):
# check if two morphs are identical
# soma
assert isinstance(m2.soma, type(m.soma))
assert m.soma.radius == m2.soma.radius
for v1, v2 in zip(m.soma.iter(), m2.soma.iter()):
assert np.allclose(v1, v2)
# neurites
for v1, v2 in zip(iter_segments(m), iter_segments(m2)):
(v1_start, v1_end), (v2_start, v2_end) = v1, v2
assert np.allclose(v1_start, v2_start)
assert np.allclose(v1_end, v2_end)
# check if the ids are different
# somata
assert m.soma is not m2.soma
# neurites
for neu1, neu2 in zip(m.neurites, m2.neurites):
assert neu1 is not neu2
# check if changes are propagated between morphs
m2.soma.radius = 10.
assert m.soma.radius != m2.soma.radius
def test_copy():
m = nm.load_morphology(SWC_PATH / 'simple.swc')
_check_cloned_morphology(m, copy(m))
def test_deepcopy():
m = nm.load_morphology(SWC_PATH / 'simple.swc')
_check_cloned_morphology(m, deepcopy(m))
def test_graft_morphology():
m = nm.load_morphology(SWC_PATH / 'simple.swc')
basal_dendrite = m.neurites[0]
m2 = graft_morphology(basal_dendrite.root_node)
assert len(m2.neurites) == 1
assert basal_dendrite == m2.neurites[0]
def test_str():
n = nm.load_morphology(SWC_PATH / 'simple.swc')
assert 'Morphology' in str(n)
assert 'Section' in str(n.neurites[0].root_node)
|
6f34696b55975aaffd5c37c590dd1c7ca606420a
|
e65a4dbfbfb0e54e59787ba7741efee12f7687f3
|
/net-p2p/py-ed2k-tools/files/patch-temp__summary.py
|
a18da6eceb9647603ff7c35d56cd70f1a5b76dc6
|
[
"BSD-2-Clause"
] |
permissive
|
freebsd/freebsd-ports
|
86f2e89d43913412c4f6b2be3e255bc0945eac12
|
605a2983f245ac63f5420e023e7dce56898ad801
|
refs/heads/main
| 2023-08-30T21:46:28.720924
| 2023-08-30T19:33:44
| 2023-08-30T19:33:44
| 1,803,961
| 916
| 918
|
NOASSERTION
| 2023-09-08T04:06:26
| 2011-05-26T11:15:35
| null |
UTF-8
|
Python
| false
| false
| 4,245
|
py
|
patch-temp__summary.py
|
--- temp_summary.py.orig 2003-05-06 11:53:14 UTC
+++ temp_summary.py
@@ -1,12 +1,9 @@
#!/usr/bin/python
from ed2k_metutils import *
import os
-import stat
+import unicodedata
-# I'm really surprised there's no easy way to get the terminal
-# width in python... :-/ I can't do external invocations to
-# stty, etc, because they might not be there on windows...
-WIDTH = 80;
+WIDTH = getattr(os.get_terminal_size(), 'columns', 80)
if __name__ == "__main__":
# Here's an example to cut and keep.
@@ -15,26 +12,22 @@ if __name__ == "__main__":
# see how much data I actually got from night to night.
if len( sys.argv ) < 2:
- print "invocation: %s < <x.part.met> [x.part.met ...] | <temp_dir> >" % sys.argv[ 0 ];
- print
- print "This program will show the amount downloaded vs. the total size "
- print "for the .part.met files listed on the command line."
- print
- print "This program assumes an 80 column display. You can tweak this "
- print "by editing the script. Change the 'WIDTH' value."
+ print("invocation: %s < <x.part.met> [x.part.met ...] | <temp_dir> >" % sys.argv[0])
+ print()
+ print("This program will show the amount downloaded vs. the total size")
+ print("for the .part.met files listed on the command line.")
sys.exit( -1 );
total_size = total_down = 0;
- sta = os.stat( sys.argv[ 1 ] )[ 0 ];
- if stat.S_ISDIR( sta ):
- mets = [ "%s%s" % ( sys.argv[ 1 ], x ) for x in os.listdir( sys.argv[ 1 ] ) if x.endswith( ".met" ) ];
+ if os.path.isdir(sys.argv[1]):
+ mets = [ "%s/%s" % (sys.argv[1], x) for x in os.listdir(sys.argv[1]) if x.endswith(".met") ]
else:
mets = sys.argv[ 1 : ];
for met_file in mets:
- fh = open( met_file, "r" );
+ fh = open(met_file, "rb")
data = fh.read();
fh.close();
@@ -43,7 +36,7 @@ if __name__ == "__main__":
# We're interested in the name, the total size, and some kind of... anti-gapping.
size = met_data.FindTags( TAG_HANDLE_FILESIZE )[ 0 ].value;
- name = met_data.FindTags( TAG_HANDLE_FILENAME )[ 0 ].value;
+ name = met_data.FindTags(TAG_HANDLE_FILENAME)[0].value.decode()
# Set the total downloaded to the file size.
down = size;
@@ -71,19 +64,42 @@ if __name__ == "__main__":
bar = "#" * ( WIDTH - 2 );
for gap in gaps:
gap_start, gap_end = gaps[ gap ];
- char_gap_start = gap_start / bytes_per_char;
- char_gap_end = gap_end / bytes_per_char;
+ char_gap_start = int(gap_start / bytes_per_char)
+ char_gap_end = int(gap_end / bytes_per_char)
bar = bar[ : char_gap_start ] + " " * ( char_gap_end - char_gap_start ) + bar[ char_gap_end : ];
+
+ # Account for CJK characters occupy two fixed-width spaces.
+ def char_width(c: str) -> int:
+ if not c.isprintable(): return 0
+ return 2 if unicodedata.category(c) == 'Lo' else 1
+
+ def visible_len(s: str) -> int:
+ return sum(char_width(c) for c in s)
+
+ # Truncate string to specified limit. If truncation happens
+ # on double-width character (like it would have to be cut in
+ # half), append an extra space for nicer alignment.
+ def visible_substr_padded(s: str, l: int) -> str:
+ vislen = 0
+ cut_here = 0
+ padding = ''
+ for c in s:
+ vislen += char_width(c)
+ if vislen <= l: cut_here += 1
+ if vislen == l: break
+ if vislen > l: padding = ' '; break
+ return s[:cut_here] + padding
# Print out our summary. Limit the filenames.
- sizestring = " - %s - %iK of %iK" % ( met_file.split( "/" )[ -1 ], down / 1024, size / 1024 );
+ sizestring = " - %s - %.2fK of %.2fK" % (met_file.split("/")[-1], down / 1024, size / 1024)
max_name_size = WIDTH - len( sizestring );
- if len( name ) < max_name_size:
- name += " " * ( max_name_size - len( name ) );
+ vislen = visible_len(name)
+ if vislen < max_name_size:
+ name += " " * (max_name_size - vislen)
else:
- name = name[ : max_name_size ];
- print "%s%s" % ( name, sizestring );
- print "[%s]" % bar;
- print
+ name = visible_substr_padded(name, max_name_size)
+ print("%s%s" % (name, sizestring))
+ print("[%s]" % bar)
+ print()
del( met_data );
- print "Totals: %sK of %sK" % ( total_down / 1024, total_size / 1024 );
+ print("Totals: %.2fK of %.2fK" % (total_down / 1024, total_size / 1024))
|
548f985582458c5cb2faa18279b31dd315746ed4
|
78e9d3d12e96fd37817e510b7b272d0d08ec4053
|
/scripts/data_generation/create_flow.py
|
5eed2235935d51ce334fc995df6f34eb5796da57
|
[] |
no_license
|
ly015/intrinsic_flow
|
15e3926ccc269d3f37d37ceb74d1a3b2ff41823c
|
94ea8f0b6c2e9d6380a29055eaa5b0068e894a25
|
refs/heads/master
| 2022-01-31T05:58:35.091723
| 2022-01-17T03:58:21
| 2022-01-17T03:58:21
| 178,152,311
| 158
| 26
| null | 2022-01-17T03:58:22
| 2019-03-28T07:45:22
|
Python
|
UTF-8
|
Python
| false
| false
| 9,482
|
py
|
create_flow.py
|
from __future__ import division, print_function
import util.io as io
import numpy as np
import tqdm
from misc import flow_util
import imageio
import cv2
# Load definition of SMPL_faces
SMPL_Faces = np.load('scripts/3d/smpl_faces.npy')
def calc_correspondence_from_smpl_parallel():
from multiprocessing import Process
num_pair = -1
##############################
# configs for dfm
##############################
# pair_split_fn = 'datasets/DF_Pose/Label/pair_split_dfm.json'
# hmr_pred_dir1 = 'datasets/DF_Pose/3d/hmr_dfm_v2/pred/'
# hmr_pred_dir2 = 'datasets/DF_Pose/3d/hmr_dfm_v2/pred/'
# output_dir = 'datasets/DF_Pose/3d/hmr_dfm_v2/corr/'
# num_threads = 20
# bidirectional = True
##############################
# configs for market1501
##############################
pair_split_fn = 'datasets/market1501/Label/pair_split.json'
hmr_pred_dir1 = 'datasets/market1501/3d/hmr/pred/'
hmr_pred_dir2 = 'datasets/market1501/3d/hmr/pred/'
output_dir = 'datasets/market1501/3d/hmr/corr/'
num_threads = 20
bidirectional = True
#############################
# configs for dfm_aug
#############################
#pair_split_fn = 'datasets/DF_Pose/Label/pair_split_dfm_aug.json'
#hmr_pred_dir1 = 'datasets/DF_Pose/3d/hmr_dfm_v2/pred/'
#hmr_pred_dir2 = 'datasets/DF_Pose/3d/hmr_dfm_aug_v2/pred/'
#output_dir = 'datasets/DF_Pose/3d/hmr_dfm_aug_v2/corr/'
#num_threads = 20
#bidirectional = False
io.mkdir_if_missing(output_dir)
# load pair ids
pairs = io.load_json(pair_split_fn)
pair_list = pairs['test'] + pairs['train']
if num_pair > 0:
pair_list = pair_list[:num_pair]
def _unit_func(idx, pair_list, bidirectional_corr):
for id_1, id_2 in tqdm.tqdm(pair_list, position=idx):
pred_1 = io.load_data(hmr_pred_dir1 + id_1 + '.pkl')
pred_2 = io.load_data(hmr_pred_dir2 + id_2 + '.pkl')
corr_2to1, vis_mask_2 = calc_correspondence_from_smpl_internal(pred_2, pred_1)
flow_util.write_corr(output_dir + '%s_%s.corr'%(id_2, id_1), corr_2to1, vis_mask_2)
if bidirectional_corr:
corr_1to2, vis_mask_1 = calc_correspondence_from_smpl_internal(pred_1, pred_2)
flow_util.write_corr(output_dir + '%s_%s.corr'%(id_1, id_2), corr_1to2, vis_mask_1)
p_list = []
for i_p in range(num_threads):
pair_list_i = pair_list[i_p::num_threads]
p = Process(target=_unit_func, args=(i_p, pair_list_i, bidirectional))
p.start()
p_list.append(p)
for p in p_list:
p.join()
def calc_correspondence_from_smpl():
'''
Compute pixel-wise correspondence between image pair by SMPL model (http://smpl.is.tue.mpg.de/).
The SMPL fit result is predicted by HMR(https://github.com/akanazawa/hmr), with following format:
pred = {
'id': sid,
'theta': theta,
'proc_param': proc_param,
'verts2d': verts2d,
'verts_z': verts_z,
'visibility': visibility, # a map with same size of img, each pixel is its corresponding SMPL face index (or 4294967295 if it's corresponding to no face)
}
See '/data2/ynli/human3d/hmr/run_hmr.py' for more details
'''
# num_pair = 64
# pair_split_fn = 'datasets/DF_Pose/Label/pair_split.json'
# hmr_pred_dir = 'temp/3d_hmr/hmr_df_openpose/pred/'
# output_dir = 'temp/3d_hmr/corr/'
num_pair = -1
pair_split_fn = 'datasets/DF_Pose/Label/pair_split_dfm.json'
hmr_pred_dir = 'datasets/DF_Pose/3d/hmr_dfm/pred/'
output_dir = 'datasets/DF_Pose/3d/hmr_dfm/corr/'
io.mkdir_if_missing(output_dir)
# load pair ids
pairs = io.load_json(pair_split_fn)
pair_list = pairs['test'] + pairs['train']
if num_pair > 0:
pair_list = pair_list[:num_pair]
for id_1, id_2 in tqdm.tqdm(pair_list):
pred_1 = io.load_data(hmr_pred_dir + id_1 + '.pkl')
pred_2 = io.load_data(hmr_pred_dir + id_2 + '.pkl')
corr_2to1, vis_mask_2 = calc_correspondence_from_smpl_internal(pred_2, pred_1)
flow_util.write_corr(output_dir + '%s_%s.corr'%(id_2, id_1), corr_2to1, vis_mask_2)
corr_1to2, vis_mask_1 = calc_correspondence_from_smpl_internal(pred_1, pred_2)
flow_util.write_corr(output_dir + '%s_%s.corr'%(id_1, id_2), corr_1to2, vis_mask_1)
def calc_correspondence_from_smpl_internal(pred_1, pred_2, faces=SMPL_Faces):
'''
Compute for each pixel (x,y) in img_1 the corresponding pixel (u,v) in img_2.
Input:
pred_1: HMR prediction for image 1. see calc_correspondence_from_smpl.
pred_2: HMR prediction for image 2
faces: list, each element is a triangle face represented by 3 vertex indices (i_a, i_b, i_c)
Output:
corr_map: (img_size, img_size, 2). corr_map[y,x] = (u,v)
corr_mask: (img_size, img_size). The value is one of:
0: human pixel with correspondence in img_1
1: human pixel without correspondece in img_1
2: background pixel
'''
invisible = 4294967295
# informations from predictions
verts_1 = pred_1['verts2d']
verts_2 = pred_2['verts2d']
vis_1 = pred_1['visibility']
h, w = vis_1.shape[:2]
# common visible face indices
visible_face_1 = np.unique(pred_1['visibility'])
visible_face_2 = np.unique(pred_2['visibility'])
visible_face_1 = visible_face_1[visible_face_1 != invisible]
common_face = np.intersect1d(visible_face_1, visible_face_2, assume_unique=True)
# corr_map and corr_mask
corr_map = np.zeros((h, w, 2), dtype=np.float32)
corr_mask = np.ones((h, w), dtype=np.uint8)
corr_mask[vis_1==invisible] = 2
xx, yy = np.meshgrid(range(w), range(h), indexing='xy')
for face_id in visible_face_1:
vis_mask = (vis_1==face_id)
pts_1 = np.stack([xx[vis_mask], yy[vis_mask]]).T #(N, 2)
# barycentric coordinate transformation
vert_ids = faces[face_id] #[i_a, i_b, i_c]
tri_1 = verts_1[vert_ids] #(3, 2)
tri_2 = verts_2[vert_ids]
pts_bc = get_barycentric_coords(pts_1, tri_1) #(N, 3)
pts_2 = pts_bc.dot(tri_2) #(N, 2)
corr_map[vis_mask] = pts_2
if face_id in common_face:
corr_mask[vis_mask] = 0
return corr_map, corr_mask
def get_barycentric_coords(pts, triangle):
'''
Compute the barycentric coordinates of a set of points respect to a given triangle.
Input:
pts: (N, 2), points coordinates in original space
triangle: (3, 2), triangle vertices
Output:
pts_bc: (N, 3), barycentirc coordinates
'''
a, b, c = triangle
v0 = b-a
v1 = c-a
v2 = pts - a
d00 = v0.dot(v0) # scalar
d01 = v0.dot(v1) # scalar
d11 = v1.dot(v1) # scalar
d20 = v2.dot(v0) # (N,)
d21 = v2.dot(v1) # (N,)
denom = d00*d11 - d01*d01
v = (d11*d20 - d01*d21) / denom
w = (d00*d21 - d01*d20) / denom
u = 1. - v - w
return np.stack([u,v,w]).T
def warp_visible_region():
'''
Warp visible regions from source image to target image.
'''
num_pair = 64
img_size = (256, 256)
# load pair ids
pairs = io.load_json('datasets/DF_Pose/Label/pair_split.json')
pair_list = pairs['test'] + pairs['train']
pair_list = pair_list[:num_pair]
# set paths
img_dir = 'datasets/DF_Pose/Img/img_df/'
smpl_img_dir = 'temp/3d_hmr/hmr_df_openpose/vis/'
# directory where correspondence map and visible mask are stored
corr_dir = 'temp/3d_hmr/corr/'
# output dir
output_dir = 'temp/3d_hmr/output/hmr_vis/'
io.mkdir_if_missing(output_dir)
# a helper function
def _crop_sub_image(img, row, col, sub_size=img_size):
w, h = sub_size
return img[h*(row-1):h*row, w*(col-1):w*col]
colors = {
'green': np.array([0, 255, 0], dtype=np.uint8),
'red': np.array([255, 0, 0], dtype=np.uint8)
}
for idx, (id_1, id_2) in enumerate(tqdm.tqdm(pair_list)):
img_1 = imageio.imread(img_dir + id_1 + '.jpg')
img_2 = imageio.imread(img_dir + id_2 + '.jpg')
smpl_img_1 = imageio.imread(smpl_img_dir + id_1 + '.jpg')
smpl_img_2 = imageio.imread(smpl_img_dir + id_2 + '.jpg')
corr_2to1, mask_2 = flow_util.read_corr(corr_dir+'%s_%s.corr'%(id_2, id_1)) # flow from img_2 to img_1
vis_mask = (mask_2==0).astype(np.uint8)[..., np.newaxis]
invis_mask = (mask_2==1).astype(np.uint8)[..., np.newaxis] # body region in img_2 with no corresponding in img_1
img_warp = cv2.remap(img_1, corr_2to1, None, cv2.INTER_LINEAR, cv2.BORDER_REPLICATE)
img_warp = img_warp*vis_mask + np.ones(img_warp.shape, dtype=np.uint8)*(1-vis_mask)
img_rend_1 = _crop_sub_image(smpl_img_1, 1, 3)
img_rend_2 = _crop_sub_image(smpl_img_2, 1, 3)
img_verts_1 = _crop_sub_image(smpl_img_1, 3, 2)
img_verts_2 = _crop_sub_image(smpl_img_2, 3, 2)
img_vis = vis_mask*colors['green'] + invis_mask*colors['red']
img_out = np.hstack([img_1, img_rend_1, img_verts_1, img_2, img_rend_2, img_verts_2, img_vis, img_warp])
imageio.imwrite(output_dir + '%d_%s_%s.jpg'%(idx, id_1, id_2), img_out)
if __name__ == '__main__':
# calc_correspondence_from_smpl()
calc_correspondence_from_smpl_parallel()
# warp_visible_region()
|
db227d638971bd8cc263e5af2c8dff6b9c29e600
|
05643b9b4d20db912c3dbfbc191cadea3143016c
|
/instrumentation/opentelemetry-instrumentation-logging/src/opentelemetry/instrumentation/logging/constants.py
|
b18f93364f31b23f4727bcd799697c495225be53
|
[
"Apache-2.0"
] |
permissive
|
open-telemetry/opentelemetry-python-contrib
|
35566cd088aa0b23ca977109fcd435ee480784b9
|
0871dd455c0adfa125a2f258a0b55c47a5da5227
|
refs/heads/main
| 2023-08-26T07:30:40.212226
| 2023-08-21T16:42:12
| 2023-08-21T16:42:12
| 220,524,743
| 476
| 401
|
Apache-2.0
| 2023-09-14T21:36:33
| 2019-11-08T18:23:43
|
Python
|
UTF-8
|
Python
| false
| false
| 5,411
|
py
|
constants.py
|
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DEFAULT_LOGGING_FORMAT = "%(asctime)s %(levelname)s [%(name)s] [%(filename)s:%(lineno)d] [trace_id=%(otelTraceID)s span_id=%(otelSpanID)s resource.service.name=%(otelServiceName)s trace_sampled=%(otelTraceSampled)s] - %(message)s"
_MODULE_DOC = """
The OpenTelemetry ``logging`` integration automatically injects tracing context into log statements.
The integration registers a custom log record factory with the the standard library logging module that automatically inject
tracing context into log record objects. Optionally, the integration can also call ``logging.basicConfig()`` to set a logging
format with placeholders for span ID, trace ID and service name.
The following keys are injected into log record objects by the factory:
- ``otelSpanID``
- ``otelTraceID``
- ``otelServiceName``
- ``otelTraceSampled``
The integration uses the following logging format by default:
.. code-block::
{default_logging_format}
Enable trace context injection
------------------------------
The integration is opt-in and must be enabled explicitly by setting the environment variable ``OTEL_PYTHON_LOG_CORRELATION`` to ``true``.
The integration always registers the custom factory that injects the tracing context into the log record objects. Setting
``OTEL_PYTHON_LOG_CORRELATION`` to ``true`` calls ``logging.basicConfig()`` to set a logging format that actually makes
use of the injected variables.
Environment variables
---------------------
.. envvar:: OTEL_PYTHON_LOG_CORRELATION
This env var must be set to ``true`` in order to enable trace context injection into logs by calling ``logging.basicConfig()`` and
setting a logging format that makes use of the injected tracing variables.
Alternatively, ``set_logging_format`` argument can be set to ``True`` when initializing the ``LoggingInstrumentor`` class to achieve the
same effect.
.. code-block::
LoggingInstrumentor(set_logging_format=True)
The default value is ``false``.
.. envvar:: OTEL_PYTHON_LOG_FORMAT
This env var can be used to instruct the instrumentation to use a custom logging format.
Alternatively, a custom logging format can be passed to the ``LoggingInstrumentor`` as the ``logging_format`` argument. For example:
.. code-block::
LoggingInstrumentor(logging_format='%(msg)s [span_id=%(span_id)s]')
The default value is:
.. code-block::
{default_logging_format}
.. envvar:: OTEL_PYTHON_LOG_LEVEL
This env var can be used to set a custom logging level.
Alternatively, log level can be passed to the ``LoggingInstrumentor`` during initialization. For example:
.. code-block::
LoggingInstrumentor(log_level=logging.DEBUG)
The default value is ``info``.
Options are:
- ``info``
- ``error``
- ``debug``
- ``warning``
Manually calling logging.basicConfig
------------------------------------
``logging.basicConfig()`` can be called to set a global logging level and format. Only the first ever call has any effect on the global logger.
Any subsequent calls have no effect and do not override a previously configured global logger. This integration calls ``logging.basicConfig()`` for you
when ``OTEL_PYTHON_LOG_CORRELATION`` is set to ``true``. It uses the format and level specified by ``OTEL_PYTHON_LOG_FORMAT`` and ``OTEL_PYTHON_LOG_LEVEL``
environment variables respectively.
If you code or some other library/framework you are using calls logging.basicConfig before this integration is enabled, then this integration's logging
format will not be used and log statements will not contain tracing context. For this reason, you'll need to make sure this integration is enabled as early
as possible in the service lifecycle or your framework is configured to use a logging format with placeholders for tracing context. This can be achieved by
adding the following placeholders to your logging format:
.. code-block::
%(otelSpanID)s %(otelTraceID)s %(otelServiceName)s %(otelTraceSampled)s
API
-----
.. code-block:: python
from opentelemetry.instrumentation.logging import LoggingInstrumentor
LoggingInstrumentor().instrument(set_logging_format=True)
Note
-----
If you do not set ``OTEL_PYTHON_LOG_CORRELATION`` to ``true`` but instead set the logging format manually or through your framework, you must ensure that this
integration is enabled before you set the logging format. This is important because unless the integration is enabled, the tracing context variables
are not injected into the log record objects. This means any attempted log statements made after setting the logging format and before enabling this integration
will result in KeyError exceptions. Such exceptions are automatically swallowed by the logging module and do not result in crashes but you may still lose out
on important log messages.
""".format(
default_logging_format=DEFAULT_LOGGING_FORMAT
)
|
fab44f69f556280346df672490335a4f7692f46f
|
d1f15554df2d5c0f74ddbcba6e870359841f682b
|
/wagtail/snippets/tests/test_usage.py
|
7f4f8322b8c346de5de8434a25e51d710acbba72
|
[
"BSD-3-Clause",
"LicenseRef-scancode-proprietary-license"
] |
permissive
|
wagtail/wagtail
|
bd405f89b86e0c625fef0685fd6bfba41cf5cbfc
|
06a7bc6124bf62675c09fbe0a4ed9bbac183e025
|
refs/heads/main
| 2023-09-04T06:22:51.601208
| 2023-09-01T15:22:00
| 2023-09-01T15:22:00
| 16,479,108
| 12,974
| 3,580
|
BSD-3-Clause
| 2023-09-14T10:45:04
| 2014-02-03T12:41:59
|
Python
|
UTF-8
|
Python
| false
| false
| 8,018
|
py
|
test_usage.py
|
from io import StringIO
from django.contrib.admin.utils import quote
from django.contrib.auth.models import Permission
from django.core import management
from django.test import TestCase
from django.urls import reverse
from wagtail.models import Page, ReferenceIndex
from wagtail.test.testapp.models import (
Advert,
DraftStateModel,
EventPage,
GenericSnippetPage,
)
from wagtail.test.utils import WagtailTestUtils
class TestUsageCount(TestCase):
fixtures = ["test.json"]
@classmethod
def setUpTestData(cls):
super().setUpTestData()
output = StringIO()
management.call_command("rebuild_references_index", stdout=output)
def test_snippet_usage_count(self):
advert = Advert.objects.get(pk=1)
self.assertEqual(ReferenceIndex.get_grouped_references_to(advert).count(), 2)
class TestUsedBy(TestCase):
fixtures = ["test.json"]
@classmethod
def setUpTestData(cls):
super().setUpTestData()
output = StringIO()
management.call_command("rebuild_references_index", stdout=output)
def test_snippet_used_by(self):
advert = Advert.objects.get(pk=1)
usage = ReferenceIndex.get_grouped_references_to(advert)
self.assertIsInstance(usage[0], tuple)
self.assertIsInstance(usage[0][0], Page)
self.assertIsInstance(usage[0][1], list)
self.assertIsInstance(usage[0][1][0], ReferenceIndex)
class TestSnippetUsageView(WagtailTestUtils, TestCase):
fixtures = ["test.json"]
def setUp(self):
self.user = self.login()
def test_use_latest_draft_as_title(self):
snippet = DraftStateModel.objects.create(text="Draft-enabled Foo, Published")
snippet.save_revision().publish()
snippet.text = "Draft-enabled Bar, In Draft"
snippet.save_revision()
response = self.client.get(
reverse(
"wagtailsnippets_tests_draftstatemodel:usage",
args=[quote(snippet.pk)],
)
)
# Should use the latest draft title in the header subtitle
self.assertContains(
response,
'<span class="w-header__subtitle">Draft-enabled Bar, In Draft</span>',
)
def test_usage(self):
# resave so that usage count gets updated
page = Page.objects.get(pk=2)
page.save()
gfk_page = GenericSnippetPage(
title="Foobar Title",
snippet_content_object=Advert.objects.get(pk=1),
)
page.add_child(instance=gfk_page)
response = self.client.get(
reverse(
"wagtailsnippets_tests_advert:usage",
args=["1"],
)
)
self.assertContains(response, "Welcome to the Wagtail test site!")
self.assertContains(response, "Foobar Title")
self.assertContains(response, "<td>Generic snippet page</td>", html=True)
self.assertContains(response, "Snippet content object")
self.assertContains(response, "<th>Field</th>", html=True)
self.assertNotContains(response, "<th>If you confirm deletion</th>", html=True)
self.assertContains(response, "Snippet content object")
def test_usage_without_edit_permission_on_snippet(self):
# Create a user with basic admin backend access
user = self.create_user(
username="basicadmin", email="basicadmin@example.com", password="password"
)
admin_permission = Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
user.user_permissions.add(admin_permission)
self.login(username="basicadmin", password="password")
response = self.client.get(
reverse(
"wagtailsnippets_tests_advert:usage",
args=["1"],
)
)
self.assertEqual(response.status_code, 302)
self.assertRedirects(response, reverse("wagtailadmin_home"))
def test_usage_without_edit_permission_on_page(self):
# resave so that usage count gets updated
page = Page.objects.get(pk=2)
page.save()
# Create a user with edit access to snippets but not pages
user = self.create_user(
username="basicadmin", email="basicadmin@example.com", password="password"
)
admin_permission = Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
advert_permission = Permission.objects.get(
content_type__app_label="tests", codename="change_advert"
)
user.user_permissions.add(admin_permission)
user.user_permissions.add(advert_permission)
self.login(username="basicadmin", password="password")
response = self.client.get(
reverse(
"wagtailsnippets_tests_advert:usage",
args=["1"],
)
)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, "Welcome to the Wagtail test site!")
self.assertContains(response, "(Private page)")
self.assertContains(response, "<td>Page</td>", html=True)
self.assertContains(response, "<th>Field</th>", html=True)
self.assertNotContains(response, "<th>If you confirm deletion</th>", html=True)
self.assertContains(response, "<li>Advert</li>", html=True)
def test_usage_with_describe_on_delete_cascade(self):
# resave so that usage count gets updated
page = Page.objects.get(pk=2)
page.save()
response = self.client.get(
reverse("wagtailsnippets_tests_advert:usage", args=["1"])
+ "?describe_on_delete=1"
)
self.assertContains(response, "Welcome to the Wagtail test site!")
self.assertContains(response, "<td>Page</td>", html=True)
self.assertNotContains(response, "<th>Field</th>", html=True)
self.assertContains(response, "<th>If you confirm deletion</th>", html=True)
self.assertContains(response, "Advert")
self.assertContains(response, ": the advert placement will also be deleted")
def test_usage_with_describe_on_delete_set_null(self):
# resave so that usage count gets updated
page = EventPage.objects.first()
page.save()
self.assertEqual(page.feed_image.get_usage().count(), 1)
response = self.client.get(
reverse("wagtailimages:image_usage", args=[page.feed_image.id])
+ "?describe_on_delete=1"
)
self.assertContains(response, page.title)
self.assertContains(response, "<td>Event page</td>", html=True)
self.assertNotContains(response, "<th>Field</th>", html=True)
self.assertContains(response, "<th>If you confirm deletion</th>", html=True)
self.assertContains(response, "Feed image")
self.assertContains(response, ": will unset the reference")
def test_usage_with_describe_on_delete_gfk(self):
advert = Advert.objects.get(pk=1)
gfk_page = GenericSnippetPage(
title="Foobar Title",
snippet_content_object=advert,
)
Page.objects.get(pk=1).add_child(instance=gfk_page)
self.assertEqual(ReferenceIndex.get_grouped_references_to(advert).count(), 1)
response = self.client.get(
reverse("wagtailsnippets_tests_advert:usage", args=["1"])
+ "?describe_on_delete=1"
)
self.assertNotContains(response, "Welcome to the Wagtail test site!")
self.assertContains(response, "Foobar Title")
self.assertContains(response, "<td>Generic snippet page</td>", html=True)
self.assertNotContains(response, "<th>Field</th>", html=True)
self.assertContains(response, "<th>If you confirm deletion</th>", html=True)
self.assertContains(response, "Snippet content object")
self.assertContains(response, ": will unset the reference")
|
926fb0e2d80701bb10f57ac28098fb8eba0cd091
|
450201e3dac529d165a0bf96c0cd31b644d53323
|
/mistral/tests/unit/policies/test_workflows.py
|
2bfa094fc0e334c9a16fc598c94c3c812e40a22c
|
[
"Apache-2.0"
] |
permissive
|
openstack/mistral
|
c840b971c89a054f4953831480abc4d266df307e
|
7baff017d0cf01d19c44055ad201ca59131b9f94
|
refs/heads/master
| 2023-08-20T18:42:20.116390
| 2023-07-05T15:39:49
| 2023-07-05T15:39:49
| 13,968,255
| 214
| 117
|
Apache-2.0
| 2021-01-28T06:06:45
| 2013-10-29T20:46:49
|
Python
|
UTF-8
|
Python
| false
| false
| 9,299
|
py
|
test_workflows.py
|
# Copyright 2016 NEC Corporation. All rights reserved.
# Copyright 2018 OVH SAS. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from unittest import mock
from mistral.db.v2 import api as db_api
from mistral.db.v2.sqlalchemy import models
from mistral.tests.unit.api import base
from mistral.tests.unit.mstrlfixtures import policy_fixtures
MOCK_DELETE = mock.MagicMock(return_value=None)
WF_DEFINITION = """
---
version: '2.0'
flow:
type: direct
input:
- param1
tasks:
task1:
action: std.echo output="Hi"
"""
WF_DB = models.WorkflowDefinition(
id='123e4567-e89b-12d3-a456-426655440000',
name='flow',
definition=WF_DEFINITION,
created_at=datetime.datetime(1970, 1, 1),
updated_at=datetime.datetime(1970, 1, 1),
spec={'input': ['param1']}
)
MOCK_WF = mock.MagicMock(return_value=WF_DB)
class TestWorkflowPolicy(base.APITest):
"""Test workflow related policies
Policies to test:
- workflows:create
- workflows:delete
- workflows:get
- workflows:list
- workflows:list:all_projects
- workflows:publicize (on POST & PUT)
- workflows:update
"""
def setUp(self):
self.policy = self.useFixture(policy_fixtures.PolicyFixture())
super(TestWorkflowPolicy, self).setUp()
@mock.patch.object(db_api, "create_workflow_definition")
def test_workflow_create_not_allowed(self, mock_obj):
self.policy.change_policy_definition(
{"workflows:create": "role:FAKE"}
)
resp = self.app.post(
'/v2/workflows',
WF_DEFINITION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(403, resp.status_int)
@mock.patch.object(db_api, "create_workflow_definition")
def test_workflow_create_allowed(self, mock_obj):
spec_mock = mock_obj.return_value.get.return_value
spec_mock.get.return_value = {}
self.policy.change_policy_definition(
{"workflows:create": "role:FAKE or rule:admin_or_owner"}
)
resp = self.app.post(
'/v2/workflows',
WF_DEFINITION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(201, resp.status_int)
@mock.patch.object(db_api, "create_workflow_definition")
def test_workflow_create_public_not_allowed(self, mock_obj):
self.policy.change_policy_definition({
"workflows:create": "role:FAKE or rule:admin_or_owner",
"workflows:publicize": "role:FAKE"
})
resp = self.app.post(
'/v2/workflows?scope=public',
WF_DEFINITION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(403, resp.status_int)
@mock.patch.object(db_api, "create_workflow_definition")
def test_workflow_create_public_allowed(self, mock_obj):
spec_mock = mock_obj.return_value.get.return_value
spec_mock.get.return_value = {}
self.policy.change_policy_definition({
"workflows:create": "role:FAKE or rule:admin_or_owner",
"workflows:publicize": "role:FAKE or rule:admin_or_owner"
})
resp = self.app.post(
'/v2/workflows?scope=public',
WF_DEFINITION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(201, resp.status_int)
@mock.patch.object(db_api, "delete_workflow_definition", MOCK_DELETE)
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
def test_workflow_delete_not_allowed(self):
self.policy.change_policy_definition(
{"workflows:delete": "role:FAKE"}
)
resp = self.app.delete(
'/v2/workflows/123',
expect_errors=True
)
self.assertEqual(403, resp.status_int)
@mock.patch.object(db_api, "delete_workflow_definition", MOCK_DELETE)
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
def test_workflow_delete_allowed(self):
self.policy.change_policy_definition(
{"workflows:delete": "role:FAKE or rule:admin_or_owner"}
)
resp = self.app.delete(
'/v2/workflows/123',
expect_errors=True
)
self.assertEqual(204, resp.status_int)
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
def test_workflow_get_not_allowed(self):
self.policy.change_policy_definition(
{"workflows:get": "role:FAKE"}
)
resp = self.app.get(
'/v2/workflows/123',
expect_errors=True
)
self.assertEqual(403, resp.status_int)
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
def test_workflow_get_allowed(self):
self.policy.change_policy_definition(
{"workflows:get": "role:FAKE or rule:admin_or_owner"}
)
resp = self.app.get(
'/v2/workflows/123',
expect_errors=True
)
self.assertEqual(200, resp.status_int)
def test_workflow_list_not_allowed(self):
self.policy.change_policy_definition(
{"workflows:list": "role:FAKE"}
)
resp = self.app.get(
'/v2/workflows',
expect_errors=True
)
self.assertEqual(403, resp.status_int)
def test_workflow_list_allowed(self):
self.policy.change_policy_definition(
{"workflows:list": "role:FAKE or rule:admin_or_owner"}
)
resp = self.app.get(
'/v2/workflows',
expect_errors=True
)
self.assertEqual(200, resp.status_int)
def test_workflow_list_all_not_allowed(self):
self.policy.change_policy_definition({
"workflows:list": "role:FAKE or rule:admin_or_owner",
"workflows:list:all_projects": "role:FAKE"
})
resp = self.app.get(
'/v2/workflows?all_projects=1',
expect_errors=True
)
self.assertEqual(403, resp.status_int)
def test_workflow_list_all_allowed(self):
self.policy.change_policy_definition({
"workflows:list": "role:FAKE or rule:admin_or_owner",
"workflows:list:all_projects": "role:FAKE or rule:admin_or_owner"
})
resp = self.app.get(
'/v2/workflows?all_projects=1',
expect_errors=True
)
self.assertEqual(200, resp.status_int)
@mock.patch.object(db_api, "update_workflow_definition")
def test_workflow_update_not_allowed(self, mock_obj):
self.policy.change_policy_definition(
{"workflows:update": "role:FAKE"}
)
resp = self.app.put(
'/v2/workflows',
WF_DEFINITION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(403, resp.status_int)
@mock.patch.object(db_api, "update_workflow_definition")
def test_workflow_update_allowed(self, mock_obj):
spec_mock = mock_obj.return_value.get.return_value
spec_mock.get.return_value = {}
self.policy.change_policy_definition(
{"workflows:update": "role:FAKE or rule:admin_or_owner"}
)
resp = self.app.put(
'/v2/workflows',
WF_DEFINITION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(200, resp.status_int)
@mock.patch.object(db_api, "update_workflow_definition")
def test_workflow_update_public_not_allowed(self, mock_obj):
self.policy.change_policy_definition({
"workflows:update": "role:FAKE or rule:admin_or_owner",
"workflows:publicize": "role:FAKE"
})
resp = self.app.put(
'/v2/workflows?scope=public',
WF_DEFINITION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(403, resp.status_int)
@mock.patch.object(db_api, "update_workflow_definition")
def test_workflow_update_public_allowed(self, mock_obj):
spec_mock = mock_obj.return_value.get.return_value
spec_mock.get.return_value = {}
self.policy.change_policy_definition({
"workflows:update": "role:FAKE or rule:admin_or_owner",
"workflows:publicize": "role:FAKE or rule:admin_or_owner"
})
resp = self.app.put(
'/v2/workflows?scope=public',
WF_DEFINITION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(200, resp.status_int)
|
4026c64fa491083476b0201e586713d7b0879994
|
67e5436d39a2aab5bfd2b9c5cff23ca934a85182
|
/scripts/tflite_profile.py
|
30e7fa54a8f3189d00ed073ab392ca5ee4f4f68f
|
[
"MIT"
] |
permissive
|
autorope/donkeycar
|
d4991aa69d8b1334c6331640e532d8d796b2ac25
|
9f91ad1aaff054522b24c2c1e727d1a111e266f4
|
refs/heads/main
| 2023-08-17T20:25:19.085591
| 2023-07-05T19:35:50
| 2023-07-05T19:35:50
| 76,095,264
| 1,861
| 921
|
MIT
| 2023-08-01T23:06:30
| 2016-12-10T06:35:09
|
Python
|
UTF-8
|
Python
| false
| false
| 1,174
|
py
|
tflite_profile.py
|
'''
Usage:
tflite_test.py --model="mymodel.tflite"
Note:
may require tensorflow > 1.11 or
pip install tf-nightly
'''
import os
from docopt import docopt
import tensorflow as tf
import numpy as np
from donkeycar.utils import FPSTimer
args = docopt(__doc__)
in_model = os.path.expanduser(args['--model'])
# Load TFLite model and allocate tensors.
interpreter = tf.lite.Interpreter(model_path=in_model)
interpreter.allocate_tensors()
# Get input and output tensors.
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
# Test model on random input data.
input_shape = input_details[0]['shape']
input_data = np.array(np.random.random_sample(input_shape), dtype=np.float32)
interpreter.set_tensor(input_details[0]['index'], input_data)
interpreter.invoke()
#sample output
for tensor in output_details:
output_data = interpreter.get_tensor(tensor['index'])
print(output_data)
#run in a loop to test performance.
print("test performance: hit CTRL+C to break")
timer = FPSTimer()
while True:
interpreter.set_tensor(input_details[0]['index'], input_data)
interpreter.invoke()
timer.on_frame()
|
230d8fd34884487d441e5812efee81417c2deb90
|
9d5ae8add5868d56af20f38f0bc1841d5ed3b4c2
|
/powerapi/database/file_db.py
|
f8b9f0d3f4336d3b5118c4b7bf6464d3604b7197
|
[
"BSD-3-Clause",
"Python-2.0",
"Apache-2.0"
] |
permissive
|
powerapi-ng/powerapi
|
5c6e30a48716f0c06449c489820991ed3ca2167d
|
be3f1852ad38894c2bc487bbb3a30508ed8d6b50
|
refs/heads/master
| 2023-08-16T12:14:16.940876
| 2023-08-16T11:25:51
| 2023-08-16T11:25:51
| 175,017,297
| 143
| 29
|
BSD-3-Clause
| 2023-09-12T12:00:45
| 2019-03-11T14:27:02
|
Python
|
UTF-8
|
Python
| false
| false
| 4,627
|
py
|
file_db.py
|
# Copyright (c) 2021, INRIA
# Copyright (c) 2021, University of Lille
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from datetime import datetime
import logging
from typing import List, Type
import os
import json
from powerapi.database.base_db import BaseDB, DBError, IterDB
from powerapi.report import Report
class FileBadDBError(DBError):
"""
Error raised when hostname/port fail
"""
def __init__(self, filename):
DBError.__init__(self, "File error : " + filename + " not found")
class FileIterDB(IterDB):
"""
FileIterDB class
Class for iterating in a file
"""
def __init__(self, db, report_type, stream_mode, filename):
""" """
IterDB.__init__(self, db, report_type, stream_mode)
self.previousJson = ""
self.__iter__()
self.filename = filename
def __iter__(self):
"""
Create the iterator for get the data
"""
return self
def __next__(self) -> Report:
"""
Allow to get the next data
:raise: StopIteration in stream mode when no report was found.
"""
file_object = open(self.filename, "r")
json_str = file_object.read()
file_object.close()
if json_str is None:
raise StopIteration()
if json_str == self.previousJson:
logging.error("Error : Report did not change since last read")
raise StopIteration()
self.previousJson = json_str
return self.report_type.from_json(json.loads(json_str))
class FileDB(BaseDB):
"""
FileDB class herited from BaseDB
Allow to handle a FileDB database in reading or writing.
"""
def __init__(self, report_type: Type[Report], filename: str):
"""
:param report_type: Type of the report handled by this database
:param filename: Name of the file containing the report
"""
BaseDB.__init__(self, report_type)
self.filename = filename
def connect(self):
"""
Override from BaseDB.
It check that the file exist
"""
if not os.path.exists(self.filename):
raise FileBadDBError(self.filename)
def iter(self, stream_mode: bool) -> FileIterDB:
"""
Create the iterator for get the data
"""
return FileIterDB(self, self.report_type, stream_mode, self.filename)
def save(self, report: Report):
"""
Override from BaseDB
:param report: Report to save
"""
file_object = open(self.filename, "w")
line = {
"sensor": report.sensor,
"target": report.target,
"timestamp": int(datetime.timestamp(report.timestamp) * 1000),
"power": report.power,
}
final_dict = {"PowerReport": [line]}
file_object.truncate(0)
file_object.write(str(final_dict))
file_object.close()
def save_many(self, reports: List[Report]):
"""
Allow to save a batch of data
:param reports: Batch of data.
"""
for report in reports:
self.save(report)
|
09bd8308549b6e923ef117d8904118673391d7d0
|
08cdf212eebebdff17e888522b0c6bc837fd0b3a
|
/probe/modules/antivirus/eicar/eicar.py
|
34860e910d9302c3cedaa495b6d2a7b9b3664ef7
|
[
"Apache-2.0"
] |
permissive
|
quarkslab/irma
|
1f5b32c17195f709d3bb9ff7f7199aad4c76dfd3
|
4e3e2c0fa82e352a1a7a7fd02381a4d84bed9f09
|
refs/heads/master
| 2023-03-03T15:37:51.480982
| 2022-10-19T19:30:27
| 2022-10-19T19:30:27
| 26,581,177
| 267
| 81
|
Apache-2.0
| 2023-03-01T23:09:39
| 2014-11-13T09:47:20
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 2,022
|
py
|
eicar.py
|
#
# Copyright (c) 2013-2018 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import hashlib
from modules.antivirus.base import Antivirus, AntivirusUnix
class Eicar(AntivirusUnix):
name = "Eicar Antivirus (Linux)"
# ==================================
# Constructor and destructor stuff
# ==================================
def __init__(self, *args, **kwargs):
# class super class constructor
super().__init__(*args, **kwargs)
# scan tool variables
# WARNING: Don't forget to increment `virus_database_version` value
# when adding a new hash.
self.md5list = [
# eicar.com, eicar.com.txt
"44d88612fea8a8f36de82e1278abb02f",
# eicar_com.zip
"6ce6f415d8475545be5ba114f208b0ff",
# eicarcom2.zip
"e4968ef99266df7c9a1f0637d2389dab",
]
# ==========================================
# Antivirus methods (need to be overriden)
# ==========================================
def get_version(self):
"""return the version of the antivirus"""
return "1.0.0"
def get_virus_database_version(self):
"""Return the Virus Database version"""
return "1"
def scan(self, paths, env=None):
md5 = hashlib.md5(paths.read_bytes()).hexdigest()
if md5 in self.md5list:
self.scan_results[paths] = "EICAR-STANDARD-ANTIVIRUS-TEST-FILE!"
return Antivirus.ScanResult.INFECTED
self.scan_results[paths] = ""
return Antivirus.ScanResult.CLEAN
|
0264b599f84dde155aeb3e8a0ef08afa8ddbcfa4
|
23d34d346cb5861b768a444bd8841157a558278b
|
/src/tracer/evaluate.py
|
b8cc2bbcce826b46ef9c288e0e5b2588471db663
|
[
"MIT"
] |
permissive
|
IAIK/AEPIC
|
de804451eb9a0f15f0a45956c9af52fbf8a41ab6
|
0b2d42e94cc39118b724b59d25bd2944a35fc391
|
refs/heads/main
| 2023-07-20T07:08:11.758790
| 2022-08-09T19:32:30
| 2022-08-09T19:32:30
| 501,202,090
| 108
| 13
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,638
|
py
|
evaluate.py
|
import click
leakable_regs = ["rdi","r8","r9","r10","r11","r12","r13","r14","xmm0","xmm1","xmm6","xmm7","xmm8","xmm9"]
instruction_history = []
leakage_result = set()
def split_key(key,key_len):
key_parts = set()
#the full key or split
key_parts.add(key)
#8 byte / 128 byte
block_sizes = [16,32]
#special case for XMM:
for i in range(0,len(key),8):
key_parts.add(key[i:i+8] + "0" * (24))
for bs in block_sizes:
for i in range(0,len(key),bs):
key_parts.add(key[i:i+bs])
#pad until register size
return key_parts
def little_endian_hex_str(hex_str):
res = ""
for x in range(len(hex_str),-2, -2):
res += hex_str[x:x+2]
return res
def parse_line(line_no,trace):
global instruction_history
current_instruction_and_reg_vals = {}
try:
current_instruction_and_reg_vals["instruction"] = int(trace[line_no].replace("\"","").split(" ")[0].split("\t")[0],16)
except:
current_instruction_and_reg_vals["instruction"] = trace[line_no]
line_no += 1
while "RDI=" not in trace[line_no]:
line_no += 1
for val in range(line_no,line_no+len(leakable_regs)):
l = trace[val].split("=")
if l[1] in current_instruction_and_reg_vals:
current_instruction_and_reg_vals[l[1].strip().replace("0x","")].append(l[0])
else:
current_instruction_and_reg_vals[l[1].strip().replace("0x","")] = [l[0]]
line_no += 1
#skip also last line
line_no += 1
instruction_history.append(current_instruction_and_reg_vals)
return line_no
#track as long back in the history until
def track_back(line_no,k_idx):
value = instruction_history[line_no]["instruction"]
if(type(value) is int):
page_number = (value & 0x7F000) >> 12
else:
#print("double check output in trace no address given at " + value)
return -1
#prevent duplicated leakage of same key
for vals in leakage_result:
split = vals.split(",")
if(value == 0x00007ffff744b282):
print(hex(page_number))
print(hex((int(split[0],16) & 0x7F000)>>12))
print("found")
if (page_number == (int(split[0],16) & 0x7F000)>>12):
if k_idx == split[-2]:
return -1
instructions_till_page_change = 1
start_offset = line_no - 1
while start_offset >= 0:
if(type(instruction_history[start_offset]["instruction"]) is not int):
start_offset -= 1
instructions_till_page_change += 1
continue
if page_number != ((instruction_history[start_offset]["instruction"] & 0X7F000) >> 12):
break
start_offset -= 1
instructions_till_page_change += 1
return instructions_till_page_change
def report_findings(key_splits,trace):
global instruction_history
global leakage_result
#skip first lines in trace until -----
for i in range(len(trace)):
if "---" in trace[i]:
break
#always skip the
ctr = i + 1
insn_counter = 0
while(ctr + len(leakable_regs) + 2 <= len(trace)):
ctr = parse_line(ctr,trace)
# check if one of the registers contains parts of the secret
for k_idx,k_split in enumerate(key_splits):
if k_split in instruction_history[insn_counter]:
leak_regs = "".join(instruction_history[insn_counter][k_split])
diff = track_back(insn_counter,f'key_{k_idx}')
if diff != -1:
leakage_result.add(f"{hex(instruction_history[insn_counter]['instruction'] & 0xFFFFF)},{diff},{leak_regs.lower()},key_{k_idx},{k_split}")
#print(instruction_history[insn_counter])
insn_counter += 1
leakage_result = sorted(leakage_result)
for l in leakage_result:
print(l)
@click.command()
@click.argument('key_file',type=click.Path(exists=True))
@click.argument('trace_file', type=click.Path(exists=True))
@click.option('-endianess',default='little')
def main(trace_file,key_file,endianess):
with open(key_file,"r") as f:
key = f.readline()
with open(trace_file,"r") as f:
trace = f.readlines()
if(endianess == 'little'):
key = little_endian_hex_str(key)
#0x will be added again
key_splits = split_key(key.replace("0x",""),len(key))
print("Key splits:")
for knr,ks in enumerate(key_splits):
print(f"{knr}:{ks}")
report_findings(key_splits,trace)
if __name__ == "__main__":
main()
|
52422d45734a3acc138d9167bffe9b2479921947
|
d6aae799e18e907fb413b715200c7832252a87e5
|
/language-modeling/BERT-finetuning/external/processors.py
|
1f0058b94562004a270cfa9b592ca67138cef729
|
[
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-proprietary-license",
"Apache-2.0",
"CC-BY-NC-4.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
sony/nnabla-examples
|
0d0bbd5df3028996e790bcf07248fdb0932697d1
|
41f71faa6efff7774a76bbd5af3198322a90a6ab
|
refs/heads/master
| 2023-09-04T03:45:54.023899
| 2023-08-22T03:31:21
| 2023-08-22T03:31:21
| 109,625,584
| 308
| 108
|
Apache-2.0
| 2023-08-22T03:31:23
| 2017-11-05T23:30:40
|
Python
|
UTF-8
|
Python
| false
| false
| 21,318
|
py
|
processors.py
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import csv
import sys
import copy
import json
logger = logging.getLogger(__name__)
def glue_convert_examples_to_features(
examples,
tokenizer,
max_length=512,
task=None,
label_list=None,
output_mode=None,
pad_token=0,
pad_token_segment_id=0,
mask_padding_with_zero=True,
):
if task is not None:
processor = glue_processors[task]()
if label_list is None:
label_list = processor.get_labels()
logger.info("Using label list %s for task %s" % (label_list, task))
if output_mode is None:
output_mode = glue_output_modes[task]
logger.info("Using output mode %s for task %s" %
(output_mode, task))
label_map = {label: i for i, label in enumerate(label_list)}
features = []
for (ex_index, example) in enumerate(examples):
if ex_index % 10000 == 0:
logger.info("Writing example %d/%d" % (ex_index, len(examples)))
inputs = tokenizer.encode_plus(
example.text_a, example.text_b, add_special_tokens=True, max_length=max_length,)
input_ids, token_type_ids = inputs["input_ids"], inputs["token_type_ids"]
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
attention_mask = [1 if mask_padding_with_zero else 0] * len(input_ids)
# Zero-pad up to the sequence length.
padding_length = max_length - len(input_ids)
input_ids = input_ids + ([pad_token] * padding_length)
attention_mask = attention_mask + \
([0 if mask_padding_with_zero else 1] * padding_length)
token_type_ids = token_type_ids + \
([pad_token_segment_id] * padding_length)
assert len(input_ids) == max_length, "Error with input length {} vs {}".format(
len(input_ids), max_length)
assert len(attention_mask) == max_length, "Error with input length {} vs {}".format(
len(attention_mask), max_length
)
assert len(token_type_ids) == max_length, "Error with input length {} vs {}".format(
len(token_type_ids), max_length
)
if output_mode == "classification":
label = label_map[example.label]
elif output_mode == "regression":
label = float(example.label)
else:
raise KeyError(output_mode)
if ex_index < 5:
logger.info("*** Example ***")
logger.info("guid: %s" % (example.guid))
logger.info("input_ids: %s" %
" ".join([str(x) for x in input_ids]))
logger.info("attention_mask: %s" %
" ".join([str(x) for x in attention_mask]))
logger.info("token_type_ids: %s" %
" ".join([str(x) for x in token_type_ids]))
logger.info("label: %s (id = %d)" % (example.label, label))
features.append(
InputFeatures(
input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, label=label
)
)
return features
class InputExample(object):
"""
A single training/test example for simple sequence classification.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
def __init__(self, guid, text_a, text_b=None, label=None):
self.guid = guid
self.text_a = text_a
self.text_b = text_b
self.label = label
def __repr__(self):
return str(self.to_json_string())
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
class InputFeatures(object):
"""
A single set of features of data.
Args:
input_ids: Indices of input sequence tokens in the vocabulary.
attention_mask: Mask to avoid performing attention on padding token indices.
Mask values selected in ``[0, 1]``:
Usually ``1`` for tokens that are NOT MASKED, ``0`` for MASKED (padded) tokens.
token_type_ids: Segment token indices to indicate first and second portions of the inputs.
label: Label corresponding to the input
"""
def __init__(self, input_ids, attention_mask, token_type_ids, label):
self.input_ids = input_ids
self.attention_mask = attention_mask
self.token_type_ids = token_type_ids
self.label = label
def __repr__(self):
return str(self.to_json_string())
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
class DataProcessor(object):
"""Base class for data converters for sequence classification data sets."""
def get_example_from_tensor_dict(self, tensor_dict):
"""Gets an example from a dict with tensorflow tensors
Args:
tensor_dict: Keys and values should match the corresponding Glue
tensorflow_dataset examples.
"""
raise NotImplementedError()
def get_train_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the train set."""
raise NotImplementedError()
def get_dev_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the dev set."""
raise NotImplementedError()
def get_labels(self):
"""Gets the list of labels for this data set."""
raise NotImplementedError()
def tfds_map(self, example):
"""Some tensorflow_datasets datasets are not formatted the same way the GLUE datasets are.
This method converts examples to the correct format."""
if len(self.get_labels()) > 1:
example.label = self.get_labels()[int(example.label)]
return example
@classmethod
def _read_tsv(cls, input_file, quotechar=None):
"""Reads a tab separated value file."""
with open(input_file, "r", encoding="utf-8-sig") as f:
reader = csv.reader(f, delimiter="\t", quotechar=quotechar)
lines = []
for line in reader:
if sys.version_info[0] == 2:
line = list(unicode(cell, 'utf-8') for cell in line)
lines.append(line)
return lines
class MrpcProcessor(DataProcessor):
"""Processor for the MRPC data set (GLUE version)."""
def get_example_from_tensor_dict(self, tensor_dict):
"""See base class."""
return InputExample(
tensor_dict["idx"].numpy(),
tensor_dict["sentence1"].numpy().decode("utf-8"),
tensor_dict["sentence2"].numpy().decode("utf-8"),
str(tensor_dict["label"].numpy()),
)
def get_train_examples(self, data_dir):
"""See base class."""
logger.info("LOOKING AT {}".format(
os.path.join(data_dir, "train.tsv")))
return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, i)
text_a = line[3]
text_b = line[4]
label = line[0]
examples.append(InputExample(
guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class MnliProcessor(DataProcessor):
"""Processor for the MultiNLI data set (GLUE version)."""
def get_example_from_tensor_dict(self, tensor_dict):
"""See base class."""
return InputExample(
tensor_dict["idx"].numpy(),
tensor_dict["premise"].numpy().decode("utf-8"),
tensor_dict["hypothesis"].numpy().decode("utf-8"),
str(tensor_dict["label"].numpy()),
)
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev_matched.tsv")), "dev_matched")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "test_matched.tsv")), "test_matched")
def get_labels(self):
"""See base class."""
return ["contradiction", "entailment", "neutral"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, line[0])
text_a = line[8]
text_b = line[9]
label = line[-1]
examples.append(InputExample(
guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class MnliMismatchedProcessor(MnliProcessor):
"""Processor for the MultiNLI Mismatched data set (GLUE version)."""
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev_mismatched.tsv")), "dev_matched")
class ColaProcessor(DataProcessor):
"""Processor for the CoLA data set (GLUE version)."""
def get_example_from_tensor_dict(self, tensor_dict):
"""See base class."""
return InputExample(
tensor_dict["idx"].numpy(),
tensor_dict["sentence"].numpy().decode("utf-8"),
None,
str(tensor_dict["label"].numpy()),
)
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, i)
text_a = line[3]
label = line[1]
examples.append(InputExample(
guid=guid, text_a=text_a, text_b=None, label=label))
return examples
class Sst2Processor(DataProcessor):
"""Processor for the SST-2 data set (GLUE version)."""
def get_example_from_tensor_dict(self, tensor_dict):
"""See base class."""
return InputExample(
tensor_dict["idx"].numpy(),
tensor_dict["sentence"].numpy().decode("utf-8"),
None,
str(tensor_dict["label"].numpy()),
)
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, i)
text_a = line[0]
label = line[1]
examples.append(InputExample(
guid=guid, text_a=text_a, text_b=None, label=label))
return examples
class StsbProcessor(DataProcessor):
"""Processor for the STS-B data set (GLUE version)."""
def get_example_from_tensor_dict(self, tensor_dict):
"""See base class."""
return InputExample(
tensor_dict["idx"].numpy(),
tensor_dict["sentence1"].numpy().decode("utf-8"),
tensor_dict["sentence2"].numpy().decode("utf-8"),
str(tensor_dict["label"].numpy()),
)
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_labels(self):
"""See base class."""
return [None]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, line[0])
text_a = line[7]
text_b = line[8]
label = line[-1]
examples.append(InputExample(
guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class QqpProcessor(DataProcessor):
"""Processor for the QQP data set (GLUE version)."""
def get_example_from_tensor_dict(self, tensor_dict):
"""See base class."""
return InputExample(
tensor_dict["idx"].numpy(),
tensor_dict["question1"].numpy().decode("utf-8"),
tensor_dict["question2"].numpy().decode("utf-8"),
str(tensor_dict["label"].numpy()),
)
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, line[0])
try:
text_a = line[3]
text_b = line[4]
label = line[5]
except IndexError:
continue
examples.append(InputExample(
guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class QnliProcessor(DataProcessor):
"""Processor for the QNLI data set (GLUE version)."""
def get_example_from_tensor_dict(self, tensor_dict):
"""See base class."""
return InputExample(
tensor_dict["idx"].numpy(),
tensor_dict["question"].numpy().decode("utf-8"),
tensor_dict["sentence"].numpy().decode("utf-8"),
str(tensor_dict["label"].numpy()),
)
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev_matched")
def get_labels(self):
"""See base class."""
return ["entailment", "not_entailment"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, line[0])
text_a = line[1]
text_b = line[2]
label = line[-1]
examples.append(InputExample(
guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class RteProcessor(DataProcessor):
"""Processor for the RTE data set (GLUE version)."""
def get_example_from_tensor_dict(self, tensor_dict):
"""See base class."""
return InputExample(
tensor_dict["idx"].numpy(),
tensor_dict["sentence1"].numpy().decode("utf-8"),
tensor_dict["sentence2"].numpy().decode("utf-8"),
str(tensor_dict["label"].numpy()),
)
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_labels(self):
"""See base class."""
return ["entailment", "not_entailment"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, line[0])
text_a = line[1]
text_b = line[2]
label = line[-1]
examples.append(InputExample(
guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class WnliProcessor(DataProcessor):
"""Processor for the WNLI data set (GLUE version)."""
def get_example_from_tensor_dict(self, tensor_dict):
"""See base class."""
return InputExample(
tensor_dict["idx"].numpy(),
tensor_dict["sentence1"].numpy().decode("utf-8"),
tensor_dict["sentence2"].numpy().decode("utf-8"),
str(tensor_dict["label"].numpy()),
)
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, line[0])
text_a = line[1]
text_b = line[2]
label = line[-1]
examples.append(InputExample(
guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
glue_tasks_num_labels = {
"cola": 2,
"mnli": 3,
"mrpc": 2,
"sst-2": 2,
"sts-b": 1,
"qqp": 2,
"qnli": 2,
"rte": 2,
"wnli": 2,
}
glue_processors = {
"cola": ColaProcessor,
"mnli": MnliProcessor,
"mnli-mm": MnliMismatchedProcessor,
"mrpc": MrpcProcessor,
"sst-2": Sst2Processor,
"sts-b": StsbProcessor,
"qqp": QqpProcessor,
"qnli": QnliProcessor,
"rte": RteProcessor,
"wnli": WnliProcessor,
}
glue_output_modes = {
"cola": "classification",
"mnli": "classification",
"mnli-mm": "classification",
"mrpc": "classification",
"sst-2": "classification",
"sts-b": "regression",
"qqp": "classification",
"qnli": "classification",
"rte": "classification",
"wnli": "classification",
}
|
42cb1b202b5c2888e97b612f7faf10cb292051f4
|
a5a99f646e371b45974a6fb6ccc06b0a674818f2
|
/L1Trigger/RPCTrigger/python/RPCConeConfig_cff.py
|
922cb0d49ed728ae5829bd1c3876e71674d60b12
|
[
"Apache-2.0"
] |
permissive
|
cms-sw/cmssw
|
4ecd2c1105d59c66d385551230542c6615b9ab58
|
19c178740257eb48367778593da55dcad08b7a4f
|
refs/heads/master
| 2023-08-23T21:57:42.491143
| 2023-08-22T20:22:40
| 2023-08-22T20:22:40
| 10,969,551
| 1,006
| 3,696
|
Apache-2.0
| 2023-09-14T19:14:28
| 2013-06-26T14:09:07
|
C++
|
UTF-8
|
Python
| false
| false
| 322
|
py
|
RPCConeConfig_cff.py
|
import FWCore.ParameterSet.Config as cms
RPCConeBuilder = cms.ESProducer("RPCConeBuilder",
towerBeg = cms.int32(0),
towerEnd = cms.int32(16)
)
rpcconesrc = cms.ESSource("EmptyESSource",
recordName = cms.string('L1RPCConeBuilderRcd'),
iovIsRunNotTime = cms.bool(True),
firstValid = cms.vuint32(1)
)
|
5cebe0e6c8442c2c3d8490013aa25782ff2be2a9
|
42a7b596cd3d8700631c3b83f6fc16e536b35ec1
|
/ext_tests.py
|
1fd8d8fd344114136db9f73e05bb5a96d74c4b03
|
[
"BSD-3-Clause"
] |
permissive
|
stefankoegl/python-json-patch
|
1c7f9353b44c83f069101ff6e5cc1099225c4155
|
73c36f2c4776c008cd4e750f5240e06dfdc918fc
|
refs/heads/master
| 2023-07-06T00:06:52.183696
| 2023-06-28T04:01:39
| 2023-06-28T04:01:39
| 1,915,437
| 309
| 84
|
BSD-3-Clause
| 2023-06-28T04:01:40
| 2011-06-18T12:03:47
|
Python
|
UTF-8
|
Python
| false
| false
| 4,221
|
py
|
ext_tests.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# python-json-patch - An implementation of the JSON Patch format
# https://github.com/stefankoegl/python-json-patch
#
# Copyright (c) 2011 Stefan Kögl <stefan@skoegl.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
""" Script to run external tests, eg from
https://github.com/json-patch/json-patch-tests """
import doctest
import unittest
import jsonpatch
import sys
class TestCaseTemplate(unittest.TestCase):
""" A generic test case for running external tests """
def _test(self, test):
if not 'doc' in test or not 'patch' in test:
# incomplete
return
if test.get('disabled', False):
# test is disabled
return
if 'error' in test:
self.assertRaises(
(jsonpatch.JsonPatchException, jsonpatch.JsonPointerException),
jsonpatch.apply_patch, test['doc'], test['patch']
)
else:
try:
res = jsonpatch.apply_patch(test['doc'], test['patch'])
except jsonpatch.JsonPatchException as jpe:
raise Exception(test.get('comment', '')) from jpe
# if there is no 'expected' we only verify that applying the patch
# does not raise an exception
if 'expected' in test:
self.assertEquals(res, test['expected'], test.get('comment', ''))
def make_test_case(tests):
class MyTestCase(TestCaseTemplate):
pass
for n, test in enumerate(tests):
add_test_method(MyTestCase, 'test_%d' % n, test)
return MyTestCase
def add_test_method(cls, name, test):
setattr(cls, name, lambda self: self._test(test))
modules = ['jsonpatch']
coverage_modules = []
def get_suite(filenames):
suite = unittest.TestSuite()
for testfile in filenames:
with open(testfile) as f:
# we use the (potentially) patched version of json.load here
tests = jsonpatch.json.load(f)
cls = make_test_case(tests)
suite.addTest(unittest.makeSuite(cls))
return suite
suite = get_suite(sys.argv[1:])
for module in modules:
m = __import__(module, fromlist=[module])
coverage_modules.append(m)
suite.addTest(doctest.DocTestSuite(m))
runner = unittest.TextTestRunner(verbosity=1)
try:
import coverage
cov = coverage.Coverage()
except ImportError:
cov = None
if cov is not None:
cov.erase()
cov.start()
result = runner.run(suite)
if not result.wasSuccessful():
sys.exit(1)
if cov is not None:
cov.stop()
cov.report(coverage_modules)
cov.erase()
if cov is None:
sys.stderr.write("""
No coverage reporting done (Python module "coverage" is missing)
Please install the python-coverage package to get coverage reporting.
""")
sys.stderr.flush()
|
7b44e5e3be04ad47ebe75916a39ed132597278f5
|
1991ea64acf35d1c3b5b50f542066a5885c08e36
|
/config.py
|
b12e3c1d15951318c5ac86725320553d597e05c2
|
[
"MIT"
] |
permissive
|
hooram/ownphotos
|
5ac13607c64e4ae09ce1f5a5860b57bd2ce5d924
|
180de5746468ec64b725723e03657dc21317f3a9
|
refs/heads/dev
| 2022-12-11T02:37:45.594234
| 2021-01-11T15:32:23
| 2021-01-11T15:32:23
| 97,455,875
| 2,756
| 269
|
MIT
| 2023-01-23T15:29:26
| 2017-07-17T08:56:54
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 898
|
py
|
config.py
|
# add paths of the directories where your photos live.
# it will not look for photos recursively, so you might want to add subdirectories as well.
import os
image_dirs = [
# '/home/hooram/with_exif/'
'/home/hooram/Nextcloud/InstantUpload',
'/home/hooram/Nextcloud/Camera Uploads',
# '/home/hooram/Nextcloud/Photos/May 26th',
# '/home/hooram/Nextcloud/kakaotalk',
# '/mnt/ext/facebook_hooram/photos',
# '/mnt/ext/pictures/DCIM',
# '/mnt/ext/pictures/unorganized',
# '/mnt/ext/pictures/Pictures',
# '/mnt/ext/pictures/Android Photo Backup',
# '/mnt/ext/pictures/Camera Uploads',
# '/mnt/ext/pictures/hiking with anton and andrea',
# '/mnt/ext/pictures/Aperture Library.aplibrary/Masters/',
# '/mnt/ext/pictures/Aperture Library.aplibrary/Masters',
# '/'
]
mapzen_api_key = 'take_care_of_me'
mapbox_api_key = os.environ['MAPBOX_API_KEY']
|
a19a072651756f3ff1455ad64106d06488893e3a
|
f9d564f1aa83eca45872dab7fbaa26dd48210d08
|
/huaweicloud-sdk-dgc/huaweicloudsdkdgc/v1/model/event.py
|
132904ae0b5a597b39000d9988010bc815d63a76
|
[
"Apache-2.0"
] |
permissive
|
huaweicloud/huaweicloud-sdk-python-v3
|
cde6d849ce5b1de05ac5ebfd6153f27803837d84
|
f69344c1dadb79067746ddf9bfde4bddc18d5ecf
|
refs/heads/master
| 2023-09-01T19:29:43.013318
| 2023-08-31T08:28:59
| 2023-08-31T08:28:59
| 262,207,814
| 103
| 44
|
NOASSERTION
| 2023-06-22T14:50:48
| 2020-05-08T02:28:43
|
Python
|
UTF-8
|
Python
| false
| false
| 6,046
|
py
|
event.py
|
# coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class Event:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'event_type': 'str',
'channel': 'str',
'fail_policy': 'str',
'concurrent': 'int',
'read_policy': 'str'
}
attribute_map = {
'event_type': 'eventType',
'channel': 'channel',
'fail_policy': 'failPolicy',
'concurrent': 'concurrent',
'read_policy': 'readPolicy'
}
def __init__(self, event_type=None, channel=None, fail_policy=None, concurrent=None, read_policy=None):
"""Event
The model defined in huaweicloud sdk
:param event_type: 事件类型
:type event_type: str
:param channel: DIS通道名称
:type channel: str
:param fail_policy: 执行失败处理策略
:type fail_policy: str
:param concurrent: 调度并发数
:type concurrent: int
:param read_policy: 读取策略
:type read_policy: str
"""
self._event_type = None
self._channel = None
self._fail_policy = None
self._concurrent = None
self._read_policy = None
self.discriminator = None
if event_type is not None:
self.event_type = event_type
if channel is not None:
self.channel = channel
if fail_policy is not None:
self.fail_policy = fail_policy
if concurrent is not None:
self.concurrent = concurrent
if read_policy is not None:
self.read_policy = read_policy
@property
def event_type(self):
"""Gets the event_type of this Event.
事件类型
:return: The event_type of this Event.
:rtype: str
"""
return self._event_type
@event_type.setter
def event_type(self, event_type):
"""Sets the event_type of this Event.
事件类型
:param event_type: The event_type of this Event.
:type event_type: str
"""
self._event_type = event_type
@property
def channel(self):
"""Gets the channel of this Event.
DIS通道名称
:return: The channel of this Event.
:rtype: str
"""
return self._channel
@channel.setter
def channel(self, channel):
"""Sets the channel of this Event.
DIS通道名称
:param channel: The channel of this Event.
:type channel: str
"""
self._channel = channel
@property
def fail_policy(self):
"""Gets the fail_policy of this Event.
执行失败处理策略
:return: The fail_policy of this Event.
:rtype: str
"""
return self._fail_policy
@fail_policy.setter
def fail_policy(self, fail_policy):
"""Sets the fail_policy of this Event.
执行失败处理策略
:param fail_policy: The fail_policy of this Event.
:type fail_policy: str
"""
self._fail_policy = fail_policy
@property
def concurrent(self):
"""Gets the concurrent of this Event.
调度并发数
:return: The concurrent of this Event.
:rtype: int
"""
return self._concurrent
@concurrent.setter
def concurrent(self, concurrent):
"""Sets the concurrent of this Event.
调度并发数
:param concurrent: The concurrent of this Event.
:type concurrent: int
"""
self._concurrent = concurrent
@property
def read_policy(self):
"""Gets the read_policy of this Event.
读取策略
:return: The read_policy of this Event.
:rtype: str
"""
return self._read_policy
@read_policy.setter
def read_policy(self, read_policy):
"""Sets the read_policy of this Event.
读取策略
:param read_policy: The read_policy of this Event.
:type read_policy: str
"""
self._read_policy = read_policy
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Event):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
f4dc5f564d0cd76958d5e45334d928e9efd31426
|
aeef2494b283012ed619870c4275e7d015f4017a
|
/sdk/python/pulumi_gcp/networkmanagement/_inputs.py
|
b558d34c8b13d001a5c8ae1a9a67f58d907eff3f
|
[
"MPL-2.0",
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
pulumi/pulumi-gcp
|
d4fd3f80c3df5290edaf33eb5eafe34e6699d0ff
|
7deea0a50a4ee5ab7bd722a83eca01707e298f85
|
refs/heads/master
| 2023-08-31T07:12:45.921522
| 2023-08-31T06:16:27
| 2023-08-31T06:16:27
| 97,485,806
| 160
| 63
|
Apache-2.0
| 2023-09-14T19:49:36
| 2017-07-17T14:28:37
|
Java
|
UTF-8
|
Python
| false
| false
| 9,967
|
py
|
_inputs.py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'ConnectivityTestDestinationArgs',
'ConnectivityTestSourceArgs',
]
@pulumi.input_type
class ConnectivityTestDestinationArgs:
def __init__(__self__, *,
instance: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
project_id: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] instance: A Compute Engine instance URI.
:param pulumi.Input[str] ip_address: The IP address of the endpoint, which can be an external or
internal IP. An IPv6 address is only allowed when the test's
destination is a global load balancer VIP.
:param pulumi.Input[str] network: A Compute Engine network URI.
:param pulumi.Input[int] port: The IP protocol port of the endpoint. Only applicable when
protocol is TCP or UDP.
:param pulumi.Input[str] project_id: Project ID where the endpoint is located. The Project ID can be
derived from the URI if you provide a VM instance or network URI.
The following are two cases where you must provide the project ID:
1. Only the IP address is specified, and the IP address is within
a GCP project. 2. When you are using Shared VPC and the IP address
that you provide is from the service project. In this case, the
network that the IP address resides in is defined in the host
project.
- - -
"""
if instance is not None:
pulumi.set(__self__, "instance", instance)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if network is not None:
pulumi.set(__self__, "network", network)
if port is not None:
pulumi.set(__self__, "port", port)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
@property
@pulumi.getter
def instance(self) -> Optional[pulumi.Input[str]]:
"""
A Compute Engine instance URI.
"""
return pulumi.get(self, "instance")
@instance.setter
def instance(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input[str]]:
"""
The IP address of the endpoint, which can be an external or
internal IP. An IPv6 address is only allowed when the test's
destination is a global load balancer VIP.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def network(self) -> Optional[pulumi.Input[str]]:
"""
A Compute Engine network URI.
"""
return pulumi.get(self, "network")
@network.setter
def network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The IP protocol port of the endpoint. Only applicable when
protocol is TCP or UDP.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
Project ID where the endpoint is located. The Project ID can be
derived from the URI if you provide a VM instance or network URI.
The following are two cases where you must provide the project ID:
1. Only the IP address is specified, and the IP address is within
a GCP project. 2. When you are using Shared VPC and the IP address
that you provide is from the service project. In this case, the
network that the IP address resides in is defined in the host
project.
- - -
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@pulumi.input_type
class ConnectivityTestSourceArgs:
def __init__(__self__, *,
instance: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
network_type: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
project_id: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] instance: A Compute Engine instance URI.
:param pulumi.Input[str] ip_address: The IP address of the endpoint, which can be an external or
internal IP. An IPv6 address is only allowed when the test's
destination is a global load balancer VIP.
:param pulumi.Input[str] network: A Compute Engine network URI.
:param pulumi.Input[str] network_type: Type of the network where the endpoint is located.
Possible values are: `GCP_NETWORK`, `NON_GCP_NETWORK`.
:param pulumi.Input[int] port: The IP protocol port of the endpoint. Only applicable when
protocol is TCP or UDP.
:param pulumi.Input[str] project_id: Project ID where the endpoint is located. The Project ID can be
derived from the URI if you provide a VM instance or network URI.
The following are two cases where you must provide the project ID:
1. Only the IP address is specified, and the IP address is
within a GCP project.
2. When you are using Shared VPC and the IP address
that you provide is from the service project. In this case,
the network that the IP address resides in is defined in the
host project.
"""
if instance is not None:
pulumi.set(__self__, "instance", instance)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if network is not None:
pulumi.set(__self__, "network", network)
if network_type is not None:
pulumi.set(__self__, "network_type", network_type)
if port is not None:
pulumi.set(__self__, "port", port)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
@property
@pulumi.getter
def instance(self) -> Optional[pulumi.Input[str]]:
"""
A Compute Engine instance URI.
"""
return pulumi.get(self, "instance")
@instance.setter
def instance(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input[str]]:
"""
The IP address of the endpoint, which can be an external or
internal IP. An IPv6 address is only allowed when the test's
destination is a global load balancer VIP.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def network(self) -> Optional[pulumi.Input[str]]:
"""
A Compute Engine network URI.
"""
return pulumi.get(self, "network")
@network.setter
def network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network", value)
@property
@pulumi.getter(name="networkType")
def network_type(self) -> Optional[pulumi.Input[str]]:
"""
Type of the network where the endpoint is located.
Possible values are: `GCP_NETWORK`, `NON_GCP_NETWORK`.
"""
return pulumi.get(self, "network_type")
@network_type.setter
def network_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network_type", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The IP protocol port of the endpoint. Only applicable when
protocol is TCP or UDP.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
Project ID where the endpoint is located. The Project ID can be
derived from the URI if you provide a VM instance or network URI.
The following are two cases where you must provide the project ID:
1. Only the IP address is specified, and the IP address is
within a GCP project.
2. When you are using Shared VPC and the IP address
that you provide is from the service project. In this case,
the network that the IP address resides in is defined in the
host project.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
|
60ef030ecd42c442c453172656c351899eb193c6
|
99199db3f78a344e72b281c71c690518ae07375a
|
/octavia/db/models.py
|
5cda8b5b312cd4595225cbcb622bc1f399b030bf
|
[
"Apache-2.0"
] |
permissive
|
openstack/octavia
|
3faf2afe2ade5bd3978bb3a0558d2eeefc648ba2
|
0426285a41464a5015494584f109eed35a0d44db
|
refs/heads/master
| 2023-09-01T20:12:48.272344
| 2023-08-31T17:24:04
| 2023-08-31T17:24:04
| 21,018,188
| 147
| 180
|
Apache-2.0
| 2021-03-30T12:34:30
| 2014-06-19T22:47:19
|
Python
|
UTF-8
|
Python
| false
| false
| 36,160
|
py
|
models.py
|
# Copyright 2014 Rackspace
# Copyright 2016 Blue Box, an IBM Company
# Copyright 2017 Walmart Stores Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db.sqlalchemy import models
import sqlalchemy as sa
from sqlalchemy.ext import orderinglist
from sqlalchemy import orm
from sqlalchemy.orm import Mapped
from sqlalchemy.orm import validates
from sqlalchemy.sql import func
from sqlalchemy_utils import ScalarListType
from octavia.api.v2.types import amphora
from octavia.api.v2.types import availability_zone_profile
from octavia.api.v2.types import availability_zones
from octavia.api.v2.types import flavor_profile
from octavia.api.v2.types import flavors
from octavia.api.v2.types import health_monitor
from octavia.api.v2.types import l7policy
from octavia.api.v2.types import l7rule
from octavia.api.v2.types import listener
from octavia.api.v2.types import load_balancer
from octavia.api.v2.types import member
from octavia.api.v2.types import pool
from octavia.api.v2.types import quotas
from octavia.common import constants
from octavia.common import data_models
from octavia.db import base_models
from octavia.i18n import _
class ProvisioningStatus(base_models.BASE, base_models.LookupTableMixin):
__tablename__ = "provisioning_status"
class OperatingStatus(base_models.BASE, base_models.LookupTableMixin):
__tablename__ = "operating_status"
class Protocol(base_models.BASE, base_models.LookupTableMixin):
__tablename__ = "protocol"
class Algorithm(base_models.BASE, base_models.LookupTableMixin):
__tablename__ = "algorithm"
class AmphoraRoles(base_models.BASE, base_models.LookupTableMixin):
__tablename__ = "amphora_roles"
class LBTopology(base_models.BASE, base_models.LookupTableMixin):
__tablename__ = "lb_topology"
class SessionPersistenceType(base_models.BASE, base_models.LookupTableMixin):
__tablename__ = "session_persistence_type"
class HealthMonitorType(base_models.BASE, base_models.LookupTableMixin):
__tablename__ = "health_monitor_type"
class VRRPAuthMethod(base_models.BASE, base_models.LookupTableMixin):
__tablename__ = "vrrp_auth_method"
class L7RuleType(base_models.BASE, base_models.LookupTableMixin):
__tablename__ = "l7rule_type"
class L7RuleCompareType(base_models.BASE, base_models.LookupTableMixin):
__tablename__ = "l7rule_compare_type"
class L7PolicyAction(base_models.BASE, base_models.LookupTableMixin):
__tablename__ = "l7policy_action"
class AmphoraBuildSlots(base_models.BASE):
__tablename__ = "amphora_build_slots"
id = sa.Column(sa.Integer(), primary_key=True)
slots_used = sa.Column(sa.Integer())
class AmphoraBuildRequest(base_models.BASE):
__tablename__ = "amphora_build_request"
amphora_id = sa.Column(sa.String(36), nullable=True, primary_key=True)
priority = sa.Column(sa.Integer())
created_time = sa.Column(sa.DateTime, default=func.now(), nullable=False)
status = sa.Column(sa.String(16), default='WAITING', nullable=False)
class SessionPersistence(base_models.BASE):
__data_model__ = data_models.SessionPersistence
__tablename__ = "session_persistence"
pool_id = sa.Column(
sa.String(36),
sa.ForeignKey("pool.id", name="fk_session_persistence_pool_id"),
nullable=False,
primary_key=True)
type = sa.Column(
sa.String(36),
sa.ForeignKey(
"session_persistence_type.name",
name="fk_session_persistence_session_persistence_type_name"),
nullable=False)
cookie_name = sa.Column(sa.String(255), nullable=True)
persistence_timeout = sa.Column(sa.Integer(), nullable=True)
persistence_granularity = sa.Column(sa.String(64), nullable=True)
pool = orm.relationship("Pool", uselist=False,
back_populates="session_persistence")
class ListenerStatistics(base_models.BASE):
__data_model__ = data_models.ListenerStatistics
__tablename__ = "listener_statistics"
listener_id = sa.Column(
sa.String(36),
primary_key=True,
nullable=False)
amphora_id = sa.Column(
sa.String(36),
primary_key=True,
nullable=False)
bytes_in = sa.Column(sa.BigInteger, nullable=False)
bytes_out = sa.Column(sa.BigInteger, nullable=False)
active_connections = sa.Column(sa.Integer, nullable=False)
total_connections = sa.Column(sa.BigInteger, nullable=False)
request_errors = sa.Column(sa.BigInteger, nullable=False)
@validates('bytes_in', 'bytes_out',
'active_connections', 'total_connections',
'request_errors')
def validate_non_negative_int(self, key, value):
if value < 0:
data = {'key': key, 'value': value}
raise ValueError(_('The %(key)s field can not have '
'negative value. '
'Current value is %(value)d.') % data)
return value
def __iadd__(self, other):
if isinstance(other, (ListenerStatistics,
data_models.ListenerStatistics)):
self.bytes_in += other.bytes_in
self.bytes_out += other.bytes_out
self.request_errors += other.request_errors
self.total_connections += other.total_connections
else:
raise TypeError( # noqa: O342
"unsupported operand type(s) for +=: '{0}' and '{1}'".format(
type(self), type(other)))
return self
class Member(base_models.BASE, base_models.IdMixin, base_models.ProjectMixin,
models.TimestampMixin, base_models.NameMixin,
base_models.TagMixin):
__data_model__ = data_models.Member
__tablename__ = "member"
__v2_wsme__ = member.MemberResponse
__table_args__ = (
sa.UniqueConstraint('pool_id', 'ip_address', 'protocol_port',
name='uq_member_pool_id_address_protocol_port'),
)
pool_id = sa.Column(
sa.String(36),
sa.ForeignKey("pool.id", name="fk_member_pool_id"),
nullable=False)
subnet_id = sa.Column(sa.String(36), nullable=True)
ip_address = sa.Column('ip_address', sa.String(64), nullable=False)
protocol_port = sa.Column(sa.Integer, nullable=False)
weight = sa.Column(sa.Integer, nullable=True)
backup = sa.Column(sa.Boolean(), nullable=False)
monitor_address = sa.Column(sa.String(64), nullable=True)
monitor_port = sa.Column(sa.Integer, nullable=True)
provisioning_status = sa.Column(
sa.String(16),
sa.ForeignKey("provisioning_status.name",
name="fk_member_provisioning_status_name"),
nullable=False)
operating_status = sa.Column(
sa.String(16),
sa.ForeignKey("operating_status.name",
name="fk_member_operating_status_name"),
nullable=False)
enabled = sa.Column(sa.Boolean(), nullable=False)
pool = orm.relationship("Pool", back_populates="members")
_tags = orm.relationship(
'Tags',
single_parent=True,
lazy='subquery',
cascade='all,delete-orphan',
primaryjoin='and_(foreign(Tags.resource_id)==Member.id)',
overlaps='_tags'
)
def __str__(self):
return (f"Member(id={self.id!r}, name={self.name!r}, "
f"project_id={self.project_id!r}, "
f"provisioning_status={self.provisioning_status!r}, "
f"ip_address={self.ip_address!r}, "
f"protocol_port={self.protocol_port!r}, "
f"operating_status={self.operating_status!r}, "
f"weight={self.weight!r})")
class HealthMonitor(base_models.BASE, base_models.IdMixin,
base_models.ProjectMixin, models.TimestampMixin,
base_models.NameMixin, base_models.TagMixin):
__data_model__ = data_models.HealthMonitor
__tablename__ = "health_monitor"
__v2_wsme__ = health_monitor.HealthMonitorResponse
__table_args__ = (
sa.UniqueConstraint('pool_id',
name='uq_health_monitor_pool'),
)
type = sa.Column(
sa.String(36),
sa.ForeignKey("health_monitor_type.name",
name="fk_health_monitor_health_monitor_type_name"),
nullable=False)
pool_id = sa.Column(
sa.String(36),
sa.ForeignKey("pool.id", name="fk_health_monitor_pool_id"),
nullable=False)
delay = sa.Column(sa.Integer, nullable=False)
timeout = sa.Column(sa.Integer, nullable=False)
fall_threshold = sa.Column(sa.Integer, nullable=False)
rise_threshold = sa.Column(sa.Integer, nullable=False)
http_method = sa.Column(sa.String(16), nullable=True)
url_path = sa.Column(sa.String(2048), nullable=True)
expected_codes = sa.Column(sa.String(64), nullable=True)
enabled = sa.Column(sa.Boolean, nullable=False)
pool = orm.relationship("Pool", uselist=False,
back_populates="health_monitor")
provisioning_status = sa.Column(
sa.String(16),
sa.ForeignKey("provisioning_status.name",
name="fk_health_monitor_provisioning_status_name"),
nullable=False)
operating_status = sa.Column(
sa.String(16),
sa.ForeignKey("operating_status.name",
name="fk_health_monitor_operating_status_name"),
nullable=False)
_tags = orm.relationship(
'Tags',
single_parent=True,
lazy='subquery',
cascade='all,delete-orphan',
primaryjoin='and_(foreign(Tags.resource_id)==HealthMonitor.id)',
overlaps='_tags'
)
http_version = sa.Column(sa.Float, nullable=True)
domain_name = sa.Column(sa.String(255), nullable=True)
def __str__(self):
return (f"HealthMonitor(id={self.id!r}, name={self.name!r}, "
f"project_id={self.project_id!r}, type={self.type!r}, "
f"enabled={self.enabled!r})")
class Pool(base_models.BASE, base_models.IdMixin, base_models.ProjectMixin,
models.TimestampMixin, base_models.NameMixin, base_models.TagMixin):
__data_model__ = data_models.Pool
__tablename__ = "pool"
__v2_wsme__ = pool.PoolResponse
description = sa.Column(sa.String(255), nullable=True)
protocol = sa.Column(
sa.String(16),
sa.ForeignKey("protocol.name", name="fk_pool_protocol_name"),
nullable=False)
lb_algorithm = sa.Column(
sa.String(255),
sa.ForeignKey("algorithm.name", name="fk_pool_algorithm_name"),
nullable=False)
provisioning_status = sa.Column(
sa.String(16),
sa.ForeignKey("provisioning_status.name",
name="fk_pool_provisioning_status_name"),
nullable=False)
operating_status = sa.Column(
sa.String(16),
sa.ForeignKey("operating_status.name",
name="fk_pool_operating_status_name"),
nullable=False)
enabled = sa.Column(sa.Boolean, nullable=False)
load_balancer_id = sa.Column(
sa.String(36),
sa.ForeignKey("load_balancer.id", name="fk_pool_load_balancer_id"),
nullable=True)
health_monitor = orm.relationship("HealthMonitor", uselist=False,
cascade="delete", back_populates="pool")
load_balancer = orm.relationship("LoadBalancer", uselist=False,
back_populates="pools")
members = orm.relationship("Member", uselist=True, cascade="delete",
back_populates="pool")
session_persistence = orm.relationship(
"SessionPersistence", uselist=False, cascade="delete",
back_populates="pool")
_default_listeners = orm.relationship("Listener", uselist=True,
back_populates="default_pool",
cascade_backrefs=False)
l7policies = orm.relationship("L7Policy", uselist=True,
back_populates="redirect_pool")
_tags = orm.relationship(
'Tags',
single_parent=True,
lazy='subquery',
cascade='all,delete-orphan',
primaryjoin='and_(foreign(Tags.resource_id)==Pool.id)',
overlaps='_tags'
)
tls_certificate_id = sa.Column(sa.String(255), nullable=True)
ca_tls_certificate_id = sa.Column(sa.String(255), nullable=True)
crl_container_id = sa.Column(sa.String(255), nullable=True)
tls_enabled = sa.Column(sa.Boolean, default=False, nullable=False)
tls_ciphers = sa.Column(sa.String(2048), nullable=True)
tls_versions = sa.Column(ScalarListType(), nullable=True)
alpn_protocols = sa.Column(ScalarListType(), nullable=True)
# This property should be a unique list of any listeners that reference
# this pool as its default_pool and any listeners referenced by enabled
# L7Policies with at least one l7rule which also reference this pool. The
# intent is that pool.listeners should be a unique list of listeners
# *actually* using the pool.
@property
def listeners(self):
_listeners = self._default_listeners[:]
_l_ids = [li.id for li in _listeners]
l7_listeners = [p.listener for p in self.l7policies
if len(p.l7rules) > 0 and p.enabled is True]
for li in l7_listeners:
if li.id not in _l_ids:
_listeners.append(li)
_l_ids.append(li.id)
return _listeners
def __str__(self):
return (f"Pool(id={self.id!r}, name={self.name!r}, "
f"project_id={self.project_id!r}, "
f"provisioning_status={self.provisioning_status!r}, "
f"protocol={self.protocol!r}, "
f"lb_algorithm={self.lb_algorithm!r}, "
f"enabled={self.enabled!r})")
class LoadBalancer(base_models.BASE, base_models.IdMixin,
base_models.ProjectMixin, models.TimestampMixin,
base_models.NameMixin, base_models.TagMixin):
__data_model__ = data_models.LoadBalancer
__tablename__ = "load_balancer"
__v2_wsme__ = load_balancer.LoadBalancerResponse
description = sa.Column(sa.String(255), nullable=True)
provisioning_status = sa.Column(
sa.String(16),
sa.ForeignKey("provisioning_status.name",
name="fk_load_balancer_provisioning_status_name"),
nullable=False)
operating_status = sa.Column(
sa.String(16),
sa.ForeignKey("operating_status.name",
name="fk_load_balancer_operating_status_name"),
nullable=False)
topology = sa.Column(
sa.String(36),
sa.ForeignKey("lb_topology.name", name="fk_lb_topology_name"),
nullable=True)
enabled = sa.Column(sa.Boolean, nullable=False)
amphorae = orm.relationship("Amphora", uselist=True,
back_populates="load_balancer")
server_group_id = sa.Column(sa.String(36), nullable=True)
provider = sa.Column(sa.String(64), nullable=True)
vip = orm.relationship('Vip', cascade='delete', uselist=False,
backref=orm.backref('load_balancer', uselist=False))
additional_vips = orm.relationship(
'AdditionalVip', cascade='delete', uselist=True,
backref=orm.backref('load_balancer', uselist=False))
pools = orm.relationship('Pool', cascade='delete', uselist=True,
back_populates="load_balancer")
listeners = orm.relationship('Listener', cascade='delete', uselist=True,
back_populates='load_balancer')
_tags = orm.relationship(
'Tags',
single_parent=True,
lazy='subquery',
cascade='all,delete-orphan',
primaryjoin='and_(foreign(Tags.resource_id)==LoadBalancer.id)',
overlaps='_tags'
)
flavor_id = sa.Column(
sa.String(36),
sa.ForeignKey("flavor.id", name="fk_lb_flavor_id"), nullable=True)
availability_zone = sa.Column(
sa.String(255),
sa.ForeignKey("availability_zone.name",
name="fk_load_balancer_availability_zone_name"),
nullable=True)
flavor: Mapped["Flavor"] = orm.relationship("Flavor")
def __str__(self):
return (f"LoadBalancer(id={self.id!r}, name={self.name!r}, "
f"project_id={self.project_id!r}, vip={self.vip!r}, "
f"provisioning_status={self.provisioning_status!r}, "
f"operating_status={self.operating_status!r}, "
f"provider={self.provider!r})")
class VRRPGroup(base_models.BASE):
__data_model__ = data_models.VRRPGroup
__tablename__ = "vrrp_group"
load_balancer_id = sa.Column(
sa.String(36),
sa.ForeignKey("load_balancer.id",
name="fk_vrrp_group_load_balancer_id"),
nullable=False, primary_key=True)
vrrp_group_name = sa.Column(sa.String(36), nullable=True)
vrrp_auth_type = sa.Column(sa.String(16), sa.ForeignKey(
"vrrp_auth_method.name",
name="fk_load_balancer_vrrp_auth_method_name"))
vrrp_auth_pass = sa.Column(sa.String(36), nullable=True)
advert_int = sa.Column(sa.Integer(), nullable=True)
load_balancer = orm.relationship("LoadBalancer", uselist=False,
backref=orm.backref("vrrp_group",
uselist=False,
cascade="delete"))
class Vip(base_models.BASE):
__data_model__ = data_models.Vip
__tablename__ = "vip"
load_balancer_id = sa.Column(
sa.String(36),
sa.ForeignKey("load_balancer.id",
name="fk_vip_load_balancer_id"),
nullable=False, primary_key=True)
ip_address = sa.Column(sa.String(64), nullable=True)
port_id = sa.Column(sa.String(36), nullable=True)
subnet_id = sa.Column(sa.String(36), nullable=True)
network_id = sa.Column(sa.String(36), nullable=True)
qos_policy_id = sa.Column(sa.String(36), nullable=True)
octavia_owned = sa.Column(sa.Boolean(), nullable=True)
class AdditionalVip(base_models.BASE):
__data_model__ = data_models.AdditionalVip
__tablename__ = "additional_vip"
__table_args__ = (
sa.PrimaryKeyConstraint('load_balancer_id', 'subnet_id',
name='pk_add_vip_load_balancer_subnet'),
)
load_balancer_id = sa.Column(
sa.String(36),
sa.ForeignKey("load_balancer.id",
name="fk_add_vip_load_balancer_id"),
nullable=False, index=True)
ip_address = sa.Column(sa.String(64), nullable=True)
port_id = sa.Column(sa.String(36), nullable=True)
subnet_id = sa.Column(sa.String(36), nullable=True)
network_id = sa.Column(sa.String(36), nullable=True)
class Listener(base_models.BASE, base_models.IdMixin,
base_models.ProjectMixin, models.TimestampMixin,
base_models.NameMixin, base_models.TagMixin):
__data_model__ = data_models.Listener
__tablename__ = "listener"
__v2_wsme__ = listener.ListenerResponse
__table_args__ = (
sa.UniqueConstraint(
'load_balancer_id', 'protocol', 'protocol_port',
name='uq_listener_load_balancer_id_protocol_port'),
)
description = sa.Column(sa.String(255), nullable=True)
protocol = sa.Column(
sa.String(16),
sa.ForeignKey("protocol.name", name="fk_listener_protocol_name"),
nullable=False)
protocol_port = sa.Column(sa.Integer(), nullable=False)
connection_limit = sa.Column(sa.Integer, nullable=True)
load_balancer_id = sa.Column(
sa.String(36),
sa.ForeignKey("load_balancer.id", name="fk_listener_load_balancer_id"),
nullable=True)
tls_certificate_id = sa.Column(sa.String(255), nullable=True)
default_pool_id = sa.Column(
sa.String(36),
sa.ForeignKey("pool.id", name="fk_listener_pool_id"),
nullable=True)
provisioning_status = sa.Column(
sa.String(16),
sa.ForeignKey("provisioning_status.name",
name="fk_listener_provisioning_status_name"),
nullable=False)
operating_status = sa.Column(
sa.String(16),
sa.ForeignKey("operating_status.name",
name="fk_listener_operating_status_name"),
nullable=False)
enabled = sa.Column(sa.Boolean(), nullable=False)
load_balancer = orm.relationship("LoadBalancer", uselist=False,
back_populates="listeners")
default_pool = orm.relationship("Pool", uselist=False,
back_populates="_default_listeners",
cascade_backrefs=False)
sni_containers = orm.relationship(
'SNI', cascade='all,delete-orphan',
uselist=True, backref=orm.backref('listener', uselist=False))
l7policies = orm.relationship(
'L7Policy', uselist=True, order_by='L7Policy.position',
collection_class=orderinglist.ordering_list('position', count_from=1),
cascade='delete', back_populates='listener')
peer_port = sa.Column(sa.Integer(), nullable=True)
insert_headers = sa.Column(sa.PickleType())
timeout_client_data = sa.Column(sa.Integer, nullable=True)
timeout_member_connect = sa.Column(sa.Integer, nullable=True)
timeout_member_data = sa.Column(sa.Integer, nullable=True)
timeout_tcp_inspect = sa.Column(sa.Integer, nullable=True)
client_ca_tls_certificate_id = sa.Column(sa.String(255), nullable=True)
client_authentication = sa.Column(
sa.String(10),
sa.ForeignKey("client_authentication_mode.name",
name="fk_listener_client_authentication_mode_name"),
nullable=False, default=constants.CLIENT_AUTH_NONE)
client_crl_container_id = sa.Column(sa.String(255), nullable=True)
tls_ciphers = sa.Column(sa.String(2048), nullable=True)
tls_versions = sa.Column(ScalarListType(), nullable=True)
alpn_protocols = sa.Column(ScalarListType(), nullable=True)
hsts_max_age = sa.Column(sa.Integer, nullable=True)
hsts_include_subdomains = sa.Column(sa.Boolean, nullable=True)
hsts_preload = sa.Column(sa.Boolean, nullable=True)
_tags = orm.relationship(
'Tags',
single_parent=True,
lazy='subquery',
cascade='all,delete-orphan',
primaryjoin='and_(foreign(Tags.resource_id)==Listener.id)',
overlaps='_tags'
)
# This property should be a unique list of the default_pool and anything
# referenced by enabled L7Policies with at least one rule that also
# reference this listener. The intent is that listener.pools should be a
# unique list of pools this listener is *actually* using.
@property
def pools(self):
_pools = []
_p_ids = []
if self.default_pool:
_pools.append(self.default_pool)
_p_ids.append(self.default_pool.id)
l7_pools = [p.redirect_pool for p in self.l7policies
if p.redirect_pool is not None and len(p.l7rules) > 0 and
p.enabled is True]
for p in l7_pools:
if p.id not in _p_ids:
_pools.append(p)
_p_ids.append(p.id)
return _pools
allowed_cidrs = orm.relationship(
'ListenerCidr', cascade='all,delete-orphan',
uselist=True, backref=orm.backref('listener', uselist=False))
def __str__(self):
return (f"Listener(id={self.id!r}, "
f"default_pool={self.default_pool!r}, name={self.name!r}, "
f"project_id={self.project_id!r}, protocol={self.protocol!r}, "
f"protocol_port={self.protocol_port!r}, "
f"enabled={self.enabled!r})")
class SNI(base_models.BASE):
__data_model__ = data_models.SNI
__tablename__ = "sni"
__table_args__ = (
sa.PrimaryKeyConstraint('listener_id', 'tls_container_id'),
)
listener_id = sa.Column(
sa.String(36),
sa.ForeignKey("listener.id", name="fk_sni_listener_id"),
nullable=False)
tls_container_id = sa.Column(sa.String(128), nullable=False)
position = sa.Column(sa.Integer(), nullable=True)
class Amphora(base_models.BASE, base_models.IdMixin, models.TimestampMixin):
__data_model__ = data_models.Amphora
__tablename__ = "amphora"
__v2_wsme__ = amphora.AmphoraResponse
load_balancer_id = sa.Column(
sa.String(36), sa.ForeignKey("load_balancer.id",
name="fk_amphora_load_balancer_id"),
nullable=True)
compute_id = sa.Column(sa.String(36), nullable=True)
lb_network_ip = sa.Column(sa.String(64), nullable=True)
vrrp_ip = sa.Column(sa.String(64), nullable=True)
ha_ip = sa.Column(sa.String(64), nullable=True)
vrrp_port_id = sa.Column(sa.String(36), nullable=True)
ha_port_id = sa.Column(sa.String(36), nullable=True)
cert_expiration = sa.Column(sa.DateTime(timezone=True), default=None,
nullable=True)
cert_busy = sa.Column(sa.Boolean(), default=False, nullable=False)
role = sa.Column(
sa.String(36),
sa.ForeignKey("amphora_roles.name", name="fk_amphora_roles_name"),
nullable=True)
status = sa.Column(
sa.String(36),
sa.ForeignKey("provisioning_status.name",
name="fk_container_provisioning_status_name"))
vrrp_interface = sa.Column(sa.String(16), nullable=True)
vrrp_id = sa.Column(sa.Integer(), nullable=True)
vrrp_priority = sa.Column(sa.Integer(), nullable=True)
cached_zone = sa.Column(sa.String(255), nullable=True)
image_id = sa.Column(sa.String(36), nullable=True)
load_balancer = orm.relationship("LoadBalancer", uselist=False,
back_populates='amphorae')
compute_flavor = sa.Column(sa.String(255), nullable=True)
def __str__(self):
return (f"Amphora(id={self.id!r}, load_balancer_id="
f"{self.load_balancer_id!r}, status={self.status!r}, "
f"role={self.role!r}, lb_network_ip={self.lb_network_ip!r}, "
f"vrrp_ip={self.vrrp_ip!r})")
class AmphoraHealth(base_models.BASE):
__data_model__ = data_models.AmphoraHealth
__tablename__ = "amphora_health"
amphora_id = sa.Column(
sa.String(36), nullable=False, primary_key=True)
last_update = sa.Column(sa.DateTime, default=func.now(),
nullable=False)
busy = sa.Column(sa.Boolean(), default=False, nullable=False)
class L7Rule(base_models.BASE, base_models.IdMixin, base_models.ProjectMixin,
models.TimestampMixin, base_models.TagMixin):
__data_model__ = data_models.L7Rule
__tablename__ = "l7rule"
__v2_wsme__ = l7rule.L7RuleResponse
l7policy_id = sa.Column(
sa.String(36),
sa.ForeignKey("l7policy.id", name="fk_l7rule_l7policy_id"),
nullable=False)
type = sa.Column(
sa.String(36),
sa.ForeignKey(
"l7rule_type.name",
name="fk_l7rule_l7rule_type_name"),
nullable=False)
compare_type = sa.Column(
sa.String(36),
sa.ForeignKey(
"l7rule_compare_type.name",
name="fk_l7rule_l7rule_compare_type_name"),
nullable=False)
key = sa.Column(sa.String(255), nullable=True)
value = sa.Column(sa.String(255), nullable=False)
invert = sa.Column(sa.Boolean(), default=False, nullable=False)
enabled = sa.Column(sa.Boolean(), nullable=False)
l7policy = orm.relationship("L7Policy", uselist=False,
back_populates="l7rules")
provisioning_status = sa.Column(
sa.String(16),
sa.ForeignKey("provisioning_status.name",
name="fk_l7rule_provisioning_status_name"),
nullable=False)
operating_status = sa.Column(
sa.String(16),
sa.ForeignKey("operating_status.name",
name="fk_l7rule_operating_status_name"),
nullable=False)
_tags = orm.relationship(
'Tags',
single_parent=True,
lazy='subquery',
cascade='all,delete-orphan',
primaryjoin='and_(foreign(Tags.resource_id)==L7Rule.id)',
overlaps='_tags'
)
def __str__(self):
return (f"L7Rule(id={self.id!r}, project_id={self.project_id!r}, "
f"provisioning_status={self.provisioning_status!r}, "
f"type={self.type!r}, key={self.key!r}, value={self.value!r}, "
f"invert={self.invert!r}, enabled={self.enabled!r})")
class L7Policy(base_models.BASE, base_models.IdMixin, base_models.ProjectMixin,
models.TimestampMixin, base_models.NameMixin,
base_models.TagMixin):
__data_model__ = data_models.L7Policy
__tablename__ = "l7policy"
__v2_wsme__ = l7policy.L7PolicyResponse
description = sa.Column(sa.String(255), nullable=True)
listener_id = sa.Column(
sa.String(36),
sa.ForeignKey("listener.id", name="fk_l7policy_listener_id"),
nullable=False)
action = sa.Column(
sa.String(36),
sa.ForeignKey(
"l7policy_action.name",
name="fk_l7policy_l7policy_action_name"),
nullable=False)
redirect_pool_id = sa.Column(
sa.String(36),
sa.ForeignKey("pool.id", name="fk_l7policy_pool_id"),
nullable=True)
redirect_url = sa.Column(
sa.String(255),
nullable=True)
redirect_prefix = sa.Column(
sa.String(255),
nullable=True)
redirect_http_code = sa.Column(sa.Integer, nullable=True)
position = sa.Column(sa.Integer, nullable=False)
enabled = sa.Column(sa.Boolean(), nullable=False)
listener = orm.relationship("Listener", uselist=False,
back_populates="l7policies")
redirect_pool = orm.relationship("Pool", uselist=False,
back_populates="l7policies")
l7rules = orm.relationship("L7Rule", uselist=True, cascade="delete",
back_populates="l7policy")
provisioning_status = sa.Column(
sa.String(16),
sa.ForeignKey("provisioning_status.name",
name="fk_l7policy_provisioning_status_name"),
nullable=False)
operating_status = sa.Column(
sa.String(16),
sa.ForeignKey("operating_status.name",
name="fk_l7policy_operating_status_name"),
nullable=False)
_tags = orm.relationship(
'Tags',
single_parent=True,
lazy='subquery',
cascade='all,delete-orphan',
primaryjoin='and_(foreign(Tags.resource_id)==L7Policy.id)',
overlaps='_tags'
)
def __str__(self):
return (f"L7Policy(id={self.id!r}, name={self.name!r}, "
f"project_id={self.project_id!r}, "
f"provisioning_status={self.provisioning_status!r}, "
f"action={self.action!r}, position={self.position!r}, "
f"enabled={self.enabled!r})")
class Quotas(base_models.BASE):
__data_model__ = data_models.Quotas
__tablename__ = "quotas"
__v2_wsme__ = quotas.QuotaAllBase
project_id = sa.Column(sa.String(36), primary_key=True)
health_monitor = sa.Column(sa.Integer(), nullable=True)
listener = sa.Column(sa.Integer(), nullable=True)
load_balancer = sa.Column(sa.Integer(), nullable=True)
member = sa.Column(sa.Integer(), nullable=True)
pool = sa.Column(sa.Integer(), nullable=True)
l7policy = sa.Column(sa.Integer(), nullable=True)
l7rule = sa.Column(sa.Integer(), nullable=True)
in_use_health_monitor = sa.Column(sa.Integer(), nullable=True)
in_use_listener = sa.Column(sa.Integer(), nullable=True)
in_use_load_balancer = sa.Column(sa.Integer(), nullable=True)
in_use_member = sa.Column(sa.Integer(), nullable=True)
in_use_pool = sa.Column(sa.Integer(), nullable=True)
in_use_l7policy = sa.Column(sa.Integer(), nullable=True)
in_use_l7rule = sa.Column(sa.Integer(), nullable=True)
def __str__(self):
return (f"Quotas(project_id={self.project_id!r}, "
f"load_balancer={self.load_balancer!r}, "
f"listener={self.listener!r}, pool={self.pool!r}, "
f"health_monitor={self.health_monitor!r}, "
f"member={self.member!r}, l7policy={self.l7policy!r}, "
f"l7rule={self.l7rule!r})")
class FlavorProfile(base_models.BASE, base_models.IdMixin,
base_models.NameMixin):
__data_model__ = data_models.FlavorProfile
__tablename__ = "flavor_profile"
__v2_wsme__ = flavor_profile.FlavorProfileResponse
provider_name = sa.Column(sa.String(255), nullable=False)
flavor_data = sa.Column(sa.String(4096), nullable=False)
class Flavor(base_models.BASE,
base_models.IdMixin,
base_models.NameMixin):
__data_model__ = data_models.Flavor
__tablename__ = "flavor"
__v2_wsme__ = flavors.FlavorResponse
__table_args__ = (
sa.UniqueConstraint('name',
name='uq_flavor_name'),
)
description = sa.Column(sa.String(255), nullable=True)
enabled = sa.Column(sa.Boolean(), nullable=False)
flavor_profile_id = sa.Column(
sa.String(36),
sa.ForeignKey("flavor_profile.id",
name="fk_flavor_flavor_profile_id"),
nullable=False)
flavor_profile: Mapped["FlavorProfile"] = orm.relationship("FlavorProfile")
class AvailabilityZoneProfile(base_models.BASE, base_models.IdMixin,
base_models.NameMixin):
__data_model__ = data_models.AvailabilityZoneProfile
__tablename__ = "availability_zone_profile"
__v2_wsme__ = availability_zone_profile.AvailabilityZoneProfileResponse
provider_name = sa.Column(sa.String(255), nullable=False)
availability_zone_data = sa.Column(sa.String(4096), nullable=False)
class AvailabilityZone(base_models.BASE,
base_models.NameMixin):
__data_model__ = data_models.AvailabilityZone
__tablename__ = "availability_zone"
__v2_wsme__ = availability_zones.AvailabilityZoneResponse
__table_args__ = (
sa.PrimaryKeyConstraint('name'),
)
description = sa.Column(sa.String(255), nullable=True)
enabled = sa.Column(sa.Boolean(), nullable=False)
availability_zone_profile_id = sa.Column(
sa.String(36),
sa.ForeignKey("availability_zone_profile.id",
name="fk_az_az_profile_id"),
nullable=False)
availability_zone_profile: Mapped["AvailabilityZoneProfile"] = (
orm.relationship("AvailabilityZoneProfile"))
class ClientAuthenticationMode(base_models.BASE):
__tablename__ = "client_authentication_mode"
name = sa.Column(sa.String(10), primary_key=True, nullable=False)
class ListenerCidr(base_models.BASE):
__data_model__ = data_models.ListenerCidr
__tablename__ = "listener_cidr"
__table_args__ = (
sa.PrimaryKeyConstraint('listener_id', 'cidr'),
)
listener_id = sa.Column(
sa.String(36),
sa.ForeignKey("listener.id", name="fk_listener_cidr_listener_id"),
nullable=False)
cidr = sa.Column(sa.String(64), nullable=False)
|
13427a7a83d31177a02b61cf47dc1afdb5e2cba7
|
0032d988541e85c47b5034c20ecf88220dde5a95
|
/openbook_posts/migrations/0067_merge_20191202_1731.py
|
a92e948e492991591f7763db378feed9ac4d2b58
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
OkunaOrg/okuna-api
|
eabd37fef9d2be59b590ed8d72bee084ac377997
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
refs/heads/master
| 2022-02-04T21:31:10.577601
| 2021-12-28T18:20:39
| 2021-12-28T18:20:39
| 151,052,951
| 185
| 92
|
MIT
| 2022-01-13T01:00:40
| 2018-10-01T07:44:46
|
Python
|
UTF-8
|
Python
| false
| false
| 281
|
py
|
0067_merge_20191202_1731.py
|
# Generated by Django 2.2.5 on 2019-12-02 16:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('openbook_posts', '0066_auto_20191129_1630'),
('openbook_posts', '0066_trendingpost'),
]
operations = [
]
|
046a88ea42cccdff1c17544d26610304167da6ba
|
6f36df6219f8e50374068bb4b3e1a5387c7a2f34
|
/fipy/solvers/trilinos/preconditioners/multilevelSolverSmootherPreconditioner.py
|
956e7b1b665a0992190c3e7229cf930270ec1e6a
|
[
"NIST-PD"
] |
permissive
|
usnistgov/fipy
|
0a3db715fea452ae710eea3999d9cd42dfe76fe7
|
fdc17193bc293da7511be9021e6d4766757e1966
|
refs/heads/master
| 2023-08-31T21:59:36.611448
| 2023-06-27T16:28:58
| 2023-06-27T16:28:58
| 23,316,495
| 444
| 171
|
NOASSERTION
| 2023-09-06T19:21:19
| 2014-08-25T14:27:58
|
Python
|
UTF-8
|
Python
| false
| false
| 1,157
|
py
|
multilevelSolverSmootherPreconditioner.py
|
from __future__ import unicode_literals
__docformat__ = 'restructuredtext'
from PyTrilinos import ML
from fipy.solvers.trilinos.preconditioners.preconditioner import Preconditioner
__all__ = ["MultilevelSolverSmootherPreconditioner"]
from future.utils import text_to_native_str
__all__ = [text_to_native_str(n) for n in __all__]
class MultilevelSolverSmootherPreconditioner(Preconditioner):
"""
Multilevel preconditioner for Trilinos solvers using Aztec solvers
as smoothers.
"""
def __init__(self, levels=10):
"""
Initialize the multilevel preconditioner
- `levels`: Maximum number of levels
"""
self.levels = levels
def _applyToSolver(self, solver, matrix):
if matrix.NumGlobalNonzeros() <= matrix.NumGlobalRows():
return
self.Prec = ML.MultiLevelPreconditioner(matrix, False)
self.Prec.SetParameterList({text_to_native_str("output"): 0, text_to_native_str("smoother: type") : text_to_native_str("Aztec"), text_to_native_str("smoother: Aztec as solver") : True})
self.Prec.ComputePreconditioner()
solver.SetPrecOperator(self.Prec)
|
cf60109a3b7d4831d054e1ed12d6c0a8e10ecadb
|
d110546d747d7e3865ce5742d5fca09f404623c0
|
/tests/unit/states/test_x509.py
|
a1a5fa8b0669142cadb2d5081780cf6a8109a3e2
|
[
"Apache-2.0",
"MIT",
"BSD-2-Clause"
] |
permissive
|
saltstack/salt
|
354fc86a7be1f69514b3dd3b2edb9e6f66844c1d
|
1ef90cbdc7203f97775edb7666db86a41eb9fc15
|
refs/heads/master
| 2023-07-19T20:56:20.210556
| 2023-06-29T23:12:28
| 2023-07-19T11:47:47
| 1,390,248
| 11,026
| 6,296
|
Apache-2.0
| 2023-09-14T20:45:37
| 2011-02-20T20:16:56
|
Python
|
UTF-8
|
Python
| false
| false
| 5,708
|
py
|
test_x509.py
|
import tempfile
import pytest
import salt.utils.files
from salt.modules import x509 as x509_mod
from salt.states import x509
from tests.support.helpers import dedent
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock
from tests.support.unit import TestCase
try:
import M2Crypto # pylint: disable=unused-import
HAS_M2CRYPTO = True
except ImportError:
HAS_M2CRYPTO = False
class X509TestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {x509: {"__opts__": {"fips_mode": False}}}
def test_certificate_info_matches(self):
cert_info = {"MD5 Finger Print": ""}
required_info = {"MD5 Finger Print": ""}
ret = x509._certificate_info_matches(cert_info, required_info)
assert ret == (True, [])
class X509FipsTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
self.file_managed_mock = MagicMock()
self.file_managed_mock.return_value = {"changes": True}
return {
x509: {
"__opts__": {"fips_mode": True},
"__salt__": {
"x509.get_pem_entry": x509_mod.get_pem_entry,
"x509.get_private_key_size": x509_mod.get_private_key_size,
},
"__states__": {"file.managed": self.file_managed_mock},
}
}
@pytest.mark.skipif(
not HAS_M2CRYPTO, reason="Skipping, reason=M2Crypto is unavailable"
)
def test_private_key_fips_mode(self):
"""
:return:
"""
test_key = dedent(
"""
-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDx7UUt0cPi5G51
FmRBhAZtZb5x6P0PFn7GwnLmSvLNhCsOcD/vq/yBUU62pknzmOjM5pgWTACZj66O
GOFmWBg06v8+sqUbaF9PZ/CxQD5MogmQhYNgfyuopHWWgLXMub2hlP+15qGohkzg
Tr/mXp2ohVAb6ihjqb7XV9MiZaLNVX+XWauM8SlhqXMiJyDUopEGbg2pLsHhIMcX
1twLlyDja+uDbCMZ4jDNB+wsWxTaPRH8KizfEabB1Cl+fdyD10pSAYcodOAnlkW+
G/DX2hwb/ZAM9B1SXTfZ3gzaIIbqXBEHcZQNXxHL7szBTVcOmfx/RPfOeRncytb9
Mit7RIBxAgMBAAECggEAD4Pi+uRIBsYVm2a7OURpURzEUPPbPtt3d/HCgqht1+ZR
CJUEVK+X+wcm4Cnb9kZpL7LeMBfhtfdz/2LzGagurT4g7nlwg0h3TFVjJ0ryc+G0
cVNOsKKXPzKE5AkPH7kNw04V9Cl9Vpx+U6hZQEHzJHqgP5oNyw540cCtJriT700b
fG1q3PYKWSkDwTiUnJTnVLybFIKQC6urxTeT2UWeiBadfDY7DjI4USfrQsqCfGMO
uWPpOOJk5RIvw5r0Of2xvxV76xCgzVTkgtWjBRMTEkfeYx3019xKlQtAKoGbZd1T
tF8DH0cDlnri4nG7YT8yYvx/LWVDg12E6IZij1X60QKBgQD7062JuQGEmTd99a7o
5TcgWYqDrmE9AEgJZjN+gnEPcsxc50HJaTQgrkV0oKrS8CMbStIymbzMKWifOj7o
gvQBVecydq1AaXePt3gRe8vBFiP4cHjFcSegs9FDvdfJR36iHOBIgEp4DWvV1vgs
+z82LT6Qy5kxUQvnlQ4dEaGdrQKBgQD175f0H4enRJ3BoWTrqt2mTAwtJcPsKmGD
9YfFB3H4+O2rEKP4FpBO5PFXZ0dqm54hDtxqyC/lSXorFCUjVUBero1ECGt6Gnn2
TSnhgk0VMxvhnc0GReIt4K9WrXGd0CMUDwIhFHj8kbb1X1yqt2hwyw7b10xFVStl
sGv8CQB+VQKBgAF9q1VZZwzl61Ivli2CzeS/IvbMnX7C9ao4lK13EDxLLbKPG/CZ
UtmurnKWUOyWx15t/viVuGxtAlWO/rhZriAj5g6CbVwoQ7DyIR/ZX8dw3h2mbNCe
buGgruh7wz9J0RIcoadMOySiz7SgZS++/QzRD8HDstB77loco8zAQfixAoGBALDO
FbTocfKbjrpkmBQg24YxR9OxQb/n3AEtI/VO2+38r4h6xxaUyhwd1S9bzWjkBXOI
poeR8XTqNQ0BR422PTeUT3SohPPcUu/yG3jG3zmta47wjjPDS85lqEgtGvA0cPN7
srErcatJ6nlOnGUSw9/K65y6lFeH2lIZ2hfwNM2dAoGBAMVCc7i3AIhLp6UrGzjP
0ioCHCakpxfl8s1VQp55lhHlP6Y4RfqT72Zq7ScteTrisIAQyI9ot0gsuct2miQM
nyDdyKGki/MPduGTzzWlBA7GZEHnxbAILH8kWJ7eE/Nh7zdF1CRts8utEO9L9S+0
lVz1j/xGOseQk4cVos681Wpw
-----END PRIVATE KEY-----"""
)
test_cert = dedent(
"""
-----BEGIN CERTIFICATE-----
MIIDazCCAlOgAwIBAgIUAfATs1aodKw11Varh55msmU0LoowDQYJKoZIhvcNAQEL
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMTAzMjMwMTM4MzdaFw0yMjAz
MjMwMTM4MzdaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB
AQUAA4IBDwAwggEKAoIBAQDx7UUt0cPi5G51FmRBhAZtZb5x6P0PFn7GwnLmSvLN
hCsOcD/vq/yBUU62pknzmOjM5pgWTACZj66OGOFmWBg06v8+sqUbaF9PZ/CxQD5M
ogmQhYNgfyuopHWWgLXMub2hlP+15qGohkzgTr/mXp2ohVAb6ihjqb7XV9MiZaLN
VX+XWauM8SlhqXMiJyDUopEGbg2pLsHhIMcX1twLlyDja+uDbCMZ4jDNB+wsWxTa
PRH8KizfEabB1Cl+fdyD10pSAYcodOAnlkW+G/DX2hwb/ZAM9B1SXTfZ3gzaIIbq
XBEHcZQNXxHL7szBTVcOmfx/RPfOeRncytb9Mit7RIBxAgMBAAGjUzBRMB0GA1Ud
DgQWBBT0qx4KLhozvuWAI9peT/utYV9FITAfBgNVHSMEGDAWgBT0qx4KLhozvuWA
I9peT/utYV9FITAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQDx
tWvUyGfEwJJg1ViBa10nVhg5sEc6KfqcPzc2GvatIGJlAbc3b1AYu6677X04SQNA
dYRA2jcZcKudy6eolPJow6SDpkt66IqciZYdbQE5h9elnwpZxmXlJTQTB9cEwyIk
2em5DKpdIwa9rRDlbAjAVJb3015MtpKRu2gsQ7gl5X2U3K+DFsWtBPf+0xiJqUiq
rd7tiHF/zylubSyH/LVONJZ6+/oT/qzJfxfpvygtQWcu4b2zzME/FPenMA8W6Rau
ZYycQfpMVc7KwqF5/wfjnkmfxoFKnkD7WQ3qFCJ/xULk/Yn1hrvNeIr+khX3qKQi
Y3BMA5m+J+PZrNy7EQSa
-----END CERTIFICATE-----
"""
)
fp, name = tempfile.mkstemp()
with salt.utils.files.fopen(name, "w") as fd:
fd.write(test_key)
fd.write(test_cert)
ret = x509.private_key_managed(name)
self.file_managed_mock.assert_called_once()
assert (
self.file_managed_mock.call_args.kwargs["contents"].strip()
== test_key.strip()
)
def test_certificate_info_matches(self):
cert_info = {"MD5 Finger Print": ""}
required_info = {"MD5 Finger Print": ""}
ret = x509._certificate_info_matches(cert_info, required_info)
assert ret == (False, ["MD5 Finger Print"])
|
ed4ab12cb0eae8ad4d5a7b7400e6206514bc388f
|
dc66c0cf24c5f741b6288f3d73e6436752432dad
|
/Backend/item/models.py
|
92a0ee2dcb712a456e5cf1f7ed4c22b9525cdcf4
|
[
"MIT"
] |
permissive
|
Linzecong/LPOJ
|
bdcf79f5e751419c0cff14c818512d5509fd849f
|
2f7ce194f1d510d8d006c2a35fdaa272f20ef1f3
|
refs/heads/master
| 2023-01-20T15:42:12.865669
| 2022-01-05T15:05:55
| 2022-01-05T15:05:55
| 164,289,923
| 236
| 79
|
MIT
| 2023-01-14T00:55:14
| 2019-01-06T08:41:36
|
Vue
|
UTF-8
|
Python
| false
| false
| 698
|
py
|
models.py
|
# -*- coding: utf-8 -*-
from django.db import models
from user.models import User
import datetime
class Item(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
title = models.TextField(default="待补题",null=False)
detail = models.TextField(null=True,blank=True)
createtime = models.DateTimeField(null=False,default=datetime.datetime.now)
deadtime = models.DateTimeField(null=True,default=datetime.datetime.now)
status = models.IntegerField(default=1) # 1 新建 0 已完成
tag = models.IntegerField(default=1) # 1 便签 # 2 待办事项 # 3 待补题
objects = models.Manager()
def __str__(self):
return self.title
|
cde567a9c081b5761ee7567b5a20b72c27ed5280
|
952dc66c61966f099756cdb6c2d13b40352f63cc
|
/zerver/tests/test_subs.py
|
b6afee0d87d9cbd39c28b6200b79b72edde350a6
|
[
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
zulip/zulip
|
5ae6aad35fd9f72996c0a2a9cdd674400966ebf6
|
965a25d91b6ee2db54038f5df855215fa25146b0
|
refs/heads/main
| 2023-08-28T23:43:00.971110
| 2023-08-28T16:47:09
| 2023-08-28T19:33:02
| 43,160,685
| 20,239
| 8,996
|
Apache-2.0
| 2023-09-14T20:57:47
| 2015-09-25T16:37:25
|
Python
|
UTF-8
|
Python
| false
| false
| 277,583
|
py
|
test_subs.py
|
import hashlib
import random
from datetime import timedelta
from io import StringIO
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Set, Union
from unittest import mock
import orjson
from django.conf import settings
from django.core.exceptions import ValidationError
from django.http import HttpResponse
from django.utils.timezone import now as timezone_now
from zerver.actions.bots import do_change_bot_owner
from zerver.actions.create_realm import do_create_realm
from zerver.actions.default_streams import (
do_add_default_stream,
do_add_streams_to_default_stream_group,
do_change_default_stream_group_description,
do_change_default_stream_group_name,
do_create_default_stream_group,
do_remove_default_stream,
do_remove_default_stream_group,
do_remove_streams_from_default_stream_group,
lookup_default_stream_groups,
)
from zerver.actions.realm_settings import do_change_realm_plan_type, do_set_realm_property
from zerver.actions.streams import (
bulk_add_subscriptions,
bulk_remove_subscriptions,
deactivated_streams_by_old_name,
do_change_stream_group_based_setting,
do_change_stream_post_policy,
do_deactivate_stream,
do_reactivate_stream,
)
from zerver.actions.user_groups import add_subgroups_to_user_group, check_add_user_group
from zerver.actions.users import do_change_user_role, do_deactivate_user
from zerver.lib.default_streams import (
get_default_stream_ids_for_realm,
get_default_streams_for_realm_as_dicts,
)
from zerver.lib.exceptions import JsonableError
from zerver.lib.message import UnreadStreamInfo, aggregate_unread_data, get_raw_unread_data
from zerver.lib.response import json_success
from zerver.lib.stream_color import STREAM_ASSIGNMENT_COLORS, pick_colors
from zerver.lib.stream_subscription import (
get_active_subscriptions_for_stream_id,
num_subscribers_for_stream_id,
subscriber_ids_with_stream_history_access,
)
from zerver.lib.stream_traffic import (
get_average_weekly_stream_traffic,
round_to_2_significant_digits,
)
from zerver.lib.streams import (
StreamDict,
access_stream_by_id,
access_stream_by_name,
can_access_stream_history,
can_access_stream_user_ids,
create_stream_if_needed,
create_streams_if_needed,
do_get_streams,
ensure_stream,
filter_stream_authorization,
list_to_streams,
)
from zerver.lib.subscription_info import (
bulk_get_subscriber_user_ids,
gather_subscriptions,
gather_subscriptions_helper,
validate_user_access_to_subscribers_helper,
)
from zerver.lib.test_classes import ZulipTestCase, get_topic_messages
from zerver.lib.test_helpers import (
HostRequestMock,
cache_tries_captured,
get_subscription,
most_recent_message,
most_recent_usermessage,
queries_captured,
reset_email_visibility_to_everyone_in_zulip_realm,
)
from zerver.lib.types import (
APIStreamDict,
APISubscriptionDict,
NeverSubscribedStreamDict,
SubscriptionInfo,
)
from zerver.models import (
Attachment,
DefaultStream,
DefaultStreamGroup,
Message,
Realm,
RealmAuditLog,
Recipient,
Stream,
Subscription,
UserGroup,
UserMessage,
UserProfile,
active_non_guest_user_ids,
get_default_stream_groups,
get_realm,
get_stream,
get_user,
get_user_profile_by_id_in_realm,
validate_attachment_request,
validate_attachment_request_for_spectator_access,
)
from zerver.views.streams import compose_views
if TYPE_CHECKING:
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
class TestMiscStuff(ZulipTestCase):
def test_test_helper(self) -> None:
cordelia = self.example_user("cordelia")
s = self.subscribed_stream_name_list(cordelia)
self.assertIn("* Verona", s)
self.assertNotIn("* Denmark", s)
def test_pick_colors(self) -> None:
used_colors: Set[str] = set()
color_map: Dict[int, str] = {}
recipient_ids = list(range(30))
user_color_map = pick_colors(used_colors, color_map, recipient_ids)
self.assertEqual(
user_color_map,
{
0: "#76ce90",
1: "#fae589",
2: "#a6c7e5",
3: "#e79ab5",
4: "#bfd56f",
5: "#f4ae55",
6: "#b0a5fd",
7: "#addfe5",
8: "#f5ce6e",
9: "#c2726a",
10: "#94c849",
11: "#bd86e5",
12: "#ee7e4a",
13: "#a6dcbf",
14: "#95a5fd",
15: "#53a063",
16: "#9987e1",
17: "#e4523d",
18: "#c2c2c2",
19: "#4f8de4",
20: "#c6a8ad",
21: "#e7cc4d",
22: "#c8bebf",
23: "#a47462",
# start repeating
24: "#76ce90",
25: "#fae589",
26: "#a6c7e5",
27: "#e79ab5",
28: "#bfd56f",
29: "#f4ae55",
},
)
color_map = {98: "color98", 99: "color99"}
used_colors = set(STREAM_ASSIGNMENT_COLORS) - {"#c6a8ad", "#9987e1"}
recipient_ids = [99, 98, 1, 2, 3, 4]
user_color_map = pick_colors(used_colors, color_map, recipient_ids)
self.assertEqual(
user_color_map,
{98: "color98", 99: "color99", 1: "#9987e1", 2: "#c6a8ad", 3: "#e79ab5", 4: "#bfd56f"},
)
"""
If we are assigning colors to a user with 24+ streams, we have to start
re-using old colors. Our algorithm basically uses recipient_id % 24, so
the following code reflects the worse case scenario that our new
streams have recipient ids spaced out by exact multiples of 24. We
don't try to work around this edge case, since users who really depend
on the stream colors can always just assign themselves custom colors
for the streams that they really want to stand out.
Even if recipient_ids were completely random, the odds of collisions
are low, but it's often the case that bulk-adds are done for streams
that either were or are being created at roughly the same time, so the
recipient_ids tend to have even fewer collisions.
"""
used_colors = set(STREAM_ASSIGNMENT_COLORS)
color_map = {}
recipient_ids = [2, 26, 50, 74]
user_color_map = pick_colors(used_colors, color_map, recipient_ids)
self.assertEqual(
user_color_map,
{2: "#a6c7e5", 26: "#a6c7e5", 50: "#a6c7e5", 74: "#a6c7e5"},
)
def test_empty_results(self) -> None:
# These are essentially just tests to ensure line
# coverage for codepaths that won't ever really be
# called in practice.
user_profile = self.example_user("cordelia")
result = bulk_get_subscriber_user_ids(
stream_dicts=[],
user_profile=user_profile,
subscribed_stream_ids=set(),
)
self.assertEqual(result, {})
streams = do_get_streams(
user_profile=user_profile,
include_public=False,
include_subscribed=False,
include_all_active=False,
include_default=False,
)
self.assertEqual(streams, [])
def test_api_fields(self) -> None:
"""Verify that all the fields from `Stream.API_FIELDS` and `Subscription.API_FIELDS` present
in `APIStreamDict` and `APISubscriptionDict`, respectively.
"""
expected_fields = set(Stream.API_FIELDS) | {"stream_id"}
expected_fields -= {"id", "can_remove_subscribers_group_id"}
expected_fields |= {"can_remove_subscribers_group"}
stream_dict_fields = set(APIStreamDict.__annotations__.keys())
computed_fields = {"is_announcement_only", "is_default", "stream_weekly_traffic"}
self.assertEqual(stream_dict_fields - computed_fields, expected_fields)
expected_fields = set(Subscription.API_FIELDS)
subscription_dict_fields = set(APISubscriptionDict.__annotations__.keys())
computed_fields = {"in_home_view", "email_address", "stream_weekly_traffic", "subscribers"}
# `APISubscriptionDict` is a subclass of `APIStreamDict`, therefore having all the
# fields in addition to the computed fields and `Subscription.API_FIELDS` that
# need to be excluded here.
self.assertEqual(
subscription_dict_fields - computed_fields - stream_dict_fields,
expected_fields,
)
class TestCreateStreams(ZulipTestCase):
def test_creating_streams(self) -> None:
stream_names = ["new1", "new2", "new3"]
stream_descriptions = ["des1", "des2", "des3"]
realm = get_realm("zulip")
# Test stream creation events.
with self.capture_send_event_calls(expected_num_events=1) as events:
ensure_stream(realm, "Public stream", invite_only=False, acting_user=None)
self.assertEqual(events[0]["event"]["type"], "stream")
self.assertEqual(events[0]["event"]["op"], "create")
# Send public stream creation event to all active users.
self.assertEqual(events[0]["users"], active_non_guest_user_ids(realm.id))
self.assertEqual(events[0]["event"]["streams"][0]["name"], "Public stream")
self.assertEqual(events[0]["event"]["streams"][0]["stream_weekly_traffic"], None)
with self.capture_send_event_calls(expected_num_events=1) as events:
ensure_stream(realm, "Private stream", invite_only=True, acting_user=None)
self.assertEqual(events[0]["event"]["type"], "stream")
self.assertEqual(events[0]["event"]["op"], "create")
# Send private stream creation event to only realm admins.
self.assert_length(events[0]["users"], 2)
self.assertTrue(self.example_user("iago").id in events[0]["users"])
self.assertTrue(self.example_user("desdemona").id in events[0]["users"])
self.assertEqual(events[0]["event"]["streams"][0]["name"], "Private stream")
self.assertEqual(events[0]["event"]["streams"][0]["stream_weekly_traffic"], None)
moderators_system_group = UserGroup.objects.get(
name="role:moderators", realm=realm, is_system_group=True
)
new_streams, existing_streams = create_streams_if_needed(
realm,
[
{
"name": stream_name,
"description": stream_description,
"invite_only": True,
"stream_post_policy": Stream.STREAM_POST_POLICY_ADMINS,
"message_retention_days": -1,
"can_remove_subscribers_group": moderators_system_group,
}
for (stream_name, stream_description) in zip(stream_names, stream_descriptions)
],
)
self.assert_length(new_streams, 3)
self.assert_length(existing_streams, 0)
actual_stream_names = {stream.name for stream in new_streams}
self.assertEqual(actual_stream_names, set(stream_names))
actual_stream_descriptions = {stream.description for stream in new_streams}
self.assertEqual(actual_stream_descriptions, set(stream_descriptions))
for stream in new_streams:
self.assertTrue(stream.invite_only)
self.assertTrue(stream.stream_post_policy == Stream.STREAM_POST_POLICY_ADMINS)
self.assertTrue(stream.message_retention_days == -1)
self.assertEqual(stream.can_remove_subscribers_group.id, moderators_system_group.id)
new_streams, existing_streams = create_streams_if_needed(
realm,
[
{"name": stream_name, "description": stream_description, "invite_only": True}
for (stream_name, stream_description) in zip(stream_names, stream_descriptions)
],
)
self.assert_length(new_streams, 0)
self.assert_length(existing_streams, 3)
actual_stream_names = {stream.name for stream in existing_streams}
self.assertEqual(actual_stream_names, set(stream_names))
actual_stream_descriptions = {stream.description for stream in existing_streams}
self.assertEqual(actual_stream_descriptions, set(stream_descriptions))
for stream in existing_streams:
self.assertTrue(stream.invite_only)
def test_create_api_multiline_description(self) -> None:
user = self.example_user("hamlet")
realm = user.realm
self.login_user(user)
post_data = {
"subscriptions": orjson.dumps(
[{"name": "new_stream", "description": "multi\nline\ndescription"}]
).decode(),
"invite_only": orjson.dumps(False).decode(),
}
result = self.api_post(user, "/api/v1/users/me/subscriptions", post_data, subdomain="zulip")
self.assert_json_success(result)
stream = get_stream("new_stream", realm)
self.assertEqual(stream.description, "multi line description")
def test_history_public_to_subscribers_on_stream_creation(self) -> None:
realm = get_realm("zulip")
stream_dicts: List[StreamDict] = [
{
"name": "publicstream",
"description": "Public stream with public history",
},
{"name": "webpublicstream", "description": "Web-public stream", "is_web_public": True},
{
"name": "privatestream",
"description": "Private stream with non-public history",
"invite_only": True,
},
{
"name": "privatewithhistory",
"description": "Private stream with public history",
"invite_only": True,
"history_public_to_subscribers": True,
},
{
"name": "publictrywithouthistory",
"description": "Public stream without public history (disallowed)",
"invite_only": False,
"history_public_to_subscribers": False,
},
]
created, existing = create_streams_if_needed(realm, stream_dicts)
self.assert_length(created, 5)
self.assert_length(existing, 0)
for stream in created:
if stream.name == "publicstream":
self.assertTrue(stream.history_public_to_subscribers)
if stream.name == "webpublicstream":
self.assertTrue(stream.history_public_to_subscribers)
if stream.name == "privatestream":
self.assertFalse(stream.history_public_to_subscribers)
if stream.name == "privatewithhistory":
self.assertTrue(stream.history_public_to_subscribers)
if stream.name == "publictrywithouthistory":
self.assertTrue(stream.history_public_to_subscribers)
def test_add_stream_as_default_on_stream_creation(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
realm = user_profile.realm
post_data = {
"subscriptions": orjson.dumps(
[{"name": "default_stream", "description": "This stream is default for new users"}]
).decode(),
"is_default_stream": orjson.dumps(True).decode(),
}
result = self.api_post(
user_profile, "/api/v1/users/me/subscriptions", post_data, subdomain="zulip"
)
self.assert_json_error(result, "Insufficient permission")
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
result = self.api_post(
user_profile, "/api/v1/users/me/subscriptions", post_data, subdomain="zulip"
)
self.assert_json_success(result)
default_stream = get_stream("default_stream", realm)
self.assertTrue(default_stream.id in get_default_stream_ids_for_realm(realm.id))
post_data = {
"subscriptions": orjson.dumps(
[
{
"name": "private_default_stream",
"description": "This stream is private and default for new users",
}
]
).decode(),
"invite_only": orjson.dumps(True).decode(),
"is_default_stream": orjson.dumps(True).decode(),
}
result = self.api_post(
user_profile, "/api/v1/users/me/subscriptions", post_data, subdomain="zulip"
)
self.assert_json_error(result, "A default stream cannot be private.")
def test_history_public_to_subscribers_zephyr_realm(self) -> None:
realm = get_realm("zephyr")
stream, created = create_stream_if_needed(realm, "private_stream", invite_only=True)
self.assertTrue(created)
self.assertTrue(stream.invite_only)
self.assertFalse(stream.history_public_to_subscribers)
stream, created = create_stream_if_needed(realm, "public_stream", invite_only=False)
self.assertTrue(created)
self.assertFalse(stream.invite_only)
self.assertFalse(stream.history_public_to_subscribers)
def test_auto_mark_stream_created_message_as_read_for_stream_creator(self) -> None:
# This test relies on email == delivery_email for
# convenience.
reset_email_visibility_to_everyone_in_zulip_realm()
realm = Realm.objects.get(name="Zulip Dev")
iago = self.example_user("iago")
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
aaron = self.example_user("aaron")
# Establish a stream for notifications.
announce_stream = ensure_stream(
realm, "announce", False, "announcements here.", acting_user=None
)
realm.notifications_stream_id = announce_stream.id
realm.save(update_fields=["notifications_stream_id"])
self.subscribe(iago, announce_stream.name)
self.subscribe(hamlet, announce_stream.name)
self.login_user(iago)
initial_message_count = Message.objects.count()
initial_usermessage_count = UserMessage.objects.count()
data = {
"subscriptions": '[{"name":"brand new stream","description":""}]',
"history_public_to_subscribers": "true",
"invite_only": "false",
"announce": "true",
"principals": orjson.dumps([iago.id, aaron.id, cordelia.id, hamlet.id]).decode(),
"stream_post_policy": "1",
}
response = self.client_post("/json/users/me/subscriptions", data)
final_message_count = Message.objects.count()
final_usermessage_count = UserMessage.objects.count()
expected_response = {
"result": "success",
"msg": "",
"subscribed": {
"AARON@zulip.com": ["brand new stream"],
"cordelia@zulip.com": ["brand new stream"],
"hamlet@zulip.com": ["brand new stream"],
"iago@zulip.com": ["brand new stream"],
},
"already_subscribed": {},
}
self.assertEqual(response.status_code, 200)
self.assertEqual(orjson.loads(response.content), expected_response)
# 2 messages should be created, one in announce and one in the new stream itself.
self.assertEqual(final_message_count - initial_message_count, 2)
# 4 UserMessages per subscriber: One for each of the subscribers, plus 1 for
# each user in the notifications stream.
announce_stream_subs = Subscription.objects.filter(recipient=announce_stream.recipient)
self.assertEqual(
final_usermessage_count - initial_usermessage_count, 4 + announce_stream_subs.count()
)
def get_unread_stream_data(user: UserProfile) -> List[UnreadStreamInfo]:
raw_unread_data = get_raw_unread_data(user)
aggregated_data = aggregate_unread_data(raw_unread_data)
return aggregated_data["streams"]
stream_id = Stream.objects.get(name="brand new stream").id
iago_unread_messages = get_unread_stream_data(iago)
hamlet_unread_messages = get_unread_stream_data(hamlet)
# The stream creation messages should be unread for Hamlet
self.assert_length(hamlet_unread_messages, 2)
# According to the code in zerver/views/streams/add_subscriptions_backend
# the notification stream message is sent first, then the new stream's message.
self.assertEqual(hamlet_unread_messages[1]["stream_id"], stream_id)
# But it should be marked as read for Iago, the stream creator.
self.assert_length(iago_unread_messages, 0)
def test_can_remove_subscribers_group_on_stream_creation(self) -> None:
user = self.example_user("hamlet")
realm = user.realm
self.login_user(user)
moderators_system_group = UserGroup.objects.get(
name="role:moderators", realm=realm, is_system_group=True
)
admins_system_group = UserGroup.objects.get(
name="role:administrators", realm=realm, is_system_group=True
)
post_data = {
"subscriptions": orjson.dumps(
[{"name": "new_stream1", "description": "First new stream"}]
).decode(),
"can_remove_subscribers_group": orjson.dumps(moderators_system_group.id).decode(),
}
result = self.api_post(user, "/api/v1/users/me/subscriptions", post_data, subdomain="zulip")
self.assert_json_success(result)
stream = get_stream("new_stream1", realm)
self.assertEqual(stream.can_remove_subscribers_group.id, moderators_system_group.id)
post_data = {
"subscriptions": orjson.dumps(
[{"name": "new_stream2", "description": "Second new stream"}]
).decode(),
}
result = self.api_post(user, "/api/v1/users/me/subscriptions", post_data, subdomain="zulip")
self.assert_json_success(result)
stream = get_stream("new_stream2", realm)
self.assertEqual(stream.can_remove_subscribers_group.id, admins_system_group.id)
hamletcharacters_group = UserGroup.objects.get(name="hamletcharacters", realm=realm)
post_data = {
"subscriptions": orjson.dumps(
[{"name": "new_stream3", "description": "Third new stream"}]
).decode(),
"can_remove_subscribers_group": orjson.dumps(hamletcharacters_group.id).decode(),
}
result = self.api_post(user, "/api/v1/users/me/subscriptions", post_data, subdomain="zulip")
self.assert_json_error(
result, "'can_remove_subscribers_group' must be a system user group."
)
internet_group = UserGroup.objects.get(
name="role:internet", is_system_group=True, realm=realm
)
post_data = {
"subscriptions": orjson.dumps(
[{"name": "new_stream3", "description": "Third new stream"}]
).decode(),
"can_remove_subscribers_group": orjson.dumps(internet_group.id).decode(),
}
result = self.api_post(user, "/api/v1/users/me/subscriptions", post_data, subdomain="zulip")
self.assert_json_error(
result,
"'can_remove_subscribers_group' setting cannot be set to 'role:internet' group.",
)
owners_group = UserGroup.objects.get(name="role:owners", is_system_group=True, realm=realm)
post_data = {
"subscriptions": orjson.dumps(
[{"name": "new_stream3", "description": "Third new stream"}]
).decode(),
"can_remove_subscribers_group": orjson.dumps(owners_group.id).decode(),
}
result = self.api_post(user, "/api/v1/users/me/subscriptions", post_data, subdomain="zulip")
self.assert_json_error(
result,
"'can_remove_subscribers_group' setting cannot be set to 'role:owners' group.",
)
nobody_group = UserGroup.objects.get(name="role:nobody", is_system_group=True, realm=realm)
post_data = {
"subscriptions": orjson.dumps(
[{"name": "new_stream3", "description": "Third new stream"}]
).decode(),
"can_remove_subscribers_group": orjson.dumps(nobody_group.id).decode(),
}
result = self.api_post(user, "/api/v1/users/me/subscriptions", post_data, subdomain="zulip")
self.assert_json_error(
result,
"'can_remove_subscribers_group' setting cannot be set to 'role:nobody' group.",
)
class RecipientTest(ZulipTestCase):
def test_recipient(self) -> None:
realm = get_realm("zulip")
stream = get_stream("Verona", realm)
recipient = Recipient.objects.get(
type_id=stream.id,
type=Recipient.STREAM,
)
self.assertEqual(repr(recipient), f"<Recipient: Verona ({stream.id}, {Recipient.STREAM})>")
class StreamAdminTest(ZulipTestCase):
def test_make_stream_public(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
self.make_stream("private_stream_1", invite_only=True)
self.make_stream("private_stream_2", invite_only=True)
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
params = {
"is_private": orjson.dumps(False).decode(),
}
stream_id = get_stream("private_stream_1", user_profile.realm).id
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "Invalid stream ID")
stream = self.subscribe(user_profile, "private_stream_1")
self.assertFalse(stream.is_in_zephyr_realm)
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
params = {
"is_private": orjson.dumps(False).decode(),
}
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_success(result)
realm = user_profile.realm
stream = get_stream("private_stream_1", realm)
self.assertFalse(stream.invite_only)
self.assertTrue(stream.history_public_to_subscribers)
messages = get_topic_messages(user_profile, stream, "stream events")
self.assert_length(messages, 1)
expected_notification = (
f"@_**King Hamlet|{user_profile.id}** changed the [access permissions](/help/stream-permissions) "
"for this stream from **Private, protected history** to **Public**."
)
self.assertEqual(messages[0].content, expected_notification)
history_public_to_subscribers_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).last()
assert history_public_to_subscribers_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: False,
RealmAuditLog.NEW_VALUE: True,
"property": "history_public_to_subscribers",
}
self.assertEqual(history_public_to_subscribers_log.extra_data, expected_extra_data)
invite_only_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).order_by("-id")[1]
assert invite_only_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: True,
RealmAuditLog.NEW_VALUE: False,
"property": "invite_only",
}
self.assertEqual(invite_only_log.extra_data, expected_extra_data)
do_change_user_role(user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
params = {
"is_private": orjson.dumps(False).decode(),
}
stream = self.subscribe(user_profile, "private_stream_2")
result = self.client_patch(f"/json/streams/{stream.id}", params)
self.assertTrue(stream.invite_only)
self.assert_json_error(result, "Must be an organization administrator")
def test_make_stream_private(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
realm = user_profile.realm
self.make_stream("public_stream_1", realm=realm)
self.make_stream("public_stream_2")
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
params = {
"is_private": orjson.dumps(True).decode(),
}
stream_id = self.subscribe(user_profile, "public_stream_1").id
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_success(result)
stream = get_stream("public_stream_1", realm)
self.assertTrue(stream.invite_only)
self.assertFalse(stream.history_public_to_subscribers)
messages = get_topic_messages(user_profile, stream, "stream events")
self.assert_length(messages, 1)
expected_notification = (
f"@_**King Hamlet|{user_profile.id}** changed the [access permissions](/help/stream-permissions) "
"for this stream from **Public** to **Private, protected history**."
)
self.assertEqual(messages[0].content, expected_notification)
history_public_to_subscribers_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).last()
assert history_public_to_subscribers_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: True,
RealmAuditLog.NEW_VALUE: False,
"property": "history_public_to_subscribers",
}
self.assertEqual(history_public_to_subscribers_log.extra_data, expected_extra_data)
invite_only_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).order_by("-id")[1]
assert invite_only_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: False,
RealmAuditLog.NEW_VALUE: True,
"property": "invite_only",
}
self.assertEqual(invite_only_log.extra_data, expected_extra_data)
default_stream = self.make_stream("default_stream", realm=realm)
do_add_default_stream(default_stream)
params = {
"is_private": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{default_stream.id}", params)
self.assert_json_error(result, "A default stream cannot be private.")
self.assertFalse(default_stream.invite_only)
do_change_user_role(user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
params = {
"is_private": orjson.dumps(True).decode(),
}
stream = self.subscribe(user_profile, "public_stream_2")
result = self.client_patch(f"/json/streams/{stream.id}", params)
self.assertFalse(stream.invite_only)
self.assert_json_error(result, "Must be an organization administrator")
def test_create_web_public_stream(self) -> None:
user_profile = self.example_user("hamlet")
owner = self.example_user("desdemona")
stream_names = ["new1", "new2", "new3"]
stream_descriptions = ["des1", "des2", "des3"]
streams_raw: List[StreamDict] = [
{"name": stream_name, "description": stream_description, "is_web_public": True}
for (stream_name, stream_description) in zip(stream_names, stream_descriptions)
]
self.assertFalse(user_profile.can_create_web_public_streams())
self.assertTrue(owner.can_create_web_public_streams())
# As per create_web_public_stream_policy, only owners can create web-public streams by default.
with self.assertRaisesRegex(JsonableError, "Insufficient permission"):
list_to_streams(
streams_raw,
user_profile,
autocreate=True,
)
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
self.assertFalse(user_profile.can_create_web_public_streams())
self.assertFalse(owner.can_create_web_public_streams())
with self.assertRaisesRegex(JsonableError, "Web-public streams are not enabled."):
list_to_streams(
streams_raw,
owner,
autocreate=True,
)
existing_streams, new_streams = list_to_streams(
streams_raw,
owner,
autocreate=True,
)
self.assert_length(new_streams, 3)
self.assert_length(existing_streams, 0)
actual_stream_names = {stream.name for stream in new_streams}
self.assertEqual(actual_stream_names, set(stream_names))
actual_stream_descriptions = {stream.description for stream in new_streams}
self.assertEqual(actual_stream_descriptions, set(stream_descriptions))
for stream in new_streams:
self.assertTrue(stream.is_web_public)
def test_make_stream_public_zephyr_mirror(self) -> None:
user_profile = self.mit_user("starnine")
self.login_user(user_profile)
realm = user_profile.realm
self.make_stream("target_stream", realm=realm, invite_only=True)
self.subscribe(user_profile, "target_stream")
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
params = {
"is_private": orjson.dumps(False).decode(),
}
stream_id = get_stream("target_stream", realm).id
result = self.client_patch(f"/json/streams/{stream_id}", params, subdomain="zephyr")
self.assert_json_success(result)
stream = get_stream("target_stream", realm)
self.assertFalse(stream.invite_only)
self.assertFalse(stream.history_public_to_subscribers)
messages = get_topic_messages(user_profile, stream, "stream events")
self.assert_length(messages, 1)
expected_notification = (
f"@_**{user_profile.full_name}|{user_profile.id}** changed the [access permissions](/help/stream-permissions) "
"for this stream from **Private, protected history** to **Public, protected history**."
)
self.assertEqual(messages[0].content, expected_notification)
realm_audit_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).last()
assert realm_audit_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: True,
RealmAuditLog.NEW_VALUE: False,
"property": "invite_only",
}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_make_stream_private_with_public_history(self) -> None:
# Convert a public stream to a private stream with shared history
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
realm = user_profile.realm
self.make_stream("public_history_stream", realm=realm)
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
params = {
"is_private": orjson.dumps(True).decode(),
"history_public_to_subscribers": orjson.dumps(True).decode(),
}
stream_id = self.subscribe(user_profile, "public_history_stream").id
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_success(result)
stream = get_stream("public_history_stream", realm)
self.assertTrue(stream.invite_only)
self.assertTrue(stream.history_public_to_subscribers)
messages = get_topic_messages(user_profile, stream, "stream events")
self.assert_length(messages, 1)
expected_notification = (
f"@_**King Hamlet|{user_profile.id}** changed the [access permissions](/help/stream-permissions) "
"for this stream from **Public** to **Private, shared history**."
)
self.assertEqual(messages[0].content, expected_notification)
realm_audit_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).last()
assert realm_audit_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: False,
RealmAuditLog.NEW_VALUE: True,
"property": "invite_only",
}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
# Convert a private stream with protected history to a private stream
# with shared history.
self.make_stream(
"private_protected_stream",
realm=realm,
invite_only=True,
history_public_to_subscribers=False,
)
params = {
"is_private": orjson.dumps(True).decode(),
"history_public_to_subscribers": orjson.dumps(True).decode(),
}
stream_id = self.subscribe(user_profile, "private_protected_stream").id
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_success(result)
stream = get_stream("private_protected_stream", realm)
self.assertTrue(stream.invite_only)
self.assertTrue(stream.history_public_to_subscribers)
messages = get_topic_messages(user_profile, stream, "stream events")
self.assert_length(messages, 1)
expected_notification = (
f"@_**King Hamlet|{user_profile.id}** changed the [access permissions](/help/stream-permissions) "
"for this stream from **Private, protected history** to **Private, shared history**."
)
self.assertEqual(messages[0].content, expected_notification)
realm_audit_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).last()
assert realm_audit_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: False,
RealmAuditLog.NEW_VALUE: True,
"property": "history_public_to_subscribers",
}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_make_stream_web_public(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
realm = user_profile.realm
self.make_stream("test_stream", realm=realm)
stream_id = self.subscribe(user_profile, "test_stream").id
params = {
"is_web_public": orjson.dumps(True).decode(),
"history_public_to_subscribers": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "Must be an organization administrator")
do_set_realm_property(
realm, "create_web_public_stream_policy", Realm.POLICY_OWNERS_ONLY, acting_user=None
)
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "Insufficient permission")
do_set_realm_property(
realm, "create_web_public_stream_policy", Realm.POLICY_NOBODY, acting_user=None
)
do_change_user_role(user_profile, UserProfile.ROLE_REALM_OWNER, acting_user=None)
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "Insufficient permission")
do_set_realm_property(
realm, "create_web_public_stream_policy", Realm.POLICY_OWNERS_ONLY, acting_user=None
)
do_change_user_role(user_profile, UserProfile.ROLE_REALM_OWNER, acting_user=None)
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "Web-public streams are not enabled.")
bad_params = {
"is_web_public": orjson.dumps(True).decode(),
"is_private": orjson.dumps(True).decode(),
"history_public_to_subscribers": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{stream_id}", bad_params)
self.assert_json_error(result, "Invalid parameters")
bad_params = {
"is_web_public": orjson.dumps(True).decode(),
"is_private": orjson.dumps(False).decode(),
"history_public_to_subscribers": orjson.dumps(False).decode(),
}
result = self.client_patch(f"/json/streams/{stream_id}", bad_params)
self.assert_json_error(result, "Invalid parameters")
stream = get_stream("test_stream", realm)
self.assertFalse(stream.is_web_public)
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_success(result)
stream = get_stream("test_stream", realm)
self.assertTrue(stream.is_web_public)
self.assertFalse(stream.invite_only)
self.assertTrue(stream.history_public_to_subscribers)
messages = get_topic_messages(user_profile, stream, "stream events")
self.assert_length(messages, 1)
expected_notification = (
f"@_**King Hamlet|{user_profile.id}** changed the [access permissions](/help/stream-permissions) "
"for this stream from **Public** to **Web-public**."
)
self.assertEqual(messages[0].content, expected_notification)
realm_audit_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).last()
assert realm_audit_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: False,
RealmAuditLog.NEW_VALUE: True,
"property": "is_web_public",
}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_change_history_access_for_private_streams(self) -> None:
user_profile = self.example_user("iago")
self.login_user(user_profile)
realm = user_profile.realm
self.make_stream("private_stream", realm=realm, invite_only=True)
stream_id = self.subscribe(user_profile, "private_stream").id
params = {
"history_public_to_subscribers": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_success(result)
stream = get_stream("private_stream", realm)
self.assertTrue(stream.invite_only)
self.assertTrue(stream.history_public_to_subscribers)
messages = get_topic_messages(user_profile, stream, "stream events")
self.assert_length(messages, 1)
expected_notification = (
f"@_**Iago|{user_profile.id}** changed the [access permissions](/help/stream-permissions) "
"for this stream from **Private, protected history** to **Private, shared history**."
)
self.assertEqual(messages[0].content, expected_notification)
realm_audit_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).last()
assert realm_audit_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: False,
RealmAuditLog.NEW_VALUE: True,
"property": "history_public_to_subscribers",
}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
params = {
"history_public_to_subscribers": orjson.dumps(False).decode(),
}
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_success(result)
stream = get_stream("private_stream", realm)
self.assertTrue(stream.invite_only)
self.assertFalse(stream.history_public_to_subscribers)
messages = get_topic_messages(user_profile, stream, "stream events")
self.assert_length(messages, 2)
expected_notification = (
f"@_**Iago|{user_profile.id}** changed the [access permissions](/help/stream-permissions) "
"for this stream from **Private, shared history** to **Private, protected history**."
)
self.assertEqual(messages[1].content, expected_notification)
realm_audit_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).last()
assert realm_audit_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: True,
RealmAuditLog.NEW_VALUE: False,
"property": "history_public_to_subscribers",
}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_add_and_remove_stream_as_default(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
realm = user_profile.realm
stream = self.make_stream("stream", realm=realm)
stream_id = self.subscribe(user_profile, "stream").id
params = {
"is_default_stream": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "Must be an organization administrator")
self.assertFalse(stream_id in get_default_stream_ids_for_realm(realm.id))
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_success(result)
self.assertTrue(stream_id in get_default_stream_ids_for_realm(realm.id))
params = {
"is_private": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "A default stream cannot be private.")
stream.refresh_from_db()
self.assertFalse(stream.invite_only)
params = {
"is_private": orjson.dumps(True).decode(),
"is_default_stream": orjson.dumps(False).decode(),
}
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_success(result)
stream.refresh_from_db()
self.assertTrue(stream.invite_only)
self.assertFalse(stream_id in get_default_stream_ids_for_realm(realm.id))
stream_2 = self.make_stream("stream_2", realm=realm)
stream_2_id = self.subscribe(user_profile, "stream_2").id
bad_params = {
"is_default_stream": orjson.dumps(True).decode(),
"is_private": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{stream_2_id}", bad_params)
self.assert_json_error(result, "A default stream cannot be private.")
stream.refresh_from_db()
self.assertFalse(stream_2.invite_only)
self.assertFalse(stream_2_id in get_default_stream_ids_for_realm(realm.id))
private_stream = self.make_stream("private_stream", realm=realm, invite_only=True)
private_stream_id = self.subscribe(user_profile, "private_stream").id
params = {
"is_default_stream": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{private_stream_id}", params)
self.assert_json_error(result, "A default stream cannot be private.")
self.assertFalse(private_stream_id in get_default_stream_ids_for_realm(realm.id))
params = {
"is_private": orjson.dumps(False).decode(),
"is_default_stream": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{private_stream_id}", params)
self.assert_json_success(result)
private_stream.refresh_from_db()
self.assertFalse(private_stream.invite_only)
self.assertTrue(private_stream_id in get_default_stream_ids_for_realm(realm.id))
def test_stream_permission_changes_updates_updates_attachments(self) -> None:
self.login("desdemona")
fp = StringIO("zulip!")
fp.name = "zulip.txt"
result = self.client_post("/json/user_uploads", {"file": fp})
url = self.assert_json_success(result)["uri"]
owner = self.example_user("desdemona")
realm = owner.realm
stream = self.make_stream("test_stream", realm=realm)
self.subscribe(owner, "test_stream")
body = f"First message ...[zulip.txt](http://{realm.host}" + url + ")"
msg_id = self.send_stream_message(owner, "test_stream", body, "test")
attachment = Attachment.objects.get(messages__id=msg_id)
self.assertFalse(stream.is_web_public)
self.assertFalse(attachment.is_web_public)
self.assertFalse(stream.invite_only)
self.assertTrue(attachment.is_realm_public)
params = {
"is_private": orjson.dumps(True).decode(),
"history_public_to_subscribers": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{stream.id}", params)
self.assert_json_success(result)
attachment.refresh_from_db()
stream.refresh_from_db()
self.assertFalse(stream.is_web_public)
self.assertFalse(attachment.is_web_public)
self.assertTrue(stream.invite_only)
self.assertIsNone(attachment.is_realm_public)
cordelia = self.example_user("cordelia")
self.assertFalse(validate_attachment_request(cordelia, attachment.path_id))
self.assertTrue(validate_attachment_request(owner, attachment.path_id))
attachment.refresh_from_db()
self.assertFalse(attachment.is_realm_public)
self.assertFalse(validate_attachment_request_for_spectator_access(realm, attachment))
params = {
"is_private": orjson.dumps(False).decode(),
"is_web_public": orjson.dumps(True).decode(),
"history_public_to_subscribers": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{stream.id}", params)
self.assert_json_success(result)
attachment.refresh_from_db()
stream.refresh_from_db()
self.assertFalse(stream.invite_only)
self.assertTrue(stream.is_web_public)
self.assertIsNone(attachment.is_realm_public)
self.assertIsNone(attachment.is_web_public)
self.assertTrue(validate_attachment_request_for_spectator_access(realm, attachment))
attachment.refresh_from_db()
self.assertTrue(attachment.is_web_public)
self.assertIsNone(attachment.is_realm_public)
self.assertTrue(validate_attachment_request(cordelia, attachment.path_id))
attachment.refresh_from_db()
self.assertTrue(attachment.is_realm_public)
params = {
"is_private": orjson.dumps(False).decode(),
"is_web_public": orjson.dumps(False).decode(),
"history_public_to_subscribers": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{stream.id}", params)
self.assert_json_success(result)
attachment.refresh_from_db()
stream.refresh_from_db()
self.assertIsNone(attachment.is_web_public)
self.assertFalse(stream.invite_only)
self.assertTrue(attachment.is_realm_public)
self.assertFalse(validate_attachment_request_for_spectator_access(realm, attachment))
attachment.refresh_from_db()
stream.refresh_from_db()
self.assertFalse(attachment.is_web_public)
# Verify moving a message to another public stream doesn't reset cache.
new_stream = self.make_stream("new_stream", realm=realm)
self.subscribe(owner, "new_stream")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
attachment.refresh_from_db()
self.assertFalse(attachment.is_web_public)
self.assertTrue(attachment.is_realm_public)
# Verify moving a message to a private stream
private_stream = self.make_stream("private_stream", realm=realm, invite_only=True)
self.subscribe(owner, "private_stream")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"stream_id": private_stream.id,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
attachment.refresh_from_db()
self.assertFalse(attachment.is_web_public)
self.assertIsNone(attachment.is_realm_public)
self.assertFalse(validate_attachment_request(cordelia, attachment.path_id))
self.assertTrue(validate_attachment_request(owner, attachment.path_id))
attachment.refresh_from_db()
self.assertFalse(attachment.is_realm_public)
# Verify moving a message to a web-public stream
web_public_stream = self.make_stream("web_public_stream", realm=realm, is_web_public=True)
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"stream_id": web_public_stream.id,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
attachment.refresh_from_db()
self.assertIsNone(attachment.is_web_public)
self.assertIsNone(attachment.is_realm_public)
self.assertTrue(validate_attachment_request_for_spectator_access(realm, attachment))
attachment.refresh_from_db()
self.assertTrue(attachment.is_web_public)
def test_try_make_stream_public_with_private_history(self) -> None:
# We only support public streams with private history if
# is_zephyr_mirror_realm, and don't allow changing stream
# permissions in such realms. So changing the
# history_public_to_subscribers property of a public stream is
# not possible in Zulip today
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
realm = user_profile.realm
self.make_stream("public_stream", realm=realm)
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
params = {
"is_private": orjson.dumps(False).decode(),
"history_public_to_subscribers": orjson.dumps(False).decode(),
}
stream_id = self.subscribe(user_profile, "public_stream").id
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "Invalid parameters")
params = {
"history_public_to_subscribers": orjson.dumps(False).decode(),
}
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "Invalid parameters")
web_public_stream = self.make_stream("web_public_stream", realm=realm, is_web_public=True)
result = self.client_patch(f"/json/streams/{web_public_stream.id}", params)
self.assert_json_error(result, "Invalid parameters")
def test_subscriber_ids_with_stream_history_access(self) -> None:
hamlet = self.example_user("hamlet")
polonius = self.example_user("polonius")
stream1 = self.make_stream(
"history_private_stream", invite_only=True, history_public_to_subscribers=False
)
self.subscribe(hamlet, stream1.name)
self.subscribe(polonius, stream1.name)
self.assertEqual(set(), subscriber_ids_with_stream_history_access(stream1))
stream2 = self.make_stream(
"history_public_web_private_stream",
invite_only=True,
is_web_public=False,
history_public_to_subscribers=True,
)
self.subscribe(hamlet, stream2.name)
self.subscribe(polonius, stream2.name)
self.assertEqual(
{hamlet.id, polonius.id}, subscriber_ids_with_stream_history_access(stream2)
)
stream3 = self.make_stream(
"history_public_web_public_stream",
is_web_public=True,
history_public_to_subscribers=True,
)
self.subscribe(hamlet, stream3.name)
self.subscribe(polonius, stream3.name)
self.assertEqual(
{hamlet.id, polonius.id}, subscriber_ids_with_stream_history_access(stream3)
)
stream4 = self.make_stream(
"regular_public_stream",
)
self.subscribe(hamlet, stream4.name)
self.subscribe(polonius, stream4.name)
self.assertEqual(
{hamlet.id, polonius.id}, subscriber_ids_with_stream_history_access(stream4)
)
def test_deactivate_stream_backend(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.make_stream("new_stream_1")
self.subscribe(user_profile, stream.name)
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
result = self.client_delete(f"/json/streams/{stream.id}")
self.assert_json_success(result)
subscription_exists = (
get_active_subscriptions_for_stream_id(stream.id, include_deactivated_users=True)
.filter(
user_profile=user_profile,
)
.exists()
)
self.assertFalse(subscription_exists)
def test_deactivate_stream_removes_default_stream(self) -> None:
stream = self.make_stream("new_stream")
do_add_default_stream(stream)
self.assertEqual(1, DefaultStream.objects.filter(stream_id=stream.id).count())
do_deactivate_stream(stream, acting_user=None)
self.assertEqual(0, DefaultStream.objects.filter(stream_id=stream.id).count())
def test_deactivate_stream_removes_stream_from_default_stream_groups(self) -> None:
realm = get_realm("zulip")
streams_to_keep = [
ensure_stream(realm, stream_name, acting_user=None)
for stream_name in ["stream1", "stream2"]
]
streams_to_remove = [ensure_stream(realm, "stream3", acting_user=None)]
all_streams = streams_to_keep + streams_to_remove
def get_streams(group: DefaultStreamGroup) -> List[Stream]:
return list(group.streams.all().order_by("name"))
group_name = "group1"
description = "This is group1"
do_create_default_stream_group(realm, group_name, description, all_streams)
default_stream_groups = get_default_stream_groups(realm)
self.assertEqual(get_streams(default_stream_groups[0]), all_streams)
do_deactivate_stream(streams_to_remove[0], acting_user=None)
self.assertEqual(get_streams(default_stream_groups[0]), streams_to_keep)
def test_deactivate_stream_marks_messages_as_read(self) -> None:
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
stream = self.make_stream("new_stream")
self.subscribe(hamlet, stream.name)
self.subscribe(cordelia, stream.name)
self.subscribe(hamlet, "Denmark")
self.subscribe(cordelia, "Denmark")
self.send_stream_message(hamlet, stream.name)
new_stream_usermessage = most_recent_usermessage(cordelia)
# We send a message to a different stream too, to verify that the
# deactivation of new_stream won't corrupt read state of UserMessage elsewhere.
self.send_stream_message(hamlet, "Denmark")
denmark_usermessage = most_recent_usermessage(cordelia)
self.assertFalse(new_stream_usermessage.flags.read)
self.assertFalse(denmark_usermessage.flags.read)
with self.captureOnCommitCallbacks(execute=True):
do_deactivate_stream(stream, acting_user=None)
new_stream_usermessage.refresh_from_db()
denmark_usermessage.refresh_from_db()
self.assertTrue(new_stream_usermessage.flags.read)
self.assertFalse(denmark_usermessage.flags.read)
def test_deactivated_streams_by_old_name(self) -> None:
realm = get_realm("zulip")
stream = self.make_stream("new_stream")
do_deactivate_stream(stream, acting_user=None)
self.assertEqual(set(deactivated_streams_by_old_name(realm, "new_stream")), {stream})
second_stream = self.make_stream("new_stream")
do_deactivate_stream(second_stream, acting_user=None)
self.assertEqual(
set(deactivated_streams_by_old_name(realm, "new_stream")), {stream, second_stream}
)
self.make_stream("!DEACTIVATED:old_style") # This is left active
old_style = self.make_stream("old_style")
do_deactivate_stream(old_style, acting_user=None)
old_style.name = "!!DEACTIVATED:old_style"
old_style.save()
self.assertEqual(set(deactivated_streams_by_old_name(realm, "old_style")), {old_style})
def test_reactivate_stream_active_stream(self) -> None:
stream = self.make_stream("new_stream")
with self.assertRaisesRegex(JsonableError, "Stream is not currently deactivated"):
do_reactivate_stream(stream, new_name="new_stream", acting_user=None)
def test_reactivate_stream_existing_name(self) -> None:
stream = self.make_stream("new_stream")
self.make_stream("existing")
do_deactivate_stream(stream, acting_user=None)
with self.assertRaisesRegex(JsonableError, "Stream named existing already exists"):
do_reactivate_stream(stream, new_name="existing", acting_user=None)
def test_reactivate_stream(self) -> None:
desdemona = self.example_user("desdemona")
iago = self.example_user("iago")
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
stream = self.make_stream("new_stream", is_web_public=True)
self.subscribe(hamlet, stream.name)
self.subscribe(cordelia, stream.name)
do_deactivate_stream(stream, acting_user=None)
with self.capture_send_event_calls(expected_num_events=4) as events:
do_reactivate_stream(stream, new_name="new_stream", acting_user=None)
# Tell all admins and owners that the stream exists
self.assertEqual(events[0]["event"]["op"], "create")
self.assertEqual(events[0]["event"]["streams"][0]["name"], "new_stream")
self.assertEqual(events[0]["event"]["streams"][0]["stream_id"], stream.id)
self.assertEqual(set(events[0]["users"]), {iago.id, desdemona.id})
# Tell the owners that they're subscribed to it
self.assertEqual(events[1]["event"]["op"], "add")
self.assertEqual(events[1]["event"]["subscriptions"][0]["name"], "new_stream")
self.assertEqual(events[1]["event"]["subscriptions"][0]["stream_id"], stream.id)
self.assertEqual(events[1]["users"], [desdemona.id])
# Send a message there logging the reactivation
self.assertEqual(events[2]["event"]["type"], "message")
# iago (as an admin) gets to know that desdemona (the owner) is now subscribed.
self.assertEqual(
events[3],
{
"event": {
"op": "peer_add",
"stream_ids": [stream.id],
"type": "subscription",
"user_ids": [desdemona.id],
},
"users": [iago.id],
},
)
stream = Stream.objects.get(id=stream.id)
self.assertFalse(stream.deactivated)
self.assertTrue(stream.invite_only)
self.assertFalse(stream.is_web_public)
self.assertTrue(stream.history_public_to_subscribers)
self.assertEqual(
[desdemona.id],
[
sub.user_profile_id
for sub in get_active_subscriptions_for_stream_id(
stream.id, include_deactivated_users=True
)
],
)
def test_vacate_private_stream_removes_default_stream(self) -> None:
stream = self.make_stream("new_stream", invite_only=True)
self.subscribe(self.example_user("hamlet"), stream.name)
do_add_default_stream(stream)
self.assertEqual(1, DefaultStream.objects.filter(stream_id=stream.id).count())
self.unsubscribe(self.example_user("hamlet"), stream.name)
self.assertEqual(0, DefaultStream.objects.filter(stream_id=stream.id).count())
# Fetch stream again from database.
stream = Stream.objects.get(id=stream.id)
self.assertTrue(stream.deactivated)
def test_deactivate_stream_backend_requires_existing_stream(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
self.make_stream("new_stream")
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
result = self.client_delete("/json/streams/999999999")
self.assert_json_error(result, "Invalid stream ID")
def test_deactivate_stream_backend_requires_admin(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.subscribe(user_profile, "new_stream")
result = self.client_delete(f"/json/streams/{stream.id}")
self.assert_json_error(result, "Must be an organization administrator")
def test_private_stream_live_updates(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
self.make_stream("private_stream", invite_only=True)
self.subscribe(user_profile, "private_stream")
self.subscribe(self.example_user("cordelia"), "private_stream")
with self.capture_send_event_calls(expected_num_events=2) as events:
stream_id = get_stream("private_stream", user_profile.realm).id
result = self.client_patch(
f"/json/streams/{stream_id}",
{"description": "Test description"},
)
self.assert_json_success(result)
cordelia = self.example_user("cordelia")
prospero = self.example_user("prospero")
notified_user_ids = set(events[0]["users"])
self.assertIn(user_profile.id, notified_user_ids)
self.assertIn(cordelia.id, notified_user_ids)
self.assertNotIn(prospero.id, notified_user_ids)
# Three events should be sent: a name event, an email address event and a notification event
with self.capture_send_event_calls(expected_num_events=3) as events:
stream_id = get_stream("private_stream", user_profile.realm).id
result = self.client_patch(f"/json/streams/{stream_id}", {"new_name": "whatever"})
self.assert_json_success(result)
notified_user_ids = set(events[0]["users"])
self.assertIn(user_profile.id, notified_user_ids)
self.assertIn(cordelia.id, notified_user_ids)
self.assertNotIn(prospero.id, notified_user_ids)
notified_with_bot_users = events[-1]["users"]
notified_with_bot_user_ids = []
notified_with_bot_user_ids.append(notified_with_bot_users[0]["id"])
notified_with_bot_user_ids.append(notified_with_bot_users[1]["id"])
self.assertIn(user_profile.id, notified_with_bot_user_ids)
self.assertIn(cordelia.id, notified_with_bot_user_ids)
self.assertNotIn(prospero.id, notified_with_bot_user_ids)
def test_rename_stream(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
realm = user_profile.realm
stream = self.subscribe(user_profile, "stream_name1")
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
result = self.client_patch(f"/json/streams/{stream.id}", {"new_name": "stream_name1"})
self.assert_json_error(result, "Stream already has that name!")
result = self.client_patch(f"/json/streams/{stream.id}", {"new_name": "Denmark"})
self.assert_json_error(result, "Stream name 'Denmark' is already taken.")
result = self.client_patch(f"/json/streams/{stream.id}", {"new_name": "denmark "})
self.assert_json_error(result, "Stream name 'denmark' is already taken.")
# Do a rename that is case-only--this should succeed.
result = self.client_patch(f"/json/streams/{stream.id}", {"new_name": "sTREAm_name1"})
self.assert_json_success(result)
# Three events should be sent: stream_email update, stream_name update and notification message.
with self.capture_send_event_calls(expected_num_events=3) as events:
stream_id = get_stream("stream_name1", user_profile.realm).id
result = self.client_patch(f"/json/streams/{stream_id}", {"new_name": "stream_name2"})
self.assert_json_success(result)
event = events[1]["event"]
self.assertEqual(
event,
dict(
op="update",
type="stream",
property="name",
value="stream_name2",
stream_id=stream_id,
name="sTREAm_name1",
),
)
notified_user_ids = set(events[1]["users"])
self.assertRaises(Stream.DoesNotExist, get_stream, "stream_name1", realm)
stream_name2_exists = get_stream("stream_name2", realm)
self.assertTrue(stream_name2_exists)
self.assertEqual(notified_user_ids, set(active_non_guest_user_ids(realm.id)))
self.assertIn(user_profile.id, notified_user_ids)
self.assertIn(self.example_user("prospero").id, notified_user_ids)
self.assertNotIn(self.example_user("polonius").id, notified_user_ids)
# Test case to handle Unicode stream name change
# *NOTE: Here encoding is needed when Unicode string is passed as an argument*
with self.capture_send_event_calls(expected_num_events=3) as events:
stream_id = stream_name2_exists.id
result = self.client_patch(f"/json/streams/{stream_id}", {"new_name": "नया नाम"})
self.assert_json_success(result)
# While querying, system can handle Unicode strings.
stream_name_uni_exists = get_stream("नया नाम", realm)
self.assertTrue(stream_name_uni_exists)
# Test case to handle changing of Unicode stream name to newer name
# NOTE: Unicode string being part of URL is handled cleanly
# by client_patch call, encoding of URL is not needed.
with self.capture_send_event_calls(expected_num_events=3) as events:
stream_id = stream_name_uni_exists.id
result = self.client_patch(
f"/json/streams/{stream_id}",
{"new_name": "नाम में क्या रक्खा हे"},
)
self.assert_json_success(result)
# While querying, system can handle Unicode strings.
self.assertRaises(Stream.DoesNotExist, get_stream, "नया नाम", realm)
stream_name_new_uni_exists = get_stream("नाम में क्या रक्खा हे", realm)
self.assertTrue(stream_name_new_uni_exists)
# Test case to change name from one language to other.
with self.capture_send_event_calls(expected_num_events=3) as events:
stream_id = stream_name_new_uni_exists.id
result = self.client_patch(f"/json/streams/{stream_id}", {"new_name": "français"})
self.assert_json_success(result)
stream_name_fr_exists = get_stream("français", realm)
self.assertTrue(stream_name_fr_exists)
# Test case to change name to mixed language name.
with self.capture_send_event_calls(expected_num_events=3) as events:
stream_id = stream_name_fr_exists.id
result = self.client_patch(f"/json/streams/{stream_id}", {"new_name": "français name"})
self.assert_json_success(result)
stream_name_mixed_exists = get_stream("français name", realm)
self.assertTrue(stream_name_mixed_exists)
# Test case for notified users in private streams.
stream_private = self.make_stream(
"stream_private_name1", realm=user_profile.realm, invite_only=True
)
self.subscribe(self.example_user("cordelia"), "stream_private_name1")
with self.capture_send_event_calls(expected_num_events=3) as events:
stream_id = get_stream("stream_private_name1", realm).id
result = self.client_patch(
f"/json/streams/{stream_id}",
{"new_name": "stream_private_name2"},
)
self.assert_json_success(result)
notified_user_ids = set(events[1]["users"])
self.assertEqual(notified_user_ids, can_access_stream_user_ids(stream_private))
self.assertIn(self.example_user("cordelia").id, notified_user_ids)
# An important corner case is that all organization admins are notified.
self.assertIn(self.example_user("iago").id, notified_user_ids)
# The current user, Hamlet was made an admin and thus should be notified too.
self.assertIn(user_profile.id, notified_user_ids)
self.assertNotIn(self.example_user("prospero").id, notified_user_ids)
def test_rename_stream_requires_admin(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
self.make_stream("stream_name1")
self.subscribe(user_profile, "stream_name1")
stream_id = get_stream("stream_name1", user_profile.realm).id
result = self.client_patch(f"/json/streams/{stream_id}", {"new_name": "stream_name2"})
self.assert_json_error(result, "Must be an organization administrator")
def test_notify_on_stream_rename(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
self.make_stream("stream_name1")
stream = self.subscribe(user_profile, "stream_name1")
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
result = self.client_patch(f"/json/streams/{stream.id}", {"new_name": "stream_name2"})
self.assert_json_success(result)
# Inspect the notification message sent
message = self.get_last_message()
actual_stream = Stream.objects.get(id=message.recipient.type_id)
message_content = f"@_**King Hamlet|{user_profile.id}** renamed stream **stream_name1** to **stream_name2**."
self.assertEqual(actual_stream.name, "stream_name2")
self.assertEqual(actual_stream.realm_id, user_profile.realm_id)
self.assertEqual(message.recipient.type, Recipient.STREAM)
self.assertEqual(message.content, message_content)
self.assertEqual(message.sender.email, "notification-bot@zulip.com")
self.assertEqual(message.sender.realm, get_realm(settings.SYSTEM_BOT_REALM))
def test_realm_admin_can_update_unsub_private_stream(self) -> None:
iago = self.example_user("iago")
hamlet = self.example_user("hamlet")
self.login_user(iago)
result = self.common_subscribe_to_streams(
iago,
["private_stream"],
dict(principals=orjson.dumps([hamlet.id]).decode()),
invite_only=True,
)
self.assert_json_success(result)
stream_id = get_stream("private_stream", iago.realm).id
result = self.client_patch(f"/json/streams/{stream_id}", {"new_name": "new_private_stream"})
self.assert_json_success(result)
result = self.client_patch(
f"/json/streams/{stream_id}",
{"description": "new description"},
)
self.assert_json_success(result)
# But cannot change stream type.
result = self.client_patch(
f"/json/streams/{stream_id}",
{
"is_private": orjson.dumps(True).decode(),
},
)
self.assert_json_error(result, "Invalid stream ID")
def test_non_admin_cannot_access_unsub_private_stream(self) -> None:
iago = self.example_user("iago")
hamlet = self.example_user("hamlet")
self.login_user(hamlet)
result = self.common_subscribe_to_streams(
hamlet,
["private_stream_1"],
dict(principals=orjson.dumps([iago.id]).decode()),
invite_only=True,
)
self.assert_json_success(result)
stream_id = get_stream("private_stream_1", hamlet.realm).id
result = self.client_patch(f"/json/streams/{stream_id}", {"new_name": "private_stream_2"})
self.assert_json_error(result, "Invalid stream ID")
result = self.client_patch(
f"/json/streams/{stream_id}",
{"description": "new description"},
)
self.assert_json_error(result, "Invalid stream ID")
result = self.client_patch(
f"/json/streams/{stream_id}",
{
"is_private": orjson.dumps(True).decode(),
},
)
self.assert_json_error(result, "Invalid stream ID")
result = self.client_delete(f"/json/streams/{stream_id}")
self.assert_json_error(result, "Invalid stream ID")
def test_change_stream_description(self) -> None:
user_profile = self.example_user("iago")
self.login_user(user_profile)
realm = user_profile.realm
self.subscribe(user_profile, "stream_name1")
with self.capture_send_event_calls(expected_num_events=2) as events:
stream_id = get_stream("stream_name1", realm).id
result = self.client_patch(
f"/json/streams/{stream_id}",
{"description": "Test description"},
)
self.assert_json_success(result)
event = events[0]["event"]
self.assertEqual(
event,
dict(
op="update",
type="stream",
property="description",
value="Test description",
rendered_description="<p>Test description</p>",
stream_id=stream_id,
name="stream_name1",
),
)
notified_user_ids = set(events[0]["users"])
stream = get_stream("stream_name1", realm)
self.assertEqual(notified_user_ids, set(active_non_guest_user_ids(realm.id)))
self.assertIn(user_profile.id, notified_user_ids)
self.assertIn(self.example_user("prospero").id, notified_user_ids)
self.assertNotIn(self.example_user("polonius").id, notified_user_ids)
self.assertEqual("Test description", stream.description)
result = self.client_patch(f"/json/streams/{stream_id}", {"description": "a" * 1025})
self.assert_json_error(
result,
f"description is too long (limit: {Stream.MAX_DESCRIPTION_LENGTH} characters)",
)
result = self.client_patch(
f"/json/streams/{stream_id}",
{"description": ""},
)
self.assert_json_success(result)
stream = get_stream("stream_name1", realm)
self.assertEqual(stream.description, "")
messages = get_topic_messages(user_profile, stream, "stream events")
expected_notification = (
f"@_**{user_profile.full_name}|{user_profile.id}** changed the description for this stream.\n\n"
"* **Old description:**\n"
"```` quote\n"
"Test description\n"
"````\n"
"* **New description:**\n"
"```` quote\n"
"*No description.*\n"
"````"
)
self.assertEqual(messages[-1].content, expected_notification)
result = self.client_patch(
f"/json/streams/{stream_id}",
{"description": "Test description"},
)
self.assert_json_success(result)
stream = get_stream("stream_name1", realm)
self.assertEqual(stream.description, "Test description")
messages = get_topic_messages(user_profile, stream, "stream events")
expected_notification = (
f"@_**{user_profile.full_name}|{user_profile.id}** changed the description for this stream.\n\n"
"* **Old description:**\n"
"```` quote\n"
"*No description.*\n"
"````\n"
"* **New description:**\n"
"```` quote\n"
"Test description\n"
"````"
)
self.assertEqual(messages[-1].content, expected_notification)
result = self.client_patch(
f"/json/streams/{stream_id}",
{"description": "a\nmulti\nline\ndescription"},
)
self.assert_json_success(result)
stream = get_stream("stream_name1", realm)
self.assertEqual(stream.description, "a multi line description")
messages = get_topic_messages(user_profile, stream, "stream events")
expected_notification = (
f"@_**{user_profile.full_name}|{user_profile.id}** changed the description for this stream.\n\n"
"* **Old description:**\n"
"```` quote\n"
"Test description\n"
"````\n"
"* **New description:**\n"
"```` quote\n"
"a multi line description\n"
"````"
)
self.assertEqual(messages[-1].content, expected_notification)
realm_audit_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).last()
assert realm_audit_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: "Test description",
RealmAuditLog.NEW_VALUE: "a multi line description",
"property": "description",
}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
# Verify that we don't render inline URL previews in this code path.
with self.settings(INLINE_URL_EMBED_PREVIEW=True):
result = self.client_patch(
f"/json/streams/{stream_id}",
{"description": "See https://zulip.com/team/"},
)
self.assert_json_success(result)
stream = get_stream("stream_name1", realm)
self.assertEqual(
stream.rendered_description,
'<p>See <a href="https://zulip.com/team/">https://zulip.com/team/</a></p>',
)
def test_change_stream_description_requires_admin(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
self.subscribe(user_profile, "stream_name1")
do_change_user_role(user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
stream_id = get_stream("stream_name1", user_profile.realm).id
result = self.client_patch(
f"/json/streams/{stream_id}", {"description": "Test description"}
)
self.assert_json_error(result, "Must be an organization administrator")
def test_change_to_stream_post_policy_admins(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
self.subscribe(user_profile, "stream_name1")
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
stream_id = get_stream("stream_name1", user_profile.realm).id
result = self.client_patch(
f"/json/streams/{stream_id}", {"is_announcement_only": orjson.dumps(True).decode()}
)
self.assert_json_success(result)
stream = get_stream("stream_name1", user_profile.realm)
self.assertEqual(stream.stream_post_policy, Stream.STREAM_POST_POLICY_ADMINS)
messages = get_topic_messages(user_profile, stream, "stream events")
expected_notification = (
f"@_**{user_profile.full_name}|{user_profile.id}** changed the "
"[posting permissions](/help/stream-sending-policy) for this stream:\n\n"
"* **Old permissions**: All stream members can post.\n"
"* **New permissions**: Only organization administrators can post."
)
self.assertEqual(messages[-1].content, expected_notification)
realm_audit_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).last()
assert realm_audit_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: Stream.STREAM_POST_POLICY_EVERYONE,
RealmAuditLog.NEW_VALUE: Stream.STREAM_POST_POLICY_ADMINS,
"property": "stream_post_policy",
}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_change_stream_post_policy_requires_admin(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.subscribe(user_profile, "stream_name1")
do_change_user_role(user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
do_set_realm_property(user_profile.realm, "waiting_period_threshold", 10, acting_user=None)
def test_non_admin(how_old: int, is_new: bool, policy: int) -> None:
user_profile.date_joined = timezone_now() - timedelta(days=how_old)
user_profile.save()
self.assertEqual(user_profile.is_provisional_member, is_new)
stream_id = get_stream("stream_name1", user_profile.realm).id
result = self.client_patch(
f"/json/streams/{stream_id}", {"stream_post_policy": orjson.dumps(policy).decode()}
)
self.assert_json_error(result, "Must be an organization administrator")
policies = [
Stream.STREAM_POST_POLICY_ADMINS,
Stream.STREAM_POST_POLICY_MODERATORS,
Stream.STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS,
]
for policy in policies:
test_non_admin(how_old=15, is_new=False, policy=policy)
test_non_admin(how_old=5, is_new=True, policy=policy)
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
for policy in policies:
stream = get_stream("stream_name1", user_profile.realm)
old_post_policy = stream.stream_post_policy
result = self.client_patch(
f"/json/streams/{stream.id}", {"stream_post_policy": orjson.dumps(policy).decode()}
)
self.assert_json_success(result)
stream = get_stream("stream_name1", user_profile.realm)
self.assertEqual(stream.stream_post_policy, policy)
messages = get_topic_messages(user_profile, stream, "stream events")
expected_notification = (
f"@_**{user_profile.full_name}|{user_profile.id}** changed the "
"[posting permissions](/help/stream-sending-policy) for this stream:\n\n"
f"* **Old permissions**: {Stream.POST_POLICIES[old_post_policy]}.\n"
f"* **New permissions**: {Stream.POST_POLICIES[policy]}."
)
self.assertEqual(messages[-1].content, expected_notification)
realm_audit_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
modified_stream=stream,
).last()
assert realm_audit_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: old_post_policy,
RealmAuditLog.NEW_VALUE: policy,
"property": "stream_post_policy",
}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_change_stream_message_retention_days_notifications(self) -> None:
user_profile = self.example_user("desdemona")
self.login_user(user_profile)
realm = user_profile.realm
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_SELF_HOSTED, acting_user=None)
stream = self.subscribe(user_profile, "stream_name1")
# Go from realm default (forever) to 2 days
result = self.client_patch(
f"/json/streams/{stream.id}", {"message_retention_days": orjson.dumps(2).decode()}
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, "stream events")
self.assert_length(messages, 1)
expected_notification = (
f"@_**Desdemona|{user_profile.id}** has changed the [message retention period](/help/message-retention-policy) for this stream:\n"
"* **Old retention period**: Forever\n"
"* **New retention period**: 2 days\n\n"
"Messages in this stream will now be automatically deleted 2 days after they are sent."
)
self.assertEqual(messages[0].content, expected_notification)
realm_audit_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED
).last()
assert realm_audit_log is not None
expected_extra_data = {RealmAuditLog.OLD_VALUE: None, RealmAuditLog.NEW_VALUE: 2}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
# Go from 2 days to 8 days
result = self.client_patch(
f"/json/streams/{stream.id}", {"message_retention_days": orjson.dumps(8).decode()}
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, "stream events")
self.assert_length(messages, 2)
expected_notification = (
f"@_**Desdemona|{user_profile.id}** has changed the [message retention period](/help/message-retention-policy) for this stream:\n"
"* **Old retention period**: 2 days\n"
"* **New retention period**: 8 days\n\n"
"Messages in this stream will now be automatically deleted 8 days after they are sent."
)
self.assertEqual(messages[1].content, expected_notification)
realm_audit_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED
).last()
assert realm_audit_log is not None
expected_extra_data = {RealmAuditLog.OLD_VALUE: 2, RealmAuditLog.NEW_VALUE: 8}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
# Go from 8 days to realm default (None on stream, forever/-1 on realm)
result = self.client_patch(
f"/json/streams/{stream.id}",
{"message_retention_days": orjson.dumps("realm_default").decode()},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, "stream events")
self.assert_length(messages, 3)
expected_notification = (
f"@_**Desdemona|{user_profile.id}** has changed the [message retention period](/help/message-retention-policy) for this stream:\n"
"* **Old retention period**: 8 days\n"
"* **New retention period**: Forever\n\n"
"Messages in this stream will now be retained forever."
)
self.assertEqual(messages[2].content, expected_notification)
realm_audit_log = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED
).last()
assert realm_audit_log is not None
expected_extra_data = {
RealmAuditLog.OLD_VALUE: 8,
RealmAuditLog.NEW_VALUE: None,
}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_change_stream_message_retention_days(self) -> None:
user_profile = self.example_user("desdemona")
self.login_user(user_profile)
realm = user_profile.realm
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_LIMITED, acting_user=None)
stream = self.subscribe(user_profile, "stream_name1")
result = self.client_patch(
f"/json/streams/{stream.id}", {"message_retention_days": orjson.dumps(2).decode()}
)
self.assert_json_error(result, "Available on Zulip Cloud Standard. Upgrade to access.")
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_SELF_HOSTED, acting_user=None)
with self.capture_send_event_calls(expected_num_events=2) as events:
result = self.client_patch(
f"/json/streams/{stream.id}", {"message_retention_days": orjson.dumps(2).decode()}
)
self.assert_json_success(result)
event = events[0]["event"]
self.assertEqual(
event,
dict(
op="update",
type="stream",
property="message_retention_days",
value=2,
stream_id=stream.id,
name="stream_name1",
),
)
notified_user_ids = set(events[0]["users"])
stream = get_stream("stream_name1", realm)
self.assertEqual(notified_user_ids, set(active_non_guest_user_ids(realm.id)))
self.assertIn(user_profile.id, notified_user_ids)
self.assertIn(self.example_user("prospero").id, notified_user_ids)
self.assertNotIn(self.example_user("polonius").id, notified_user_ids)
self.assertEqual(stream.message_retention_days, 2)
with self.capture_send_event_calls(expected_num_events=2) as events:
result = self.client_patch(
f"/json/streams/{stream.id}",
{"message_retention_days": orjson.dumps("unlimited").decode()},
)
self.assert_json_success(result)
event = events[0]["event"]
self.assertEqual(
event,
dict(
op="update",
type="stream",
property="message_retention_days",
value=-1,
stream_id=stream.id,
name="stream_name1",
),
)
self.assert_json_success(result)
stream = get_stream("stream_name1", realm)
self.assertEqual(stream.message_retention_days, -1)
with self.capture_send_event_calls(expected_num_events=2) as events:
result = self.client_patch(
f"/json/streams/{stream.id}",
{"message_retention_days": orjson.dumps("realm_default").decode()},
)
self.assert_json_success(result)
event = events[0]["event"]
self.assertEqual(
event,
dict(
op="update",
type="stream",
property="message_retention_days",
value=None,
stream_id=stream.id,
name="stream_name1",
),
)
stream = get_stream("stream_name1", realm)
self.assertEqual(stream.message_retention_days, None)
result = self.client_patch(
f"/json/streams/{stream.id}",
{"message_retention_days": orjson.dumps("invalid").decode()},
)
self.assert_json_error(result, "Bad value for 'message_retention_days': invalid")
result = self.client_patch(
f"/json/streams/{stream.id}", {"message_retention_days": orjson.dumps(-1).decode()}
)
self.assert_json_error(result, "Bad value for 'message_retention_days': -1")
result = self.client_patch(
f"/json/streams/{stream.id}", {"message_retention_days": orjson.dumps(0).decode()}
)
self.assert_json_error(result, "Bad value for 'message_retention_days': 0")
def test_change_stream_message_retention_days_requires_realm_owner(self) -> None:
user_profile = self.example_user("iago")
self.login_user(user_profile)
realm = user_profile.realm
stream = self.subscribe(user_profile, "stream_name1")
result = self.client_patch(
f"/json/streams/{stream.id}", {"message_retention_days": orjson.dumps(2).decode()}
)
self.assert_json_error(result, "Must be an organization owner")
do_change_user_role(user_profile, UserProfile.ROLE_REALM_OWNER, acting_user=None)
result = self.client_patch(
f"/json/streams/{stream.id}", {"message_retention_days": orjson.dumps(2).decode()}
)
self.assert_json_success(result)
stream = get_stream("stream_name1", realm)
self.assertEqual(stream.message_retention_days, 2)
def test_change_stream_can_remove_subscribers_group(self) -> None:
user_profile = self.example_user("iago")
realm = user_profile.realm
stream = self.subscribe(user_profile, "stream_name1")
moderators_system_group = UserGroup.objects.get(
name="role:moderators", realm=realm, is_system_group=True
)
self.login("shiva")
result = self.client_patch(
f"/json/streams/{stream.id}",
{"can_remove_subscribers_group": orjson.dumps(moderators_system_group.id).decode()},
)
self.assert_json_error(result, "Must be an organization administrator")
self.login("iago")
result = self.client_patch(
f"/json/streams/{stream.id}",
{"can_remove_subscribers_group": orjson.dumps(moderators_system_group.id).decode()},
)
self.assert_json_success(result)
stream = get_stream("stream_name1", realm)
self.assertEqual(stream.can_remove_subscribers_group.id, moderators_system_group.id)
# This setting can only be set to system groups.
hamletcharacters_group = UserGroup.objects.get(name="hamletcharacters", realm=realm)
result = self.client_patch(
f"/json/streams/{stream.id}",
{"can_remove_subscribers_group": orjson.dumps(hamletcharacters_group.id).decode()},
)
self.assert_json_error(
result, "'can_remove_subscribers_group' must be a system user group."
)
internet_group = UserGroup.objects.get(
name="role:internet", is_system_group=True, realm=realm
)
result = self.client_patch(
f"/json/streams/{stream.id}",
{"can_remove_subscribers_group": orjson.dumps(internet_group.id).decode()},
)
self.assert_json_error(
result,
"'can_remove_subscribers_group' setting cannot be set to 'role:internet' group.",
)
owners_group = UserGroup.objects.get(name="role:owners", is_system_group=True, realm=realm)
result = self.client_patch(
f"/json/streams/{stream.id}",
{"can_remove_subscribers_group": orjson.dumps(owners_group.id).decode()},
)
self.assert_json_error(
result,
"'can_remove_subscribers_group' setting cannot be set to 'role:owners' group.",
)
nobody_group = UserGroup.objects.get(name="role:nobody", is_system_group=True, realm=realm)
result = self.client_patch(
f"/json/streams/{stream.id}",
{"can_remove_subscribers_group": orjson.dumps(nobody_group.id).decode()},
)
self.assert_json_error(
result,
"'can_remove_subscribers_group' setting cannot be set to 'role:nobody' group.",
)
# For private streams, even admins must be subscribed to the stream to change
# can_remove_subscribers_group setting.
stream = self.make_stream("stream_name2", invite_only=True)
result = self.client_patch(
f"/json/streams/{stream.id}",
{"can_remove_subscribers_group": orjson.dumps(moderators_system_group.id).decode()},
)
self.assert_json_error(result, "Invalid stream ID")
self.subscribe(user_profile, "stream_name2")
result = self.client_patch(
f"/json/streams/{stream.id}",
{"can_remove_subscribers_group": orjson.dumps(moderators_system_group.id).decode()},
)
self.assert_json_success(result)
stream = get_stream("stream_name2", realm)
self.assertEqual(stream.can_remove_subscribers_group.id, moderators_system_group.id)
def test_stream_message_retention_days_on_stream_creation(self) -> None:
"""
Only admins can create streams with message_retention_days
with value other than None.
"""
admin = self.example_user("iago")
streams_raw: List[StreamDict] = [
{
"name": "new_stream",
"message_retention_days": 10,
"is_web_public": False,
}
]
with self.assertRaisesRegex(JsonableError, "Must be an organization owner"):
list_to_streams(streams_raw, admin, autocreate=True)
streams_raw = [
{
"name": "new_stream",
"message_retention_days": -1,
"is_web_public": False,
}
]
with self.assertRaisesRegex(JsonableError, "Must be an organization owner"):
list_to_streams(streams_raw, admin, autocreate=True)
streams_raw = [
{
"name": "new_stream",
"message_retention_days": None,
"is_web_public": False,
}
]
result = list_to_streams(streams_raw, admin, autocreate=True)
self.assert_length(result[0], 0)
self.assert_length(result[1], 1)
self.assertEqual(result[1][0].name, "new_stream")
self.assertEqual(result[1][0].message_retention_days, None)
owner = self.example_user("desdemona")
realm = owner.realm
streams_raw = [
{
"name": "new_stream1",
"message_retention_days": 10,
"is_web_public": False,
},
{
"name": "new_stream2",
"message_retention_days": -1,
"is_web_public": False,
},
{
"name": "new_stream3",
"is_web_public": False,
},
]
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_LIMITED, acting_user=admin)
with self.assertRaisesRegex(
JsonableError, "Available on Zulip Cloud Standard. Upgrade to access."
):
list_to_streams(streams_raw, owner, autocreate=True)
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_SELF_HOSTED, acting_user=admin)
result = list_to_streams(streams_raw, owner, autocreate=True)
self.assert_length(result[0], 0)
self.assert_length(result[1], 3)
self.assertEqual(result[1][0].name, "new_stream1")
self.assertEqual(result[1][0].message_retention_days, 10)
self.assertEqual(result[1][1].name, "new_stream2")
self.assertEqual(result[1][1].message_retention_days, -1)
self.assertEqual(result[1][2].name, "new_stream3")
self.assertEqual(result[1][2].message_retention_days, None)
def set_up_stream_for_archiving(
self, stream_name: str, invite_only: bool = False, subscribed: bool = True
) -> Stream:
"""
Create a stream for archiving by an administrator.
"""
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.make_stream(stream_name, invite_only=invite_only)
# For testing archiving streams you aren't on.
if subscribed:
self.subscribe(user_profile, stream_name)
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
return stream
def archive_stream(self, stream: Stream) -> None:
"""
Archive the stream and assess the result.
"""
active_name = stream.name
realm = stream.realm
stream_id = stream.id
# Simulate that a stream by the same name has already been
# deactivated, just to exercise our renaming logic:
# Since we do not know the id of these simulated stream we prepend the name with a random hashed_stream_id
ensure_stream(realm, "DB32B77!DEACTIVATED:" + active_name, acting_user=None)
with self.capture_send_event_calls(expected_num_events=1) as events:
result = self.client_delete("/json/streams/" + str(stream_id))
self.assert_json_success(result)
# We no longer send subscription events for stream deactivations.
sub_events = [e for e in events if e["event"]["type"] == "subscription"]
self.assertEqual(sub_events, [])
stream_events = [e for e in events if e["event"]["type"] == "stream"]
self.assert_length(stream_events, 1)
event = stream_events[0]["event"]
self.assertEqual(event["op"], "delete")
self.assertEqual(event["streams"][0]["stream_id"], stream.id)
with self.assertRaises(Stream.DoesNotExist):
Stream.objects.get(realm=get_realm("zulip"), name=active_name)
# A deleted stream's name is changed, is deactivated, is invite-only,
# and has no subscribers.
hashed_stream_id = hashlib.sha512(str(stream_id).encode()).hexdigest()[0:7]
deactivated_stream_name = hashed_stream_id + "!DEACTIVATED:" + active_name
deactivated_stream = get_stream(deactivated_stream_name, realm)
self.assertTrue(deactivated_stream.deactivated)
self.assertTrue(deactivated_stream.invite_only)
self.assertEqual(deactivated_stream.name, deactivated_stream_name)
subscribers = self.users_subscribed_to_stream(deactivated_stream_name, realm)
self.assertEqual(subscribers, [])
# It doesn't show up in the list of public streams anymore.
result = self.client_get("/json/streams", {"include_subscribed": "false"})
public_streams = [s["name"] for s in self.assert_json_success(result)["streams"]]
self.assertNotIn(active_name, public_streams)
self.assertNotIn(deactivated_stream_name, public_streams)
# Even if you could guess the new name, you can't subscribe to it.
result = self.client_post(
"/json/users/me/subscriptions",
{"subscriptions": orjson.dumps([{"name": deactivated_stream_name}]).decode()},
)
self.assert_json_error(result, f"Unable to access stream ({deactivated_stream_name}).")
# You cannot re-archive the stream
with self.capture_send_event_calls(expected_num_events=0) as events:
result = self.client_delete("/json/streams/" + str(stream_id))
self.assert_json_error(result, "Stream is already deactivated")
def test_you_must_be_realm_admin(self) -> None:
"""
You must be on the realm to create a stream.
"""
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
other_realm = do_create_realm(string_id="other", name="other")
stream = self.make_stream("other_realm_stream", realm=other_realm)
result = self.client_delete("/json/streams/" + str(stream.id))
self.assert_json_error(result, "Invalid stream ID")
# Even becoming a realm admin doesn't help us for an out-of-realm
# stream.
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
result = self.client_delete("/json/streams/" + str(stream.id))
self.assert_json_error(result, "Invalid stream ID")
def test_delete_public_stream(self) -> None:
"""
When an administrator deletes a public stream, that stream is not
visible to users at all anymore.
"""
stream = self.set_up_stream_for_archiving("newstream")
self.archive_stream(stream)
def test_delete_private_stream(self) -> None:
"""
Administrators can delete private streams they are on.
"""
stream = self.set_up_stream_for_archiving("newstream", invite_only=True)
self.archive_stream(stream)
def test_archive_stream_youre_not_on(self) -> None:
"""
Administrators can delete public streams they aren't on
"""
pub_stream = self.set_up_stream_for_archiving("pubstream", subscribed=False)
self.archive_stream(pub_stream)
def test_archive_invite_only_stream_youre_not_on(self) -> None:
"""
Administrators can delete invite-only streams they aren't on
"""
priv_stream = self.set_up_stream_for_archiving(
"privstream", subscribed=False, invite_only=True
)
self.archive_stream(priv_stream)
def attempt_unsubscribe_of_principal(
self,
target_users: List[UserProfile],
query_count: int,
cache_count: Optional[int] = None,
is_realm_admin: bool = False,
is_subbed: bool = True,
invite_only: bool = False,
target_users_subbed: bool = True,
using_legacy_emails: bool = False,
other_sub_users: Sequence[UserProfile] = [],
) -> "TestHttpResponse":
# Set up the main user, who is in most cases an admin.
if is_realm_admin:
user_profile = self.example_user("iago")
else:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
# Set up the stream.
stream_name = "hümbüǵ"
self.make_stream(stream_name, invite_only=invite_only)
# Set up the principal to be unsubscribed.
principals = [user.email if using_legacy_emails else user.id for user in target_users]
# Subscribe the admin and/or principal as specified in the flags.
if is_subbed:
self.subscribe(user_profile, stream_name)
if target_users_subbed:
for user in target_users:
self.subscribe(user, stream_name)
for user in other_sub_users:
self.subscribe(user, stream_name)
with self.assert_database_query_count(query_count):
with cache_tries_captured() as cache_tries:
with self.captureOnCommitCallbacks(execute=True):
result = self.client_delete(
"/json/users/me/subscriptions",
{
"subscriptions": orjson.dumps([stream_name]).decode(),
"principals": orjson.dumps(principals).decode(),
},
)
if cache_count is not None:
self.assert_length(cache_tries, cache_count)
# If the removal succeeded, assert all target users are no longer subscribed.
if result.status_code not in [400]:
subbed_users = self.users_subscribed_to_stream(stream_name, user_profile.realm)
for user in target_users:
self.assertNotIn(user, subbed_users)
return result
def test_cant_remove_other_users_from_stream(self) -> None:
"""
If you're not an admin, you can't remove other people from streams except your own bots.
"""
result = self.attempt_unsubscribe_of_principal(
query_count=8,
target_users=[self.example_user("cordelia")],
is_realm_admin=False,
is_subbed=True,
invite_only=False,
target_users_subbed=True,
)
self.assert_json_error(result, "Insufficient permission")
def test_realm_admin_remove_others_from_public_stream(self) -> None:
"""
If you're a realm admin, you can remove people from public streams, even
those you aren't on.
"""
result = self.attempt_unsubscribe_of_principal(
query_count=16,
target_users=[self.example_user("cordelia")],
is_realm_admin=True,
is_subbed=True,
invite_only=False,
target_users_subbed=True,
)
json = self.assert_json_success(result)
self.assert_length(json["removed"], 1)
self.assert_length(json["not_removed"], 0)
def test_realm_admin_remove_multiple_users_from_stream(self) -> None:
"""
If you're a realm admin, you can remove multiple users from a stream.
TODO: We have too many queries for this situation--each additional
user leads to 4 more queries.
Fortunately, some of the extra work here is in
do_mark_stream_messages_as_read, which gets deferred
using a queue.
"""
target_users = [
self.example_user(name)
for name in ["cordelia", "prospero", "iago", "hamlet", "outgoing_webhook_bot"]
]
result = self.attempt_unsubscribe_of_principal(
query_count=27,
cache_count=8,
target_users=target_users,
is_realm_admin=True,
is_subbed=True,
invite_only=False,
target_users_subbed=True,
)
json = self.assert_json_success(result)
self.assert_length(json["removed"], 5)
self.assert_length(json["not_removed"], 0)
def test_realm_admin_remove_others_from_subbed_private_stream(self) -> None:
"""
If you're a realm admin, you can remove other people from private streams you
are on.
"""
result = self.attempt_unsubscribe_of_principal(
query_count=17,
target_users=[self.example_user("cordelia")],
is_realm_admin=True,
is_subbed=True,
invite_only=True,
target_users_subbed=True,
)
json = self.assert_json_success(result)
self.assert_length(json["removed"], 1)
self.assert_length(json["not_removed"], 0)
def test_realm_admin_remove_others_from_unsubbed_private_stream(self) -> None:
"""
If you're a realm admin, you can remove people from private
streams you aren't on.
"""
result = self.attempt_unsubscribe_of_principal(
query_count=17,
target_users=[self.example_user("cordelia")],
is_realm_admin=True,
is_subbed=False,
invite_only=True,
target_users_subbed=True,
other_sub_users=[self.example_user("othello")],
)
json = self.assert_json_success(result)
self.assert_length(json["removed"], 1)
self.assert_length(json["not_removed"], 0)
def test_cant_remove_others_from_stream_legacy_emails(self) -> None:
result = self.attempt_unsubscribe_of_principal(
query_count=8,
is_realm_admin=False,
is_subbed=True,
invite_only=False,
target_users=[self.example_user("cordelia")],
target_users_subbed=True,
using_legacy_emails=True,
)
self.assert_json_error(result, "Insufficient permission")
def test_admin_remove_others_from_stream_legacy_emails(self) -> None:
result = self.attempt_unsubscribe_of_principal(
query_count=16,
target_users=[self.example_user("cordelia")],
is_realm_admin=True,
is_subbed=True,
invite_only=False,
target_users_subbed=True,
using_legacy_emails=True,
)
json = self.assert_json_success(result)
self.assert_length(json["removed"], 1)
self.assert_length(json["not_removed"], 0)
def test_admin_remove_multiple_users_from_stream_legacy_emails(self) -> None:
result = self.attempt_unsubscribe_of_principal(
query_count=19,
target_users=[self.example_user("cordelia"), self.example_user("prospero")],
is_realm_admin=True,
is_subbed=True,
invite_only=False,
target_users_subbed=True,
using_legacy_emails=True,
)
json = self.assert_json_success(result)
self.assert_length(json["removed"], 2)
self.assert_length(json["not_removed"], 0)
def test_remove_already_not_subbed(self) -> None:
"""
Trying to unsubscribe someone who already isn't subscribed to a stream
fails gracefully.
"""
result = self.attempt_unsubscribe_of_principal(
query_count=11,
target_users=[self.example_user("cordelia")],
is_realm_admin=True,
is_subbed=False,
invite_only=False,
target_users_subbed=False,
)
json = self.assert_json_success(result)
self.assert_length(json["removed"], 0)
self.assert_length(json["not_removed"], 1)
def test_bot_owner_can_remove_bot_from_stream(self) -> None:
user_profile = self.example_user("hamlet")
webhook_bot = self.example_user("webhook_bot")
do_change_bot_owner(webhook_bot, bot_owner=user_profile, acting_user=user_profile)
result = self.attempt_unsubscribe_of_principal(
query_count=13,
target_users=[webhook_bot],
is_realm_admin=False,
is_subbed=True,
invite_only=False,
target_users_subbed=True,
)
self.assert_json_success(result)
def test_non_bot_owner_cannot_remove_bot_from_stream(self) -> None:
other_user = self.example_user("cordelia")
webhook_bot = self.example_user("webhook_bot")
do_change_bot_owner(webhook_bot, bot_owner=other_user, acting_user=other_user)
result = self.attempt_unsubscribe_of_principal(
query_count=8,
target_users=[webhook_bot],
is_realm_admin=False,
is_subbed=True,
invite_only=False,
target_users_subbed=True,
)
self.assert_json_error(result, "Insufficient permission")
def test_can_remove_subscribers_group(self) -> None:
realm = get_realm("zulip")
leadership_group = check_add_user_group(
realm,
"leadership",
[self.example_user("iago"), self.example_user("shiva")],
acting_user=None,
)
managers_group = check_add_user_group(
realm, "managers", [self.example_user("hamlet")], acting_user=None
)
add_subgroups_to_user_group(managers_group, [leadership_group], acting_user=None)
cordelia = self.example_user("cordelia")
stream = self.make_stream("public_stream")
def check_unsubscribing_user(
user: UserProfile, can_remove_subscribers_group: UserGroup, expect_fail: bool = False
) -> None:
self.login_user(user)
self.subscribe(cordelia, stream.name)
do_change_stream_group_based_setting(
stream,
"can_remove_subscribers_group",
can_remove_subscribers_group,
acting_user=None,
)
result = self.client_delete(
"/json/users/me/subscriptions",
{
"subscriptions": orjson.dumps([stream.name]).decode(),
"principals": orjson.dumps([cordelia.id]).decode(),
},
)
if expect_fail:
self.assert_json_error(result, "Insufficient permission")
return
json = self.assert_json_success(result)
self.assert_length(json["removed"], 1)
self.assert_length(json["not_removed"], 0)
check_unsubscribing_user(self.example_user("hamlet"), leadership_group, expect_fail=True)
check_unsubscribing_user(self.example_user("desdemona"), leadership_group, expect_fail=True)
check_unsubscribing_user(self.example_user("iago"), leadership_group)
check_unsubscribing_user(self.example_user("othello"), managers_group, expect_fail=True)
check_unsubscribing_user(self.example_user("shiva"), managers_group)
check_unsubscribing_user(self.example_user("hamlet"), managers_group)
stream = self.make_stream("private_stream", invite_only=True)
self.subscribe(self.example_user("hamlet"), stream.name)
# Non-admins are not allowed to unsubscribe others from private streams that they
# are not subscribed to even if they are member of the allowed group.
check_unsubscribing_user(self.example_user("shiva"), leadership_group, expect_fail=True)
check_unsubscribing_user(self.example_user("iago"), leadership_group)
self.subscribe(self.example_user("shiva"), stream.name)
check_unsubscribing_user(self.example_user("shiva"), leadership_group)
def test_remove_invalid_user(self) -> None:
"""
Trying to unsubscribe an invalid user from a stream fails gracefully.
"""
admin = self.example_user("iago")
self.login_user(admin)
self.assertTrue(admin.is_realm_admin)
stream_name = "hümbüǵ"
self.make_stream(stream_name)
result = self.client_delete(
"/json/users/me/subscriptions",
{
"subscriptions": orjson.dumps([stream_name]).decode(),
"principals": orjson.dumps([99]).decode(),
},
)
self.assert_json_error(
result, "User not authorized to execute queries on behalf of '99'", status_code=403
)
class DefaultStreamTest(ZulipTestCase):
def get_default_stream_names(self, realm: Realm) -> Set[str]:
streams = get_default_streams_for_realm_as_dicts(realm.id)
return {s["name"] for s in streams}
def test_query_count(self) -> None:
DefaultStream.objects.all().delete()
realm = get_realm("zulip")
new_stream_ids = set()
for i in range(5):
stream = ensure_stream(realm, f"stream {i}", acting_user=None)
new_stream_ids.add(stream.id)
do_add_default_stream(stream)
with queries_captured() as queries:
default_streams = get_default_streams_for_realm_as_dicts(realm.id)
self.assert_length(queries, 1)
self.assert_length(default_streams, 5)
self.assertEqual({dct["stream_id"] for dct in default_streams}, new_stream_ids)
# Make sure our query isn't some bloated select_related query.
self.assertLess(len(queries[0].sql), 800)
with queries_captured() as queries:
default_stream_ids = get_default_stream_ids_for_realm(realm.id)
self.assert_length(queries, 1)
self.assertEqual(default_stream_ids, new_stream_ids)
def test_add_and_remove_default_stream(self) -> None:
realm = get_realm("zulip")
stream = ensure_stream(realm, "Added stream", acting_user=None)
orig_stream_names = self.get_default_stream_names(realm)
do_add_default_stream(stream)
new_stream_names = self.get_default_stream_names(realm)
added_stream_names = new_stream_names - orig_stream_names
self.assertEqual(added_stream_names, {"Added stream"})
# idempotency--2nd call to add_default_stream should be a noop
do_add_default_stream(stream)
self.assertEqual(self.get_default_stream_names(realm), new_stream_names)
# start removing
do_remove_default_stream(stream)
self.assertEqual(self.get_default_stream_names(realm), orig_stream_names)
# idempotency--2nd call to remove_default_stream should be a noop
do_remove_default_stream(stream)
self.assertEqual(self.get_default_stream_names(realm), orig_stream_names)
def test_api_calls(self) -> None:
user_profile = self.example_user("hamlet")
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
self.login_user(user_profile)
DefaultStream.objects.filter(realm=user_profile.realm).delete()
stream_name = "stream ADDED via api"
stream = ensure_stream(user_profile.realm, stream_name, acting_user=None)
result = self.client_post("/json/default_streams", dict(stream_id=stream.id))
self.assert_json_success(result)
self.assertTrue(stream_name in self.get_default_stream_names(user_profile.realm))
# look for it
self.subscribe(user_profile, stream_name)
payload = dict(
include_public="true",
include_default="true",
)
result = self.client_get("/json/streams", payload)
streams = self.assert_json_success(result)["streams"]
default_streams = {stream["name"] for stream in streams if stream["is_default"]}
self.assertEqual(default_streams, {stream_name})
other_streams = {stream["name"] for stream in streams if not stream["is_default"]}
self.assertGreater(len(other_streams), 0)
# and remove it
result = self.client_delete("/json/default_streams", dict(stream_id=stream.id))
self.assert_json_success(result)
self.assertFalse(stream_name in self.get_default_stream_names(user_profile.realm))
# Test admin can't access unsubscribed private stream for adding.
stream_name = "private_stream"
stream = self.make_stream(stream_name, invite_only=True)
self.subscribe(self.example_user("iago"), stream_name)
result = self.client_post("/json/default_streams", dict(stream_id=stream.id))
self.assert_json_error(result, "Invalid stream ID")
# Test admin can't add subscribed private stream also.
self.subscribe(user_profile, stream_name)
result = self.client_post("/json/default_streams", dict(stream_id=stream.id))
self.assert_json_error(result, "Private streams cannot be made default.")
def test_guest_user_access_to_streams(self) -> None:
user_profile = self.example_user("polonius")
self.login_user(user_profile)
self.assertEqual(user_profile.role, UserProfile.ROLE_GUEST)
# Get all the streams that Polonius has access to (subscribed + web-public streams)
result = self.client_get("/json/streams", {"include_web_public": "true"})
streams = self.assert_json_success(result)["streams"]
sub_info = gather_subscriptions_helper(user_profile)
subscribed = sub_info.subscriptions
unsubscribed = sub_info.unsubscribed
never_subscribed = sub_info.never_subscribed
self.assert_length(streams, len(subscribed) + len(unsubscribed) + len(never_subscribed))
stream_names = [stream["name"] for stream in streams]
expected_stream_names = [stream["name"] for stream in subscribed + unsubscribed]
expected_stream_names += [stream["name"] for stream in never_subscribed]
self.assertEqual(set(stream_names), set(expected_stream_names))
class DefaultStreamGroupTest(ZulipTestCase):
def test_create_update_and_remove_default_stream_group(self) -> None:
realm = get_realm("zulip")
# Test creating new default stream group
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 0)
streams = [
ensure_stream(realm, stream_name, acting_user=None)
for stream_name in ["stream1", "stream2", "stream3"]
]
def get_streams(group: DefaultStreamGroup) -> List[Stream]:
return list(group.streams.all().order_by("name"))
group_name = "group1"
description = "This is group1"
do_create_default_stream_group(realm, group_name, description, streams)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(default_stream_groups[0].description, description)
self.assertEqual(get_streams(default_stream_groups[0]), streams)
# Test adding streams to existing default stream group
group = lookup_default_stream_groups(["group1"], realm)[0]
new_stream_names = [
"stream4",
"stream5",
"stream6",
"stream7",
"stream8",
"stream9",
]
new_streams = [
ensure_stream(realm, new_stream_name, acting_user=None)
for new_stream_name in new_stream_names
]
streams += new_streams
do_add_streams_to_default_stream_group(realm, group, new_streams)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(get_streams(default_stream_groups[0]), streams)
# Test removing streams from existing default stream group
with self.assert_database_query_count(5):
do_remove_streams_from_default_stream_group(realm, group, new_streams)
remaining_streams = streams[0:3]
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(get_streams(default_stream_groups[0]), remaining_streams)
# Test changing default stream group description
new_description = "group1 new description"
do_change_default_stream_group_description(realm, group, new_description)
default_stream_groups = get_default_stream_groups(realm)
self.assertEqual(default_stream_groups[0].description, new_description)
self.assert_length(default_stream_groups, 1)
# Test changing default stream group name
new_group_name = "new group1"
do_change_default_stream_group_name(realm, group, new_group_name)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, new_group_name)
self.assertEqual(get_streams(default_stream_groups[0]), remaining_streams)
# Test removing default stream group
do_remove_default_stream_group(realm, group)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 0)
# Test creating a default stream group which contains a default stream
do_add_default_stream(remaining_streams[0])
with self.assertRaisesRegex(
JsonableError, "'stream1' is a default stream and cannot be added to 'new group1'"
):
do_create_default_stream_group(
realm, new_group_name, "This is group1", remaining_streams
)
def test_api_calls(self) -> None:
self.login("hamlet")
user_profile = self.example_user("hamlet")
realm = user_profile.realm
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
# Test creating new default stream group
stream_names = ["stream1", "stream2", "stream3"]
group_name = "group1"
description = "This is group1"
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 0)
streams = [
ensure_stream(realm, stream_name, acting_user=None) for stream_name in stream_names
]
result = self.client_post(
"/json/default_stream_groups/create",
{
"group_name": group_name,
"description": description,
"stream_names": orjson.dumps(stream_names).decode(),
},
)
self.assert_json_success(result)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(default_stream_groups[0].description, description)
self.assertEqual(list(default_stream_groups[0].streams.all().order_by("id")), streams)
# Try adding the same streams to the group.
result = self.client_post(
"/json/default_stream_groups/create",
{
"group_name": group_name,
"description": description,
"stream_names": orjson.dumps(stream_names).decode(),
},
)
self.assert_json_error(result, "Default stream group 'group1' already exists")
# Test adding streams to existing default stream group
group_id = default_stream_groups[0].id
new_stream_names = ["stream4", "stream5"]
new_streams = [
ensure_stream(realm, new_stream_name, acting_user=None)
for new_stream_name in new_stream_names
]
streams += new_streams
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(result, "Missing 'op' argument")
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "invalid", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(result, 'Invalid value for "op". Specify one of "add" or "remove".')
result = self.client_patch(
"/json/default_stream_groups/12345/streams",
{"op": "add", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(result, "Default stream group with id '12345' does not exist.")
result = self.client_patch(f"/json/default_stream_groups/{group_id}/streams", {"op": "add"})
self.assert_json_error(result, "Missing 'stream_names' argument")
do_add_default_stream(new_streams[0])
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "add", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(
result, "'stream4' is a default stream and cannot be added to 'group1'"
)
do_remove_default_stream(new_streams[0])
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "add", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_success(result)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(list(default_stream_groups[0].streams.all().order_by("name")), streams)
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "add", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(
result, "Stream 'stream4' is already present in default stream group 'group1'"
)
# Test removing streams from default stream group
result = self.client_patch(
"/json/default_stream_groups/12345/streams",
{"op": "remove", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(result, "Default stream group with id '12345' does not exist.")
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "remove", "stream_names": orjson.dumps(["random stream name"]).decode()},
)
self.assert_json_error(result, "Invalid stream name 'random stream name'")
streams.remove(new_streams[0])
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "remove", "stream_names": orjson.dumps([new_stream_names[0]]).decode()},
)
self.assert_json_success(result)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(list(default_stream_groups[0].streams.all().order_by("name")), streams)
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "remove", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(
result, "Stream 'stream4' is not present in default stream group 'group1'"
)
# Test changing description of default stream group
new_description = "new group1 description"
result = self.client_patch(f"/json/default_stream_groups/{group_id}")
self.assert_json_error(result, 'You must pass "new_description" or "new_group_name".')
result = self.client_patch(
"/json/default_stream_groups/12345",
{"new_description": new_description},
)
self.assert_json_error(result, "Default stream group with id '12345' does not exist.")
result = self.client_patch(
f"/json/default_stream_groups/{group_id}",
{"new_description": new_description},
)
self.assert_json_success(result)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(default_stream_groups[0].description, new_description)
# Test changing name of default stream group
new_group_name = "new group1"
do_create_default_stream_group(realm, "group2", "", [])
result = self.client_patch(
f"/json/default_stream_groups/{group_id}",
{"new_group_name": "group2"},
)
self.assert_json_error(result, "Default stream group 'group2' already exists")
new_group = lookup_default_stream_groups(["group2"], realm)[0]
do_remove_default_stream_group(realm, new_group)
result = self.client_patch(
f"/json/default_stream_groups/{group_id}",
{"new_group_name": group_name},
)
self.assert_json_error(result, "This default stream group is already named 'group1'")
result = self.client_patch(
f"/json/default_stream_groups/{group_id}",
{"new_group_name": new_group_name},
)
self.assert_json_success(result)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, new_group_name)
self.assertEqual(default_stream_groups[0].description, new_description)
# Test deleting a default stream group
result = self.client_delete(f"/json/default_stream_groups/{group_id}")
self.assert_json_success(result)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 0)
result = self.client_delete(f"/json/default_stream_groups/{group_id}")
self.assert_json_error(result, f"Default stream group with id '{group_id}' does not exist.")
def test_invalid_default_stream_group_name(self) -> None:
self.login("iago")
user_profile = self.example_user("iago")
realm = user_profile.realm
stream_names = ["stream1", "stream2", "stream3"]
description = "This is group1"
for stream_name in stream_names:
ensure_stream(realm, stream_name, acting_user=None)
result = self.client_post(
"/json/default_stream_groups/create",
{
"group_name": "",
"description": description,
"stream_names": orjson.dumps(stream_names).decode(),
},
)
self.assert_json_error(result, "Invalid default stream group name ''")
result = self.client_post(
"/json/default_stream_groups/create",
{
"group_name": "x" * 100,
"description": description,
"stream_names": orjson.dumps(stream_names).decode(),
},
)
self.assert_json_error(
result,
"Default stream group name too long (limit: {} characters)".format(
DefaultStreamGroup.MAX_NAME_LENGTH
),
)
result = self.client_post(
"/json/default_stream_groups/create",
{
"group_name": "abc\000",
"description": description,
"stream_names": orjson.dumps(stream_names).decode(),
},
)
self.assert_json_error(
result, "Default stream group name 'abc\000' contains NULL (0x00) characters."
)
# Also test that lookup_default_stream_groups raises an
# error if we pass it a bad name. This function is used
# during registration, but it's a bit heavy to do a full
# test of that.
with self.assertRaisesRegex(JsonableError, "Invalid default stream group invalid-name"):
lookup_default_stream_groups(["invalid-name"], realm)
class SubscriptionPropertiesTest(ZulipTestCase):
def test_set_stream_color(self) -> None:
"""
A POST request to /api/v1/users/me/subscriptions/properties with stream_id and
color data sets the stream color, and for that stream only. Also, make sure that
any invalid hex color codes are bounced.
"""
test_user = self.example_user("hamlet")
self.login_user(test_user)
old_subs, _ = gather_subscriptions(test_user)
sub = old_subs[0]
stream_id = sub["stream_id"]
new_color = "#ffffff" # TODO: ensure that this is different from old_color
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": "color", "stream_id": stream_id, "value": "#ffffff"}]
).decode()
},
)
self.assert_json_success(result)
new_subs = gather_subscriptions(test_user)[0]
found_sub = None
for sub in new_subs:
if sub["stream_id"] == stream_id:
found_sub = sub
break
assert found_sub is not None
self.assertEqual(found_sub["color"], new_color)
new_subs.remove(found_sub)
for sub in old_subs:
if sub["stream_id"] == stream_id:
found_sub = sub
break
old_subs.remove(found_sub)
self.assertEqual(old_subs, new_subs)
invalid_color = "3ffrff"
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": "color", "stream_id": stream_id, "value": invalid_color}]
).decode()
},
)
self.assert_json_error(result, "color is not a valid hex color code")
def test_set_color_missing_stream_id(self) -> None:
"""
Updating the color property requires a `stream_id` key.
"""
test_user = self.example_user("hamlet")
self.login_user(test_user)
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": "color", "value": "#ffffff"}]
).decode()
},
)
self.assert_json_error(result, "stream_id key is missing from subscription_data[0]")
def test_set_color_unsubscribed_stream_id(self) -> None:
"""
Updating the color property requires a subscribed stream.
"""
test_user = self.example_user("hamlet")
self.login_user(test_user)
sub_info = gather_subscriptions_helper(test_user)
not_subbed = sub_info.never_subscribed
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[
{
"property": "color",
"stream_id": not_subbed[0]["stream_id"],
"value": "#ffffff",
}
]
).decode()
},
)
self.assert_json_error(
result, "Not subscribed to stream id {}".format(not_subbed[0]["stream_id"])
)
def test_set_color_missing_color(self) -> None:
"""
Updating the color property requires a color.
"""
test_user = self.example_user("hamlet")
self.login_user(test_user)
subs = gather_subscriptions(test_user)[0]
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": "color", "stream_id": subs[0]["stream_id"]}]
).decode()
},
)
self.assert_json_error(result, "value key is missing from subscription_data[0]")
def test_set_stream_wildcard_mentions_notify(self) -> None:
"""
A POST request to /api/v1/users/me/subscriptions/properties with wildcard_mentions_notify
sets the property.
"""
test_user = self.example_user("hamlet")
self.login_user(test_user)
subs = gather_subscriptions(test_user)[0]
sub = subs[0]
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[
{
"property": "wildcard_mentions_notify",
"stream_id": sub["stream_id"],
"value": True,
}
]
).decode()
},
)
self.assert_json_success(result)
updated_sub = get_subscription(sub["name"], test_user)
self.assertIsNotNone(updated_sub)
self.assertEqual(updated_sub.wildcard_mentions_notify, True)
def test_set_pin_to_top(self) -> None:
"""
A POST request to /api/v1/users/me/subscriptions/properties with stream_id and
pin_to_top data pins the stream.
"""
user = self.example_user("hamlet")
self.login_user(user)
old_subs, _ = gather_subscriptions(user)
sub = old_subs[0]
stream_id = sub["stream_id"]
new_pin_to_top = not sub["pin_to_top"]
result = self.api_post(
user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": "pin_to_top", "stream_id": stream_id, "value": new_pin_to_top}]
).decode()
},
)
self.assert_json_success(result)
updated_sub = get_subscription(sub["name"], user)
self.assertIsNotNone(updated_sub)
self.assertEqual(updated_sub.pin_to_top, new_pin_to_top)
def test_change_is_muted(self) -> None:
test_user = self.example_user("hamlet")
self.login_user(test_user)
subs = gather_subscriptions(test_user)[0]
sub = Subscription.objects.get(
recipient__type=Recipient.STREAM,
recipient__type_id=subs[0]["stream_id"],
user_profile=test_user,
)
self.assertEqual(sub.is_muted, False)
property_name = "is_muted"
with self.capture_send_event_calls(expected_num_events=2) as events:
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[
{
"property": property_name,
"value": True,
"stream_id": subs[0]["stream_id"],
}
]
).decode()
},
)
self.assert_json_success(result)
self.assertEqual(events[0]["event"]["property"], "in_home_view")
self.assertEqual(events[0]["event"]["value"], False)
self.assertEqual(events[1]["event"]["property"], "is_muted")
self.assertEqual(events[1]["event"]["value"], True)
sub = Subscription.objects.get(
recipient__type=Recipient.STREAM,
recipient__type_id=subs[0]["stream_id"],
user_profile=test_user,
)
self.assertEqual(sub.is_muted, True)
legacy_property_name = "in_home_view"
with self.capture_send_event_calls(expected_num_events=2) as events:
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[
{
"property": legacy_property_name,
"value": True,
"stream_id": subs[0]["stream_id"],
}
]
).decode()
},
)
self.assert_json_success(result)
self.assertEqual(events[0]["event"]["property"], "in_home_view")
self.assertEqual(events[0]["event"]["value"], True)
self.assertEqual(events[1]["event"]["property"], "is_muted")
self.assertEqual(events[1]["event"]["value"], False)
self.assert_json_success(result)
sub = Subscription.objects.get(
recipient__type=Recipient.STREAM,
recipient__type_id=subs[0]["stream_id"],
user_profile=test_user,
)
self.assertEqual(sub.is_muted, False)
with self.capture_send_event_calls(expected_num_events=2) as events:
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[
{
"property": legacy_property_name,
"value": False,
"stream_id": subs[0]["stream_id"],
}
]
).decode()
},
)
self.assert_json_success(result)
self.assertEqual(events[0]["event"]["property"], "in_home_view")
self.assertEqual(events[0]["event"]["value"], False)
self.assertEqual(events[1]["event"]["property"], "is_muted")
self.assertEqual(events[1]["event"]["value"], True)
sub = Subscription.objects.get(
recipient__type=Recipient.STREAM,
recipient__type_id=subs[0]["stream_id"],
user_profile=test_user,
)
self.assertEqual(sub.is_muted, True)
def test_set_subscription_property_incorrect(self) -> None:
"""
Trying to set a property incorrectly returns a JSON error.
"""
test_user = self.example_user("hamlet")
self.login_user(test_user)
subs = gather_subscriptions(test_user)[0]
property_name = "is_muted"
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": property_name, "value": "bad", "stream_id": subs[0]["stream_id"]}]
).decode()
},
)
self.assert_json_error(result, f"{property_name} is not a boolean")
property_name = "in_home_view"
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": property_name, "value": "bad", "stream_id": subs[0]["stream_id"]}]
).decode()
},
)
self.assert_json_error(result, f"{property_name} is not a boolean")
property_name = "desktop_notifications"
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": property_name, "value": "bad", "stream_id": subs[0]["stream_id"]}]
).decode()
},
)
self.assert_json_error(result, f"{property_name} is not a boolean")
property_name = "audible_notifications"
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": property_name, "value": "bad", "stream_id": subs[0]["stream_id"]}]
).decode()
},
)
self.assert_json_error(result, f"{property_name} is not a boolean")
property_name = "push_notifications"
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": property_name, "value": "bad", "stream_id": subs[0]["stream_id"]}]
).decode()
},
)
self.assert_json_error(result, f"{property_name} is not a boolean")
property_name = "email_notifications"
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": property_name, "value": "bad", "stream_id": subs[0]["stream_id"]}]
).decode()
},
)
self.assert_json_error(result, f"{property_name} is not a boolean")
property_name = "wildcard_mentions_notify"
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": property_name, "value": "bad", "stream_id": subs[0]["stream_id"]}]
).decode()
},
)
self.assert_json_error(result, f"{property_name} is not a boolean")
property_name = "color"
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": property_name, "value": False, "stream_id": subs[0]["stream_id"]}]
).decode()
},
)
self.assert_json_error(result, f"{property_name} is not a string")
def test_json_subscription_property_invalid_stream(self) -> None:
test_user = self.example_user("hamlet")
self.login_user(test_user)
stream_id = 1000
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": "is_muted", "stream_id": stream_id, "value": False}]
).decode()
},
)
self.assert_json_error(result, "Invalid stream ID")
def test_set_invalid_property(self) -> None:
"""
Trying to set an invalid property returns a JSON error.
"""
test_user = self.example_user("hamlet")
self.login_user(test_user)
subs = gather_subscriptions(test_user)[0]
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[{"property": "bad", "value": "bad", "stream_id": subs[0]["stream_id"]}]
).decode()
},
)
self.assert_json_error(result, "Unknown subscription property: bad")
def test_ignored_parameters_in_subscriptions_properties_endpoint(self) -> None:
"""
Sending an invalid parameter with a valid parameter returns
an `ignored_parameters_unsupported` array.
"""
test_user = self.example_user("hamlet")
self.login_user(test_user)
subs = gather_subscriptions(test_user)[0]
sub = subs[0]
result = self.api_post(
test_user,
"/api/v1/users/me/subscriptions/properties",
{
"subscription_data": orjson.dumps(
[
{
"property": "wildcard_mentions_notify",
"stream_id": sub["stream_id"],
"value": True,
}
]
).decode(),
"invalid_parameter": orjson.dumps(
[{"property": "pin_to_top", "stream_id": sub["stream_id"], "value": False}]
).decode(),
},
)
self.assert_json_success(result, ignored_parameters=["invalid_parameter"])
class SubscriptionRestApiTest(ZulipTestCase):
def test_basic_add_delete(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
# add
request = {
"add": orjson.dumps([{"name": "my_test_stream_1"}]).decode(),
}
result = self.api_patch(user, "/api/v1/users/me/subscriptions", request)
self.assert_json_success(result)
streams = self.get_streams(user)
self.assertTrue("my_test_stream_1" in streams)
# now delete the same stream
request = {
"delete": orjson.dumps(["my_test_stream_1"]).decode(),
}
result = self.api_patch(user, "/api/v1/users/me/subscriptions", request)
self.assert_json_success(result)
streams = self.get_streams(user)
self.assertTrue("my_test_stream_1" not in streams)
def test_add_with_color(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
# add with color proposition
request = {
"add": orjson.dumps([{"name": "my_test_stream_2", "color": "#afafaf"}]).decode(),
}
result = self.api_patch(user, "/api/v1/users/me/subscriptions", request)
self.assert_json_success(result)
# incorrect color format
request = {
"subscriptions": orjson.dumps(
[{"name": "my_test_stream_3", "color": "#0g0g0g"}]
).decode(),
}
result = self.api_post(user, "/api/v1/users/me/subscriptions", request)
self.assert_json_error(result, 'subscriptions[0]["color"] is not a valid hex color code')
def test_api_valid_property(self) -> None:
"""
Trying to set valid json returns success message.
"""
user = self.example_user("hamlet")
self.login_user(user)
subs = gather_subscriptions(user)[0]
result = self.api_patch(
user,
"/api/v1/users/me/subscriptions/{}".format(subs[0]["stream_id"]),
{"property": "color", "value": "#c2c2c2"},
)
self.assert_json_success(result)
def test_api_invalid_property(self) -> None:
"""
Trying to set an invalid property returns a JSON error.
"""
user = self.example_user("hamlet")
self.login_user(user)
subs = gather_subscriptions(user)[0]
result = self.api_patch(
user,
"/api/v1/users/me/subscriptions/{}".format(subs[0]["stream_id"]),
{"property": "invalid", "value": "somevalue"},
)
self.assert_json_error(result, "Unknown subscription property: invalid")
def test_api_invalid_stream_id(self) -> None:
"""
Trying to set an invalid stream id returns a JSON error.
"""
user = self.example_user("hamlet")
self.login_user(user)
result = self.api_patch(
user,
"/api/v1/users/me/subscriptions/121",
{"property": "is_muted", "value": "somevalue"},
)
self.assert_json_error(result, "Invalid stream ID")
def test_bad_add_parameters(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
def check_for_error(val: Any, expected_message: str) -> None:
request = {
"add": orjson.dumps(val).decode(),
}
result = self.api_patch(user, "/api/v1/users/me/subscriptions", request)
self.assert_json_error(result, expected_message)
check_for_error(["foo"], "add[0] is not a dict")
check_for_error([{"bogus": "foo"}], "name key is missing from add[0]")
check_for_error([{"name": {}}], 'add[0]["name"] is not a string')
def test_bad_principals(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
request = {
"add": orjson.dumps([{"name": "my_new_stream"}]).decode(),
"principals": orjson.dumps([{}]).decode(),
}
result = self.api_patch(user, "/api/v1/users/me/subscriptions", request)
self.assert_json_error(result, "principals is not an allowed_type")
def test_bad_delete_parameters(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
request = {
"delete": orjson.dumps([{"name": "my_test_stream_1"}]).decode(),
}
result = self.api_patch(user, "/api/v1/users/me/subscriptions", request)
self.assert_json_error(result, "delete[0] is not a string")
def test_add_or_delete_not_specified(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
result = self.api_patch(user, "/api/v1/users/me/subscriptions", {})
self.assert_json_error(result, 'Nothing to do. Specify at least one of "add" or "delete".')
def test_patch_enforces_valid_stream_name_check(self) -> None:
"""
Only way to force an error is with a empty string.
"""
user = self.example_user("hamlet")
self.login_user(user)
invalid_stream_name = ""
request = {
"delete": orjson.dumps([invalid_stream_name]).decode(),
}
result = self.api_patch(user, "/api/v1/users/me/subscriptions", request)
self.assert_json_error(result, "Stream name can't be empty!")
def test_stream_name_too_long(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
long_stream_name = "a" * 61
request = {
"delete": orjson.dumps([long_stream_name]).decode(),
}
result = self.api_patch(user, "/api/v1/users/me/subscriptions", request)
self.assert_json_error(result, "Stream name too long (limit: 60 characters).")
def test_stream_name_contains_null(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
stream_name = "abc\000"
request = {
"delete": orjson.dumps([stream_name]).decode(),
}
result = self.api_patch(user, "/api/v1/users/me/subscriptions", request)
self.assert_json_error(result, "Invalid character in stream name, at position 4!")
def test_compose_views_rollback(self) -> None:
"""
The compose_views function() is used under the hood by
update_subscriptions_backend. It's a pretty simple method in terms of
control flow, but it uses a Django rollback, which may make it brittle
code when we upgrade Django. We test the functions's rollback logic
here with a simple scenario to avoid false positives related to
subscription complications.
"""
user_profile = self.example_user("hamlet")
user_profile.full_name = "Hamlet"
user_profile.save()
request = HostRequestMock(user_profile=user_profile)
def thunk1() -> HttpResponse:
user_profile.full_name = "Should not be committed"
user_profile.save()
return json_success(request)
def thunk2() -> HttpResponse:
raise JsonableError("random failure")
with self.assertRaises(JsonableError):
compose_views([thunk1, thunk2])
user_profile = self.example_user("hamlet")
self.assertEqual(user_profile.full_name, "Hamlet")
class SubscriptionAPITest(ZulipTestCase):
def setUp(self) -> None:
"""
All tests will be logged in as hamlet. Also save various useful values
as attributes that tests can access.
"""
super().setUp()
self.user_profile = self.example_user("hamlet")
self.test_email = self.user_profile.email
self.test_user = self.user_profile
self.login_user(self.user_profile)
self.test_realm = self.user_profile.realm
self.streams = self.get_streams(self.user_profile)
def make_random_stream_names(self, existing_stream_names: List[str]) -> List[str]:
"""
Helper function to make up random stream names. It takes
existing_stream_names and randomly appends a digit to the end of each,
but avoids names of streams already in the realm.
"""
all_stream_names = [stream.name for stream in Stream.objects.filter(realm=self.test_realm)]
return [
random_stream
for stream in existing_stream_names
if (random_stream := stream + str(random.randint(0, 9))) not in all_stream_names
]
def test_invalid_stream_name(self) -> None:
"""
Creating a stream with invalid 'Cc' and 'Cn' category of unicode characters in stream name
"""
user = self.example_user("hamlet")
self.login_user(user)
# For Cc category
post_data_cc = {
"subscriptions": orjson.dumps(
[{"name": "new\n\rstream", "description": "this is description"}]
).decode(),
"invite_only": orjson.dumps(False).decode(),
}
result = self.api_post(
user, "/api/v1/users/me/subscriptions", post_data_cc, subdomain="zulip"
)
self.assert_json_error(result, "Invalid character in stream name, at position 4!")
# For Cn category
post_data_cn = {
"subscriptions": orjson.dumps(
[{"name": "new\uFFFEstream", "description": "this is description"}]
).decode(),
"invite_only": orjson.dumps(False).decode(),
}
result = self.api_post(
user, "/api/v1/users/me/subscriptions", post_data_cn, subdomain="zulip"
)
self.assert_json_error(result, "Invalid character in stream name, at position 4!")
def test_invalid_stream_rename(self) -> None:
"""
Renaming a stream with invalid characters.
"""
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.subscribe(user_profile, "stream_name1")
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
# Check for empty name
result = self.client_patch(f"/json/streams/{stream.id}", {"new_name": ""})
self.assert_json_error(result, "Stream name can't be empty!")
# Check for long name
result = self.client_patch(f"/json/streams/{stream.id}", {"new_name": "a" * 61})
self.assert_json_error(result, "Stream name too long (limit: 60 characters).")
# Check for Cc characters
result = self.client_patch(f"/json/streams/{stream.id}", {"new_name": "test\n\rname"})
self.assert_json_error(result, "Invalid character in stream name, at position 5!")
# Check for Cn characters
result = self.client_patch(f"/json/streams/{stream.id}", {"new_name": "test\uFFFEame"})
self.assert_json_error(result, "Invalid character in stream name, at position 5!")
def test_successful_subscriptions_list(self) -> None:
"""
Calling /api/v1/users/me/subscriptions should successfully return your subscriptions.
"""
result = self.api_get(self.test_user, "/api/v1/users/me/subscriptions")
json = self.assert_json_success(result)
self.assertIn("subscriptions", json)
for stream in json["subscriptions"]:
self.assertIsInstance(stream["name"], str)
self.assertIsInstance(stream["color"], str)
self.assertIsInstance(stream["invite_only"], bool)
# check that the stream name corresponds to an actual
# stream; will throw Stream.DoesNotExist if it doesn't
get_stream(stream["name"], self.test_realm)
list_streams = [stream["name"] for stream in json["subscriptions"]]
# also check that this matches the list of your subscriptions
self.assertEqual(sorted(list_streams), sorted(self.streams))
def test_successful_subscriptions_list_subscribers(self) -> None:
"""
Calling /api/v1/users/me/subscriptions should successfully return your subscriptions.
"""
result = self.api_get(
self.test_user,
"/api/v1/users/me/subscriptions",
{"include_subscribers": "true"},
)
json = self.assert_json_success(result)
self.assertIn("subscriptions", json)
for stream in json["subscriptions"]:
self.assertIsInstance(stream["name"], str)
self.assertIsInstance(stream["color"], str)
self.assertIsInstance(stream["invite_only"], bool)
# check that the stream name corresponds to an actual
# stream; will throw Stream.DoesNotExist if it doesn't
get_stream(stream["name"], self.test_realm)
list_streams = [stream["name"] for stream in json["subscriptions"]]
# also check that this matches the list of your subscriptions
self.assertEqual(sorted(list_streams), sorted(self.streams))
def helper_check_subs_before_and_after_add(
self,
subscriptions: List[str],
other_params: Dict[str, Any],
subscribed: List[str],
already_subscribed: List[str],
email: str,
new_subs: List[str],
realm: Realm,
invite_only: bool = False,
) -> None:
"""
Check result of adding subscriptions.
You can add subscriptions for yourself or possibly many
principals, which is why e-mails map to subscriptions in the
result.
The result json is of the form
{"msg": "",
"result": "success",
"already_subscribed": {self.example_email("iago"): ["Venice", "Verona"]},
"subscribed": {self.example_email("iago"): ["Venice8"]}}
"""
result = self.common_subscribe_to_streams(
self.test_user, subscriptions, other_params, invite_only=invite_only
)
json = self.assert_json_success(result)
self.assertEqual(sorted(subscribed), sorted(json["subscribed"][email]))
self.assertEqual(sorted(already_subscribed), sorted(json["already_subscribed"][email]))
user = get_user(email, realm)
new_streams = self.get_streams(user)
self.assertEqual(sorted(new_streams), sorted(new_subs))
def test_successful_subscriptions_add(self) -> None:
"""
Calling POST /json/users/me/subscriptions should successfully add
streams, and should determine which are new subscriptions vs
which were already subscribed. We add 2 new streams to the
list of subscriptions and confirm the right number of events
are generated.
"""
self.assertNotEqual(len(self.streams), 0) # necessary for full test coverage
add_streams = ["Verona2", "Denmark5"]
self.assertNotEqual(len(add_streams), 0) # necessary for full test coverage
# Three events should be sent for each stream for stream creation, subscription add and message notifications.
with self.capture_send_event_calls(expected_num_events=6):
self.helper_check_subs_before_and_after_add(
self.streams + add_streams,
{},
add_streams,
self.streams,
self.test_email,
self.streams + add_streams,
self.test_realm,
)
def test_successful_subscriptions_add_with_announce(self) -> None:
"""
Calling POST /json/users/me/subscriptions should successfully add
streams, and should determine which are new subscriptions vs
which were already subscribed. We add 2 new streams to the
list of subscriptions and confirm the right number of events
are generated.
"""
self.assertNotEqual(len(self.streams), 0)
add_streams = ["Verona2", "Denmark5"]
self.assertNotEqual(len(add_streams), 0)
other_params = {
"announce": "true",
}
notifications_stream = get_stream(self.streams[0], self.test_realm)
self.test_realm.notifications_stream_id = notifications_stream.id
self.test_realm.save()
with self.capture_send_event_calls(expected_num_events=7) as events:
self.helper_check_subs_before_and_after_add(
self.streams + add_streams,
other_params,
add_streams,
self.streams,
self.test_email,
self.streams + add_streams,
self.test_realm,
)
expected_stream_ids = {get_stream(stream, self.test_realm).id for stream in add_streams}
(peer_add_event,) = (event for event in events if event["event"].get("op") == "peer_add")
self.assertEqual(set(peer_add_event["event"]["stream_ids"]), expected_stream_ids)
self.assertEqual(set(peer_add_event["event"]["user_ids"]), {self.test_user.id})
def test_successful_subscriptions_notifies_pm(self) -> None:
"""
Calling POST /json/users/me/subscriptions should notify when a new stream is created.
"""
invitee = self.example_user("iago")
current_stream = self.get_streams(invitee)[0]
invite_streams = self.make_random_stream_names([current_stream])[:1]
self.common_subscribe_to_streams(
invitee,
invite_streams,
extra_post_data={
"announce": "true",
"principals": orjson.dumps([self.user_profile.id]).decode(),
},
)
def test_successful_subscriptions_notifies_stream(self) -> None:
"""
Calling POST /json/users/me/subscriptions should notify when a new stream is created.
"""
invitee = self.example_user("iago")
invitee_full_name = "Iago"
current_stream = self.get_streams(invitee)[0]
invite_streams = self.make_random_stream_names([current_stream])[:1]
notifications_stream = get_stream(current_stream, self.test_realm)
self.test_realm.notifications_stream_id = notifications_stream.id
self.test_realm.save()
self.common_subscribe_to_streams(
invitee,
invite_streams,
extra_post_data=dict(
announce="true",
principals=orjson.dumps([self.user_profile.id]).decode(),
),
)
target_stream = get_stream(invite_streams[0], self.test_realm)
msg = self.get_second_to_last_message()
self.assertEqual(msg.recipient.type, Recipient.STREAM)
self.assertEqual(msg.recipient.type_id, notifications_stream.id)
self.assertEqual(msg.sender_id, self.notification_bot(self.test_realm).id)
expected_msg = (
f"@_**{invitee_full_name}|{invitee.id}** created a new stream #**{invite_streams[0]}**."
)
self.assertEqual(msg.content, expected_msg)
msg = self.get_last_message()
self.assertEqual(msg.recipient.type, Recipient.STREAM)
self.assertEqual(msg.recipient.type_id, target_stream.id)
self.assertEqual(msg.sender_id, self.notification_bot(self.test_realm).id)
expected_msg = (
f"**Public** stream created by @_**{invitee_full_name}|{invitee.id}**. **Description:**\n"
"```` quote\n*No description.*\n````"
)
self.assertEqual(msg.content, expected_msg)
def test_successful_cross_realm_notification(self) -> None:
"""
Calling POST /json/users/me/subscriptions in a new realm
should notify with a proper new stream link
"""
realm = do_create_realm("testrealm", "Test Realm")
notifications_stream = Stream.objects.get(name="general", realm=realm)
realm.notifications_stream = notifications_stream
realm.save()
invite_streams = ["cross_stream"]
user = self.example_user("AARON")
user.realm = realm
user.save()
self.common_subscribe_to_streams(
user,
invite_streams,
extra_post_data=dict(
announce="true",
),
subdomain="testrealm",
)
msg = self.get_second_to_last_message()
self.assertEqual(msg.recipient.type, Recipient.STREAM)
self.assertEqual(msg.recipient.type_id, notifications_stream.id)
self.assertEqual(msg.sender_id, self.notification_bot(realm).id)
stream_id = Stream.objects.latest("id").id
expected_rendered_msg = f'<p><span class="user-mention silent" data-user-id="{user.id}">{user.full_name}</span> created a new stream <a class="stream" data-stream-id="{stream_id}" href="/#narrow/stream/{stream_id}-{invite_streams[0]}">#{invite_streams[0]}</a>.</p>'
self.assertEqual(msg.rendered_content, expected_rendered_msg)
def test_successful_subscriptions_notifies_with_escaping(self) -> None:
"""
Calling POST /json/users/me/subscriptions should notify when a new stream is created.
"""
invitee_full_name = "Iago"
invitee = self.example_user("iago")
current_stream = self.get_streams(invitee)[0]
notifications_stream = get_stream(current_stream, self.test_realm)
self.test_realm.notifications_stream_id = notifications_stream.id
self.test_realm.save()
invite_streams = ["strange ) \\ test"]
self.common_subscribe_to_streams(
invitee,
invite_streams,
extra_post_data={
"announce": "true",
"principals": orjson.dumps([self.user_profile.id]).decode(),
},
)
msg = self.get_second_to_last_message()
self.assertEqual(msg.sender_id, self.notification_bot(notifications_stream.realm).id)
expected_msg = (
f"@_**{invitee_full_name}|{invitee.id}** created a new stream #**{invite_streams[0]}**."
)
self.assertEqual(msg.content, expected_msg)
def test_non_ascii_stream_subscription(self) -> None:
"""
Subscribing to a stream name with non-ASCII characters succeeds.
"""
self.helper_check_subs_before_and_after_add(
[*self.streams, "hümbüǵ"],
{},
["hümbüǵ"],
self.streams,
self.test_email,
[*self.streams, "hümbüǵ"],
self.test_realm,
)
def test_subscriptions_add_too_long(self) -> None:
"""
Calling POST /json/users/me/subscriptions on a stream whose name is >60
characters should return a JSON error.
"""
# character limit is 60 characters
long_stream_name = "a" * 61
result = self.common_subscribe_to_streams(
self.test_user, [long_stream_name], allow_fail=True
)
self.assert_json_error(result, "Stream name too long (limit: 60 characters).")
def test_subscriptions_add_stream_with_null(self) -> None:
"""
Calling POST /json/users/me/subscriptions on a stream whose name contains
null characters should return a JSON error.
"""
stream_name = "abc\000"
result = self.common_subscribe_to_streams(self.test_user, [stream_name], allow_fail=True)
self.assert_json_error(result, "Invalid character in stream name, at position 4!")
def _test_user_settings_for_creating_streams(
self,
stream_policy: str,
*,
invite_only: bool,
is_web_public: bool,
) -> None:
user_profile = self.example_user("cordelia")
realm = user_profile.realm
do_set_realm_property(realm, stream_policy, Realm.POLICY_ADMINS_ONLY, acting_user=None)
do_change_user_role(user_profile, UserProfile.ROLE_MODERATOR, acting_user=None)
result = self.common_subscribe_to_streams(
user_profile,
["new_stream1"],
invite_only=invite_only,
is_web_public=is_web_public,
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
self.common_subscribe_to_streams(user_profile, ["new_stream1"], invite_only=invite_only)
do_set_realm_property(realm, stream_policy, Realm.POLICY_MODERATORS_ONLY, acting_user=None)
do_change_user_role(user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
# Make sure that we are checking the permission with a full member,
# as full member is the user just below moderator in the role hierarchy.
self.assertFalse(user_profile.is_provisional_member)
result = self.common_subscribe_to_streams(
user_profile,
["new_stream2"],
allow_fail=True,
invite_only=invite_only,
is_web_public=is_web_public,
)
self.assert_json_error(result, "Insufficient permission")
do_change_user_role(user_profile, UserProfile.ROLE_MODERATOR, acting_user=None)
self.common_subscribe_to_streams(user_profile, ["new_stream2"], invite_only=invite_only)
do_set_realm_property(realm, stream_policy, Realm.POLICY_MEMBERS_ONLY, acting_user=None)
do_change_user_role(user_profile, UserProfile.ROLE_GUEST, acting_user=None)
result = self.common_subscribe_to_streams(
user_profile,
["new_stream3"],
invite_only=invite_only,
is_web_public=is_web_public,
allow_fail=True,
)
self.assert_json_error(result, "Not allowed for guest users")
do_change_user_role(user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
self.common_subscribe_to_streams(
self.test_user,
["new_stream4"],
invite_only=invite_only,
is_web_public=is_web_public,
)
do_set_realm_property(
realm, stream_policy, Realm.POLICY_FULL_MEMBERS_ONLY, acting_user=None
)
do_set_realm_property(realm, "waiting_period_threshold", 100000, acting_user=None)
result = self.common_subscribe_to_streams(
user_profile,
["new_stream5"],
invite_only=invite_only,
is_web_public=is_web_public,
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
do_set_realm_property(realm, "waiting_period_threshold", 0, acting_user=None)
self.common_subscribe_to_streams(user_profile, ["new_stream3"], invite_only=invite_only)
def test_user_settings_for_creating_private_streams(self) -> None:
self._test_user_settings_for_creating_streams(
"create_private_stream_policy",
invite_only=True,
is_web_public=False,
)
def test_user_settings_for_creating_public_streams(self) -> None:
self._test_user_settings_for_creating_streams(
"create_public_stream_policy",
invite_only=False,
is_web_public=False,
)
def test_user_settings_for_creating_web_public_streams(self) -> None:
self._test_user_settings_for_creating_streams(
"create_web_public_stream_policy", invite_only=False, is_web_public=True
)
def _test_can_create_streams(self, stream_policy: str, invite_only: bool) -> None:
if invite_only:
def validation_func(user_profile: UserProfile) -> bool:
user_profile.refresh_from_db()
return user_profile.can_create_private_streams()
else:
def validation_func(user_profile: UserProfile) -> bool:
user_profile.refresh_from_db()
return user_profile.can_create_public_streams()
self.check_has_permission_policies(stream_policy, validation_func)
def test_can_create_private_streams(self) -> None:
self._test_can_create_streams("create_private_stream_policy", invite_only=True)
def test_can_create_public_streams(self) -> None:
self._test_can_create_streams("create_public_stream_policy", invite_only=False)
def test_can_create_web_public_streams(self) -> None:
def validation_func(user_profile: UserProfile) -> bool:
user_profile.refresh_from_db()
return user_profile.can_create_web_public_streams()
self.check_has_permission_policies("create_web_public_stream_policy", validation_func)
def test_user_settings_for_subscribing_other_users(self) -> None:
"""
You can't subscribe other people to streams if you are a guest or your account is not old
enough.
"""
user_profile = self.example_user("cordelia")
invitee_user_id = user_profile.id
realm = user_profile.realm
do_set_realm_property(
realm, "create_public_stream_policy", Realm.POLICY_MEMBERS_ONLY, acting_user=None
)
do_set_realm_property(
realm, "invite_to_stream_policy", Realm.POLICY_ADMINS_ONLY, acting_user=None
)
do_change_user_role(self.test_user, UserProfile.ROLE_MODERATOR, acting_user=None)
result = self.common_subscribe_to_streams(
self.test_user,
["stream1"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
do_change_user_role(self.test_user, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
self.common_subscribe_to_streams(
self.test_user, ["stream1"], {"principals": orjson.dumps([invitee_user_id]).decode()}
)
do_set_realm_property(
realm, "invite_to_stream_policy", Realm.POLICY_MODERATORS_ONLY, acting_user=None
)
do_change_user_role(self.test_user, UserProfile.ROLE_MEMBER, acting_user=None)
# Make sure that we are checking the permission with a full member,
# as full member is the user just below moderator in the role hierarchy.
self.assertFalse(self.test_user.is_provisional_member)
result = self.common_subscribe_to_streams(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
do_change_user_role(self.test_user, UserProfile.ROLE_MODERATOR, acting_user=None)
self.common_subscribe_to_streams(
self.test_user, ["stream2"], {"principals": orjson.dumps([invitee_user_id]).decode()}
)
self.unsubscribe(user_profile, "stream2")
do_set_realm_property(
realm, "invite_to_stream_policy", Realm.POLICY_MEMBERS_ONLY, acting_user=None
)
do_change_user_role(self.test_user, UserProfile.ROLE_GUEST, acting_user=None)
result = self.common_subscribe_to_streams(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Not allowed for guest users")
do_change_user_role(self.test_user, UserProfile.ROLE_MEMBER, acting_user=None)
self.common_subscribe_to_streams(
self.test_user,
["stream2"],
{"principals": orjson.dumps([self.test_user.id, invitee_user_id]).decode()},
)
self.unsubscribe(user_profile, "stream2")
do_set_realm_property(
realm,
"invite_to_stream_policy",
Realm.POLICY_FULL_MEMBERS_ONLY,
acting_user=None,
)
do_set_realm_property(realm, "waiting_period_threshold", 100000, acting_user=None)
result = self.common_subscribe_to_streams(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
do_set_realm_property(realm, "waiting_period_threshold", 0, acting_user=None)
self.common_subscribe_to_streams(
self.test_user, ["stream2"], {"principals": orjson.dumps([invitee_user_id]).decode()}
)
def test_can_subscribe_other_users(self) -> None:
"""
You can't subscribe other people to streams if you are a guest or your account is not old
enough.
"""
def validation_func(user_profile: UserProfile) -> bool:
user_profile.refresh_from_db()
return user_profile.can_subscribe_other_users()
self.check_has_permission_policies("invite_to_stream_policy", validation_func)
def test_subscriptions_add_invalid_stream(self) -> None:
"""
Calling POST /json/users/me/subscriptions on a stream whose name is invalid (as
defined by valid_stream_name in zerver/views.py) should return a JSON
error.
"""
# currently, the only invalid name is the empty string
invalid_stream_name = ""
result = self.common_subscribe_to_streams(
self.test_user, [invalid_stream_name], allow_fail=True
)
self.assert_json_error(result, "Stream name can't be empty!")
def assert_adding_subscriptions_for_principal(
self,
invitee_data: Union[str, int],
invitee_realm: Realm,
streams: List[str],
policy_name: str,
invite_only: bool = False,
) -> None:
"""
Calling POST /json/users/me/subscriptions on behalf of another principal (for
whom you have permission to add subscriptions) should successfully add
those subscriptions and send a message to the subscribee notifying
them.
"""
if isinstance(invitee_data, str):
other_profile = get_user(invitee_data, invitee_realm)
else:
other_profile = get_user_profile_by_id_in_realm(invitee_data, invitee_realm)
current_streams = self.get_streams(other_profile)
self.assertIsInstance(other_profile, UserProfile)
self.assertNotEqual(len(current_streams), 0) # necessary for full test coverage
self.assertNotEqual(len(streams), 0) # necessary for full test coverage
streams_to_sub = streams[:1] # just add one, to make the message easier to check
streams_to_sub.extend(current_streams)
self.helper_check_subs_before_and_after_add(
streams_to_sub,
{"principals": orjson.dumps([invitee_data]).decode()},
streams[:1],
current_streams,
other_profile.email,
streams_to_sub,
invitee_realm,
invite_only=invite_only,
)
# verify that a welcome message was sent to the stream
msg = self.get_last_message()
self.assertEqual(msg.recipient.type, msg.recipient.STREAM)
self.assertEqual(msg.topic_name(), "stream events")
self.assertEqual(msg.sender.email, settings.NOTIFICATION_BOT)
self.assertIn(
f"**{policy_name}** stream created by @_**{self.test_user.full_name}|{self.test_user.id}**. **Description:**\n"
"```` quote",
msg.content,
)
def test_multi_user_subscription(self) -> None:
user1 = self.example_user("cordelia")
user2 = self.example_user("iago")
realm = get_realm("zulip")
streams_to_sub = ["multi_user_stream"]
with self.capture_send_event_calls(expected_num_events=5) as events:
with self.assert_database_query_count(37):
self.common_subscribe_to_streams(
self.test_user,
streams_to_sub,
dict(principals=orjson.dumps([user1.id, user2.id]).decode()),
)
for ev in [x for x in events if x["event"]["type"] not in ("message", "stream")]:
if ev["event"]["op"] == "add":
self.assertEqual(
set(ev["event"]["subscriptions"][0]["subscribers"]),
{user1.id, user2.id},
)
else:
# Check "peer_add" events for streams users were
# never subscribed to, in order for the neversubscribed
# structure to stay up-to-date.
self.assertEqual(ev["event"]["op"], "peer_add")
stream = get_stream("multi_user_stream", realm)
self.assertEqual(num_subscribers_for_stream_id(stream.id), 2)
# Now add ourselves
with self.capture_send_event_calls(expected_num_events=2) as events:
with self.assert_database_query_count(13):
self.common_subscribe_to_streams(
self.test_user,
streams_to_sub,
dict(principals=orjson.dumps([self.test_user.id]).decode()),
)
add_event, add_peer_event = events
self.assertEqual(add_event["event"]["type"], "subscription")
self.assertEqual(add_event["event"]["op"], "add")
self.assertEqual(add_event["users"], [get_user(self.test_email, self.test_realm).id])
self.assertEqual(
set(add_event["event"]["subscriptions"][0]["subscribers"]),
{user1.id, user2.id, self.test_user.id},
)
self.assertNotIn(self.example_user("polonius").id, add_peer_event["users"])
self.assert_length(add_peer_event["users"], 11)
self.assertEqual(add_peer_event["event"]["type"], "subscription")
self.assertEqual(add_peer_event["event"]["op"], "peer_add")
self.assertEqual(add_peer_event["event"]["user_ids"], [self.user_profile.id])
stream = get_stream("multi_user_stream", realm)
self.assertEqual(num_subscribers_for_stream_id(stream.id), 3)
# Finally, add othello.
events = []
user_profile = self.example_user("othello")
email3 = user_profile.email
user3 = user_profile
realm3 = user_profile.realm
stream = get_stream("multi_user_stream", realm)
with self.capture_send_event_calls(expected_num_events=2) as events:
bulk_add_subscriptions(realm, [stream], [user_profile], acting_user=None)
add_event, add_peer_event = events
self.assertEqual(add_event["event"]["type"], "subscription")
self.assertEqual(add_event["event"]["op"], "add")
self.assertEqual(add_event["users"], [get_user(email3, realm3).id])
self.assertEqual(
set(add_event["event"]["subscriptions"][0]["subscribers"]),
{user1.id, user2.id, user3.id, self.test_user.id},
)
# We don't send a peer_add event to othello
self.assertNotIn(user_profile.id, add_peer_event["users"])
self.assertNotIn(self.example_user("polonius").id, add_peer_event["users"])
self.assert_length(add_peer_event["users"], 11)
self.assertEqual(add_peer_event["event"]["type"], "subscription")
self.assertEqual(add_peer_event["event"]["op"], "peer_add")
self.assertEqual(add_peer_event["event"]["user_ids"], [user_profile.id])
def test_private_stream_subscription(self) -> None:
realm = get_realm("zulip")
# Create a private stream with Hamlet subscribed
stream_name = "private"
stream = ensure_stream(realm, stream_name, invite_only=True, acting_user=None)
existing_user_profile = self.example_user("hamlet")
bulk_add_subscriptions(realm, [stream], [existing_user_profile], acting_user=None)
# Now subscribe Cordelia to the stream, capturing events
user_profile = self.example_user("cordelia")
with self.capture_send_event_calls(expected_num_events=3) as events:
bulk_add_subscriptions(realm, [stream], [user_profile], acting_user=None)
create_event, add_event, add_peer_event = events
self.assertEqual(create_event["event"]["type"], "stream")
self.assertEqual(create_event["event"]["op"], "create")
self.assertEqual(create_event["users"], [user_profile.id])
self.assertEqual(create_event["event"]["streams"][0]["name"], stream_name)
self.assertEqual(add_event["event"]["type"], "subscription")
self.assertEqual(add_event["event"]["op"], "add")
self.assertEqual(add_event["users"], [user_profile.id])
self.assertEqual(
set(add_event["event"]["subscriptions"][0]["subscribers"]),
{user_profile.id, existing_user_profile.id},
)
# We don't send a peer_add event to othello, but we do send peer_add event to
# all realm admins.
self.assertNotIn(user_profile.id, add_peer_event["users"])
self.assert_length(add_peer_event["users"], 3)
self.assertEqual(add_peer_event["event"]["type"], "subscription")
self.assertEqual(add_peer_event["event"]["op"], "peer_add")
self.assertEqual(add_peer_event["event"]["user_ids"], [user_profile.id])
# Do not send stream creation event to realm admin users
# even if realm admin is subscribed to stream cause realm admin already get
# private stream creation event on stream creation.
new_stream = ensure_stream(realm, "private stream", invite_only=True, acting_user=None)
with self.capture_send_event_calls(expected_num_events=2) as events:
bulk_add_subscriptions(
realm, [new_stream], [self.example_user("iago")], acting_user=None
)
# Note that since iago is an admin, he won't get a stream/create
# event here.
self.assert_length(events, 2)
add_event, add_peer_event = events
self.assertEqual(add_event["event"]["type"], "subscription")
self.assertEqual(add_event["event"]["op"], "add")
self.assertEqual(add_event["users"], [self.example_user("iago").id])
self.assert_length(add_peer_event["users"], 1)
self.assertEqual(add_peer_event["event"]["type"], "subscription")
self.assertEqual(add_peer_event["event"]["op"], "peer_add")
self.assertEqual(add_peer_event["event"]["user_ids"], [self.example_user("iago").id])
def test_subscribe_to_stream_post_policy_admins_stream(self) -> None:
"""
Members can subscribe to streams where only admins can post
"""
member = self.example_user("AARON")
stream = self.make_stream("stream1")
do_change_stream_post_policy(stream, Stream.STREAM_POST_POLICY_ADMINS, acting_user=member)
result = self.common_subscribe_to_streams(member, ["stream1"])
json = self.assert_json_success(result)
self.assertEqual(json["subscribed"], {member.email: ["stream1"]})
self.assertEqual(json["already_subscribed"], {})
def test_subscribe_to_stream_post_policy_restrict_new_members_stream(self) -> None:
"""
New members can subscribe to streams where they can not post
"""
new_member_email = self.nonreg_email("test")
self.register(new_member_email, "test")
new_member = self.nonreg_user("test")
do_set_realm_property(new_member.realm, "waiting_period_threshold", 10, acting_user=None)
self.assertTrue(new_member.is_provisional_member)
stream = self.make_stream("stream1")
do_change_stream_post_policy(
stream, Stream.STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS, acting_user=new_member
)
result = self.common_subscribe_to_streams(new_member, ["stream1"])
json = self.assert_json_success(result)
self.assertEqual(json["subscribed"], {new_member.email: ["stream1"]})
self.assertEqual(json["already_subscribed"], {})
def test_subscribe_to_stream_post_policy_moderators_stream(self) -> None:
"""
Members can subscribe to streams where only admins and moderators can post
"""
member = self.example_user("AARON")
stream = self.make_stream("stream1")
# Make sure that we are testing this with full member which is just below the moderator
# in the role hierarchy.
self.assertFalse(member.is_provisional_member)
do_change_stream_post_policy(
stream, Stream.STREAM_POST_POLICY_MODERATORS, acting_user=member
)
result = self.common_subscribe_to_streams(member, ["stream1"])
json = self.assert_json_success(result)
self.assertEqual(json["subscribed"], {member.email: ["stream1"]})
self.assertEqual(json["already_subscribed"], {})
def test_guest_user_subscribe(self) -> None:
"""Guest users cannot subscribe themselves to anything"""
guest_user = self.example_user("polonius")
result = self.common_subscribe_to_streams(guest_user, ["Denmark"], allow_fail=True)
self.assert_json_error(result, "Not allowed for guest users")
# Verify the internal checks also block guest users.
stream = get_stream("Denmark", guest_user.realm)
self.assertEqual(filter_stream_authorization(guest_user, [stream]), ([], [stream]))
stream = self.make_stream("private_stream", invite_only=True)
result = self.common_subscribe_to_streams(guest_user, ["private_stream"], allow_fail=True)
self.assert_json_error(result, "Not allowed for guest users")
self.assertEqual(filter_stream_authorization(guest_user, [stream]), ([], [stream]))
web_public_stream = self.make_stream("web_public_stream", is_web_public=True)
public_stream = self.make_stream("public_stream", invite_only=False)
private_stream = self.make_stream("private_stream2", invite_only=True)
# This test should be added as soon as the subscription endpoint allows
# guest users to subscribe to web-public streams. Although they are already
# authorized, the decorator in "add_subscriptions_backend" still needs to be
# deleted.
#
# result = self.common_subscribe_to_streams(guest_user, ['web_public_stream'],
# is_web_public=True, allow_fail=True)
# self.assert_json_success(result)
streams_to_sub = [web_public_stream, public_stream, private_stream]
self.assertEqual(
filter_stream_authorization(guest_user, streams_to_sub),
([web_public_stream], [public_stream, private_stream]),
)
# Guest can be subscribed by other users.
normal_user = self.example_user("aaron")
with self.capture_send_event_calls(expected_num_events=6) as events:
self.common_subscribe_to_streams(
self.example_user("hamlet"),
["Denmark"],
dict(principals=orjson.dumps([guest_user.id, normal_user.id]).decode()),
)
# Verify that stream creation event is sent to guest user only.
stream_create_events = [
event
for event in events
if event["event"]["type"] == "stream" and event["event"]["op"] == "create"
]
self.assert_length(stream_create_events, 1)
self.assertEqual(stream_create_events[0]["users"], [guest_user.id])
# Verify that subscription add event is sent to both the users.
subscription_add_events = [
event
for event in events
if event["event"]["type"] == "subscription" and event["event"]["op"] == "add"
]
self.assert_length(subscription_add_events, 2)
self.assertEqual(subscription_add_events[0]["users"], [guest_user.id])
self.assertEqual(subscription_add_events[1]["users"], [normal_user.id])
def test_users_getting_add_peer_event(self) -> None:
"""
Check users getting add_peer_event is correct
"""
streams_to_sub = ["multi_user_stream"]
othello = self.example_user("othello")
cordelia = self.example_user("cordelia")
iago = self.example_user("iago")
orig_user_ids_to_subscribe = [self.test_user.id, othello.id]
self.common_subscribe_to_streams(
self.test_user,
streams_to_sub,
dict(principals=orjson.dumps(orig_user_ids_to_subscribe).decode()),
)
new_user_ids_to_subscribe = [iago.id, cordelia.id]
with self.capture_send_event_calls(expected_num_events=5) as events:
self.common_subscribe_to_streams(
self.test_user,
streams_to_sub,
dict(principals=orjson.dumps(new_user_ids_to_subscribe).decode()),
)
add_peer_events = [event for event in events if event["event"].get("op") == "peer_add"]
(add_peer_event,) = add_peer_events
self.assertEqual(add_peer_event["event"]["type"], "subscription")
self.assertEqual(add_peer_event["event"]["op"], "peer_add")
event_sent_to_ids = add_peer_event["users"]
for user_id in new_user_ids_to_subscribe:
# Make sure new users subscribed to stream is not in
# peer_add event recipient list
self.assertNotIn(user_id, event_sent_to_ids)
for old_user in orig_user_ids_to_subscribe:
# Check non-new users are in peer_add event recipient list.
self.assertIn(old_user, event_sent_to_ids)
def test_users_getting_remove_peer_event(self) -> None:
"""
Check users getting add_peer_event is correct
"""
user1 = self.example_user("othello")
user2 = self.example_user("cordelia")
user3 = self.example_user("hamlet")
user4 = self.example_user("iago")
user5 = self.example_user("AARON")
guest = self.example_user("polonius")
realm = user1.realm
stream1 = self.make_stream("stream1")
stream2 = self.make_stream("stream2")
stream3 = self.make_stream("stream3")
private = self.make_stream("private_stream", invite_only=True)
self.subscribe(user1, "stream1")
self.subscribe(user2, "stream1")
self.subscribe(user3, "stream1")
self.subscribe(user2, "stream2")
self.subscribe(user2, "stream3")
self.subscribe(user1, "private_stream")
self.subscribe(user2, "private_stream")
self.subscribe(user3, "private_stream")
# Sends 3 peer-remove events and 2 unsubscribe events.
with self.assert_database_query_count(16):
with self.assert_memcached_count(3):
with self.capture_send_event_calls(expected_num_events=5) as events:
bulk_remove_subscriptions(
realm,
[user1, user2],
[stream1, stream2, stream3, private],
acting_user=None,
)
peer_events = [e for e in events if e["event"].get("op") == "peer_remove"]
# We only care about a subset of users when we inspect
# peer_remove events.
our_user_ids = {
user1.id,
user2.id,
user3.id,
user4.id,
user5.id,
guest.id,
}
notifications = []
for event in peer_events:
stream_ids = event["event"]["stream_ids"]
stream_names = sorted(Stream.objects.get(id=stream_id).name for stream_id in stream_ids)
removed_user_ids = set(event["event"]["user_ids"])
notified_user_ids = set(event["users"]) & our_user_ids
notifications.append((",".join(stream_names), removed_user_ids, notified_user_ids))
notifications.sort(key=lambda tup: tup[0])
self.assertEqual(
notifications,
[
("private_stream", {user1.id, user2.id}, {user3.id, user4.id}),
("stream1", {user1.id, user2.id}, {user3.id, user4.id, user5.id}),
("stream2,stream3", {user2.id}, {user1.id, user3.id, user4.id, user5.id}),
],
)
def test_bulk_subscribe_MIT(self) -> None:
mit_user = self.mit_user("starnine")
num_streams = 15
realm = get_realm("zephyr")
stream_names = [f"stream_{i}" for i in range(num_streams)]
streams = [self.make_stream(stream_name, realm=realm) for stream_name in stream_names]
for stream in streams:
stream.is_in_zephyr_realm = True
stream.save()
# Verify that peer_event events are never sent in Zephyr
# realm. This does generate stream creation events from
# send_stream_creation_events_for_previously_inaccessible_streams.
with self.capture_send_event_calls(expected_num_events=num_streams + 1) as events:
with self.assert_database_query_count(num_streams + 11):
self.common_subscribe_to_streams(
mit_user,
stream_names,
dict(principals=orjson.dumps([mit_user.id]).decode()),
subdomain="zephyr",
)
# num_streams stream creation events:
self.assertEqual(
{(event["event"]["type"], event["event"]["op"]) for event in events[0:num_streams]},
{("stream", "create")},
)
# Followed by one subscription event:
self.assertEqual(events[num_streams]["event"]["type"], "subscription")
with self.capture_send_event_calls(expected_num_events=1):
bulk_remove_subscriptions(
realm,
users=[mit_user],
streams=streams,
acting_user=None,
)
def test_subscribe_others_to_public_stream_in_zephyr_realm(self) -> None:
"""
Users cannot be subscribed to public streams by other users in zephyr realm.
"""
starnine = self.mit_user("starnine")
espuser = self.mit_user("espuser")
realm = get_realm("zephyr")
stream = self.make_stream("stream_1", realm=realm)
stream.is_in_zephyr_realm = True
stream.save()
result = self.common_subscribe_to_streams(
starnine,
["stream_1"],
dict(principals=orjson.dumps([starnine.id, espuser.id]).decode()),
subdomain="zephyr",
allow_fail=True,
)
self.assert_json_error(
result,
"You can only invite other Zephyr mirroring users to private streams.",
status_code=400,
)
def test_bulk_subscribe_many(self) -> None:
# Create a whole bunch of streams
streams = [f"stream_{i}" for i in range(30)]
for stream_name in streams:
self.make_stream(stream_name)
desdemona = self.example_user("desdemona")
test_users = [
desdemona,
self.example_user("cordelia"),
self.example_user("hamlet"),
self.example_user("othello"),
self.example_user("iago"),
self.example_user("prospero"),
]
# Subscribe out test users to some streams, including
# some that we may soon subscribe them to.
for stream_name in ["Verona", "Denmark", *streams[:10]]:
for user in test_users:
self.subscribe(user, stream_name)
# Now unsubscribe users from the first few streams,
# so they have to reactivate.
for stream_name in streams[:5]:
for user in test_users:
self.unsubscribe(user, stream_name)
test_user_ids = [user.id for user in test_users]
# The only known O(N) behavior here is that we call
# principal_to_user_profile for each of our users, but it
# should be cached.
with self.assert_database_query_count(21):
with self.assert_memcached_count(3):
with mock.patch("zerver.views.streams.send_messages_for_new_subscribers"):
self.common_subscribe_to_streams(
desdemona,
streams,
dict(principals=orjson.dumps(test_user_ids).decode()),
)
def test_subscriptions_add_for_principal(self) -> None:
"""
You can subscribe other people to streams.
"""
invitee = self.example_user("iago")
current_streams = self.get_streams(invitee)
invite_streams = self.make_random_stream_names(current_streams)
self.assert_adding_subscriptions_for_principal(
invitee.id, invitee.realm, invite_streams, policy_name="Public"
)
def test_subscriptions_add_for_principal_legacy_emails(self) -> None:
invitee = self.example_user("iago")
current_streams = self.get_streams(invitee)
invite_streams = self.make_random_stream_names(current_streams)
self.assert_adding_subscriptions_for_principal(
invitee.email, invitee.realm, invite_streams, policy_name="Public"
)
def test_subscriptions_add_for_principal_deactivated(self) -> None:
"""
You can't subscribe deactivated people to streams.
"""
target_profile = self.example_user("cordelia")
post_data = dict(
principals=orjson.dumps([target_profile.id]).decode(),
)
self.common_subscribe_to_streams(self.test_user, "Verona", post_data)
do_deactivate_user(target_profile, acting_user=None)
result = self.common_subscribe_to_streams(
self.test_user, "Denmark", post_data, allow_fail=True
)
self.assert_json_error(
result,
f"User not authorized to execute queries on behalf of '{target_profile.id}'",
status_code=403,
)
def test_subscriptions_add_for_principal_invite_only(self) -> None:
"""
You can subscribe other people to invite only streams.
"""
invitee = self.example_user("iago")
current_streams = self.get_streams(invitee)
invite_streams = self.make_random_stream_names(current_streams)
self.assert_adding_subscriptions_for_principal(
invitee.id,
invitee.realm,
invite_streams,
invite_only=True,
policy_name="Private, protected history",
)
def test_non_ascii_subscription_for_principal(self) -> None:
"""
You can subscribe other people to streams even if they containing
non-ASCII characters.
"""
iago = self.example_user("iago")
self.assert_adding_subscriptions_for_principal(
iago.id, get_realm("zulip"), ["hümbüǵ"], policy_name="Public"
)
def test_subscription_add_invalid_principal_legacy_emails(self) -> None:
"""
Calling subscribe on behalf of a principal that does not exist
should return a JSON error.
"""
invalid_principal = "rosencrantz-and-guildenstern@zulip.com"
invalid_principal_realm = get_realm("zulip")
# verify that invalid_principal actually doesn't exist
with self.assertRaises(UserProfile.DoesNotExist):
get_user(invalid_principal, invalid_principal_realm)
result = self.common_subscribe_to_streams(
self.test_user,
self.streams,
{"principals": orjson.dumps([invalid_principal]).decode()},
allow_fail=True,
)
self.assert_json_error(
result,
f"User not authorized to execute queries on behalf of '{invalid_principal}'",
status_code=403,
)
def test_subscription_add_invalid_principal(self) -> None:
invalid_principal = 999
invalid_principal_realm = get_realm("zulip")
with self.assertRaises(UserProfile.DoesNotExist):
get_user_profile_by_id_in_realm(invalid_principal, invalid_principal_realm)
result = self.common_subscribe_to_streams(
self.test_user,
self.streams,
{"principals": orjson.dumps([invalid_principal]).decode()},
allow_fail=True,
)
self.assert_json_error(
result,
f"User not authorized to execute queries on behalf of '{invalid_principal}'",
status_code=403,
)
def test_subscription_add_principal_other_realm(self) -> None:
"""
Calling subscribe on behalf of a principal in another realm
should return a JSON error.
"""
profile = self.mit_user("starnine")
principal = profile.id
# verify that principal exists (thus, the reason for the error is the cross-realming)
self.assertIsInstance(profile, UserProfile)
result = self.common_subscribe_to_streams(
self.test_user,
self.streams,
{"principals": orjson.dumps([principal]).decode()},
allow_fail=True,
)
self.assert_json_error(
result,
f"User not authorized to execute queries on behalf of '{principal}'",
status_code=403,
)
def helper_check_subs_before_and_after_remove(
self,
subscriptions: List[str],
json_dict: Dict[str, Any],
email: str,
new_subs: List[str],
realm: Realm,
) -> None:
"""
Check result of removing subscriptions.
Unlike adding subscriptions, you can only remove subscriptions
for yourself, so the result format is different.
{"msg": "",
"removed": ["Denmark", "Scotland", "Verona"],
"not_removed": ["Rome"], "result": "success"}
"""
result = self.client_delete(
"/json/users/me/subscriptions", {"subscriptions": orjson.dumps(subscriptions).decode()}
)
json = self.assert_json_success(result)
for key, val in json_dict.items():
# we don't care about the order of the items
self.assertEqual(sorted(val), sorted(json[key]))
user = get_user(email, realm)
new_streams = self.get_streams(user)
self.assertEqual(sorted(new_streams), sorted(new_subs))
def test_successful_subscriptions_remove(self) -> None:
"""
Calling DELETE /json/users/me/subscriptions should successfully remove streams,
and should determine which were removed vs which weren't subscribed to.
We cannot randomly generate stream names because the remove code
verifies whether streams exist.
"""
self.assertGreaterEqual(len(self.streams), 2)
streams_to_remove = self.streams[1:]
not_subbed = [
stream.name
for stream in Stream.objects.filter(realm=get_realm("zulip"))
if stream.name not in self.streams
]
random.shuffle(not_subbed)
self.assertNotEqual(len(not_subbed), 0) # necessary for full test coverage
try_to_remove = not_subbed[:3] # attempt to remove up to 3 streams not already subbed to
streams_to_remove.extend(try_to_remove)
self.helper_check_subs_before_and_after_remove(
streams_to_remove,
{"removed": self.streams[1:], "not_removed": try_to_remove},
self.test_email,
[self.streams[0]],
self.test_realm,
)
def test_subscriptions_remove_fake_stream(self) -> None:
"""
Calling DELETE /json/users/me/subscriptions on a stream that doesn't exist
should return a JSON error.
"""
random_streams = self.make_random_stream_names(self.streams)
self.assertNotEqual(len(random_streams), 0) # necessary for full test coverage
# pick only one fake stream, to make checking the error message easy
streams_to_remove = random_streams[:1]
result = self.client_delete(
"/json/users/me/subscriptions",
{"subscriptions": orjson.dumps(streams_to_remove).decode()},
)
self.assert_json_error(result, f"Stream(s) ({random_streams[0]}) do not exist")
def helper_subscriptions_exists(
self, stream: str, expect_success: bool, subscribed: bool
) -> None:
"""
Call /json/subscriptions/exists on a stream and expect a certain result.
"""
result = self.client_post("/json/subscriptions/exists", {"stream": stream})
if expect_success:
json = self.assert_json_success(result)
else:
self.assertEqual(result.status_code, 404)
json = result.json()
if subscribed:
self.assertIn("subscribed", json)
self.assertEqual(json["subscribed"], subscribed)
def test_successful_subscriptions_exists_subbed(self) -> None:
"""
Calling /json/subscriptions/exist on a stream to which you are subbed
should return that it exists and that you are subbed.
"""
self.assertNotEqual(len(self.streams), 0) # necessary for full test coverage
self.helper_subscriptions_exists(self.streams[0], True, True)
def test_successful_subscriptions_exists_not_subbed(self) -> None:
"""
Calling /json/subscriptions/exist on a stream to which you are not
subbed should return that it exists and that you are not subbed.
"""
all_stream_names = [stream.name for stream in Stream.objects.filter(realm=self.test_realm)]
streams_not_subbed = list(set(all_stream_names) - set(self.streams))
self.assertNotEqual(len(streams_not_subbed), 0) # necessary for full test coverage
self.helper_subscriptions_exists(streams_not_subbed[0], True, False)
def test_subscriptions_does_not_exist(self) -> None:
"""
Calling /json/subscriptions/exist on a stream that doesn't exist should
return that it doesn't exist.
"""
random_streams = self.make_random_stream_names(self.streams)
self.assertNotEqual(len(random_streams), 0) # necessary for full test coverage
self.helper_subscriptions_exists(random_streams[0], False, False)
def test_subscriptions_exist_invalid_name(self) -> None:
"""
Calling /json/subscriptions/exist on a stream whose name is invalid (as
defined by valid_stream_name in zerver/views.py) should return a JSON
error.
"""
# currently, the only invalid stream name is the empty string
invalid_stream_name = ""
result = self.client_post("/json/subscriptions/exists", {"stream": invalid_stream_name})
self.assert_json_error(result, "Stream name can't be empty!")
def test_existing_subscriptions_autosubscription(self) -> None:
"""
Call /json/subscriptions/exist on an existing stream and autosubscribe to it.
"""
stream_name = "new_public_stream"
cordelia = self.example_user("cordelia")
self.common_subscribe_to_streams(cordelia, [stream_name], invite_only=False)
result = self.client_post(
"/json/subscriptions/exists", {"stream": stream_name, "autosubscribe": "false"}
)
response_dict = self.assert_json_success(result)
self.assertIn("subscribed", response_dict)
self.assertFalse(response_dict["subscribed"])
result = self.client_post(
"/json/subscriptions/exists", {"stream": stream_name, "autosubscribe": "true"}
)
response_dict = self.assert_json_success(result)
self.assertIn("subscribed", response_dict)
self.assertTrue(response_dict)
def test_existing_subscriptions_autosubscription_private_stream(self) -> None:
"""Call /json/subscriptions/exist on an existing private stream with
autosubscribe should fail.
"""
stream_name = "Saxony"
cordelia = self.example_user("cordelia")
self.common_subscribe_to_streams(cordelia, [stream_name], invite_only=True)
stream = get_stream(stream_name, self.test_realm)
result = self.client_post(
"/json/subscriptions/exists", {"stream": stream_name, "autosubscribe": "true"}
)
# We can't see invite-only streams here
self.assert_json_error(result, "Invalid stream name 'Saxony'", status_code=404)
# Importantly, we are not now subscribed
self.assertEqual(num_subscribers_for_stream_id(stream.id), 1)
# A user who is subscribed still sees the stream exists
self.login("cordelia")
result = self.client_post(
"/json/subscriptions/exists", {"stream": stream_name, "autosubscribe": "false"}
)
response_dict = self.assert_json_success(result)
self.assertIn("subscribed", response_dict)
self.assertTrue(response_dict)
def get_subscription(self, user_profile: UserProfile, stream_name: str) -> Subscription:
stream = get_stream(stream_name, self.test_realm)
return Subscription.objects.get(
user_profile=user_profile,
recipient__type=Recipient.STREAM,
recipient__type_id=stream.id,
)
def test_subscriptions_add_notification_default_none(self) -> None:
"""
When creating a subscription, the desktop, push, and audible notification
settings for that stream are none. A value of None means to use the values
inherited from the global notification settings.
"""
user_profile = self.example_user("iago")
invitee_user_id = user_profile.id
invitee_realm = user_profile.realm
user_profile.enable_stream_desktop_notifications = True
user_profile.enable_stream_push_notifications = True
user_profile.enable_stream_audible_notifications = True
user_profile.enable_stream_email_notifications = True
user_profile.save()
current_stream = self.get_streams(user_profile)[0]
invite_streams = self.make_random_stream_names([current_stream])
self.assert_adding_subscriptions_for_principal(
invitee_user_id, invitee_realm, invite_streams, policy_name="Public"
)
subscription = self.get_subscription(user_profile, invite_streams[0])
with mock.patch("zerver.models.Recipient.__repr__", return_value="recip"):
self.assertEqual(
repr(subscription),
"<Subscription: "
f"<UserProfile: {user_profile.email} {user_profile.realm!r}> -> recip>",
)
self.assertIsNone(subscription.desktop_notifications)
self.assertIsNone(subscription.push_notifications)
self.assertIsNone(subscription.audible_notifications)
self.assertIsNone(subscription.email_notifications)
def test_mark_messages_as_unread_on_unsubscribe(self) -> None:
realm = get_realm("zulip")
user = self.example_user("iago")
random_user = self.example_user("hamlet")
stream1 = ensure_stream(realm, "stream1", invite_only=False, acting_user=None)
stream2 = ensure_stream(realm, "stream2", invite_only=False, acting_user=None)
private = ensure_stream(realm, "private_stream", invite_only=True, acting_user=None)
self.subscribe(user, "stream1")
self.subscribe(user, "stream2")
self.subscribe(user, "private_stream")
self.subscribe(random_user, "stream1")
self.subscribe(random_user, "stream2")
self.subscribe(random_user, "private_stream")
self.send_stream_message(random_user, "stream1", "test", "test")
self.send_stream_message(random_user, "stream2", "test", "test")
self.send_stream_message(random_user, "private_stream", "test", "test")
def get_unread_stream_data() -> List[UnreadStreamInfo]:
raw_unread_data = get_raw_unread_data(user)
aggregated_data = aggregate_unread_data(raw_unread_data)
return aggregated_data["streams"]
result = get_unread_stream_data()
self.assert_length(result, 3)
self.assertEqual(result[0]["stream_id"], stream1.id)
self.assertEqual(result[1]["stream_id"], stream2.id)
self.assertEqual(result[2]["stream_id"], private.id)
with self.captureOnCommitCallbacks(execute=True):
# Unsubscribing should mark all the messages in stream2 as read
self.unsubscribe(user, "stream2")
self.unsubscribe(user, "private_stream")
self.subscribe(user, "stream2")
self.subscribe(user, "private_stream")
result = get_unread_stream_data()
self.assert_length(result, 1)
self.assertEqual(result[0]["stream_id"], stream1.id)
def test_gather_subscriptions_excludes_deactivated_streams(self) -> None:
"""
Check that gather_subscriptions_helper does not include deactivated streams in its
results.
"""
realm = get_realm("zulip")
admin_user = self.example_user("iago")
non_admin_user = self.example_user("cordelia")
self.login_user(admin_user)
for stream_name in ["stream1", "stream2", "stream3"]:
self.make_stream(stream_name, realm=realm, invite_only=False)
self.subscribe(admin_user, stream_name)
self.subscribe(non_admin_user, stream_name)
self.subscribe(self.example_user("othello"), stream_name)
def archive_stream(stream_name: str) -> None:
stream_id = get_stream(stream_name, realm).id
result = self.client_delete(f"/json/streams/{stream_id}")
self.assert_json_success(result)
# Deleted/deactivated stream should not be returned in the helper results
admin_before_delete = gather_subscriptions_helper(admin_user)
non_admin_before_delete = gather_subscriptions_helper(non_admin_user)
# Delete our stream
archive_stream("stream1")
# Get subs after delete
admin_after_delete = gather_subscriptions_helper(admin_user)
non_admin_after_delete = gather_subscriptions_helper(non_admin_user)
# Compare results - should be 1 stream less
self.assertTrue(
len(admin_before_delete.subscriptions) == len(admin_after_delete.subscriptions) + 1,
"Expected exactly 1 less stream from gather_subscriptions_helper",
)
self.assertTrue(
len(non_admin_before_delete.subscriptions)
== len(non_admin_after_delete.subscriptions) + 1,
"Expected exactly 1 less stream from gather_subscriptions_helper",
)
def test_validate_user_access_to_subscribers_helper(self) -> None:
"""
Ensure the validate_user_access_to_subscribers_helper is properly raising
ValidationError on missing user, user not-in-realm.
"""
user_profile = self.example_user("othello")
realm_name = "no_othello_allowed"
realm = do_create_realm(realm_name, "Everyone but Othello is allowed")
stream_dict = {
"name": "publicstream",
"description": "Public stream with public history",
"realm_id": realm.id,
}
# For this test to work, othello can't be in the no_othello_here realm
self.assertNotEqual(
user_profile.realm.id, realm.id, "Expected othello user to not be in this realm."
)
# This should result in missing user
with self.assertRaises(ValidationError):
validate_user_access_to_subscribers_helper(None, stream_dict, lambda user_profile: True)
# This should result in user not in realm
with self.assertRaises(ValidationError):
validate_user_access_to_subscribers_helper(
user_profile, stream_dict, lambda user_profile: True
)
def test_subscriptions_query_count(self) -> None:
"""
Test database query count when creating stream with api/v1/users/me/subscriptions.
"""
user1 = self.example_user("cordelia")
user2 = self.example_user("iago")
new_streams = [
"query_count_stream_1",
"query_count_stream_2",
"query_count_stream_3",
]
# Test creating a public stream when realm does not have a notification stream.
with self.assert_database_query_count(37):
self.common_subscribe_to_streams(
self.test_user,
[new_streams[0]],
dict(principals=orjson.dumps([user1.id, user2.id]).decode()),
)
# Test creating private stream.
with self.assert_database_query_count(36):
self.common_subscribe_to_streams(
self.test_user,
[new_streams[1]],
dict(principals=orjson.dumps([user1.id, user2.id]).decode()),
invite_only=True,
)
# Test creating a public stream with announce when realm has a notification stream.
notifications_stream = get_stream(self.streams[0], self.test_realm)
self.test_realm.notifications_stream_id = notifications_stream.id
self.test_realm.save()
with self.assert_database_query_count(45):
self.common_subscribe_to_streams(
self.test_user,
[new_streams[2]],
dict(
announce="true",
principals=orjson.dumps([user1.id, user2.id]).decode(),
),
)
class GetStreamsTest(ZulipTestCase):
def test_streams_api_for_bot_owners(self) -> None:
hamlet = self.example_user("hamlet")
test_bot = self.create_test_bot("foo", hamlet)
assert test_bot is not None
realm = get_realm("zulip")
self.login_user(hamlet)
# Check it correctly lists the bot owner's subs with
# include_owner_subscribed=true
filters = dict(
include_owner_subscribed="true",
include_public="false",
include_subscribed="false",
)
result = self.api_get(test_bot, "/api/v1/streams", filters)
owner_subs = self.api_get(hamlet, "/api/v1/users/me/subscriptions")
json = self.assert_json_success(result)
self.assertIn("streams", json)
self.assertIsInstance(json["streams"], list)
self.assert_json_success(owner_subs)
owner_subs_json = orjson.loads(owner_subs.content)
self.assertEqual(
sorted(s["name"] for s in json["streams"]),
sorted(s["name"] for s in owner_subs_json["subscriptions"]),
)
# Check it correctly lists the bot owner's subs and the
# bot's subs
self.subscribe(test_bot, "Scotland")
filters = dict(
include_owner_subscribed="true",
include_public="false",
include_subscribed="true",
)
result = self.api_get(test_bot, "/api/v1/streams", filters)
json = self.assert_json_success(result)
self.assertIn("streams", json)
self.assertIsInstance(json["streams"], list)
actual = sorted(s["name"] for s in json["streams"])
expected = [s["name"] for s in owner_subs_json["subscriptions"]]
expected.append("Scotland")
expected.sort()
self.assertEqual(actual, expected)
# Check it correctly lists the bot owner's subs + all public streams
self.make_stream("private_stream", realm=realm, invite_only=True)
self.subscribe(test_bot, "private_stream")
result = self.api_get(
test_bot,
"/api/v1/streams",
{
"include_owner_subscribed": "true",
"include_public": "true",
"include_subscribed": "false",
},
)
json = self.assert_json_success(result)
self.assertIn("streams", json)
self.assertIsInstance(json["streams"], list)
actual = sorted(s["name"] for s in json["streams"])
expected = [s["name"] for s in owner_subs_json["subscriptions"]]
expected.extend(["Rome", "Venice", "Scotland"])
expected.sort()
self.assertEqual(actual, expected)
# Check it correctly lists the bot owner's subs + all public streams +
# the bot's subs
result = self.api_get(
test_bot,
"/api/v1/streams",
{
"include_owner_subscribed": "true",
"include_public": "true",
"include_subscribed": "true",
},
)
json = self.assert_json_success(result)
self.assertIn("streams", json)
self.assertIsInstance(json["streams"], list)
actual = sorted(s["name"] for s in json["streams"])
expected = [s["name"] for s in owner_subs_json["subscriptions"]]
expected.extend(["Rome", "Venice", "Scotland", "private_stream"])
expected.sort()
self.assertEqual(actual, expected)
def test_all_active_streams_api(self) -> None:
url = "/api/v1/streams"
data = {"include_all_active": "true"}
# Check non-superuser can't use include_all_active
normal_user = self.example_user("cordelia")
result = self.api_get(normal_user, url, data)
self.assertEqual(result.status_code, 400)
# Realm admin users can see all active streams.
admin_user = self.example_user("iago")
self.assertTrue(admin_user.is_realm_admin)
result = self.api_get(admin_user, url, data)
json = self.assert_json_success(result)
self.assertIn("streams", json)
self.assertIsInstance(json["streams"], list)
stream_names = {s["name"] for s in json["streams"]}
self.assertEqual(
stream_names,
{"Venice", "Denmark", "Scotland", "Verona", "Rome", "core team"},
)
def test_public_streams_api(self) -> None:
"""
Ensure that the query we use to get public streams successfully returns
a list of streams
"""
user = self.example_user("hamlet")
realm = get_realm("zulip")
self.login_user(user)
# Check it correctly lists the user's subs with include_public=false
result = self.api_get(user, "/api/v1/streams", {"include_public": "false"})
result2 = self.api_get(user, "/api/v1/users/me/subscriptions")
json = self.assert_json_success(result)
self.assertIn("streams", json)
self.assertIsInstance(json["streams"], list)
self.assert_json_success(result2)
json2 = orjson.loads(result2.content)
self.assertEqual(
sorted(s["name"] for s in json["streams"]),
sorted(s["name"] for s in json2["subscriptions"]),
)
# Check it correctly lists all public streams with include_subscribed=false
filters = dict(include_public="true", include_subscribed="false")
result = self.api_get(user, "/api/v1/streams", filters)
json = self.assert_json_success(result)
all_streams = [
stream.name for stream in Stream.objects.filter(realm=realm, invite_only=False)
]
self.assertEqual(sorted(s["name"] for s in json["streams"]), sorted(all_streams))
def test_get_single_stream_api(self) -> None:
self.login("hamlet")
realm = get_realm("zulip")
denmark_stream = get_stream("Denmark", realm)
result = self.client_get(f"/json/streams/{denmark_stream.id}")
json = self.assert_json_success(result)
self.assertEqual(json["stream"]["name"], "Denmark")
self.assertEqual(json["stream"]["stream_id"], denmark_stream.id)
result = self.client_get("/json/streams/9999")
self.assert_json_error(result, "Invalid stream ID")
private_stream = self.make_stream("private_stream", invite_only=True)
self.subscribe(self.example_user("cordelia"), "private_stream")
# Non-admins cannot access unsubscribed private streams.
result = self.client_get(f"/json/streams/{private_stream.id}")
self.assert_json_error(result, "Invalid stream ID")
self.login("iago")
result = self.client_get(f"/json/streams/{private_stream.id}")
json = self.assert_json_success(result)
self.assertEqual(json["stream"]["name"], "private_stream")
self.assertEqual(json["stream"]["stream_id"], private_stream.id)
self.login("cordelia")
result = self.client_get(f"/json/streams/{private_stream.id}")
json = self.assert_json_success(result)
self.assertEqual(json["stream"]["name"], "private_stream")
self.assertEqual(json["stream"]["stream_id"], private_stream.id)
class StreamIdTest(ZulipTestCase):
def test_get_stream_id(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
stream = gather_subscriptions(user)[0][0]
result = self.client_get("/json/get_stream_id", {"stream": stream["name"]})
response_dict = self.assert_json_success(result)
self.assertEqual(response_dict["stream_id"], stream["stream_id"])
def test_get_stream_id_wrong_name(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
result = self.client_get("/json/get_stream_id", {"stream": "wrongname"})
self.assert_json_error(result, "Invalid stream name 'wrongname'")
class InviteOnlyStreamTest(ZulipTestCase):
def test_must_be_subbed_to_send(self) -> None:
"""
If you try to send a message to an invite-only stream to which
you aren't subscribed, you'll get a 400.
"""
user = self.example_user("hamlet")
self.login_user(user)
# Create Saxony as an invite-only stream.
self.assert_json_success(
self.common_subscribe_to_streams(user, ["Saxony"], invite_only=True)
)
cordelia = self.example_user("cordelia")
with self.assertRaises(JsonableError):
self.send_stream_message(cordelia, "Saxony")
def test_list_respects_invite_only_bit(self) -> None:
"""
Make sure that /api/v1/users/me/subscriptions properly returns
the invite-only bit for streams that are invite-only
"""
user = self.example_user("hamlet")
self.login_user(user)
self.common_subscribe_to_streams(user, ["Saxony"], invite_only=True)
self.common_subscribe_to_streams(user, ["Normandy"], invite_only=False)
result = self.api_get(user, "/api/v1/users/me/subscriptions")
response_dict = self.assert_json_success(result)
self.assertIn("subscriptions", response_dict)
for sub in response_dict["subscriptions"]:
if sub["name"] == "Normandy":
self.assertEqual(
sub["invite_only"], False, "Normandy was mistakenly marked private"
)
if sub["name"] == "Saxony":
self.assertEqual(sub["invite_only"], True, "Saxony was not properly marked private")
def test_inviteonly(self) -> None:
# Creating an invite-only stream is allowed
hamlet = self.example_user("hamlet")
othello = self.example_user("othello")
stream_name = "Saxony"
result = self.common_subscribe_to_streams(hamlet, [stream_name], invite_only=True)
json = self.assert_json_success(result)
self.assertEqual(json["subscribed"], {hamlet.email: [stream_name]})
self.assertEqual(json["already_subscribed"], {})
# Subscribing oneself to an invite-only stream is not allowed
self.login_user(othello)
result = self.common_subscribe_to_streams(othello, [stream_name], allow_fail=True)
self.assert_json_error(result, "Unable to access stream (Saxony).")
# authorization_errors_fatal=False works
self.login_user(othello)
result = self.common_subscribe_to_streams(
othello,
[stream_name],
extra_post_data={"authorization_errors_fatal": orjson.dumps(False).decode()},
)
json = self.assert_json_success(result)
self.assertEqual(json["unauthorized"], [stream_name])
self.assertEqual(json["subscribed"], {})
self.assertEqual(json["already_subscribed"], {})
# Inviting another user to an invite-only stream is allowed
self.login_user(hamlet)
result = self.common_subscribe_to_streams(
hamlet,
[stream_name],
extra_post_data={"principals": orjson.dumps([othello.id]).decode()},
)
json = self.assert_json_success(result)
self.assertEqual(json["subscribed"], {othello.email: [stream_name]})
self.assertEqual(json["already_subscribed"], {})
# Make sure both users are subscribed to this stream
stream_id = get_stream(stream_name, hamlet.realm).id
result = self.api_get(hamlet, f"/api/v1/streams/{stream_id}/members")
json = self.assert_json_success(result)
self.assertTrue(othello.id in json["subscribers"])
self.assertTrue(hamlet.id in json["subscribers"])
class GetSubscribersTest(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user("hamlet")
self.login_user(self.user_profile)
def verify_sub_fields(self, sub_data: SubscriptionInfo) -> None:
other_fields = {
"email_address",
"is_announcement_only",
"in_home_view",
"stream_id",
"stream_weekly_traffic",
"subscribers",
}
expected_fields = set(Stream.API_FIELDS) | set(Subscription.API_FIELDS) | other_fields
expected_fields -= {"id", "can_remove_subscribers_group_id"}
expected_fields |= {"can_remove_subscribers_group"}
for lst in [sub_data.subscriptions, sub_data.unsubscribed]:
for sub in lst:
self.assertEqual(set(sub), expected_fields)
other_fields = {
"is_announcement_only",
"stream_id",
"stream_weekly_traffic",
"subscribers",
}
expected_fields = set(Stream.API_FIELDS) | other_fields
expected_fields -= {"id", "can_remove_subscribers_group_id"}
expected_fields |= {"can_remove_subscribers_group"}
for never_sub in sub_data.never_subscribed:
self.assertEqual(set(never_sub), expected_fields)
def assert_user_got_subscription_notification(
self, user: UserProfile, expected_msg: str
) -> None:
# verify that the user was sent a message informing them about the subscription
realm = user.realm
msg = most_recent_message(user)
self.assertEqual(msg.recipient.type, msg.recipient.PERSONAL)
self.assertEqual(msg.sender_id, self.notification_bot(realm).id)
def non_ws(s: str) -> str:
return s.replace("\n", "").replace(" ", "")
self.assertEqual(non_ws(msg.content), non_ws(expected_msg))
def check_well_formed_result(
self, result: Dict[str, Any], stream_name: str, realm: Realm
) -> None:
"""
A successful call to get_subscribers returns the list of subscribers in
the form:
{"msg": "",
"result": "success",
"subscribers": [hamlet_user.id, prospero_user.id]}
"""
self.assertIn("subscribers", result)
self.assertIsInstance(result["subscribers"], list)
true_subscribers = [
user_profile.id for user_profile in self.users_subscribed_to_stream(stream_name, realm)
]
self.assertEqual(sorted(result["subscribers"]), sorted(true_subscribers))
def make_subscriber_request(
self, stream_id: int, user: Optional[UserProfile] = None
) -> "TestHttpResponse":
if user is None:
user = self.user_profile
return self.api_get(user, f"/api/v1/streams/{stream_id}/members")
def make_successful_subscriber_request(self, stream_name: str) -> None:
stream_id = get_stream(stream_name, self.user_profile.realm).id
result = self.make_subscriber_request(stream_id)
response_dict = self.assert_json_success(result)
self.check_well_formed_result(response_dict, stream_name, self.user_profile.realm)
def test_subscriber(self) -> None:
"""
get_subscribers returns the list of subscribers.
"""
stream_name = gather_subscriptions(self.user_profile)[0][0]["name"]
self.make_successful_subscriber_request(stream_name)
def test_gather_subscriptions(self) -> None:
"""
gather_subscriptions returns correct results with only 3 queries
(We also use this test to verify subscription notifications to
folks who get subscribed to streams.)
"""
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
othello = self.example_user("othello")
polonius = self.example_user("polonius")
streams = [f"stream_{i}" for i in range(10)]
for stream_name in streams:
self.make_stream(stream_name)
users_to_subscribe = [
self.user_profile.id,
othello.id,
cordelia.id,
polonius.id,
]
with self.assert_database_query_count(47):
self.common_subscribe_to_streams(
self.user_profile,
streams,
dict(principals=orjson.dumps(users_to_subscribe).decode()),
)
msg = f"""
@**King Hamlet|{hamlet.id}** subscribed you to the following streams:
* #**stream_0**
* #**stream_1**
* #**stream_2**
* #**stream_3**
* #**stream_4**
* #**stream_5**
* #**stream_6**
* #**stream_7**
* #**stream_8**
* #**stream_9**
"""
for user in [cordelia, othello, polonius]:
self.assert_user_got_subscription_notification(user, msg)
# Subscribe ourself first.
self.common_subscribe_to_streams(
self.user_profile,
["stream_invite_only_1"],
dict(principals=orjson.dumps([self.user_profile.id]).decode()),
invite_only=True,
)
# Now add in other users, and this should trigger messages
# to notify the user.
self.common_subscribe_to_streams(
self.user_profile,
["stream_invite_only_1"],
dict(principals=orjson.dumps(users_to_subscribe).decode()),
invite_only=True,
)
msg = f"""
@**King Hamlet|{hamlet.id}** subscribed you to the stream #**stream_invite_only_1**.
"""
for user in [cordelia, othello, polonius]:
self.assert_user_got_subscription_notification(user, msg)
with self.assert_database_query_count(4):
subscribed_streams, _ = gather_subscriptions(
self.user_profile, include_subscribers=True
)
self.assertGreaterEqual(len(subscribed_streams), 11)
for sub in subscribed_streams:
if not sub["name"].startswith("stream_"):
continue
self.assert_length(sub["subscribers"], len(users_to_subscribe))
def test_never_subscribed_streams(self) -> None:
"""
Check never_subscribed streams are fetched correctly and not include invite_only streams,
or invite_only and public streams to guest users.
"""
realm = get_realm("zulip")
users_to_subscribe = [
self.example_user("othello").id,
self.example_user("cordelia").id,
]
public_streams = [
"test_stream_public_1",
"test_stream_public_2",
"test_stream_public_3",
"test_stream_public_4",
"test_stream_public_5",
]
private_streams = [
"test_stream_invite_only_1",
"test_stream_invite_only_2",
]
web_public_streams = [
"test_stream_web_public_1",
"test_stream_web_public_2",
]
def create_public_streams() -> None:
for stream_name in public_streams:
self.make_stream(stream_name, realm=realm)
self.common_subscribe_to_streams(
self.user_profile,
public_streams,
dict(principals=orjson.dumps(users_to_subscribe).decode()),
)
create_public_streams()
def create_web_public_streams() -> None:
for stream_name in web_public_streams:
self.make_stream(stream_name, realm=realm, is_web_public=True)
ret = self.common_subscribe_to_streams(
self.user_profile,
web_public_streams,
dict(principals=orjson.dumps(users_to_subscribe).decode()),
)
self.assert_json_success(ret)
create_web_public_streams()
def create_private_streams() -> None:
self.common_subscribe_to_streams(
self.user_profile,
private_streams,
dict(principals=orjson.dumps(users_to_subscribe).decode()),
invite_only=True,
)
create_private_streams()
def get_never_subscribed() -> List[NeverSubscribedStreamDict]:
with self.assert_database_query_count(4):
sub_data = gather_subscriptions_helper(self.user_profile)
self.verify_sub_fields(sub_data)
never_subscribed = sub_data.never_subscribed
# Ignore old streams.
never_subscribed = [dct for dct in never_subscribed if dct["name"].startswith("test_")]
return never_subscribed
never_subscribed = get_never_subscribed()
# Invite only stream should not be there in never_subscribed streams
self.assert_length(never_subscribed, len(public_streams) + len(web_public_streams))
for stream_dict in never_subscribed:
name = stream_dict["name"]
self.assertFalse("invite_only" in name)
self.assert_length(stream_dict["subscribers"], len(users_to_subscribe))
# Send private stream subscribers to all realm admins.
def test_admin_case() -> None:
self.user_profile.role = UserProfile.ROLE_REALM_ADMINISTRATOR
# Test realm admins can get never subscribed private stream's subscribers.
never_subscribed = get_never_subscribed()
self.assertEqual(
len(never_subscribed),
len(public_streams) + len(private_streams) + len(web_public_streams),
)
for stream_dict in never_subscribed:
self.assert_length(stream_dict["subscribers"], len(users_to_subscribe))
test_admin_case()
def test_guest_user_case() -> None:
self.user_profile.role = UserProfile.ROLE_GUEST
helper_result = gather_subscriptions_helper(self.user_profile)
self.verify_sub_fields(helper_result)
sub = helper_result.subscriptions
unsub = helper_result.unsubscribed
never_sub = helper_result.never_subscribed
# It's +1 because of the stream Rome.
self.assert_length(never_sub, len(web_public_streams) + 1)
sub_ids = [stream["stream_id"] for stream in sub]
unsub_ids = [stream["stream_id"] for stream in unsub]
for stream_dict in never_sub:
self.assertTrue(stream_dict["is_web_public"])
self.assertTrue(stream_dict["stream_id"] not in sub_ids)
self.assertTrue(stream_dict["stream_id"] not in unsub_ids)
# The Rome stream has is_web_public=True, with default
# subscribers not set up by this test, so we do the
# following check only for the streams we created.
if stream_dict["name"] in web_public_streams:
self.assert_length(stream_dict["subscribers"], len(users_to_subscribe))
test_guest_user_case()
def test_gather_subscribed_streams_for_guest_user(self) -> None:
guest_user = self.example_user("polonius")
stream_name_sub = "public_stream_1"
self.make_stream(stream_name_sub, realm=get_realm("zulip"))
self.subscribe(guest_user, stream_name_sub)
stream_name_unsub = "public_stream_2"
self.make_stream(stream_name_unsub, realm=get_realm("zulip"))
self.subscribe(guest_user, stream_name_unsub)
self.unsubscribe(guest_user, stream_name_unsub)
stream_name_never_sub = "public_stream_3"
self.make_stream(stream_name_never_sub, realm=get_realm("zulip"))
normal_user = self.example_user("aaron")
self.subscribe(normal_user, stream_name_sub)
self.subscribe(normal_user, stream_name_unsub)
self.subscribe(normal_user, stream_name_unsub)
helper_result = gather_subscriptions_helper(guest_user)
self.verify_sub_fields(helper_result)
subs = helper_result.subscriptions
neversubs = helper_result.never_subscribed
# Guest users get info about subscribed public stream's subscribers
expected_stream_exists = False
for sub in subs:
if sub["name"] == stream_name_sub:
expected_stream_exists = True
self.assert_length(sub["subscribers"], 2)
self.assertTrue(expected_stream_exists)
# Guest user only get data about never subscribed streams if they're
# web-public.
for stream in neversubs:
self.assertTrue(stream["is_web_public"])
# Guest user only get data about never subscribed web-public streams
self.assert_length(neversubs, 1)
def test_api_fields_present(self) -> None:
user = self.example_user("cordelia")
sub_data = gather_subscriptions_helper(user)
subscribed = sub_data.subscriptions
self.assertGreaterEqual(len(subscribed), 1)
self.verify_sub_fields(sub_data)
def test_previously_subscribed_private_streams(self) -> None:
admin_user = self.example_user("iago")
non_admin_user = self.example_user("cordelia")
guest_user = self.example_user("polonius")
stream_name = "private_stream"
self.make_stream(stream_name, realm=get_realm("zulip"), invite_only=True)
self.subscribe(admin_user, stream_name)
self.subscribe(non_admin_user, stream_name)
self.subscribe(guest_user, stream_name)
self.subscribe(self.example_user("othello"), stream_name)
self.unsubscribe(admin_user, stream_name)
self.unsubscribe(non_admin_user, stream_name)
self.unsubscribe(guest_user, stream_name)
# Test admin user gets previously subscribed private stream's subscribers.
sub_data = gather_subscriptions_helper(admin_user)
self.verify_sub_fields(sub_data)
unsubscribed_streams = sub_data.unsubscribed
self.assert_length(unsubscribed_streams, 1)
self.assert_length(unsubscribed_streams[0]["subscribers"], 1)
# Test non-admin users cannot get previously subscribed private stream's subscribers.
sub_data = gather_subscriptions_helper(non_admin_user)
self.verify_sub_fields(sub_data)
unsubscribed_streams = sub_data.unsubscribed
self.assert_length(unsubscribed_streams, 1)
self.assertEqual(unsubscribed_streams[0]["subscribers"], [])
sub_data = gather_subscriptions_helper(guest_user)
self.verify_sub_fields(sub_data)
unsubscribed_streams = sub_data.unsubscribed
self.assert_length(unsubscribed_streams, 1)
self.assertEqual(unsubscribed_streams[0]["subscribers"], [])
def test_gather_subscriptions_mit(self) -> None:
"""
gather_subscriptions returns correct results with only 3 queries
"""
# Subscribe only ourself because invites are disabled on mit.edu
mit_user_profile = self.mit_user("starnine")
user_id = mit_user_profile.id
users_to_subscribe = [user_id, self.mit_user("espuser").id]
for email in users_to_subscribe:
stream = self.subscribe(mit_user_profile, "mit_stream")
self.assertTrue(stream.is_in_zephyr_realm)
self.common_subscribe_to_streams(
mit_user_profile,
["mit_invite_only"],
dict(principals=orjson.dumps(users_to_subscribe).decode()),
invite_only=True,
subdomain="zephyr",
)
with self.assert_database_query_count(3):
subscribed_streams, _ = gather_subscriptions(mit_user_profile, include_subscribers=True)
self.assertGreaterEqual(len(subscribed_streams), 2)
for sub in subscribed_streams:
if not sub["name"].startswith("mit_"):
raise AssertionError("Unexpected stream!")
if sub["name"] == "mit_invite_only":
self.assert_length(sub["subscribers"], len(users_to_subscribe))
else:
self.assert_length(sub["subscribers"], 0)
self.assertIsNone(sub["stream_weekly_traffic"])
# Create a web-public stream to test never_subscried data.
self.make_stream("mit_stream_2", realm=mit_user_profile.realm, is_web_public=True)
self.make_stream("mit_stream_3", realm=mit_user_profile.realm)
sub_info = gather_subscriptions_helper(mit_user_profile, include_subscribers=True)
never_subscribed_streams = sub_info.never_subscribed
# Users in zephyr mirror realm can only access web-public never subscribed streams.
self.assert_length(never_subscribed_streams, 1)
self.assertEqual(never_subscribed_streams[0]["name"], "mit_stream_2")
self.assertTrue(never_subscribed_streams[0]["is_web_public"])
self.assertIsNone(never_subscribed_streams[0]["stream_weekly_traffic"])
def test_nonsubscriber(self) -> None:
"""
Even a non-subscriber to a public stream can query a stream's membership
with get_subscribers.
"""
# Create a stream for which Hamlet is the only subscriber.
stream_name = "Saxony"
self.common_subscribe_to_streams(self.user_profile, [stream_name])
other_user = self.example_user("othello")
# Fetch the subscriber list as a non-member.
self.login_user(other_user)
self.make_successful_subscriber_request(stream_name)
def test_subscriber_private_stream(self) -> None:
"""
A subscriber to a private stream can query that stream's membership.
"""
stream_name = "Saxony"
self.common_subscribe_to_streams(self.user_profile, [stream_name], invite_only=True)
self.make_successful_subscriber_request(stream_name)
stream_id = get_stream(stream_name, self.user_profile.realm).id
# Verify another user can't get the data.
self.login("cordelia")
result = self.client_get(f"/json/streams/{stream_id}/members")
self.assert_json_error(result, "Invalid stream ID")
# But an organization administrator can
self.login("iago")
result = self.client_get(f"/json/streams/{stream_id}/members")
self.assert_json_success(result)
def test_json_get_subscribers_stream_not_exist(self) -> None:
"""
json_get_subscribers also returns the list of subscribers for a stream.
"""
stream_id = 99999999
result = self.client_get(f"/json/streams/{stream_id}/members")
self.assert_json_error(result, "Invalid stream ID")
def test_json_get_subscribers(self) -> None:
"""
json_get_subscribers in zerver/views/streams.py
also returns the list of subscribers for a stream, when requested.
"""
stream_name = gather_subscriptions(self.user_profile)[0][0]["name"]
stream_id = get_stream(stream_name, self.user_profile.realm).id
expected_subscribers = gather_subscriptions(self.user_profile, include_subscribers=True)[0][
0
]["subscribers"]
result = self.client_get(f"/json/streams/{stream_id}/members")
result_dict = self.assert_json_success(result)
self.assertIn("subscribers", result_dict)
self.assertIsInstance(result_dict["subscribers"], list)
subscribers: List[int] = []
for subscriber in result_dict["subscribers"]:
self.assertIsInstance(subscriber, int)
subscribers.append(subscriber)
self.assertEqual(set(subscribers), set(expected_subscribers))
def test_json_get_subscribers_for_guest_user(self) -> None:
"""
Guest users should have access to subscribers of web-public streams, even
if they aren't subscribed or have never subscribed to that stream.
"""
guest_user = self.example_user("polonius")
never_subscribed = gather_subscriptions_helper(guest_user, True).never_subscribed
# A guest user can only see never subscribed streams that are web-public.
# For Polonius, the only web-public stream that he is not subscribed at
# this point is Rome.
self.assert_length(never_subscribed, 1)
web_public_stream_id = never_subscribed[0]["stream_id"]
result = self.client_get(f"/json/streams/{web_public_stream_id}/members")
result_dict = self.assert_json_success(result)
self.assertIn("subscribers", result_dict)
self.assertIsInstance(result_dict["subscribers"], list)
self.assertGreater(len(result_dict["subscribers"]), 0)
def test_nonsubscriber_private_stream(self) -> None:
"""
A non-subscriber non-realm-admin user to a private stream can't query that stream's membership.
But unsubscribed realm admin users can query private stream's membership.
"""
# Create a private stream for which Hamlet is the only subscriber.
stream_name = "NewStream"
self.common_subscribe_to_streams(self.user_profile, [stream_name], invite_only=True)
user_profile = self.example_user("othello")
# Try to fetch the subscriber list as a non-member & non-realm-admin-user.
stream_id = get_stream(stream_name, user_profile.realm).id
result = self.make_subscriber_request(stream_id, user=user_profile)
self.assert_json_error(result, "Invalid stream ID")
# Try to fetch the subscriber list as a non-member & realm-admin-user.
self.login("iago")
self.make_successful_subscriber_request(stream_name)
class AccessStreamTest(ZulipTestCase):
def test_access_stream(self) -> None:
"""
A comprehensive security test for the access_stream_by_* API functions.
"""
# Create a private stream for which Hamlet is the only subscriber.
hamlet = self.example_user("hamlet")
stream_name = "new_private_stream"
self.login_user(hamlet)
self.common_subscribe_to_streams(hamlet, [stream_name], invite_only=True)
stream = get_stream(stream_name, hamlet.realm)
othello = self.example_user("othello")
# Nobody can access a stream that doesn't exist
with self.assertRaisesRegex(JsonableError, "Invalid stream ID"):
access_stream_by_id(hamlet, 501232)
with self.assertRaisesRegex(JsonableError, "Invalid stream name 'invalid stream'"):
access_stream_by_name(hamlet, "invalid stream")
# Hamlet can access the private stream
(stream_ret, sub_ret) = access_stream_by_id(hamlet, stream.id)
self.assertEqual(stream.id, stream_ret.id)
assert sub_ret is not None
self.assertEqual(sub_ret.recipient.type_id, stream.id)
(stream_ret2, sub_ret2) = access_stream_by_name(hamlet, stream.name)
self.assertEqual(stream_ret.id, stream_ret2.id)
self.assertEqual(sub_ret, sub_ret2)
# Othello cannot access the private stream
with self.assertRaisesRegex(JsonableError, "Invalid stream ID"):
access_stream_by_id(othello, stream.id)
with self.assertRaisesRegex(JsonableError, "Invalid stream name 'new_private_stream'"):
access_stream_by_name(othello, stream.name)
# Both Othello and Hamlet can access a public stream that only
# Hamlet is subscribed to in this realm
public_stream_name = "public_stream"
self.common_subscribe_to_streams(hamlet, [public_stream_name], invite_only=False)
public_stream = get_stream(public_stream_name, hamlet.realm)
access_stream_by_id(othello, public_stream.id)
access_stream_by_name(othello, public_stream.name)
access_stream_by_id(hamlet, public_stream.id)
access_stream_by_name(hamlet, public_stream.name)
# Nobody can access a public stream in another realm
mit_realm = get_realm("zephyr")
mit_stream = ensure_stream(mit_realm, "mit_stream", invite_only=False, acting_user=None)
sipbtest = self.mit_user("sipbtest")
with self.assertRaisesRegex(JsonableError, "Invalid stream ID"):
access_stream_by_id(hamlet, mit_stream.id)
with self.assertRaisesRegex(JsonableError, "Invalid stream name 'mit_stream'"):
access_stream_by_name(hamlet, mit_stream.name)
with self.assertRaisesRegex(JsonableError, "Invalid stream ID"):
access_stream_by_id(sipbtest, stream.id)
with self.assertRaisesRegex(JsonableError, "Invalid stream name 'new_private_stream'"):
access_stream_by_name(sipbtest, stream.name)
# MIT realm users cannot access even public streams in their realm
with self.assertRaisesRegex(JsonableError, "Invalid stream ID"):
access_stream_by_id(sipbtest, mit_stream.id)
with self.assertRaisesRegex(JsonableError, "Invalid stream name 'mit_stream'"):
access_stream_by_name(sipbtest, mit_stream.name)
# But they can access streams they are subscribed to
self.common_subscribe_to_streams(sipbtest, [mit_stream.name], subdomain="zephyr")
access_stream_by_id(sipbtest, mit_stream.id)
access_stream_by_name(sipbtest, mit_stream.name)
def test_stream_access_by_guest(self) -> None:
guest_user_profile = self.example_user("polonius")
self.login_user(guest_user_profile)
stream_name = "public_stream_1"
stream = self.make_stream(stream_name, guest_user_profile.realm, invite_only=False)
# Guest user don't have access to unsubscribed public streams
with self.assertRaisesRegex(JsonableError, "Invalid stream ID"):
access_stream_by_id(guest_user_profile, stream.id)
# Guest user have access to subscribed public streams
self.subscribe(guest_user_profile, stream_name)
(stream_ret, sub_ret) = access_stream_by_id(guest_user_profile, stream.id)
assert sub_ret is not None
self.assertEqual(stream.id, stream_ret.id)
self.assertEqual(sub_ret.recipient.type_id, stream.id)
stream_name = "private_stream_1"
stream = self.make_stream(stream_name, guest_user_profile.realm, invite_only=True)
# Obviously, a guest user doesn't have access to unsubscribed private streams either
with self.assertRaisesRegex(JsonableError, "Invalid stream ID"):
access_stream_by_id(guest_user_profile, stream.id)
# Guest user have access to subscribed private streams
self.subscribe(guest_user_profile, stream_name)
(stream_ret, sub_ret) = access_stream_by_id(guest_user_profile, stream.id)
assert sub_ret is not None
self.assertEqual(stream.id, stream_ret.id)
self.assertEqual(sub_ret.recipient.type_id, stream.id)
stream_name = "web_public_stream"
stream = self.make_stream(stream_name, guest_user_profile.realm, is_web_public=True)
# Guest users have access to web-public streams even if they aren't subscribed.
(stream_ret, sub_ret) = access_stream_by_id(guest_user_profile, stream.id)
self.assertTrue(can_access_stream_history(guest_user_profile, stream))
assert sub_ret is None
self.assertEqual(stream.id, stream_ret.id)
class StreamTrafficTest(ZulipTestCase):
def test_average_weekly_stream_traffic_calculation(self) -> None:
# No traffic data for the stream
self.assertEqual(
get_average_weekly_stream_traffic(42, timezone_now() - timedelta(days=300), {1: 4003}),
0,
)
# using high numbers here to make it more likely to catch small errors in the denominators
# of the calculations. That being said we don't want to go over 100, since then the 2
# significant digits calculation gets applied
# old stream
self.assertEqual(
get_average_weekly_stream_traffic(
42, timezone_now() - timedelta(days=300), {42: 98 * 4 + 3}
),
98,
)
# stream between 7 and 27 days old
self.assertEqual(
get_average_weekly_stream_traffic(
42, timezone_now() - timedelta(days=10), {42: (98 * 10 + 9) // 7}
),
98,
)
# stream less than 7 days old
self.assertEqual(
get_average_weekly_stream_traffic(42, timezone_now() - timedelta(days=5), {42: 100}),
None,
)
# average traffic between 0 and 1
self.assertEqual(
get_average_weekly_stream_traffic(42, timezone_now() - timedelta(days=300), {42: 1}), 1
)
def test_round_to_2_significant_digits(self) -> None:
self.assertEqual(120, round_to_2_significant_digits(116))
class NoRecipientIDsTest(ZulipTestCase):
def test_no_recipient_ids(self) -> None:
user_profile = self.example_user("cordelia")
Subscription.objects.filter(
user_profile=user_profile, recipient__type=Recipient.STREAM
).delete()
subs = gather_subscriptions_helper(user_profile).subscriptions
# Checks that gather_subscriptions_helper will not return anything
# since there will not be any recipients, without crashing.
#
# This covers a rare corner case.
self.assert_length(subs, 0)
|
e28522236c69fbdbfcd8e9e78978548144833f48
|
568fa58296378fa129ab3349adf010daa44ed45b
|
/third_party/incubator-tvm/topi/python/topi/x86/bitserial_conv2d.py
|
97d0dc0eefaa6c0a5bd0a427fbd116af29ad332a
|
[
"Apache-2.0",
"BSD-3-Clause",
"NCSA",
"X11-distribute-modifications-variant",
"Zlib",
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Unlicense",
"LLVM-exception",
"BSD-2-Clause"
] |
permissive
|
mindspore-ai/akg
|
37f471badc66de6a831f1f45ad84344f34d23ef2
|
99f33858d6972741748cbfc9ab0bf9600428fef7
|
refs/heads/master
| 2023-07-25T23:03:17.672665
| 2023-07-11T07:33:57
| 2023-07-11T07:33:57
| 274,077,856
| 319
| 36
|
Apache-2.0
| 2021-12-30T13:43:08
| 2020-06-22T08:09:05
|
Python
|
UTF-8
|
Python
| false
| false
| 8,851
|
py
|
bitserial_conv2d.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name,unused-variable,invalid-name
"""Bitserial conv2d schedule on x86"""
import tvm
from tvm import autotvm
from topi.util import get_const_int
from .. import generic, tag
@autotvm.register_topi_schedule(generic.nn.schedule_bitserial_conv2d_nchw, ['cpu'], 'direct')
@autotvm.register_topi_schedule(generic.nn.schedule_bitserial_conv2d_nhwc, ['cpu'], 'direct')
def schedule_bitserial_conv2d(cfg, outs):
"""CPU schedule for bitserial convolutions NCHW and NHWC"""
s = tvm.create_schedule([x.op for x in outs])
scheduled_ops = []
def traverse(op):
"""Traverse operators from computation graph"""
output = op.output(0)
# inline all one-to-one-mapping operators except the last stage (output)
if tag.is_broadcast(op.tag) or 'elemwise' in op.tag:
if op not in s.outputs:
s[op].compute_inline()
for tensor in op.input_tensors:
if tensor.op.input_tensors and (tensor.op not in scheduled_ops):
if isinstance(tensor.op, tvm.tensor.ComputeOp):
traverse(tensor.op)
elif 'spatial_bitserial_conv_nchw' in op.tag or 'spatial_bitserial_conv_nhwc' in op.tag:
conv_out = op.input_tensors[0]
kernel_vec = conv_out.op.input_tensors[1]
kernel_q = kernel_vec.op.input_tensors[0]
data_vec = conv_out.op.input_tensors[0]
data_q = data_vec.op.input_tensors[0]
data = data_q.op.input_tensors[0]
data_pad = None
if isinstance(data_q.op, tvm.tensor.ComputeOp) and "pad" in data_q.op.tag:
data_pad = data_q
data_q = data
data = data_q.op.input_tensors[0]
if "QuantizeInput" in data.op.name:
# Need to go up 1 further, from the combine in bitpack
data = data.op.input_tensors[0]
if 'spatial_bitserial_conv_nchw' in op.tag:
_schedule_bitserial_conv2d_nchw(cfg, s, data_q, data_pad, data_vec,
kernel_q, kernel_vec,
conv_out, output, outs[0])
elif 'spatial_bitserial_conv_nhwc' in op.tag:
_schedule_bitserial_conv2d_nhwc(cfg, s, data_q, data_pad, data_vec,
kernel_q, kernel_vec,
conv_out, output, outs[0])
scheduled_ops.append(op)
traverse(outs[0].op)
return s
def _schedule_bitserial_conv2d_nchw(cfg, s, data_q, data_pad, data_vec,
kernel_q, kernel_vec,
conv_out, output, last):
IB, _, CI, IH, IW = data_q.shape
KB, CO, _, KH, KW = kernel_q.shape
_, _, OH, OW = output.shape
# Infer padding and stride
if data_pad is None:
padding = (0, 0)
TH, TW = IH, IW
else:
_, _, _, TH, TW = data_pad.shape
hpad = get_const_int((TH - IH) // 2)
wpad = get_const_int((TW - IW) // 2)
padding = (hpad, wpad)
hstride = get_const_int((TH - KH) // (OH - 1))
wstride = get_const_int((TW - KW) // (OW - 1))
stride = (hstride, wstride)
VC = cfg["tile_co"].size[-1]
VH = cfg["tile_oh"].size[-1]
VW = cfg["tile_ow"].size[-1]
##### Schedule Data padding, and bitpacking
if data_pad is not None:
s[data_pad].compute_inline()
_, _, h, _, _, _, _ = s[data_vec].op.axis
cfg.define_split("tile_ah", cfg.axis(h), num_outputs=2, max_factor=32)
oh, ih = cfg["tile_ah"].apply(s, data_vec, h)
if cfg["tile_ah"].size[1] == 1:
oaxis = oh
paxis = oh
else:
oaxis = oh
paxis = ih
s[data_vec].parallel(paxis)
s[data_vec].pragma(oaxis, "parallel_launch_point")
s[data_vec].pragma(paxis, "parallel_stride_pattern")
s[data_vec].pragma(oaxis, "parallel_barrier_when_finish")
##### Schedule Kenerl bitpacking
co, _, _, _, _, _ = s[kernel_vec].op.axis
cfg.define_split("tile_bco", cfg.axis(co), num_outputs=2, max_factor=32)
oco, ico = cfg["tile_bco"].apply(s, kernel_vec, co)
if cfg["tile_bco"].size[1] == 1:
oaxis = oco
paxis = oco
else:
oaxis = oco
paxis = ico
s[kernel_vec].parallel(paxis)
s[kernel_vec].pragma(oaxis, "parallel_launch_point")
s[kernel_vec].pragma(paxis, "parallel_stride_pattern")
s[kernel_vec].pragma(oaxis, "parallel_barrier_when_finish")
##### Schedule Convolution
n, co, oh, ow, vh, vw, vc = s[conv_out].op.axis
ci, dh, dw, ib, kb = s[conv_out].op.reduce_axis
# s[conv_out].reorder(n, oh, ow, co, vh, vw, dh, dw, ci, vc, b1, b2)
cfg["reorder_0"].apply(s, conv_out, [n, co, oh, ow, vc, vh, vw, dh, dw, kb, ib, ci])
cfg["ann_reduce"].apply(s, conv_out, [kb, ib, dh, dw],
axis_lens=[get_const_int(kb.dom.extent),
get_const_int(ib.dom.extent),
get_const_int(dh.dom.extent),
get_const_int(dw.dom.extent)],
max_unroll=16,
cfg=cfg)
s[conv_out].vectorize(vc)
# # Schedule output
n, co, h, w = s[last].op.axis
co, vc = s[last].split(co, VC)
oh, ow, vh, vw = s[last].tile(h, w, VH, VW)
s[last].reorder(n, co, oh, ow, vh, vw, vc)
if last != output:
s[output].compute_inline()
s[conv_out].compute_at(s[last], ow)
oco, ico = cfg["tile_oh"].apply(s, last, co)
if cfg["tile_oh"].size[1] == 1:
oaxis = oco
paxis = oco
else:
oco, ico = s[last].split(co, bc)
oaxis = oco
paxis = ico
s[last].parallel(oco)
return s
def _schedule_bitserial_conv2d_nhwc(cfg, s, data_q, data_pad, data_vec,
kernel_q, kernel_vec,
conv_out, output, last):
# no stride and padding info here
_, IH, IW, CI, IB = data_q.shape
KH, KW, _, CO, KB = kernel_q.shape
_, OH, OW, _ = output.shape
VC = cfg["tile_co"].size[-1]
VH = cfg["tile_oh"].size[-1]
VW = cfg["tile_ow"].size[-1]
##### Schedule data padding and packing
if data_pad is not None:
s[data_pad].compute_inline()
_, h, _, _, _, _, _ = s[data_vec].op.axis
cfg.define_split("tile_ah", cfg.axis(h), num_outputs=2, max_factor=32)
oh, ih = cfg["tile_ah"].apply(s, data_vec, h)
s[data_vec].parallel(oh)
##### Schedule kernel packing
co, _, _, _, _, _ = s[kernel_vec].op.axis
cfg.define_split("tile_bco", cfg.axis(co), num_outputs=2, max_factor=32)
oco, ico = cfg["tile_bco"].apply(s, kernel_vec, co)
s[kernel_vec].parallel(oco)
##### Schedule Convolution
n, oh, ow, co, vh, vw, vc = s[conv_out].op.axis
dh, dw, ci, b1, b2 = s[conv_out].op.reduce_axis
# s[conv_out].reorder(n, oh, ow, co, vh, vw, dh, dw, ci, vc, b1, b2)
cfg["reorder_0"].apply(s, conv_out, [n, oh, ow, co, vh, vw, dh, dw, ci, vc, b1, b2])
cfg["ann_reduce"].apply(s, conv_out, [b1, b2, dh, dw],
axis_lens=[get_const_int(b1.dom.extent),
get_const_int(b2.dom.extent),
get_const_int(dh.dom.extent),
get_const_int(dw.dom.extent)],
max_unroll=16,
cfg=cfg)
s[conv_out].unroll(b1)
s[conv_out].unroll(b2)
s[conv_out].vectorize(vc)
# # Schedule output
n, h, w, co = s[last].op.axis
co, vc = s[last].split(co, VC)
oh, ow, vh, vw = s[last].tile(h, w, VH, VW)
s[last].reorder(n, oh, ow, co, vh, vw, vc)
s[last].vectorize(vc)
if last != output:
s[output].compute_inline()
s[conv_out].compute_at(s[last], ow)
oho, iho = cfg["tile_oh"].apply(s, last, oh) # reuse parameter
s[last].parallel(oho)
return s
|
75c6880ce8c7242ed239379d200263f1552a62c6
|
bbfc9f05efefe29b6ce9832bb3506efb900c1c93
|
/tencentcloud/iir/v20200417/models.py
|
9f87875225dcdf373d17f78754c2e08efae9357f
|
[
"Apache-2.0"
] |
permissive
|
TencentCloud/tencentcloud-sdk-python
|
a2fab235926b0a27e9cfdf55e085a8bb15b3f506
|
6baf00a5a56ba58b6a1123423e0a1422d17a0201
|
refs/heads/master
| 2023-09-04T10:52:28.060438
| 2023-09-01T03:09:16
| 2023-09-01T03:09:16
| 130,147,399
| 594
| 300
|
Apache-2.0
| 2023-09-06T07:03:24
| 2018-04-19T02:23:12
|
Python
|
UTF-8
|
Python
| false
| false
| 12,048
|
py
|
models.py
|
# -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from tencentcloud.common.abstract_model import AbstractModel
class Location(AbstractModel):
"""检测到的主体在图片中的矩形框位置(四个顶点坐标)
"""
def __init__(self):
r"""
:param _XMin: 位置矩形框的左上角横坐标
:type XMin: int
:param _YMin: 位置矩形框的左上角纵坐标
:type YMin: int
:param _XMax: 位置矩形框的右下角横坐标
:type XMax: int
:param _YMax: 位置矩形框的右下角纵坐标
:type YMax: int
"""
self._XMin = None
self._YMin = None
self._XMax = None
self._YMax = None
@property
def XMin(self):
return self._XMin
@XMin.setter
def XMin(self, XMin):
self._XMin = XMin
@property
def YMin(self):
return self._YMin
@YMin.setter
def YMin(self, YMin):
self._YMin = YMin
@property
def XMax(self):
return self._XMax
@XMax.setter
def XMax(self, XMax):
self._XMax = XMax
@property
def YMax(self):
return self._YMax
@YMax.setter
def YMax(self, YMax):
self._YMax = YMax
def _deserialize(self, params):
self._XMin = params.get("XMin")
self._YMin = params.get("YMin")
self._XMax = params.get("XMax")
self._YMax = params.get("YMax")
memeber_set = set(params.keys())
for name, value in vars(self).items():
property_name = name[1:]
if property_name in memeber_set:
memeber_set.remove(property_name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ProductInfo(AbstractModel):
"""图像识别出的商品的详细信息。
当图像中检测到多个物品时,会对显著性最高的物品进行识别。
"""
def __init__(self):
r"""
:param _FindSKU: 1表示找到同款商品,以下字段为同款商品信息;
0表示未找到同款商品, 具体商品信息为空(参考价格、名称、品牌等),仅提供商品类目。
是否找到同款的判断依据为Score分值,分值越大则同款的可能性越大。
:type FindSKU: int
:param _Location: 本商品在图片中的坐标,表示为矩形框的四个顶点坐标。
:type Location: :class:`tencentcloud.iir.v20200417.models.Location`
:param _Name: 商品名称
:type Name: str
:param _Brand: 商品品牌
:type Brand: str
:param _Price: 参考价格,综合多个信息源,仅供参考。
:type Price: str
:param _ProductCategory: 识别结果的商品类目。
包含:鞋、图书音像、箱包、美妆个护、服饰、家电数码、玩具乐器、食品饮料、珠宝、家居家装、药品、酒水、绿植园艺、其他商品、非商品等。
当类别为“非商品”时,除Location、Score和本字段之外的商品信息为空。
:type ProductCategory: str
:param _Score: 输入图片中的主体物品和输出结果的相似度。分值越大,输出结果与输入图片是同款的可能性越高。
:type Score: float
:param _Image: 搜索到的商品配图URL
:type Image: str
"""
self._FindSKU = None
self._Location = None
self._Name = None
self._Brand = None
self._Price = None
self._ProductCategory = None
self._Score = None
self._Image = None
@property
def FindSKU(self):
return self._FindSKU
@FindSKU.setter
def FindSKU(self, FindSKU):
self._FindSKU = FindSKU
@property
def Location(self):
return self._Location
@Location.setter
def Location(self, Location):
self._Location = Location
@property
def Name(self):
return self._Name
@Name.setter
def Name(self, Name):
self._Name = Name
@property
def Brand(self):
return self._Brand
@Brand.setter
def Brand(self, Brand):
self._Brand = Brand
@property
def Price(self):
return self._Price
@Price.setter
def Price(self, Price):
self._Price = Price
@property
def ProductCategory(self):
return self._ProductCategory
@ProductCategory.setter
def ProductCategory(self, ProductCategory):
self._ProductCategory = ProductCategory
@property
def Score(self):
return self._Score
@Score.setter
def Score(self, Score):
self._Score = Score
@property
def Image(self):
return self._Image
@Image.setter
def Image(self, Image):
self._Image = Image
def _deserialize(self, params):
self._FindSKU = params.get("FindSKU")
if params.get("Location") is not None:
self._Location = Location()
self._Location._deserialize(params.get("Location"))
self._Name = params.get("Name")
self._Brand = params.get("Brand")
self._Price = params.get("Price")
self._ProductCategory = params.get("ProductCategory")
self._Score = params.get("Score")
self._Image = params.get("Image")
memeber_set = set(params.keys())
for name, value in vars(self).items():
property_name = name[1:]
if property_name in memeber_set:
memeber_set.remove(property_name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RecognizeProductRequest(AbstractModel):
"""RecognizeProduct请求参数结构体
"""
def __init__(self):
r"""
:param _ImageUrl: 图片限制:内测版仅支持jpg、jpeg,图片大小不超过1M,分辨率在25万到100万之间。
建议先对图片进行压缩,以便提升处理速度。
:type ImageUrl: str
:param _ImageBase64: 图片经过base64编码的内容。最大不超过1M,分辨率在25万到100万之间。
与ImageUrl同时存在时优先使用ImageUrl字段。
**注意:图片需要base64编码,并且要去掉编码头部。**
:type ImageBase64: str
"""
self._ImageUrl = None
self._ImageBase64 = None
@property
def ImageUrl(self):
return self._ImageUrl
@ImageUrl.setter
def ImageUrl(self, ImageUrl):
self._ImageUrl = ImageUrl
@property
def ImageBase64(self):
return self._ImageBase64
@ImageBase64.setter
def ImageBase64(self, ImageBase64):
self._ImageBase64 = ImageBase64
def _deserialize(self, params):
self._ImageUrl = params.get("ImageUrl")
self._ImageBase64 = params.get("ImageBase64")
memeber_set = set(params.keys())
for name, value in vars(self).items():
property_name = name[1:]
if property_name in memeber_set:
memeber_set.remove(property_name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RecognizeProductResponse(AbstractModel):
"""RecognizeProduct返回参数结构体
"""
def __init__(self):
r"""
:param _RegionDetected: 检测到的图片中的商品位置和品类预测。
当图片中存在多个商品时,输出多组坐标,按照__显著性__排序(综合考虑面积、是否在中心、检测算法置信度)。
最多可以输出__3组__检测结果。
:type RegionDetected: list of RegionDetected
:param _ProductInfo: 图像识别出的商品的详细信息。
当图像中检测到多个物品时,会对显著性最高的进行识别。
:type ProductInfo: :class:`tencentcloud.iir.v20200417.models.ProductInfo`
:param _RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self._RegionDetected = None
self._ProductInfo = None
self._RequestId = None
@property
def RegionDetected(self):
return self._RegionDetected
@RegionDetected.setter
def RegionDetected(self, RegionDetected):
self._RegionDetected = RegionDetected
@property
def ProductInfo(self):
return self._ProductInfo
@ProductInfo.setter
def ProductInfo(self, ProductInfo):
self._ProductInfo = ProductInfo
@property
def RequestId(self):
return self._RequestId
@RequestId.setter
def RequestId(self, RequestId):
self._RequestId = RequestId
def _deserialize(self, params):
if params.get("RegionDetected") is not None:
self._RegionDetected = []
for item in params.get("RegionDetected"):
obj = RegionDetected()
obj._deserialize(item)
self._RegionDetected.append(obj)
if params.get("ProductInfo") is not None:
self._ProductInfo = ProductInfo()
self._ProductInfo._deserialize(params.get("ProductInfo"))
self._RequestId = params.get("RequestId")
class RegionDetected(AbstractModel):
"""检测到的图片中的商品位置和品类预测。
当图片中存在多个商品时,输出多组坐标,按照__显著性__排序(综合考虑面积、是否在中心、检测算法置信度)。
最多可以输出__3组__检测结果。
"""
def __init__(self):
r"""
:param _Category: 商品的品类预测结果。
包含:鞋、图书音像、箱包、美妆个护、服饰、家电数码、玩具乐器、食品饮料、珠宝、家居家装、药品、酒水、绿植园艺、其他商品、非商品等。
:type Category: str
:param _CategoryScore: 商品品类预测的置信度
:type CategoryScore: float
:param _Location: 检测到的主体在图片中的坐标,表示为矩形框的四个顶点坐标
:type Location: :class:`tencentcloud.iir.v20200417.models.Location`
"""
self._Category = None
self._CategoryScore = None
self._Location = None
@property
def Category(self):
return self._Category
@Category.setter
def Category(self, Category):
self._Category = Category
@property
def CategoryScore(self):
return self._CategoryScore
@CategoryScore.setter
def CategoryScore(self, CategoryScore):
self._CategoryScore = CategoryScore
@property
def Location(self):
return self._Location
@Location.setter
def Location(self, Location):
self._Location = Location
def _deserialize(self, params):
self._Category = params.get("Category")
self._CategoryScore = params.get("CategoryScore")
if params.get("Location") is not None:
self._Location = Location()
self._Location._deserialize(params.get("Location"))
memeber_set = set(params.keys())
for name, value in vars(self).items():
property_name = name[1:]
if property_name in memeber_set:
memeber_set.remove(property_name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
|
f7404e1805f3a203c997d0f418d079af2c1043b8
|
119b447b0289d828360be7c45288b70a651a7e12
|
/malgan/detector.py
|
8b278d1bb2d3ce301675e7d12ccec311966d85a2
|
[
"MIT"
] |
permissive
|
CyberForce/Pesidious
|
446bbeb1b7a16443666b3257419931efb4e8ecbb
|
c36647d1b3ba86a9a4e6e1a0bda2a371d8875781
|
refs/heads/master
| 2022-12-18T11:09:20.253787
| 2020-10-01T19:47:12
| 2020-10-01T19:47:12
| 295,115,767
| 119
| 36
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,003
|
py
|
detector.py
|
# -*- coding: utf-8 -*-
from enum import Enum
from typing import Union
import numpy as np
import sklearn
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.neural_network import MLPClassifier
from sklearn.svm import SVC
import torch
TorchOrNumpy = Union[np.ndarray, torch.Tensor]
# noinspection PyPep8Naming
class BlackBoxDetector:
r"""
Black box detector that intends to mimic an antivirus/anti-Malware program that detects whether
a specific program is either malware or benign.
"""
class Type(Enum):
r""" Learner algorithm to be used by the black-box detector """
DecisionTree = DecisionTreeClassifier()
LogisticRegression = LogisticRegression(solver='lbfgs', max_iter=int(1e6))
MultiLayerPerceptron = MLPClassifier()
RandomForest = RandomForestClassifier(n_estimators=100)
SVM = SVC(gamma="auto")
@staticmethod
def names():
r""" Builds the list of all enum names """
return [c.name for c in BlackBoxDetector.Type]
@staticmethod
def get_from_name(name):
r"""
Gets the enum item from the specified name
:param name: Name of the enum object
:return: Enum item associated with the specified name
"""
for c in BlackBoxDetector.Type:
if c.name == name:
return c
raise ValueError("Unknown enum \"%s\" for class \"%s\"", name, __class__.name)
def __init__(self, learner_type: 'BlackBoxDetector.Type'):
self.type = learner_type
# noinspection PyCallingNonCallable
self._model = sklearn.clone(self.type.value)
self.training = True
def fit(self, X: TorchOrNumpy, y: TorchOrNumpy):
r"""
Fits the learner. Supports NumPy and PyTorch arrays as input. Returns a torch tensor
as output.
:param X: Examples upon which to train
:param y: Labels for the examples
"""
if isinstance(X, torch.Tensor):
X = X.numpy()
if isinstance(y, torch.Tensor):
y = y.numpy()
self._model.fit(X, y)
self.training = False
def predict(self, X: TorchOrNumpy) -> torch.tensor:
r"""
Predict the labels for \p X
:param X: Set of examples for which label probabilities should be predicted
:return: Predicted value for \p X
"""
if self.training:
raise ValueError("Detector does not appear to be trained but trying to predict")
if torch.cuda.is_available():
X = X.cpu()
if isinstance(X, torch.Tensor):
X = X.numpy()
y = torch.from_numpy(self._model.predict(X)).float()
return y.cuda() if torch.cuda.is_available() else y
|
a1979fe6fc265f85ceda4d0b16f0c7eb37a907e2
|
40ca168bbb9c865a13c83ef479838981c5b7a1c0
|
/packages/hagrid/hagrid/rand_sec.py
|
8f7735820b3c4dec168d50d30dfdef5e5506be91
|
[
"Apache-2.0",
"Python-2.0"
] |
permissive
|
OpenMined/PySyft
|
6907171bc35062d04c1b6320097c3bcafb65ae68
|
1833278212d89e66853f28a7ca365261550bbe4f
|
refs/heads/dev
| 2023-09-05T05:50:48.773703
| 2023-09-05T04:00:44
| 2023-09-05T04:00:44
| 97,641,933
| 9,473
| 2,530
|
Apache-2.0
| 2023-09-14T12:50:53
| 2017-07-18T20:41:16
|
Python
|
UTF-8
|
Python
| false
| false
| 2,819
|
py
|
rand_sec.py
|
# stdlib
from os import urandom
import string
import sys
from typing import List
from typing import Set
def generate_sec_random_password(
length: int,
special_chars: bool = True,
digits: bool = True,
lower_case: bool = True,
upper_case: bool = True,
) -> str:
"""Generates a random password of the given length.
Args:
length (int): length of the password
special_chars (bool, optional): Include at least one specials char in the password. Defaults to True.
digits (bool, optional): Include at least one digit in the password. Defaults to True.
lower_case (bool, optional): Include at least one lower case character in the password. Defaults to True.
upper_case (bool, optional): Includde at least one upper case character in the password. Defaults to True.
Raises:
ValueError: If password length if too short.
Returns:
str: randomly generated password
"""
if not isinstance(length, int) or length < 10:
raise ValueError(
"Password should have a positive safe length of at least 10 characters!"
)
choices: str = ""
required_tokens: List[str] = []
if special_chars:
special_characters = "!@#$%^&*()_+"
choices += special_characters
required_tokens.append(
special_characters[
int.from_bytes(urandom(1), sys.byteorder) % len(special_characters)
]
)
if lower_case:
choices += string.ascii_lowercase
required_tokens.append(
string.ascii_lowercase[
int.from_bytes(urandom(1), sys.byteorder) % len(string.ascii_lowercase)
]
)
if upper_case:
choices += string.ascii_uppercase
required_tokens.append(
string.ascii_uppercase[
int.from_bytes(urandom(1), sys.byteorder) % len(string.ascii_uppercase)
]
)
if digits:
choices += string.digits
required_tokens.append(
string.digits[
int.from_bytes(urandom(1), sys.byteorder) % len(string.digits)
]
)
# Python 3 (urandom returns bytes)
password = [choices[c % len(choices)] for c in urandom(length)]
# Pick some random indexes
random_indexes: Set[int] = set()
while len(random_indexes) < len(required_tokens):
random_indexes.add(int.from_bytes(urandom(1), sys.byteorder) % len(password))
# Replace the random indexes with the required tokens
for i, idx in enumerate(random_indexes):
password[idx] = required_tokens[i]
return "".join(password)
if __name__ == "__main__":
pwd_length = 48
# generate_sec_random_password(pwd_length)
print(generate_sec_random_password(pwd_length, special_chars=False))
|
4da3f8084216312a09d1c62cd78f2f1d8a01e50a
|
f9d564f1aa83eca45872dab7fbaa26dd48210d08
|
/huaweicloud-sdk-lakeformation/huaweicloudsdklakeformation/v1/model/lake_formation_policy.py
|
cfd2f2151ecd91d7667a72da3b4ea35f28f79a38
|
[
"Apache-2.0"
] |
permissive
|
huaweicloud/huaweicloud-sdk-python-v3
|
cde6d849ce5b1de05ac5ebfd6153f27803837d84
|
f69344c1dadb79067746ddf9bfde4bddc18d5ecf
|
refs/heads/master
| 2023-09-01T19:29:43.013318
| 2023-08-31T08:28:59
| 2023-08-31T08:28:59
| 262,207,814
| 103
| 44
|
NOASSERTION
| 2023-06-22T14:50:48
| 2020-05-08T02:28:43
|
Python
|
UTF-8
|
Python
| false
| false
| 14,168
|
py
|
lake_formation_policy.py
|
# coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class LakeFormationPolicy:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'project_id': 'str',
'instance_id': 'str',
'principal_type': 'str',
'principal_source': 'str',
'principal_name': 'str',
'resource': 'ResourceInfo',
'resource_name': 'str',
'permissions': 'list[str]',
'grant_able_permissions': 'list[str]',
'created_time': 'int',
'condition': 'str',
'obligation': 'str',
'authorization_paths': 'list[str]'
}
attribute_map = {
'project_id': 'project_id',
'instance_id': 'instance_id',
'principal_type': 'principal_type',
'principal_source': 'principal_source',
'principal_name': 'principal_name',
'resource': 'resource',
'resource_name': 'resource_name',
'permissions': 'permissions',
'grant_able_permissions': 'grant_able_permissions',
'created_time': 'created_time',
'condition': 'condition',
'obligation': 'obligation',
'authorization_paths': 'authorization_paths'
}
def __init__(self, project_id=None, instance_id=None, principal_type=None, principal_source=None, principal_name=None, resource=None, resource_name=None, permissions=None, grant_able_permissions=None, created_time=None, condition=None, obligation=None, authorization_paths=None):
"""LakeFormationPolicy
The model defined in huaweicloud sdk
:param project_id: 项目id
:type project_id: str
:param instance_id: 实例id
:type instance_id: str
:param principal_type: 主体类型,USER-用户,GROUP-组,ROLE-角色,SHARE-共享,OTHER-其它
:type principal_type: str
:param principal_source: 主体来源,IAM-云,SAML-联邦,LDAP-权限策略,LOCAL-本地,OTHER-其它
:type principal_source: str
:param principal_name: 主体名
:type principal_name: str
:param resource:
:type resource: :class:`huaweicloudsdklakeformation.v1.ResourceInfo`
:param resource_name: 要求用点分格式进行分割
:type resource_name: str
:param permissions: 权限列表
:type permissions: list[str]
:param grant_able_permissions: 可以传递的权限列表
:type grant_able_permissions: list[str]
:param created_time: 创建时间
:type created_time: int
:param condition: 条件信息
:type condition: str
:param obligation: obligation,义务,当前包含data filter和data mask
:type obligation: str
:param authorization_paths: 授权路径列表
:type authorization_paths: list[str]
"""
self._project_id = None
self._instance_id = None
self._principal_type = None
self._principal_source = None
self._principal_name = None
self._resource = None
self._resource_name = None
self._permissions = None
self._grant_able_permissions = None
self._created_time = None
self._condition = None
self._obligation = None
self._authorization_paths = None
self.discriminator = None
self.project_id = project_id
if instance_id is not None:
self.instance_id = instance_id
self.principal_type = principal_type
self.principal_source = principal_source
self.principal_name = principal_name
if resource is not None:
self.resource = resource
self.resource_name = resource_name
self.permissions = permissions
if grant_able_permissions is not None:
self.grant_able_permissions = grant_able_permissions
self.created_time = created_time
if condition is not None:
self.condition = condition
if obligation is not None:
self.obligation = obligation
if authorization_paths is not None:
self.authorization_paths = authorization_paths
@property
def project_id(self):
"""Gets the project_id of this LakeFormationPolicy.
项目id
:return: The project_id of this LakeFormationPolicy.
:rtype: str
"""
return self._project_id
@project_id.setter
def project_id(self, project_id):
"""Sets the project_id of this LakeFormationPolicy.
项目id
:param project_id: The project_id of this LakeFormationPolicy.
:type project_id: str
"""
self._project_id = project_id
@property
def instance_id(self):
"""Gets the instance_id of this LakeFormationPolicy.
实例id
:return: The instance_id of this LakeFormationPolicy.
:rtype: str
"""
return self._instance_id
@instance_id.setter
def instance_id(self, instance_id):
"""Sets the instance_id of this LakeFormationPolicy.
实例id
:param instance_id: The instance_id of this LakeFormationPolicy.
:type instance_id: str
"""
self._instance_id = instance_id
@property
def principal_type(self):
"""Gets the principal_type of this LakeFormationPolicy.
主体类型,USER-用户,GROUP-组,ROLE-角色,SHARE-共享,OTHER-其它
:return: The principal_type of this LakeFormationPolicy.
:rtype: str
"""
return self._principal_type
@principal_type.setter
def principal_type(self, principal_type):
"""Sets the principal_type of this LakeFormationPolicy.
主体类型,USER-用户,GROUP-组,ROLE-角色,SHARE-共享,OTHER-其它
:param principal_type: The principal_type of this LakeFormationPolicy.
:type principal_type: str
"""
self._principal_type = principal_type
@property
def principal_source(self):
"""Gets the principal_source of this LakeFormationPolicy.
主体来源,IAM-云,SAML-联邦,LDAP-权限策略,LOCAL-本地,OTHER-其它
:return: The principal_source of this LakeFormationPolicy.
:rtype: str
"""
return self._principal_source
@principal_source.setter
def principal_source(self, principal_source):
"""Sets the principal_source of this LakeFormationPolicy.
主体来源,IAM-云,SAML-联邦,LDAP-权限策略,LOCAL-本地,OTHER-其它
:param principal_source: The principal_source of this LakeFormationPolicy.
:type principal_source: str
"""
self._principal_source = principal_source
@property
def principal_name(self):
"""Gets the principal_name of this LakeFormationPolicy.
主体名
:return: The principal_name of this LakeFormationPolicy.
:rtype: str
"""
return self._principal_name
@principal_name.setter
def principal_name(self, principal_name):
"""Sets the principal_name of this LakeFormationPolicy.
主体名
:param principal_name: The principal_name of this LakeFormationPolicy.
:type principal_name: str
"""
self._principal_name = principal_name
@property
def resource(self):
"""Gets the resource of this LakeFormationPolicy.
:return: The resource of this LakeFormationPolicy.
:rtype: :class:`huaweicloudsdklakeformation.v1.ResourceInfo`
"""
return self._resource
@resource.setter
def resource(self, resource):
"""Sets the resource of this LakeFormationPolicy.
:param resource: The resource of this LakeFormationPolicy.
:type resource: :class:`huaweicloudsdklakeformation.v1.ResourceInfo`
"""
self._resource = resource
@property
def resource_name(self):
"""Gets the resource_name of this LakeFormationPolicy.
要求用点分格式进行分割
:return: The resource_name of this LakeFormationPolicy.
:rtype: str
"""
return self._resource_name
@resource_name.setter
def resource_name(self, resource_name):
"""Sets the resource_name of this LakeFormationPolicy.
要求用点分格式进行分割
:param resource_name: The resource_name of this LakeFormationPolicy.
:type resource_name: str
"""
self._resource_name = resource_name
@property
def permissions(self):
"""Gets the permissions of this LakeFormationPolicy.
权限列表
:return: The permissions of this LakeFormationPolicy.
:rtype: list[str]
"""
return self._permissions
@permissions.setter
def permissions(self, permissions):
"""Sets the permissions of this LakeFormationPolicy.
权限列表
:param permissions: The permissions of this LakeFormationPolicy.
:type permissions: list[str]
"""
self._permissions = permissions
@property
def grant_able_permissions(self):
"""Gets the grant_able_permissions of this LakeFormationPolicy.
可以传递的权限列表
:return: The grant_able_permissions of this LakeFormationPolicy.
:rtype: list[str]
"""
return self._grant_able_permissions
@grant_able_permissions.setter
def grant_able_permissions(self, grant_able_permissions):
"""Sets the grant_able_permissions of this LakeFormationPolicy.
可以传递的权限列表
:param grant_able_permissions: The grant_able_permissions of this LakeFormationPolicy.
:type grant_able_permissions: list[str]
"""
self._grant_able_permissions = grant_able_permissions
@property
def created_time(self):
"""Gets the created_time of this LakeFormationPolicy.
创建时间
:return: The created_time of this LakeFormationPolicy.
:rtype: int
"""
return self._created_time
@created_time.setter
def created_time(self, created_time):
"""Sets the created_time of this LakeFormationPolicy.
创建时间
:param created_time: The created_time of this LakeFormationPolicy.
:type created_time: int
"""
self._created_time = created_time
@property
def condition(self):
"""Gets the condition of this LakeFormationPolicy.
条件信息
:return: The condition of this LakeFormationPolicy.
:rtype: str
"""
return self._condition
@condition.setter
def condition(self, condition):
"""Sets the condition of this LakeFormationPolicy.
条件信息
:param condition: The condition of this LakeFormationPolicy.
:type condition: str
"""
self._condition = condition
@property
def obligation(self):
"""Gets the obligation of this LakeFormationPolicy.
obligation,义务,当前包含data filter和data mask
:return: The obligation of this LakeFormationPolicy.
:rtype: str
"""
return self._obligation
@obligation.setter
def obligation(self, obligation):
"""Sets the obligation of this LakeFormationPolicy.
obligation,义务,当前包含data filter和data mask
:param obligation: The obligation of this LakeFormationPolicy.
:type obligation: str
"""
self._obligation = obligation
@property
def authorization_paths(self):
"""Gets the authorization_paths of this LakeFormationPolicy.
授权路径列表
:return: The authorization_paths of this LakeFormationPolicy.
:rtype: list[str]
"""
return self._authorization_paths
@authorization_paths.setter
def authorization_paths(self, authorization_paths):
"""Sets the authorization_paths of this LakeFormationPolicy.
授权路径列表
:param authorization_paths: The authorization_paths of this LakeFormationPolicy.
:type authorization_paths: list[str]
"""
self._authorization_paths = authorization_paths
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LakeFormationPolicy):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
b695bd0e3bee9e4fcebfd122305e2f7d34c4b58a
|
b86bfae60767eed63cf935609d934b0a5c98918e
|
/modules/imdb.py
|
2b11d85b830d17d0807012594ef0089cf2bf70ca
|
[
"EFL-2.0"
] |
permissive
|
myano/jenni
|
cabf44b54968e1320313cdad90a1a2d9f7c25569
|
d2e9f86b4d0826f43806bf6baf134147500027db
|
refs/heads/master
| 2021-08-16T10:48:20.006984
| 2020-07-23T13:22:19
| 2020-07-23T13:22:19
| 422,097
| 158
| 118
|
NOASSERTION
| 2023-01-07T14:35:48
| 2009-12-12T01:00:31
|
Python
|
UTF-8
|
Python
| false
| false
| 3,029
|
py
|
imdb.py
|
# -*- coding: utf8 -*-
'''
imdb.py - jenni Movie Information Module
Copyright 2014-2015, yano, yanovich.net
Copyright 2012, Elad Alfassa, <elad@fedoraproject.org>
Licensed under the Eiffel Forum License 2.
This module relies on omdbapi.com
More info:
* jenni: https://github.com/myano/jenni/
* Phenny: http://inamidst.com/phenny/
'''
from modules import proxy
import json
import re
import urllib2
API_BASE_URL = 'http://www.omdbapi.com/'
def prep_title(txt):
txt = txt.replace(' ', '+')
txt = (txt).encode('utf-8')
txt = urllib2.quote(txt)
return txt
def movie(jenni, input):
'''.imdb movie/show title -- displays information about a production'''
if not input.group(2):
return jenni.say('Please enter a movie or TV show title. '
'Year is optional.')
word = input.group(2).rstrip()
matchObj = re.match(r'([\w\s]*)\s?,\s?(\d{4})', word, re.M | re.I)
if matchObj:
title = matchObj.group(1)
year = matchObj.group(2)
title = prep_title(title)
uri = API_BASE_URL + '?t=%s&y=%s&plot=short&r=json' % (title, year)
else:
title = word
title = prep_title(title)
uri = API_BASE_URL + '?t=%s&plot=short&r=json' % (title)
try:
page = proxy.get(uri)
except:
return jenni.say('[IMDB] Connection to API did not succeed.')
try:
data = json.loads(page)
except:
return jenni.say("[IMDB] Couldn't make sense of information from API")
message = '[IMDB] '
if data['Response'] == 'False':
if 'Error' in data:
message += data['Error']
else:
message += 'Got an error from imdbapi'
else:
pre_plot_output = u'Title: {0} | Released: {1} | Rated: {2} '
pre_plot_output += '| Rating: {3} | Metascore: {4} | Genre: {5} '
pre_plot_output += '| Runtime: {6} | Plot: '
genre = data['Genre']
runtime = data['Runtime']
pre_plot = pre_plot_output.format(data['Title'], data['Released'],
data['Rated'], data['imdbRating'],
data['Metascore'], genre,
runtime)
after_plot_output = ' | IMDB Link: http://imdb.com/title/{0}'
after_plot = after_plot_output.format(data['imdbID'])
truncation = '[...]'
## 510 - (16 + 8 + 63)
## max_chars (minus \r\n) - (max_nick_length + max_ident_length
## + max_vhost_lenth_on_freenode)
max_len_of_plot = 423 - (len(pre_plot) + len(after_plot) + len(truncation))
new_plot = data['Plot']
if len(data['Plot']) > max_len_of_plot:
new_plot = data['Plot'][:max_len_of_plot] + truncation
message = pre_plot + new_plot + after_plot
jenni.say(message)
movie.commands = ['imdb', 'movie', 'movies', 'show', 'tv', 'television']
movie.example = '.imdb Movie Title, 2015'
if __name__ == '__main__':
print __doc__.strip()
|
56244b510ab37cf655a7226644b7208e8d84f051
|
c5f7019c52cd91a3d9505943b9d866539f2fb0bc
|
/synapse/lib/stormlib/basex.py
|
10aeb11fc98ad81d173ade3204a3a384343642a1
|
[
"Apache-2.0"
] |
permissive
|
vertexproject/synapse
|
ce31699fcb10cb2c870d448915f4d4524247e2d0
|
1808dff78921b4bfdb451a12ee5d03427a5295b9
|
refs/heads/master
| 2023-09-03T23:48:26.584015
| 2023-08-31T20:34:35
| 2023-08-31T20:34:35
| 37,228,107
| 307
| 63
|
Apache-2.0
| 2023-09-14T21:53:32
| 2015-06-10T23:29:41
|
Python
|
UTF-8
|
Python
| false
| false
| 2,552
|
py
|
basex.py
|
import synapse.exc as s_exc
import synapse.common as s_common
import synapse.lib.stormtypes as s_stormtypes
@s_stormtypes.registry.registerLib
class BaseXLib(s_stormtypes.Lib):
'''
A Storm library which implements helpers for encoding and decoding strings using an arbitrary charset.
'''
_storm_locals = (
{'name': 'encode', 'desc': 'Encode bytes into a baseX string.',
'type': {'type': 'function', '_funcname': 'encode',
'args': (
{'name': 'byts', 'type': 'bytes', 'desc': 'The bytes to be encoded into a string.'},
{'name': 'charset', 'type': 'str', 'desc': 'The charset used to encode the bytes.'},
),
'returns': {'type': 'str', 'desc': 'The encoded string.', }
}},
{'name': 'decode', 'desc': 'Decode a baseX string into bytes.',
'type': {'type': 'function', '_funcname': 'decode',
'args': (
{'name': 'text', 'type': 'str', 'desc': 'The hex string to be decoded into bytes.'},
{'name': 'charset', 'type': 'str', 'desc': 'The charset used to decode the string.'},
),
'returns': {'type': 'bytes', 'desc': 'The decoded bytes.', }
}},
)
_storm_lib_path = ('basex',)
def getObjLocals(self):
return {
'encode': self.encode,
'decode': self.decode,
}
async def encode(self, byts, charset):
if not isinstance(byts, bytes):
raise s_exc.BadArg(mesg='$lib.basex.encode() requires a bytes argument.')
charset = await s_stormtypes.tostr(charset)
retn = []
base = len(charset)
num = int.from_bytes(byts, 'big')
if num == 0:
return charset[0]
while num:
retn.append(charset[int(num % base)])
num = num // base
return ''.join(retn[::-1])
async def decode(self, text, charset):
text = await s_stormtypes.tostr(text)
charset = await s_stormtypes.tostr(charset)
alpha2num = {c: o for (o, c) in enumerate(charset)}
retn = 0
base = len(charset)
for c in text:
v = alpha2num.get(c)
if v is None:
mesg = f'$lib.basex.decode() string contains value not in charset: {c}'
raise s_exc.BadArg(mesg=mesg)
retn = (retn * base) + v
size = (retn.bit_length() + 7) // 8
return retn.to_bytes(size, 'big')
|
1154400557e4cc20546ff80e5abecbce2f9525e4
|
77fee94c58cd5b6305eef2f13d74b488db428c59
|
/litex/soc/cores/uart.py
|
723337897f04426b9d0d0d70c92263ca852facec
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] |
permissive
|
enjoy-digital/litex
|
de5919d649c1b884c47a5e0364c2a9a584ebd614
|
405296b7fd99764af21fffd94afa5075c22affa8
|
refs/heads/master
| 2023-08-31T23:52:33.895792
| 2023-08-31T17:34:55
| 2023-08-31T17:36:21
| 45,734,719
| 2,351
| 524
|
NOASSERTION
| 2023-09-14T21:26:26
| 2015-11-07T12:02:12
|
C
|
UTF-8
|
Python
| false
| false
| 17,651
|
py
|
uart.py
|
#
# This file is part of LiteX.
#
# Copyright (c) 2014 Yann Sionneau <ys@m-labs.hk>
# Copyright (c) 2015-2020 Florent Kermarrec <florent@enjoy-digital.fr>
# Copyright (c) 2015 Sebastien Bourdeauducq <sb@m-labs.hk>
# Copyright (c) 2018 Tim 'mithro' Ansell <me@mith.ro>
# SPDX-License-Identifier: BSD-2-Clause
from math import log2
from migen import *
from migen.genlib.record import Record
from migen.genlib.cdc import MultiReg
from litex.gen import *
from litex.gen.genlib.misc import WaitTimer
from litex.soc.interconnect.csr import *
from litex.soc.interconnect.csr_eventmanager import *
from litex.soc.interconnect import wishbone
from litex.soc.interconnect import stream
# Common -------------------------------------------------------------------------------------------
def UARTPads():
return Record([("tx", 1), ("rx", 1)])
class UARTInterface:
def __init__(self):
self.sink = stream.Endpoint([("data", 8)])
self.source = stream.Endpoint([("data", 8)])
# RS232 PHY ----------------------------------------------------------------------------------------
RS232_IDLE = 1
RS232_START = 0
RS232_STOP = 1
class RS232PHYInterface(UARTInterface): pass
class RS232ClkPhaseAccum(LiteXModule):
def __init__(self, tuning_word, mode="tx"):
assert mode in ["tx", "rx"]
self.enable = Signal()
self.tick = Signal()
# # #
phase = Signal(32, reset_less=True)
self.sync += Cat(phase, self.tick).eq(tuning_word if mode == "tx" else 2**31)
self.sync += If(self.enable, Cat(phase, self.tick).eq(phase + tuning_word))
class RS232PHYTX(LiteXModule):
def __init__(self, pads, tuning_word):
self.sink = sink = stream.Endpoint([("data", 8)])
# # #
pads.tx.reset = 1
data = Signal(8, reset_less=True)
count = Signal(4, reset_less=True)
# Clock Phase Accumulator.
clk_phase_accum = RS232ClkPhaseAccum(tuning_word, mode="tx")
self.submodules += clk_phase_accum
# FSM
self.fsm = fsm = FSM(reset_state="IDLE")
fsm.act("IDLE",
# Reset Count and set TX to Idle.
NextValue(count, 0),
NextValue(pads.tx, RS232_IDLE),
# Wait for TX data to transmit.
If(sink.valid,
NextValue(pads.tx, RS232_START),
NextValue(data, sink.data),
NextState("RUN")
)
)
fsm.act("RUN",
# Enable Clock Phase Accumulator.
clk_phase_accum.enable.eq(1),
# On Clock Phase Accumulator tick:
If(clk_phase_accum.tick,
# Set TX data.
NextValue(pads.tx, data),
# Increment Count.
NextValue(count, count + 1),
# Shift TX data.
NextValue(data, Cat(data[1:], RS232_STOP)),
# When 10-bit have been transmitted...
If(count == (10 - 1),
# Ack sink and return to Idle.
sink.ready.eq(1),
NextState("IDLE")
)
)
)
class RS232PHYRX(LiteXModule):
def __init__(self, pads, tuning_word):
self.source = source = stream.Endpoint([("data", 8)])
# # #
data = Signal(8, reset_less=True)
count = Signal(4, reset_less=True)
# Clock Phase Accumulator.
clk_phase_accum = RS232ClkPhaseAccum(tuning_word, mode="rx")
self.submodules += clk_phase_accum
# Resynchronize pads.rx and generate delayed version.
rx = Signal()
rx_d = Signal()
self.specials += MultiReg(pads.rx, rx)
self.sync += rx_d.eq(rx)
# FSM
self.fsm = fsm = FSM(reset_state="IDLE")
fsm.act("IDLE",
# Reset Count.
NextValue(count, 0),
# Wait for RX Start bit.
If((rx == RS232_START) & (rx_d == RS232_IDLE),
NextState("RUN")
)
)
fsm.act("RUN",
# Enable Clock Phase Accumulator.
clk_phase_accum.enable.eq(1),
# On Clock Phase Accumulator tick:
If(clk_phase_accum.tick,
# Increment Count.
NextValue(count, count + 1),
# Shift RX data.
NextValue(data, Cat(data[1:], rx)),
# When 10-bit have been received...
If(count == (10 - 1),
# Produce data (but only when RX Stop bit is seen).
source.valid.eq(rx == RS232_STOP),
source.data.eq(data),
NextState("IDLE")
)
)
)
class RS232PHY(LiteXModule):
def __init__(self, pads, clk_freq, baudrate=115200, with_dynamic_baudrate=False):
tuning_word = int((baudrate/clk_freq)*2**32)
if with_dynamic_baudrate:
self._tuning_word = CSRStorage(32, reset=tuning_word)
tuning_word = self._tuning_word.storage
self.tx = RS232PHYTX(pads, tuning_word)
self.rx = RS232PHYRX(pads, tuning_word)
self.sink, self.source = self.tx.sink, self.rx.source
class RS232PHYMultiplexer(LiteXModule):
def __init__(self, phys, phy):
self.sel = Signal(max=len(phys))
# # #
cases = {}
for n in range(len(phys)):
# don't stall uarts when not selected
self.comb += phys[n].sink.ready.eq(1)
# connect core to phy
cases[n] = [
phy.source.connect(phys[n].source),
phys[n].sink.connect(phy.sink)
]
self.comb += Case(self.sel, cases)
class RS232PHYModel(LiteXModule):
def __init__(self, pads):
self.sink = stream.Endpoint([("data", 8)])
self.source = stream.Endpoint([("data", 8)])
self.comb += [
pads.source_valid.eq(self.sink.valid),
pads.source_data.eq(self.sink.data),
self.sink.ready.eq(pads.source_ready),
self.source.valid.eq(pads.sink_valid),
self.source.data.eq(pads.sink_data),
pads.sink_ready.eq(self.source.ready)
]
# UART ---------------------------------------------------------------------------------------------
def _get_uart_fifo(depth, sink_cd="sys", source_cd="sys"):
if sink_cd != source_cd:
fifo = stream.AsyncFIFO([("data", 8)], depth)
return ClockDomainsRenamer({"write": sink_cd, "read": source_cd})(fifo)
else:
return stream.SyncFIFO([("data", 8)], depth, buffered=True)
def UARTPHY(pads, clk_freq, baudrate):
# FT245 Asynchronous FIFO mode (baudrate ignored)
if hasattr(pads, "rd_n") and hasattr(pads, "wr_n"):
from litex.soc.cores.usb_fifo import FT245PHYAsynchronous
return FT245PHYAsynchronous(pads, clk_freq)
# RS232
else:
return RS232PHY(pads, clk_freq, baudrate)
class UART(LiteXModule, UARTInterface):
def __init__(self, phy=None,
tx_fifo_depth = 16,
rx_fifo_depth = 16,
rx_fifo_rx_we = False,
phy_cd = "sys"):
self._rxtx = CSR(8) # RX/TX Data.
self._txfull = CSRStatus(description="TX FIFO Full.")
self._rxempty = CSRStatus(description="RX FIFO Empty.")
self.ev = EventManager()
self.ev.tx = EventSourceProcess(edge="rising")
self.ev.rx = EventSourceProcess(edge="rising")
self.ev.finalize()
self._txempty = CSRStatus(description="TX FIFO Empty.")
self._rxfull = CSRStatus(description="RX FIFO Full.")
# # #
UARTInterface.__init__(self)
# PHY
# ---
if phy is not None:
self.comb += phy.source.connect(self.sink)
self.comb += self.source.connect(phy.sink)
# TX
# --
self.tx_fifo = tx_fifo = _get_uart_fifo(tx_fifo_depth, source_cd=phy_cd)
self.comb += [
# CSR --> FIFO.
tx_fifo.sink.valid.eq(self._rxtx.re),
tx_fifo.sink.data.eq(self._rxtx.r),
# FIFO --> Source.
tx_fifo.source.connect(self.source),
# CSR Status.
self._txfull.status.eq(~tx_fifo.sink.ready),
self._txempty.status.eq(~tx_fifo.source.valid),
# IRQ (When FIFO becomes non-full).
self.ev.tx.trigger.eq(tx_fifo.sink.ready)
]
# RX
# --
self.rx_fifo = rx_fifo = _get_uart_fifo(rx_fifo_depth, sink_cd=phy_cd)
self.comb += [
# Sink --> FIFO.
self.sink.connect(rx_fifo.sink),
# FIFO --> CSR.
self._rxtx.w.eq(rx_fifo.source.data),
rx_fifo.source.ready.eq(self.ev.rx.clear | (rx_fifo_rx_we & self._rxtx.we)),
# Status.
self._rxempty.status.eq(~rx_fifo.source.valid),
self._rxfull.status.eq(~rx_fifo.sink.ready),
# IRQ (When FIFO becomes non-empty).
self.ev.rx.trigger.eq(rx_fifo.source.valid)
]
def add_auto_tx_flush(self, sys_clk_freq, timeout=1e-2, interval=2):
# Add automatic TX flush when ready is not active for a long time (timeout), this can prevent
# stalling the UART (and thus CPU) when the PHY is not operational at startup.
flush_ep = stream.Endpoint([("data", 8)])
flush_count = Signal(int(log2(interval)))
# Insert Flush Endpoint between TX FIFO and Source.
self.comb += self.tx_fifo.source.connect(flush_ep)
self.comb += flush_ep.connect(self.source)
# Flush TX FIFO when Source.ready is inactive for timeout (with interval cycles between
# each ready).
self.timer = timer = WaitTimer(timeout*sys_clk_freq)
self.comb += timer.wait.eq(~self.source.ready)
self.sync += flush_count.eq(flush_count + 1)
self.comb += If(timer.done, flush_ep.ready.eq(flush_count == 0))
#self.sync += If(flush_ep.valid & flush_ep.ready, Display("%c", flush_ep.data))
# UART Bone ----------------------------------------------------------------------------------------
CMD_WRITE_BURST_INCR = 0x01
CMD_READ_BURST_INCR = 0x02
CMD_WRITE_BURST_FIXED = 0x03
CMD_READ_BURST_FIXED = 0x04
class Stream2Wishbone(LiteXModule):
def __init__(self, phy=None, clk_freq=None, data_width=32, address_width=32):
self.sink = sink = stream.Endpoint([("data", 8)]) if phy is None else phy.source
self.source = source = stream.Endpoint([("data", 8)]) if phy is None else phy.sink
self.wishbone = wishbone.Interface(data_width=data_width, adr_width=address_width)
# # #
assert data_width in [8, 16, 32]
assert address_width in [8, 16, 32]
cmd = Signal(8, reset_less=True)
incr = Signal()
length = Signal(8, reset_less=True)
address = Signal(address_width, reset_less=True)
data = Signal(data_width, reset_less=True)
data_bytes_count = Signal(int(log2(data_width//8)), reset_less=True)
addr_bytes_count = Signal(int(log2(address_width//8)), reset_less=True)
words_count = Signal(8, reset_less=True)
data_bytes_count_done = (data_bytes_count == (data_width//8 - 1))
addr_bytes_count_done = (addr_bytes_count == (address_width//8 - 1))
words_count_done = (words_count == (length - 1))
self.fsm = fsm = ResetInserter()(FSM(reset_state="RECEIVE-CMD"))
self.timer = timer = WaitTimer(100e-3*clk_freq)
self.comb += timer.wait.eq(~fsm.ongoing("RECEIVE-CMD"))
self.comb += fsm.reset.eq(timer.done)
fsm.act("RECEIVE-CMD",
sink.ready.eq(1),
NextValue(data_bytes_count, 0),
NextValue(addr_bytes_count, 0),
NextValue(words_count, 0),
If(sink.valid,
NextValue(cmd, sink.data),
NextState("RECEIVE-LENGTH")
)
)
fsm.act("RECEIVE-LENGTH",
sink.ready.eq(1),
If(sink.valid,
NextValue(length, sink.data),
NextState("RECEIVE-ADDRESS")
)
)
fsm.act("RECEIVE-ADDRESS",
sink.ready.eq(1),
If(sink.valid,
NextValue(address, Cat(sink.data, address)),
NextValue(addr_bytes_count, addr_bytes_count + 1),
If(addr_bytes_count_done,
If((cmd == CMD_WRITE_BURST_INCR) | (cmd == CMD_WRITE_BURST_FIXED),
NextValue(incr, cmd == CMD_WRITE_BURST_INCR),
NextState("RECEIVE-DATA")
).Elif((cmd == CMD_READ_BURST_INCR) | (cmd == CMD_READ_BURST_FIXED),
NextValue(incr, cmd == CMD_READ_BURST_INCR),
NextState("READ-DATA")
).Else(
NextState("RECEIVE-CMD")
)
)
)
)
fsm.act("RECEIVE-DATA",
sink.ready.eq(1),
If(sink.valid,
NextValue(data, Cat(sink.data, data)),
NextValue(data_bytes_count, data_bytes_count + 1),
If(data_bytes_count_done,
NextState("WRITE-DATA")
)
)
)
self.comb += [
self.wishbone.adr.eq(address),
self.wishbone.dat_w.eq(data),
self.wishbone.sel.eq(2**(data_width//8) - 1)
]
fsm.act("WRITE-DATA",
sink.ready.eq(0),
self.wishbone.stb.eq(1),
self.wishbone.we.eq(1),
self.wishbone.cyc.eq(1),
If(self.wishbone.ack,
NextValue(words_count, words_count + 1),
NextValue(address, address + incr),
If(words_count_done,
NextState("RECEIVE-CMD")
).Else(
NextState("RECEIVE-DATA")
)
)
)
fsm.act("READ-DATA",
sink.ready.eq(0),
self.wishbone.stb.eq(1),
self.wishbone.we.eq(0),
self.wishbone.cyc.eq(1),
If(self.wishbone.ack,
NextValue(data, self.wishbone.dat_r),
NextState("SEND-DATA")
)
)
cases = {}
for i, n in enumerate(reversed(range(data_width//8))):
cases[i] = source.data.eq(data[8*n:])
self.comb += Case(data_bytes_count, cases)
fsm.act("SEND-DATA",
sink.ready.eq(0),
source.valid.eq(1),
If(source.ready,
NextValue(data_bytes_count, data_bytes_count + 1),
If(data_bytes_count_done,
NextValue(words_count, words_count + 1),
NextValue(address, address + incr),
If(words_count_done,
NextState("RECEIVE-CMD")
).Else(
NextState("READ-DATA")
)
)
)
)
self.comb += source.last.eq(data_bytes_count_done & words_count_done)
if hasattr(source, "length"):
self.comb += source.length.eq((data_width//8)*length)
class UARTBone(Stream2Wishbone):
def __init__(self, phy, clk_freq, cd="sys"):
if cd == "sys":
self.phy = phy
Stream2Wishbone.__init__(self, self.phy, clk_freq=clk_freq)
else:
self.phy = ClockDomainsRenamer(cd)(phy)
self.tx_cdc = stream.ClockDomainCrossing([("data", 8)], cd_from="sys", cd_to=cd)
self.rx_cdc = stream.ClockDomainCrossing([("data", 8)], cd_from=cd, cd_to="sys")
self.comb += self.phy.source.connect(self.rx_cdc.sink)
self.comb += self.tx_cdc.source.connect(self.phy.sink)
Stream2Wishbone.__init__(self, clk_freq=clk_freq)
self.comb += self.rx_cdc.source.connect(self.sink)
self.comb += self.source.connect(self.tx_cdc.sink)
class UARTWishboneBridge(UARTBone):
def __init__(self, pads, clk_freq, baudrate=115200, cd="sys"):
self.phy = RS232PHY(pads, clk_freq, baudrate)
UARTBone.__init__(self, self.phy, clk_freq, cd)
# UART Multiplexer ---------------------------------------------------------------------------------
class UARTMultiplexer(LiteXModule):
def __init__(self, uarts, uart):
self.sel = Signal(max=len(uarts))
# # #
cases = {}
for n in range(len(uarts)):
cases[n] = [
uart.tx.eq(uarts[n].tx),
uarts[n].rx.eq(uart.rx)
]
self.comb += Case(self.sel, cases)
# UART Crossover -----------------------------------------------------------------------------------
class UARTCrossover(UART):
"""
UART crossover trough Wishbone bridge.
Creates a fully compatible UART that can be used by the CPU as a regular UART and adds a second
UART, cross-connected to the main one to allow terminal emulation over a Wishbone bridge.
"""
def __init__(self, **kwargs):
assert kwargs.get("phy", None) == None
UART.__init__(self, **kwargs)
self.xover = UART(tx_fifo_depth=1, rx_fifo_depth=16, rx_fifo_rx_we=True)
self.comb += [
self.source.connect(self.xover.sink),
self.xover.source.connect(self.sink)
]
|
95dd42a194a62af18efa40aebc7e89ddb6cf31e8
|
aff73e17a2cebca97d53443821357811645d843e
|
/tests/test_plaintext_parser.py
|
adab1f5fa71c43e18a5a06d7ae7661de79afda3c
|
[
"Apache-2.0"
] |
permissive
|
miso-belica/sumy
|
8184c41018302b0a0a3ff80797768f4b7a0e65e4
|
5fdfae543b01359a3cd82b1edb7d5f6c1c89c782
|
refs/heads/main
| 2023-08-19T00:46:09.453121
| 2023-08-11T07:20:07
| 2023-08-11T07:20:07
| 8,313,091
| 3,352
| 599
|
Apache-2.0
| 2023-08-11T07:16:24
| 2013-02-20T12:56:48
|
Python
|
UTF-8
|
Python
| false
| false
| 1,934
|
py
|
test_plaintext_parser.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from sumy.nlp.tokenizers import Tokenizer
from sumy.parsers.plaintext import PlaintextParser
def test_parse_plaintext():
parser = PlaintextParser.from_string("""
Ako sa máš? Ja dobre! A ty? No
mohlo to byť aj lepšie!!! Ale pohodička.
TOTO JE AKOŽE NADPIS
A toto je text pod ním, ktorý je textový.
A tak ďalej...
""", Tokenizer("czech"))
document = parser.document
assert len(document.paragraphs) == 2
assert len(document.paragraphs[0].headings) == 0
assert len(document.paragraphs[0].sentences) == 5
assert len(document.paragraphs[1].headings) == 1
assert len(document.paragraphs[1].sentences) == 2
def test_parse_plaintext_long():
parser = PlaintextParser.from_string("""
Ako sa máš? Ja dobre! A ty? No
mohlo to byť aj lepšie!!! Ale pohodička.
TOTO JE AKOŽE NADPIS
A toto je text pod ním, ktorý je textový.
A tak ďalej...
VEĽKOLEPÉ PREKVAPENIE
Tretí odstavec v tomto texte je úplne o ničom. Ale má
vety a to je hlavné. Takže sa majte na pozore ;-)
A tak ďalej...
A tak este dalej!
""", Tokenizer("czech"))
document = parser.document
assert len(document.paragraphs) == 5
assert len(document.paragraphs[0].headings) == 0
assert len(document.paragraphs[0].sentences) == 5
assert len(document.paragraphs[1].headings) == 1
assert len(document.paragraphs[1].sentences) == 2
assert len(document.paragraphs[2].headings) == 1
assert len(document.paragraphs[2].sentences) == 3
assert len(document.paragraphs[3].headings) == 0
assert len(document.paragraphs[3].sentences) == 1
assert len(document.paragraphs[4].headings) == 0
assert len(document.paragraphs[4].sentences) == 1
|
e513d008d320cdbeabee9552cdaf1f1f0bfec74e
|
85373d45a83e4096affafa4f4e5b400787413e57
|
/test/programytest/services/rest/newsapi/test_service.py
|
0b9015666a625acbac553da21428323e3481a0f7
|
[
"MIT"
] |
permissive
|
keiffster/program-y
|
a02bb9d8278835547cc875f4f9cd668d5b1f44da
|
fc7b0a3afa4fa6ed683e0c817a9aa89f9543bb20
|
refs/heads/master
| 2023-08-23T13:55:39.255535
| 2022-12-13T09:51:57
| 2022-12-13T09:51:57
| 74,462,571
| 379
| 173
|
NOASSERTION
| 2023-05-23T00:51:21
| 2016-11-22T10:43:41
|
Python
|
UTF-8
|
Python
| false
| false
| 4,595
|
py
|
test_service.py
|
import unittest
from unittest.mock import patch
from unittest.mock import Mock
import os
from programy.services.rest.newsapi.service import NewsAPIService
from programy.services.config import ServiceConfiguration
from programy.services.config import ServiceRESTConfiguration
from programytest.services.testclient import ServiceTestClient
from programytest.services.testcase import ServiceTestCase
from programytest.externals import integration_tests_active, integration_tests_disabled
from programytest.services.rest.newsapi.responses import everything_success_response
from programytest.services.rest.newsapi.responses import headlines_success_response
from programytest.services.rest.newsapi.responses import sources_success_response
class NewsAPIServiceTestClient(ServiceTestClient):
def __init__(self):
ServiceTestClient.__init__(self, debug=True)
def load_storage(self):
super(NewsAPIServiceTestClient, self).load_storage()
self.add_license_keys_store(self.get_license_key_file())
class NewsAPIServiceTests(ServiceTestCase):
def test_init(self):
service = NewsAPIService(ServiceConfiguration.from_data("rest", "newsapi", "news"))
self.assertIsNotNone(service)
def patch_requests_everything_success_response(self, url, headers, timeout):
mock_response = Mock()
mock_response.status_code = 200
mock_response.json.return_value = everything_success_response
return mock_response
def patch_requests_headlines_success_response(self, url, headers, timeout):
mock_response = Mock()
mock_response.status_code = 200
mock_response.json.return_value = headlines_success_response
return mock_response
def patch_requests_sources_success_response(self, url, headers, timeout):
mock_response = Mock()
mock_response.status_code = 200
mock_response.json.return_value = sources_success_response
return mock_response
def _do_everything(self):
service = NewsAPIService(ServiceConfiguration.from_data("rest", "newsapi", "news"))
self.assertIsNotNone(service)
client = NewsAPIServiceTestClient()
service.initialise(client)
response = service.get_everything("chatbot")
self.assertResponse(response, 'get_everything', "newsapi", "news")
@unittest.skipIf(integration_tests_active() is False, integration_tests_disabled)
def test_everything_integration(self):
self._do_everything()
@patch("programy.services.rest.base.RESTService._requests_get", patch_requests_everything_success_response)
def test_everything_unit(self):
self._do_everything()
def _do_headlines(self):
service = NewsAPIService(ServiceConfiguration.from_data("rest", "newsapi", "news"))
self.assertIsNotNone(service)
client = NewsAPIServiceTestClient()
service.initialise(client)
response = service.get_headlines("uk")
self.assertResponse(response, 'get_headlines', "newsapi", "news")
@unittest.skipIf(integration_tests_active() is False, integration_tests_disabled)
def test_headlines_integration(self):
self._do_headlines()
@patch("programy.services.rest.base.RESTService._requests_get", patch_requests_headlines_success_response)
def test_headlines_unit(self):
self._do_headlines()
def _do_sources(self):
service = NewsAPIService(ServiceConfiguration.from_data("rest", "newsapi", "news"))
self.assertIsNotNone(service)
client = NewsAPIServiceTestClient()
service.initialise(client)
response = service.get_sources()
self.assertResponse(response, 'get_sources', "newsapi", "news")
@unittest.skipIf(integration_tests_active() is False, integration_tests_disabled)
def test_sources_integration(self):
self._do_sources()
@patch("programy.services.rest.base.RESTService._requests_get", patch_requests_sources_success_response)
def test_sources_unit(self):
self._do_sources()
@patch("programy.services.rest.base.RESTService._requests_get", patch_requests_everything_success_response)
def test_handler_load_everything(self):
client = NewsAPIServiceTestClient()
conf_file = NewsAPIService.get_default_conf_file()
response = self._do_handler_load(client, conf_file, "newsapi", "NEWSAPI EVERYTHING CHATBOTS")
self.assertIsNotNone(response)
self.assertTrue(response.startswith("<ul><li>How artificial intelligence and machine learning produced robots we can talk to</li>"))
|
ec4a0a3703753c314ccd4c935efeb816ed076eaf
|
a41e1498e3c080f47abd8e8e57157548df3ebbf1
|
/pandas/tests/frame/test_logical_ops.py
|
2cc3b67e7ac029d3f42256f700db7e75894c5e1a
|
[
"BSD-3-Clause"
] |
permissive
|
pandas-dev/pandas
|
e7e639454a298bebc272622e66faa9829ea393bb
|
c7325d7e7e77ecb4a4e57b48bc25265277c75712
|
refs/heads/main
| 2023-09-01T12:42:07.927176
| 2023-09-01T11:14:10
| 2023-09-01T11:14:10
| 858,127
| 36,166
| 18,728
|
BSD-3-Clause
| 2023-09-14T21:18:41
| 2010-08-24T01:37:33
|
Python
|
UTF-8
|
Python
| false
| false
| 7,042
|
py
|
test_logical_ops.py
|
import operator
import re
import numpy as np
import pytest
from pandas import (
CategoricalIndex,
DataFrame,
Interval,
Series,
isnull,
)
import pandas._testing as tm
class TestDataFrameLogicalOperators:
# &, |, ^
@pytest.mark.parametrize(
"left, right, op, expected",
[
(
[True, False, np.nan],
[True, False, True],
operator.and_,
[True, False, False],
),
(
[True, False, True],
[True, False, np.nan],
operator.and_,
[True, False, False],
),
(
[True, False, np.nan],
[True, False, True],
operator.or_,
[True, False, False],
),
(
[True, False, True],
[True, False, np.nan],
operator.or_,
[True, False, True],
),
],
)
def test_logical_operators_nans(self, left, right, op, expected, frame_or_series):
# GH#13896
result = op(frame_or_series(left), frame_or_series(right))
expected = frame_or_series(expected)
tm.assert_equal(result, expected)
def test_logical_ops_empty_frame(self):
# GH#5808
# empty frames, non-mixed dtype
df = DataFrame(index=[1])
result = df & df
tm.assert_frame_equal(result, df)
result = df | df
tm.assert_frame_equal(result, df)
df2 = DataFrame(index=[1, 2])
result = df & df2
tm.assert_frame_equal(result, df2)
dfa = DataFrame(index=[1], columns=["A"])
result = dfa & dfa
expected = DataFrame(False, index=[1], columns=["A"])
tm.assert_frame_equal(result, expected)
def test_logical_ops_bool_frame(self):
# GH#5808
df1a_bool = DataFrame(True, index=[1], columns=["A"])
result = df1a_bool & df1a_bool
tm.assert_frame_equal(result, df1a_bool)
result = df1a_bool | df1a_bool
tm.assert_frame_equal(result, df1a_bool)
def test_logical_ops_int_frame(self):
# GH#5808
df1a_int = DataFrame(1, index=[1], columns=["A"])
df1a_bool = DataFrame(True, index=[1], columns=["A"])
result = df1a_int | df1a_bool
tm.assert_frame_equal(result, df1a_bool)
# Check that this matches Series behavior
res_ser = df1a_int["A"] | df1a_bool["A"]
tm.assert_series_equal(res_ser, df1a_bool["A"])
def test_logical_ops_invalid(self):
# GH#5808
df1 = DataFrame(1.0, index=[1], columns=["A"])
df2 = DataFrame(True, index=[1], columns=["A"])
msg = re.escape("unsupported operand type(s) for |: 'float' and 'bool'")
with pytest.raises(TypeError, match=msg):
df1 | df2
df1 = DataFrame("foo", index=[1], columns=["A"])
df2 = DataFrame(True, index=[1], columns=["A"])
msg = re.escape("unsupported operand type(s) for |: 'str' and 'bool'")
with pytest.raises(TypeError, match=msg):
df1 | df2
def test_logical_operators(self):
def _check_bin_op(op):
result = op(df1, df2)
expected = DataFrame(
op(df1.values, df2.values), index=df1.index, columns=df1.columns
)
assert result.values.dtype == np.bool_
tm.assert_frame_equal(result, expected)
def _check_unary_op(op):
result = op(df1)
expected = DataFrame(op(df1.values), index=df1.index, columns=df1.columns)
assert result.values.dtype == np.bool_
tm.assert_frame_equal(result, expected)
df1 = {
"a": {"a": True, "b": False, "c": False, "d": True, "e": True},
"b": {"a": False, "b": True, "c": False, "d": False, "e": False},
"c": {"a": False, "b": False, "c": True, "d": False, "e": False},
"d": {"a": True, "b": False, "c": False, "d": True, "e": True},
"e": {"a": True, "b": False, "c": False, "d": True, "e": True},
}
df2 = {
"a": {"a": True, "b": False, "c": True, "d": False, "e": False},
"b": {"a": False, "b": True, "c": False, "d": False, "e": False},
"c": {"a": True, "b": False, "c": True, "d": False, "e": False},
"d": {"a": False, "b": False, "c": False, "d": True, "e": False},
"e": {"a": False, "b": False, "c": False, "d": False, "e": True},
}
df1 = DataFrame(df1)
df2 = DataFrame(df2)
_check_bin_op(operator.and_)
_check_bin_op(operator.or_)
_check_bin_op(operator.xor)
_check_unary_op(operator.inv) # TODO: belongs elsewhere
def test_logical_with_nas(self):
d = DataFrame({"a": [np.nan, False], "b": [True, True]})
# GH4947
# bool comparisons should return bool
result = d["a"] | d["b"]
expected = Series([False, True])
tm.assert_series_equal(result, expected)
# GH4604, automatic casting here
result = d["a"].fillna(False) | d["b"]
expected = Series([True, True])
tm.assert_series_equal(result, expected)
msg = "The 'downcast' keyword in fillna is deprecated"
with tm.assert_produces_warning(FutureWarning, match=msg):
result = d["a"].fillna(False, downcast=False) | d["b"]
expected = Series([True, True])
tm.assert_series_equal(result, expected)
def test_logical_ops_categorical_columns(self):
# GH#38367
intervals = [Interval(1, 2), Interval(3, 4)]
data = DataFrame(
[[1, np.nan], [2, np.nan]],
columns=CategoricalIndex(
intervals, categories=intervals + [Interval(5, 6)]
),
)
mask = DataFrame(
[[False, False], [False, False]], columns=data.columns, dtype=bool
)
result = mask | isnull(data)
expected = DataFrame(
[[False, True], [False, True]],
columns=CategoricalIndex(
intervals, categories=intervals + [Interval(5, 6)]
),
)
tm.assert_frame_equal(result, expected)
def test_int_dtype_different_index_not_bool(self):
# GH 52500
df1 = DataFrame([1, 2, 3], index=[10, 11, 23], columns=["a"])
df2 = DataFrame([10, 20, 30], index=[11, 10, 23], columns=["a"])
result = np.bitwise_xor(df1, df2)
expected = DataFrame([21, 8, 29], index=[10, 11, 23], columns=["a"])
tm.assert_frame_equal(result, expected)
result = df1 ^ df2
tm.assert_frame_equal(result, expected)
def test_different_dtypes_different_index_raises(self):
# GH 52538
df1 = DataFrame([1, 2], index=["a", "b"])
df2 = DataFrame([3, 4], index=["b", "c"])
with pytest.raises(TypeError, match="unsupported operand type"):
df1 & df2
|
009cbbcc20d45d39f73dd57a331ff7a53d827700
|
60ba5cc2f817471dd0ff84a15996b46b1dbfa6ba
|
/park/envs/spark_sim/__init__.py
|
3533c53493507317545059aa04c34de0a434ec2b
|
[
"MIT"
] |
permissive
|
park-project/park
|
dd15d27e5859fe421c878a90627716623892b6f9
|
08f8f7f0dea14e011af2d5ce2a72410084eb8713
|
refs/heads/master
| 2023-06-28T11:38:30.359938
| 2022-04-07T14:42:21
| 2022-04-07T14:42:21
| 184,142,889
| 216
| 55
|
MIT
| 2023-06-14T16:10:38
| 2019-04-29T20:55:25
|
Python
|
UTF-8
|
Python
| false
| false
| 49
|
py
|
__init__.py
|
from park.envs.spark_sim.spark import SparkSimEnv
|
93bdc2d4fb3ddc35f8c6fbded579b2dc4806bbf3
|
13c0929950a663964c925625abc65421cf1b3c2f
|
/tests/test_api.py
|
26e940d0fa068af1fdcb02e4243765bcbfe312e4
|
[
"MIT"
] |
permissive
|
allisson/python-simple-rest-client
|
ccb0c801d6d7ff15c830a250f31b69aae152d4de
|
5a8fb5d8aea8e9436d74ed74e6c35164c8a3b6b3
|
refs/heads/master
| 2022-06-20T05:27:03.101557
| 2022-05-28T22:55:50
| 2022-05-28T22:55:50
| 85,091,859
| 174
| 55
|
MIT
| 2022-05-28T22:55:51
| 2017-03-15T15:50:24
|
Python
|
UTF-8
|
Python
| false
| false
| 4,379
|
py
|
test_api.py
|
import pytest
from simple_rest_client.api import API
from simple_rest_client.resource import Resource
def test_api_headers():
api = API(api_root_url="http://localhost:0/api/")
assert api.headers == {}
json_api = API(api_root_url="http://localhost:0/api/", headers={"Content-Type": "application/json"})
assert json_api.headers == {"Content-Type": "application/json"}
@pytest.mark.parametrize("ssl_verify,expected_ssl_verify", [(None, True), (True, True), (False, False)])
def test_api_ssl_verify(ssl_verify, expected_ssl_verify, api, reqres_resource):
api = API(api_root_url="http://localhost:0/api/", json_encode_body=True, ssl_verify=ssl_verify)
api.add_resource(resource_name="users")
assert api.ssl_verify == expected_ssl_verify
def test_api_add_resource(api, reqres_resource):
api.add_resource(resource_name="users")
assert isinstance(api.users, Resource)
attrs = (
"actions",
"api_root_url",
"append_slash",
"headers",
"json_encode_body",
"resource_name",
"ssl_verify",
"timeout",
)
for attr in attrs:
assert getattr(api.users, attr) == getattr(reqres_resource, attr)
assert "users" in api._resources
@pytest.mark.parametrize(
"resource_name,resource_valid_name",
[("users", "users"), ("my-users", "my_users"), ("my users", "my_users"), ("影師嗎", "ying_shi_ma")],
)
def test_api_resource_valid_name(resource_name, resource_valid_name, api):
api.add_resource(resource_name=resource_name)
resource = getattr(api, resource_valid_name)
assert isinstance(resource, Resource)
assert resource_name in api._resources
assert resource.get_action_full_url("list") == f"{api.api_root_url}{resource_name}"
def test_api_add_resource_with_other_resource_class(api, reqres_resource):
class AnotherResource(Resource):
def extra_action(self):
return True
api.add_resource(resource_name="users", resource_class=AnotherResource)
assert api.users.extra_action()
def test_api_get_resource_list(api):
api.add_resource(resource_name="users")
api.add_resource(resource_name="login")
resource_list = api.get_resource_list()
assert "users" in resource_list
assert "login" in resource_list
@pytest.mark.parametrize(
"url,method,status,action,args,kwargs",
[
("/api/users", "GET", 200, "list", None, {}),
("/api/users", "POST", 201, "create", None, {"body": {"success": True}}),
("/api/users/2", "GET", 200, "retrieve", 2, {"body": {"success": True}}),
("/api/users/2", "PUT", 200, "update", 2, {"body": {"success": True}}),
("/api/users/2", "PATCH", 200, "partial_update", 2, {"body": {"success": True}}),
("/api/users/2", "DELETE", 204, "destroy", 2, {"body": {"success": True}}),
],
)
def test_reqres_api_users_actions(httpserver, url, method, status, action, args, kwargs, reqres_api):
httpserver.expect_request(url, method=method).respond_with_json({"success": True}, status=status)
response = getattr(reqres_api.users, action)(args, **kwargs)
assert response.status_code == status
assert response.method == method
assert url in response.url
if method != "DELETE":
assert response.body == {"success": True}
@pytest.mark.asyncio
@pytest.mark.parametrize(
"url,method,status,action,args,kwargs",
[
("/api/users", "GET", 200, "list", None, {}),
("/api/users", "POST", 201, "create", None, {"body": {"success": True}}),
("/api/users/2", "GET", 200, "retrieve", 2, {"body": {"success": True}}),
("/api/users/2", "PUT", 200, "update", 2, {"body": {"success": True}}),
("/api/users/2", "PATCH", 200, "partial_update", 2, {"body": {"success": True}}),
("/api/users/2", "DELETE", 204, "destroy", 2, {"body": {"success": True}}),
],
)
async def test_reqres_async_api_users_actions(
httpserver, url, method, status, action, args, kwargs, reqres_async_api
):
httpserver.expect_request(url, method=method).respond_with_json({"success": True}, status=status)
response = await getattr(reqres_async_api.users, action)(args, **kwargs)
assert response.status_code == status
assert response.method == method
assert url in response.url
if method != "DELETE":
assert response.body == {"success": True}
|
d7e686c3ee83cfc1e3dfbb2438b3465ec238bbbe
|
ec8d9e1595ccc252a57d1769382bb98d604e40a9
|
/pyrolite/__init__.py
|
29a6453712f1497ac2c7b66f31fe6368ab6f0885
|
[
"BSD-3-Clause",
"MIT"
] |
permissive
|
morganjwilliams/pyrolite
|
2a8e738e14099f72282a80acb7139d71eec71efc
|
ac7cd52029909738a41143b89be880e0da419266
|
refs/heads/main
| 2023-08-31T09:20:33.282184
| 2023-07-21T06:45:24
| 2023-07-21T06:45:24
| 137,172,322
| 113
| 37
|
NOASSERTION
| 2023-08-29T10:49:17
| 2018-06-13T06:31:12
|
Python
|
UTF-8
|
Python
| false
| false
| 1,314
|
py
|
__init__.py
|
"""
pyrolite: A set of tools for getting the most from your geochemical data.
"""
from ._version import get_versions
__version__ = get_versions()["version"]
del get_versions
import importlib
import pkgutil
import matplotlib.style
from .util.log import Handle
from .util.plot.style import _export_mplstyle # this import adds the style used below
logger = Handle(__name__)
def load_extensions(base="pyrolite_", replace=["util"]):
"""
Automatically load any extensions associated with pyrolite
to be importable from :mod:`pyrolite.extensions`.
Parameters
----------
base : :class:`str`
Module base string pattern for recognising extensions.
replace : :class:`list`
List of strings to replace from extension modules to shorten call signatures.
"""
from . import extensions
modules = {
name.replace(base, ""): importlib.import_module(name)
for finder, name, ispkg in pkgutil.iter_modules()
if name.startswith(base)
}
for n, m in modules.items():
for r in replace:
n = n.replace(r, "")
setattr(extensions, n, m)
# _export_pyrolite_mplstyle() should be called in .plot import regardless
matplotlib.style.use("pyrolite")
from . import _version
__version__ = _version.get_versions()['version']
|
42b1ce38bdf5cb05fb0d382612156129d4355817
|
c8a0facd2ab0ae172b9745113c5a3372edb0034d
|
/tests/test_integration/test_examples/test_percolate.py
|
30fcf972ba53ade36e4998f5a9f172e7d2176758
|
[
"Apache-2.0"
] |
permissive
|
elastic/elasticsearch-dsl-py
|
1ba2390f8123ace1d53146fb42665d4cfdf6bf6f
|
56832d816ac01c08aba89d84f35b3ca404f5cd7e
|
refs/heads/main
| 2023-08-31T20:57:26.709632
| 2023-08-28T16:35:28
| 2023-08-28T16:35:28
| 17,446,726
| 3,702
| 951
|
Apache-2.0
| 2023-09-08T05:34:51
| 2014-03-05T16:19:39
|
Python
|
UTF-8
|
Python
| false
| false
| 1,159
|
py
|
test_percolate.py
|
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from .percolate import BlogPost, setup
def test_post_gets_tagged_automatically(write_client):
setup()
bp = BlogPost(_id=47, content="nothing about snakes here!")
bp_py = BlogPost(_id=42, content="something about Python here!")
bp.save()
bp_py.save()
assert [] == bp.tags
assert {"programming", "development", "python"} == set(bp_py.tags)
|
7156d7e505849dc82026d6e23df77ad64f0c62b9
|
2f24871ed5abc8b0ebd862a99dde4339bafbfad9
|
/moveit_py/moveit/servo_client/devices/ps4_dualshock.pyi
|
bab5505ddcc2acaa73e5339e22ead4e86ed717f6
|
[
"BSD-3-Clause"
] |
permissive
|
ros-planning/moveit2
|
b3c01ac1197a302d4ee834f9addfde754a2b07e9
|
47d92ef973680aea1badd6cc6080598f040a89f4
|
refs/heads/main
| 2023-09-04T08:32:28.333482
| 2023-08-24T15:44:29
| 2023-08-24T15:44:29
| 170,893,393
| 680
| 445
|
BSD-3-Clause
| 2023-09-14T17:23:51
| 2019-02-15T16:17:11
|
C++
|
UTF-8
|
Python
| false
| false
| 1,607
|
pyi
|
ps4_dualshock.pyi
|
from rclpy.impl.rcutils_logger import RCUtilsLogger
from moveit.servo_client.teleop import TeleopDevice as TeleopDevice
from multiprocessing import Process as Process
from sensor_msgs.msg import Joy as Joy
from std_srvs.srv import Trigger as Trigger
class DualShockAxes:
LEFT_STICK_X: int
LEFT_STICK_Y: int
LEFT_TRIGGER: int
RIGHT_STICK_X: int
RIGHT_STICK_Y: int
RIGHT_TRIGGER: int
D_PAD_X: int
D_PAD_Y: int
def __init__(
self,
LEFT_STICK_X,
LEFT_STICK_Y,
LEFT_TRIGGER,
RIGHT_STICK_X,
RIGHT_STICK_Y,
RIGHT_TRIGGER,
D_PAD_X,
D_PAD_Y,
) -> None: ...
class DualShockButtons:
X: int
O: int
TRIANGLE: int
SQUARE: int
L1: int
R1: int
L2: int
R2: int
SHARE: int
OPTIONS: int
HOME: int
LEFT_STICK_TRIGGER: int
RIGHT_STICK_TRIGGER: int
def __init__(
self,
X,
O,
TRIANGLE,
SQUARE,
L1,
R1,
L2,
R2,
SHARE,
OPTIONS,
HOME,
LEFT_STICK_TRIGGER,
RIGHT_STICK_TRIGGER,
) -> None: ...
class PS4DualShock:
Axes: DualShockAxes
Buttons: DualShockButtons
def __init__(self, Axes, Buttons) -> None: ...
class PS4DualShockTeleop(TeleopDevice):
logger: RCUtilsLogger
def __init__(
self,
ee_frame_name: str,
node_name: str = ...,
device_name: str = ...,
device_config: PS4DualShock = ...,
) -> None: ...
def publish_command(self, data) -> None: ...
def record() -> None: ...
|
538377d04d10adde72c52be909a342d849b77e62
|
057a475216e9beed41983481aafcaf109bbf58da
|
/tests/queries/0_stateless/02481_async_insert_dedup.python
|
0e80a21bf4648509b5f181c50c57bd57fa8ce697
|
[
"Apache-2.0",
"BSL-1.0"
] |
permissive
|
ClickHouse/ClickHouse
|
fece5204263a5b4d693854b6039699265f1bb27f
|
6649328db809d51a694c358571539bc5820464be
|
refs/heads/master
| 2023-08-31T18:48:36.615225
| 2023-08-31T17:51:24
| 2023-08-31T17:51:24
| 60,246,359
| 23,878
| 5,449
|
Apache-2.0
| 2023-09-14T20:10:52
| 2016-06-02T08:28:18
|
C++
|
UTF-8
|
Python
| false
| false
| 5,202
|
python
|
02481_async_insert_dedup.python
|
#!/usr/bin/env python3
import os
import sys
import random
import queue
import time
from threading import Thread
CURDIR = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(CURDIR, "helpers"))
from pure_http_client import ClickHouseClient
table_engine = sys.argv[1]
client = ClickHouseClient()
# test table without partition
client.query("DROP TABLE IF EXISTS t_async_insert_dedup_no_part SYNC")
create_query = f"""
CREATE TABLE t_async_insert_dedup_no_part (
KeyID UInt32
) Engine = {table_engine}('/clickhouse/tables/{{shard}}/{{database}}/t_async_insert_dedup', '{{replica}}')
ORDER BY (KeyID)
"""
client.query(create_query)
client.query(
"insert into t_async_insert_dedup_no_part values (1), (2), (3), (4), (5)",
settings={
"async_insert": 1,
"wait_for_async_insert": 1,
"insert_keeper_fault_injection_probability": 0,
},
)
result = client.query("select count(*) from t_async_insert_dedup_no_part")
print(result, flush=True)
client.query("DROP TABLE IF EXISTS t_async_insert_dedup_no_part SYNC")
# generate data and push to queue
def generate_data(q, total_number, use_token):
old_data = []
max_chunk_size = 30
partitions = ["2022-11-11 10:10:10", "2022-12-12 10:10:10"]
last_number = 0
while True:
dup_simulate = random.randint(0, 3)
# insert old data randomly. 25% of them are dup.
if dup_simulate == 0:
last_idx = len(old_data) - 1
if last_idx < 0:
continue
idx = last_idx - random.randint(0, 50)
if idx < 0:
idx = 0
q.put(old_data[idx])
else:
# insert new data.
chunk_size = random.randint(1, max_chunk_size)
insert_stmt = "insert into t_async_insert_dedup values "
start = last_number + 1
end = start + chunk_size
if end > total_number:
end = total_number
token = ""
for i in range(start, end + 1):
partition = partitions[random.randint(0, 1)]
insert_stmt += "('{}', {}),".format(partition, i)
if use_token:
token = str(i)
insert_stmt = insert_stmt[:-1]
q.put((insert_stmt, token))
old_data.append((insert_stmt, token))
last_number = end
if end >= total_number:
break
# wait all the tasks is done.
q.join()
def fetch_and_insert_data(q, client):
while True:
insert = q.get()
client.query(
insert[0],
settings={
"async_insert": 1,
"async_insert_deduplicate": 1,
"wait_for_async_insert": 0,
"async_insert_busy_timeout_ms": 1500,
"insert_keeper_fault_injection_probability": 0,
"insert_deduplication_token": insert[1],
},
)
q.task_done()
sleep_time = random.randint(50, 500)
time.sleep(sleep_time / 1000.0)
# main process
client.query("DROP TABLE IF EXISTS t_async_insert_dedup SYNC")
create_query = f"""
CREATE TABLE t_async_insert_dedup (
EventDate DateTime,
KeyID UInt32
) Engine = {table_engine}('/clickhouse/tables/{{shard}}/{{database}}/t_async_insert_dedup', '{{replica}}')
PARTITION BY toYYYYMM(EventDate)
ORDER BY (KeyID, EventDate) SETTINGS use_async_block_ids_cache = 1
"""
client.query(create_query)
q = queue.Queue(100)
total_number = 10000
use_token = False
if len(sys.argv) > 3 and sys.argv[2] == "token":
use_token = True
gen = Thread(target=generate_data, args=[q, total_number, use_token])
gen.start()
for i in range(3):
insert = Thread(target=fetch_and_insert_data, args=[q, client])
insert.start()
gen.join()
retry = 0
while True:
time.sleep(5)
result = client.query("select KeyID from t_async_insert_dedup order by KeyID")
result = result.split()
err = False
errMsg = ""
if len(result) != total_number:
err = True
errMsg = f"the size of result is {len(result)}. we expect {total_number}."
else:
for i in range(total_number):
expect = str(i + 1)
real = result[i]
if expect != real:
err = True
errMsg = f"error, real value {real} is not equal to expect value {expect} for {i}-th elements"
break
# retry several times to get stable results.
if err and retry >= 5:
print(errMsg, flush=True)
elif err:
retry += 1
continue
else:
print(len(result), flush=True)
break
result = client.query(
"SELECT value FROM system.metrics where metric = 'AsyncInsertCacheSize'"
)
result = int(result.split()[0])
if result <= 0:
raise Exception(f"AsyncInsertCacheSize should > 0, but got {result}")
result = client.query(
"SELECT value FROM system.events where event = 'AsyncInsertCacheHits'"
)
result = int(result.split()[0])
if result <= 0:
raise Exception(f"AsyncInsertCacheHits should > 0, but got {result}")
client.query("DROP TABLE IF EXISTS t_async_insert_dedup SYNC")
os._exit(os.EX_OK)
|
39a5d5af2a9c726e3982e68d4d7f501f49d6ce36
|
c5952aa93ddede4cbe8e027f8ac507ffdbca6124
|
/CA1Dimension.pyde
|
ce14419d790b4f78454fd72a4fb3953116e66fdf
|
[] |
no_license
|
hackingmath/Math-Adventures
|
1cea476ee5aea807367670aefc062bc2530218ba
|
327751c5db4526e858ab240b29b9cc55bb742586
|
refs/heads/master
| 2023-07-06T00:25:25.391354
| 2023-06-23T15:57:18
| 2023-06-23T15:57:18
| 125,400,295
| 114
| 51
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,678
|
pyde
|
CA1Dimension.pyde
|
width_of_grid = 151
#size of cell
sz = 600//101 + 1
class Cell:
def __init__(self,c,on):
self.c = c
self.on = on
self.ruleList = [0,0,0,1,1,1,1,0] #Rule 30
def checkNeighbors(self):
global cellList
if self.c == 0: #if the cell is in column 0
left = 0
else:
left = cellList[self.c-1].on
me = self.on
if self.c == len(cellList)-1:
right = 0
else:
right = cellList[self.c+1].on
return self.ruleList[7 - (4*left+2*me+right)]
def update(self):
if self.on == 1:
fill(0)
else:
fill(255)
rect(sz*self.c,0,sz,sz)
def createCellList():
'''Creates a big list of off cells with
one on Cell in the center'''
global cellList, level
cellList=[]#empty list for cells
#populate the initial cell list
for i in range(width_of_grid):
cellList.append(Cell(i,0)) #add off Cells or zeroes
#center cell is set to on
cellList[width_of_grid//2].on = 1
level = 0
return cellList
def generateNewCellList():
global cellList
newList = []
for cell in cellList:
newList.append(Cell(cell.c,cell.checkNeighbors()))
return newList
def setup():
global cellList
size(600,600)
cellList = createCellList()
noStroke()
def draw():
global cellList,level
pushMatrix()
translate(-150,level*sz)
#draw each cell in the list:
for cell in cellList:
cell.update()
popMatrix()
level += 1
cellList = generateNewCellList()
|
43bc38c5f8ea9a0f9b0f9da59fb7fd19444a4185
|
d6aae799e18e907fb413b715200c7832252a87e5
|
/responsible_ai/kernel_shap/calculate.py
|
41dffdb730e3b62a91d806d9ba8453714d824658
|
[
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-proprietary-license",
"Apache-2.0",
"CC-BY-NC-4.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
sony/nnabla-examples
|
0d0bbd5df3028996e790bcf07248fdb0932697d1
|
41f71faa6efff7774a76bbd5af3198322a90a6ab
|
refs/heads/master
| 2023-09-04T03:45:54.023899
| 2023-08-22T03:31:21
| 2023-08-22T03:31:21
| 109,625,584
| 308
| 108
|
Apache-2.0
| 2023-08-22T03:31:23
| 2017-11-05T23:30:40
|
Python
|
UTF-8
|
Python
| false
| false
| 10,119
|
py
|
calculate.py
|
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import nnabla as nn
from scipy.special import binom
import copy
import itertools
class Ridge:
def __init__(self, alpha=0, fit_intercept=True):
self.alpha = alpha
self.fit_intercept = fit_intercept
def fit(self, X: np.ndarray, y: np.ndarray, weights=None):
y = y.reshape(-1, 1)
if X.shape[0] != y.shape[0]:
raise Exception(
f"Number of X and y rows don't match ({X.shape[0]} != {y.shape[0]})")
if self.fit_intercept:
X = np.concatenate([np.ones((X.shape[0], 1)), X], 1)
if weights is None:
tmp = X
else:
tmp = np.transpose(np.transpose(X) * np.transpose(weights))
etmp_dot = np.dot(np.transpose(tmp), X)
if self.alpha != 0:
etmp_dot = etmp_dot + self.alpha * np.eye(etmp_dot.shape[0])
try:
tmp2 = np.linalg.inv(etmp_dot)
except np.linalg.LinAlgError:
tmp2 = np.linalg.pinv(etmp_dot)
self.w = np.dot(tmp2, np.dot(np.transpose(tmp), y))
def predict(self, X: np.ndarray):
if self.fit_intercept:
X = np.concatenate([np.ones((X.shape[0], 1)), X], 1)
return np.dot(X, self.w)
class KernelSHAP:
def __init__(self, data, model, X, alpha, nsamples='auto'):
self.data = data
self.model = model
self.X = X
self.alpha = alpha
self.nsamples = nsamples
def shap_values(self):
out = self.model(self.data)
out.forward()
self.train_samples = self.data.d.shape[0]
self.num_features = self.data.d.shape[1]
self.num_classes = out.d.shape[1]
self.weights = np.ones(self.train_samples)
self.weights /= np.sum(self.weights)
self.nsamples_run = 0
groups = [np.array([i]) for i in range(self.num_features)]
self.fnull = np.sum((out.d.T * self.weights).T, 0)
expected_value = self.fnull
explanations = []
for i in range(len(self.X.d)):
instance = self.X[i:i + 1, :]
explanations.append(self.explain(instance, groups))
s = explanations[0].shape
outs = [np.zeros((self.X.d.shape[0], s[0])) for j in range(s[1])]
for i in range(self.X.d.shape[0]):
for j in range(s[1]):
outs[j][i] = explanations[i][:, j]
if len(self.X.d) == 1:
outs = [sp[0] for sp in outs]
return outs, expected_value
def explain(self, instance, groups):
self.nsamples_added = 0
varying_inds = self.varying_groups(instance.d, groups)
varying_feature_groups = [groups[i] for i in varying_inds]
M = len(varying_feature_groups)
varying_feature_groups = np.array(varying_feature_groups)
varying_feature_groups = varying_feature_groups.flatten()
model_out = self.model(instance)
model_out.forward()
fx = model_out.d[0]
fx = np.array(fx)
if M == 0:
phi = np.zeros((self.num_features, self.num_classes))
elif M == 1:
phi = np.zeros((self.num_features, self.num_classes))
diff = fx - self.fnull
for d in range(self.num_classes):
phi[varying_inds[0], d] = diff[d]
else:
if self.nsamples == "auto":
self.nsamples = 2 * M + 2**11
max_samples = 2 ** 30
if M <= 30:
max_samples = 2 ** M - 2
if self.nsamples > max_samples:
self.nsamples = max_samples
self.synth_data = np.tile(self.data.d, (self.nsamples, 1))
self.mask_matrix = np.zeros((self.nsamples, M))
self.kernel_weights = np.zeros(self.nsamples)
self.y = np.zeros(
(self.nsamples * self.train_samples, self.num_classes))
self.ey = np.zeros((self.nsamples, self.num_classes))
num_subset_sizes = np.int(np.ceil((M - 1) / 2.0))
num_paired_subset_sizes = np.int(np.floor((M - 1) / 2.0))
weight_vector = np.array([(M - 1.0) / (i * (M - i))
for i in range(1, num_subset_sizes + 1)])
weight_vector[:num_paired_subset_sizes] *= 2
weight_vector /= np.sum(weight_vector)
num_full_subsets = 0
num_samples_left = self.nsamples
group_inds = np.arange(M, dtype='int64')
mask = np.zeros(M)
remaining_weight_vector = copy.copy(weight_vector)
for subset_size in range(1, num_subset_sizes + 1):
nsubsets = binom(M, subset_size)
if subset_size <= num_paired_subset_sizes:
nsubsets *= 2
if num_samples_left * remaining_weight_vector[subset_size - 1] / nsubsets >= 1.0 - 1e-8:
num_full_subsets += 1
num_samples_left -= nsubsets
if remaining_weight_vector[subset_size - 1] < 1.0:
remaining_weight_vector /= (1 -
remaining_weight_vector[subset_size - 1])
w = weight_vector[subset_size - 1] / binom(M, subset_size)
if subset_size <= num_paired_subset_sizes:
w /= 2.0
for inds in itertools.combinations(group_inds, subset_size):
mask[:] = 0.0
mask[np.array(inds, dtype='int64')] = 1.0
self.addsample(varying_feature_groups,
instance.d, mask, w)
if subset_size <= num_paired_subset_sizes:
mask[:] = np.abs(mask - 1)
self.addsample(varying_feature_groups,
instance.d, mask, w)
else:
break
self.run()
phi = np.zeros((self.num_features, self.num_classes))
for d in range(self.num_classes):
vphi = self.solve(d, M, fx)
phi[varying_inds, d] = vphi
return phi
def varying_groups(self, x, groups):
varying = np.zeros(self.num_features)
for i in range(0, self.num_features):
inds = groups[i]
x_group = x[0, inds]
num_mismatches = np.sum(np.frompyfunc(
self.not_equal, 2, 1)(x_group, self.data.d[:, inds]))
varying[i] = num_mismatches > 0
varying_indices = np.nonzero(varying)[0]
return varying_indices
def not_equal(self, i, j):
if isinstance(i, str) or isinstance(j, str):
return 0 if i == j else 1
return 0 if np.isclose(i, j, equal_nan=True) else 1
def addsample(self, varying_feature_groups, x, m, w):
offset = self.nsamples_added * self.train_samples
mask = m == 1.0
groups = varying_feature_groups[mask]
if len(groups.shape) == 2:
for group in groups:
self.synth_data[offset:offset +
self.train_samples, group] = x[0, group]
else:
evaluation_data = x[0, groups]
self.synth_data[offset:offset +
self.train_samples, groups] = evaluation_data
self.mask_matrix[self.nsamples_added, :] = m
self.kernel_weights[self.nsamples_added] = w
self.nsamples_added += 1
def run(self):
num_to_run = self.nsamples_added * self.train_samples - \
self.nsamples_run * self.train_samples
data_run = self.synth_data[self.nsamples_run *
self.train_samples:self.nsamples_added*self.train_samples, :]
data_run_nn = nn.Variable(data_run.shape)
data_run_nn.d = data_run
modelout = self.model(data_run_nn)
modelout.forward()
modelout = modelout.d
self.y[self.nsamples_run * self.train_samples:self.nsamples_added *
self.train_samples, :] = np.reshape(modelout, (num_to_run, self.num_classes))
for i in range(self.nsamples_run, self.nsamples_added):
ey_val = np.zeros(self.num_classes)
for j in range(0, self.train_samples):
ey_val += self.y[i * self.train_samples +
j, :] * self.weights[j]
self.ey[i, :] = ey_val
def solve(self, dim, M, fx):
ey_adj = self.ey[:, dim] - self.fnull[dim]
nonzero_inds = np.arange(M)
if len(nonzero_inds) == 0:
return np.zeros(M)
ey_adj2 = ey_adj - self.mask_matrix[:, nonzero_inds[-1]] * (
fx[dim] - self.fnull[dim])
etmp = np.transpose(np.transpose(
self.mask_matrix[:, nonzero_inds[:-1]]) - self.mask_matrix[:, nonzero_inds[-1]])
model = Ridge(alpha=self.alpha, fit_intercept=False)
model.fit(etmp, ey_adj2, self.kernel_weights)
w = model.w.reshape(-1)
phi = np.zeros(M)
phi[nonzero_inds[:-1]] = w
phi[nonzero_inds[-1]] = (fx[dim] - self.fnull[dim]) - sum(w)
for i in range(M):
if np.abs(phi[i]) < 1e-10:
phi[i] = 0
return phi
def calculate_shap(self):
if len(self.X.d.shape) == 1:
self.X = self.X.reshape((1, len(self.X.d)))
values, expected_values = self.shap_values()
return values, expected_values
|
235311ca403239c9d30d59a0388521a8988edc68
|
fa1ad2e2ac7e376fc7cb3b3a6e1bb88eed3e80be
|
/govern/data-quality/soda-core/soda/snowflake/soda/data_sources/snowflake_data_source.py
|
1fbd41b95cae1733303ee6110e8ae32f25045c41
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
alldatacenter/alldata
|
7bc7713c9f1d56ad6b8e59ea03206d1073b7e047
|
8d5f9a2d49ab8f9e85ccf058cb02c2fda287afc6
|
refs/heads/master
| 2023-08-05T07:32:25.442740
| 2023-08-03T13:17:24
| 2023-08-03T13:17:24
| 213,321,771
| 774
| 250
|
Apache-2.0
| 2023-09-06T17:35:32
| 2019-10-07T07:36:18
| null |
UTF-8
|
Python
| false
| false
| 8,505
|
py
|
snowflake_data_source.py
|
from __future__ import annotations
import logging
import re
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from snowflake import connector
from snowflake.connector.network import DEFAULT_SOCKET_CONNECT_TIMEOUT
from soda.common.logs import Logs
from soda.execution.data_source import DataSource
from soda.execution.data_type import DataType
logger = logging.getLogger(__name__)
class SnowflakeDataSource(DataSource):
TYPE = "snowflake"
SCHEMA_CHECK_TYPES_MAPPING: dict = {
"TEXT": ["character varying", "varchar", "string"],
"NUMBER": ["integer", "int"],
"FLOAT": ["decimal"],
"TIMESTAMP_NTZ": ["timestamp"],
"TIMESTAMP_TZ": ["timestamptz"],
}
SQL_TYPE_FOR_CREATE_TABLE_MAP: dict = {
DataType.TEXT: "TEXT",
DataType.INTEGER: "INT",
DataType.DECIMAL: "FLOAT",
DataType.DATE: "DATE",
DataType.TIME: "TIME",
DataType.TIMESTAMP: "TIMESTAMP_NTZ",
DataType.TIMESTAMP_TZ: "TIMESTAMP_TZ",
DataType.BOOLEAN: "BOOLEAN",
}
SQL_TYPE_FOR_SCHEMA_CHECK_MAP = {
DataType.TEXT: "TEXT",
DataType.INTEGER: "NUMBER",
DataType.DECIMAL: "FLOAT",
DataType.DATE: "DATE",
DataType.TIME: "TIME",
DataType.TIMESTAMP: "TIMESTAMP_NTZ",
DataType.TIMESTAMP_TZ: "TIMESTAMP_TZ",
DataType.BOOLEAN: "BOOLEAN",
}
NUMERIC_TYPES_FOR_PROFILING = [
"FLOAT",
"NUMBER",
"INT",
"DECIMAL",
"NUMERIC",
"INTEGER",
"BIGINT",
"SMALLINT",
"TINYINT",
"FLOAT4",
"FLOAT8",
"REAL",
]
TEXT_TYPES_FOR_PROFILING = [
"TEXT",
"VARCHAR",
"CHAR",
"CHARACTER",
"NCHAR",
"STRING",
"NVARCHAR",
"NVARCHAR2",
"CHAR VARYING",
"NCHAR VARYING",
]
def __init__(self, logs: Logs, data_source_name: str, data_source_properties: dict):
super().__init__(logs, data_source_name, data_source_properties)
self.user = data_source_properties.get("username")
self.password = data_source_properties.get("password")
self.token = data_source_properties.get("token")
self.account = data_source_properties.get("account")
self.data_source = data_source_properties.get("data_source")
self.warehouse = data_source_properties.get("warehouse")
self.login_timeout = data_source_properties.get("connection_timeout", DEFAULT_SOCKET_CONNECT_TIMEOUT)
self.role = data_source_properties.get("role")
self.client_session_keep_alive = data_source_properties.get("client_session_keep_alive")
self.client_store_temporary_credential = data_source_properties.get("client_store_temporary_credential", False)
self.session_parameters = data_source_properties.get("session_params")
self.passcode_in_password = data_source_properties.get("passcode_in_password", False)
self.private_key_passphrase = data_source_properties.get("private_key_passphrase")
self.private_key = data_source_properties.get("private_key")
self.private_key_path = data_source_properties.get("private_key_path")
self.client_prefetch_threads = data_source_properties.get("client_prefetch_threads", 4)
self.client_session_keep_alive = data_source_properties.get("client_session_keep_alive", False)
self.authenticator = data_source_properties.get("authenticator", "snowflake")
self.session_params = data_source_properties.get("session_parameters")
def connect(self):
self.connection = connector.connect(
user=self.user,
password=self.password,
token=self.token,
account=self.account,
data_source=self.data_source,
database=self.database,
schema=self.schema,
warehouse=self.warehouse,
login_timeout=self.login_timeout,
role=self.role,
client_session_keep_alive=self.client_session_keep_alive,
client_store_temporary_credential=self.client_store_temporary_credential,
session_parameters=self.session_parameters,
passcode_in_password=self.passcode_in_password,
private_key=self.__get_private_key(),
client_prefetch_threads=self.client_prefetch_threads,
authenticator=self.authenticator,
application="Soda Core",
)
def __get_private_key(self):
if not (self.private_key_path or self.private_key):
return None
if self.private_key_passphrase:
encoded_passphrase = self.private_key_passphrase.encode()
else:
encoded_passphrase = None
pk_bytes = None
if self.private_key:
pk_bytes = self.private_key.encode()
elif self.private_key_path:
with open(self.private_key_path, "rb") as pk:
pk_bytes = pk.read()
p_key = serialization.load_pem_private_key(pk_bytes, password=encoded_passphrase, backend=default_backend())
return p_key.private_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption(),
)
def escape_regex(self, value: str):
return re.sub(r"(\\.)", r"\\\1", value)
def regex_replace_flags(self) -> str:
return ""
def expr_regexp_like(self, expr: str, pattern: str):
return f"REGEXP_LIKE(COLLATE({expr}, ''), '{pattern}')"
def cast_text_to_number(self, column_name, validity_format: str):
"""Cast string to number
- first regex replace removes extra chars, keeps: "digits + - . ,"
- second regex changes "," to "."
- Nullif makes sure that if regexes return empty string then Null is returned instead
"""
regex = self.escape_regex(r"'[^-0-9\.\,]'")
return f"CAST(NULLIF(REGEXP_REPLACE(REGEXP_REPLACE(COLLATE({column_name}, ''), {regex}, ''{self.regex_replace_flags()}), ',', '.'{self.regex_replace_flags()}), '') AS {self.SQL_TYPE_FOR_CREATE_TABLE_MAP[DataType.DECIMAL]})"
def get_metric_sql_aggregation_expression(self, metric_name: str, metric_args: list[object] | None, expr: str):
# TODO add all of these snowflake specific statistical aggregate functions: https://docs.snowflake.com/en/sql-reference/functions-aggregation.html
if metric_name in [
"stddev",
"stddev_pop",
"stddev_samp",
"variance",
"var_pop",
"var_samp",
]:
return f"{metric_name.upper()}({expr})"
if metric_name in ["percentile", "percentile_disc"]:
# TODO ensure proper error if the metric_args[0] is not a valid number
percentile_fraction = metric_args[1] if metric_args else None
return f"PERCENTILE_DISC({percentile_fraction}) WITHIN GROUP (ORDER BY {expr})"
return super().get_metric_sql_aggregation_expression(metric_name, metric_args, expr)
def sql_get_table_names_with_count(
self, include_tables: list[str] | None = None, exclude_tables: list[str] | None = None
) -> str:
table_filter_expression = self.sql_table_include_exclude_filter(
"table_name", "table_schema", include_tables, exclude_tables
)
where_clause = f"AND {table_filter_expression}" if table_filter_expression else ""
sql = f"""
SELECT table_name, row_count
FROM information_schema.tables
WHERE table_schema != 'INFORMATION_SCHEMA'
{where_clause}
"""
return sql
def _create_table_prefix(self):
return ".".join([p for p in [self.database, self.schema] if p is not None])
def default_casify_sql_function(self) -> str:
return "upper"
def default_casify_system_name(self, identifier: str) -> str:
return identifier.upper()
def default_casify_table_name(self, identifier: str) -> str:
return identifier.upper()
def default_casify_column_name(self, identifier: str) -> str:
return identifier.upper()
def default_casify_type_name(self, identifier: str) -> str:
return identifier.upper()
def safe_connection_data(self):
return [
self.type,
self.account,
]
|
9502085edbd9bcf1034225cb160a80f475064009
|
a3e2d421f94a8adf2c41ff1d093b5a06de1448d6
|
/server/pypi/packages/lz4/test.py
|
a1cd2b5d691473e7c2bb3190c341e0ba7abff624
|
[
"MIT"
] |
permissive
|
chaquo/chaquopy
|
09ef057015a756ce9b862732477b2549562720b4
|
e09bbe6ca5efd859d484b01e30131ccc944aa2b6
|
refs/heads/master
| 2023-08-31T22:09:22.230601
| 2023-08-31T13:07:57
| 2023-08-31T13:07:57
| 95,140,462
| 607
| 121
|
MIT
| 2023-09-13T19:17:29
| 2017-06-22T17:33:02
|
Python
|
UTF-8
|
Python
| false
| false
| 388
|
py
|
test.py
|
import unittest
class TestLz4(unittest.TestCase):
def test_basic(self):
import os
import lz4.frame
input_data = 20 * 128 * os.urandom(1024)
compressed = lz4.frame.compress(input_data)
self.assertLess(len(compressed), len(input_data))
decompressed = lz4.frame.decompress(compressed)
self.assertEqual(decompressed, input_data)
|
a951508eb69768a89420b054e4f838d1073e20b3
|
29dfa1deefc72493d1b1eecf1a8df62e24599a77
|
/tests/encryption/test_lib.py
|
a101b0601f87b0ad0fb3aa607054375413600969
|
[
"Apache-2.0"
] |
permissive
|
log2timeline/dfvfs
|
fd301eaf721a9945641a44ff722aec963158a6b3
|
28756d910e951a22c5f0b2bcf5184f055a19d544
|
refs/heads/main
| 2023-08-07T22:45:45.432668
| 2023-07-30T12:17:56
| 2023-07-30T12:17:56
| 23,820,144
| 197
| 65
|
Apache-2.0
| 2023-07-30T12:17:58
| 2014-09-09T05:06:44
|
Python
|
UTF-8
|
Python
| false
| false
| 218
|
py
|
test_lib.py
|
# -*- coding: utf-8 -*-
"""Shared test cases."""
from tests import test_lib as shared_test_lib
class DecrypterTestCase(shared_test_lib.BaseTestCase):
"""The unit test case for decrypter object implementations."""
|
99d069678aa05cc40cf8760a1be09588982da098
|
a3d6556180e74af7b555f8d47d3fea55b94bcbda
|
/ppapi/native_client/tests/nacl_browser/inbrowser_test_runner/nacl.scons
|
0641c89f580ad3aac359ff04cefb5807712f9f33
|
[
"LicenseRef-scancode-khronos",
"BSD-3-Clause"
] |
permissive
|
chromium/chromium
|
aaa9eda10115b50b0616d2f1aed5ef35d1d779d6
|
a401d6cf4f7bf0e2d2e964c512ebb923c3d8832c
|
refs/heads/main
| 2023-08-24T00:35:12.585945
| 2023-08-23T22:01:11
| 2023-08-23T22:01:11
| 120,360,765
| 17,408
| 7,102
|
BSD-3-Clause
| 2023-09-10T23:44:27
| 2018-02-05T20:55:32
| null |
UTF-8
|
Python
| false
| false
| 2,309
|
scons
|
nacl.scons
|
# -*- python -*-
# Copyright 2012 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import posixpath
Import('env')
env.ComponentLibrary('testrunner_browser', ['test_runner_ppapi.c'])
dest_copy = env.Replicate('$STAGING_DIR', 'test_runner.html')
env.Replicate('$STAGING_DIR',
'${SCONSTRUCT_DIR}/tools/browser_tester/browserdata/nacltest.js')
def AddTest(env, test_name, exe_list, parallel=False):
test_files = [env.File('${STAGING_DIR}/%s${PROGSUFFIX}'
% env.ProgramNameForNmf(exe_name))
for exe_name in exe_list]
def WriteManifestList(target, source, env):
nmf_names = ['%s.nmf' % exe_name for exe_name in exe_list]
data = ('// This file is automatically generated\n'
'var G_NMF_TEST_LIST = %s;\n' % json.dumps(nmf_names))
fh = open(target[0].abspath, 'w')
fh.write(data)
fh.close()
nmf_list_js = env.Command(['%s_nmf_test_list.js' % test_name], [],
WriteManifestList)[0]
# Scons does not track the dependency of nmf_list_js on exe_list, so
# we must always recreate nmf_list_js when it is used.
env.AlwaysBuild(nmf_list_js)
node = env.PPAPIBrowserTester(
'%s.out' % test_name, url='test_runner.html',
nmf_names=exe_list,
files=[env.File('test_runner.html')] + test_files,
map_files=[('nmf_test_list.js', nmf_list_js)],
test_args=[('parallel', int(parallel))])
# Disabled on Valgrind because of multiple nexes.
# TODO(eugenis): enable when Valgrind learns to autodetect the nexe name
env.AddNodeToTestSuite(node, ['chrome_browser_tests'], test_name,
disable_irt_suffix=True,
is_broken=env.PPAPIBrowserTesterIsBroken() or
env.Bit('running_on_valgrind') or
# inbrowser_test_runner_parallel is flaky on 32 bit windows
# (maybe http://code.google.com/p/chromium/issues/detail?id=120355 ?)
parallel)
if not env.Bit('tests_use_irt'):
Return()
exe_list = env['TESTS_TO_RUN_INBROWSER']
AddTest(env, 'run_inbrowser_test_runner', exe_list, parallel=False)
AddTest(env, 'run_inbrowser_test_runner_parallel', exe_list, parallel=True)
|
fbc0b172436f413733eab16826d38d240a64ae10
|
32c7914231074fba10220e4a79b0a1f41a1e2eae
|
/snntoolbox/datasets/aedat/ImportAedat.py
|
dc41a9ccb77cde52a7f252ce5f201f11d836796f
|
[
"MIT"
] |
permissive
|
NeuromorphicProcessorProject/snn_toolbox
|
ea4ee4ddfe8a1436a5a297eb562384e302ff91db
|
0255f753efa32f69593ac6bc25a95d5b09f8f1cb
|
refs/heads/master
| 2023-01-21T06:54:23.166129
| 2023-01-13T10:01:01
| 2023-01-13T10:01:01
| 64,289,132
| 351
| 122
|
MIT
| 2022-08-12T22:00:26
| 2016-07-27T07:58:19
|
Python
|
UTF-8
|
Python
| false
| false
| 601
|
py
|
ImportAedat.py
|
# -*- coding: utf-8 -*-
"""
Import aedat file.
"""
from snntoolbox.datasets.aedat.ImportAedatHeaders import \
import_aedat_headers
from snntoolbox.datasets.aedat.ImportAedatDataVersion1or2 import \
import_aedat_dataversion1or2
def import_aedat(args):
"""
Parameters
----------
args :
Returns
-------
"""
output = {'info': args}
with open(output['info']['filePathAndName'], 'rb') as \
output['info']['fileHandle']:
output['info'] = import_aedat_headers(output['info'])
return import_aedat_dataversion1or2(output['info'])
|
397246135a65077722ac6de6f71a322e4a52b84a
|
b4565ee9e5b2da09b845577a632c7bbec3118d79
|
/rosbridge_library/src/rosbridge_library/internal/publishers.py
|
e4f674b70ddb511760494a884cb7a0d4a9092ecc
|
[
"BSD-3-Clause"
] |
permissive
|
RobotWebTools/rosbridge_suite
|
05ec59f9788651ca2f382bd8951c58ce30dd3ac4
|
7bbe58ba95b2bdf4003a2ee7c1bb76691ba6d405
|
refs/heads/ros2
| 2023-08-31T11:28:16.618382
| 2023-04-17T22:23:59
| 2023-04-17T22:23:59
| 6,251,725
| 761
| 487
|
BSD-3-Clause
| 2023-09-14T01:10:13
| 2012-10-16T21:08:31
|
Python
|
UTF-8
|
Python
| false
| false
| 12,881
|
py
|
publishers.py
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, Willow Garage, Inc.
# Copyright (c) 2014, Creativa 77 SRL
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from threading import Timer
from rclpy.duration import Duration
from rclpy.qos import DurabilityPolicy, QoSProfile
from rosbridge_library.internal import message_conversion, ros_loader
from rosbridge_library.internal.message_conversion import msg_class_type_repr
from rosbridge_library.internal.topics import (
TopicNotEstablishedException,
TypeConflictException,
)
class MultiPublisher:
"""Keeps track of the clients that are using a particular publisher.
Provides an API to publish messages and register clients that are using
this publisher"""
def __init__(self, topic, node_handle, msg_type=None, latched_client_id=None, queue_size=100):
"""Register a publisher on the specified topic.
Keyword arguments:
topic -- the name of the topic to register the publisher to
node_handle -- Handle to a rclpy node to create the publisher.
msg_type -- (optional) the type to register the publisher as. If not
provided, an attempt will be made to infer the topic type
latch -- (optional) if a client requested this publisher to be latched,
provide the client_id of that client here
Throws:
TopicNotEstablishedException -- if no msg_type was specified by the
caller and the topic is not yet established, so a topic type cannot
be inferred
TypeConflictException -- if the msg_type was specified by the
caller and the topic is established, and the established type is
different to the user-specified msg_type
"""
# First check to see if the topic is already established
topics_names_and_types = dict(node_handle.get_topic_names_and_types())
topic_type = topics_names_and_types.get(topic)
# If it's not established and no type was specified, exception
if msg_type is None and topic_type is None:
raise TopicNotEstablishedException(topic)
# topic_type is a list of types or None at this point; only one type is supported.
if topic_type is not None:
if len(topic_type) > 1:
node_handle.get_logger().warning(f"More than one topic type detected: {topic_type}")
topic_type = topic_type[0]
# Use the established topic type if none was specified
if msg_type is None:
msg_type = topic_type
# Load the message class, propagating any exceptions from bad msg types
msg_class = ros_loader.get_message_class(msg_type)
# Make sure the specified msg type and established msg type are same
msg_type_string = msg_class_type_repr(msg_class)
if topic_type is not None and topic_type != msg_type_string:
raise TypeConflictException(topic, topic_type, msg_type_string)
# Create the publisher and associated member variables
self.clients = {}
self.latched_client_id = latched_client_id
self.topic = topic
self.node_handle = node_handle
self.msg_class = msg_class
# Adding a lifespan solves the problem of late-joining subscribers
# without the need of a custom message publisher implementation.
publisher_qos = QoSProfile(
depth=queue_size,
durability=DurabilityPolicy.TRANSIENT_LOCAL,
)
# For latched clients, no lifespan has to be specified (i.e. latch forever).
# Otherwise we want to keep the messages for a second to prevent late-joining subscribers from
# missing messages.
if latched_client_id is None:
publisher_qos.lifespan = Duration(seconds=1)
else:
publisher_qos.depth = 1
self.publisher = node_handle.create_publisher(msg_class, topic, qos_profile=publisher_qos)
def unregister(self):
"""Unregisters the publisher and clears the clients"""
self.node_handle.destroy_publisher(self.publisher)
self.clients.clear()
def verify_type(self, msg_type):
"""Verify that the publisher publishes messages of the specified type.
Keyword arguments:
msg_type -- the type to check this publisher against
Throws:
Exception -- if ros_loader cannot load the specified msg type
TypeConflictException -- if the msg_type is different than the type of
this publisher
"""
if not ros_loader.get_message_class(msg_type) is self.msg_class:
raise TypeConflictException(self.topic, msg_class_type_repr(self.msg_class), msg_type)
return
def publish(self, msg):
"""Publish a message using this publisher.
Keyword arguments:
msg -- the dict (json) message to publish
Throws:
Exception -- propagates exceptions from message conversion if the
provided msg does not properly conform to the message type of this
publisher
"""
# Create a message instance
inst = self.msg_class()
# Populate the instance, propagating any exceptions that may be thrown
message_conversion.populate_instance(msg, inst)
# Publish the message
self.publisher.publish(inst)
def register_client(self, client_id):
"""Register the specified client as a client of this publisher.
Keyword arguments:
client_id -- the ID of the client using the publisher
"""
self.clients[client_id] = True
def unregister_client(self, client_id):
"""Unregister the specified client from this publisher.
If the specified client_id is not a client of this publisher, nothing
happens.
Keyword arguments:
client_id -- the ID of the client to remove
"""
if client_id in self.clients:
del self.clients[client_id]
def has_clients(self):
"""Return true if there are clients to this publisher."""
return len(self.clients) != 0
class PublisherManager:
"""The PublisherManager keeps track of ROS publishers
It maintains a MultiPublisher instance for each registered topic
When unregistering a client, if there are no more clients for a publisher,
then that publisher is unregistered from the ROS Master
"""
def __init__(self):
self._publishers = {}
self.unregister_timers = {}
self.unregister_timeout = 10.0
def register(self, client_id, topic, node_handle, msg_type=None, latch=False, queue_size=100):
"""Register a publisher on the specified topic.
Publishers are shared between clients, so a single MultiPublisher
instance is created per topic, even if multiple clients register.
Keyword arguments:
client_id -- the ID of the client making this request
topic -- the name of the topic to publish on
node_handle -- Handle to a rclpy node to create the publisher.
msg_type -- (optional) the type to publish
latch -- (optional) whether to make this publisher latched
queue_size -- (optional) rospy publisher queue_size to use
Throws:
Exception -- exceptions are propagated from the MultiPublisher if
there is a problem loading the specified msg class or establishing
the publisher
"""
latched_client_id = client_id if latch else None
if topic not in self._publishers:
self._publishers[topic] = MultiPublisher(
topic,
node_handle,
msg_type=msg_type,
latched_client_id=latched_client_id,
queue_size=queue_size,
)
elif latch and self._publishers[topic].latched_client_id != client_id:
node_handle.get_logger().warn(
f"Client ID {client_id} attempted to register topic [{topic}] as "
"latched but this topic was previously registered."
)
node_handle.get_logger().warn(
"Only a single registered latched publisher is supported at the time"
)
elif not latch and self._publishers[topic].latched_client_id:
node_handle.get_logger().warn(
f"New non-latched publisher registration for topic [{topic}] which is "
"already registered as latched. but this topic was previously registered."
)
node_handle.get_logger().warn(
"Only a single registered latched publisher is supported at the time"
)
if msg_type is not None:
self._publishers[topic].verify_type(msg_type)
self._publishers[topic].register_client(client_id)
def unregister(self, client_id, topic):
"""Unregister a client from the publisher for the given topic.
Will wait some time before actually unregistering, it is done in
_unregister_impl
If there are no clients remaining for that publisher, then the
publisher is unregistered from the ROS Master
Keyword arguments:
client_id -- the ID of the client making this request
topic -- the topic to unregister the publisher for
"""
if topic not in self._publishers:
return
self._publishers[topic].unregister_client(client_id)
if topic in self.unregister_timers:
self.unregister_timers[topic].cancel()
del self.unregister_timers[topic]
self.unregister_timers[topic] = Timer(
self.unregister_timeout, self._unregister_impl, [topic]
)
self.unregister_timers[topic].start()
def _unregister_impl(self, topic):
if not self._publishers[topic].has_clients():
self._publishers[topic].unregister()
del self._publishers[topic]
del self.unregister_timers[topic]
def unregister_all(self, client_id):
"""Unregisters a client from all publishers that they are registered
to.
Keyword arguments:
client_id -- the ID of the client making this request"""
for topic in self._publishers.keys():
self.unregister(client_id, topic)
def publish(self, client_id, topic, msg, node_handle, latch=False, queue_size=100):
"""Publish a message on the given topic.
Tries to create a publisher on the topic if one does not already exist.
Keyword arguments:
client_id -- the ID of the client making this request
topic -- the topic to publish the message on
msg -- a JSON-like dict of fields and values
node_handle -- Handle to a rclpy node to create the publisher.
latch -- (optional) whether to make this publisher latched
queue_size -- (optional) rospy publisher queue_size to use
Throws:
Exception -- a variety of exceptions are propagated. They can be
thrown if there is a problem setting up or getting the publisher,
or if the provided msg does not map to the msg class of the publisher.
"""
self.register(client_id, topic, node_handle, latch=latch, queue_size=queue_size)
self._publishers[topic].publish(msg)
manager = PublisherManager()
|
d6bba6eb034f5f733154768dc5148eb2fca5b091
|
091a6200be74bf6577c86f623665bcc24e16b02b
|
/PyPortal_Hackster/code.py
|
e2d2d730dfaf90e553461da9e7bd87e66d1dc88c
|
[
"MIT"
] |
permissive
|
adafruit/Adafruit_Learning_System_Guides
|
b5f7bce40a16da64e7a79d4b39de032f2cca41d4
|
5eaa7a15a437c533b89f359a25983e24bb6b5438
|
refs/heads/main
| 2023-09-05T18:31:41.621956
| 2023-09-05T15:36:09
| 2023-09-05T15:36:09
| 105,065,494
| 937
| 937
|
MIT
| 2023-09-12T18:48:53
| 2017-09-27T20:22:44
|
C
|
UTF-8
|
Python
| false
| false
| 1,848
|
py
|
code.py
|
# SPDX-FileCopyrightText: 2019 Limor Fried for Adafruit Industries
#
# SPDX-License-Identifier: MIT
import time
import board
from adafruit_pyportal import PyPortal
# Get wifi details and more from a secrets.py file
try:
from secrets import secrets
except ImportError:
print("WiFi secrets are kept in secrets.py, please add them there!")
raise
PROJECT_NAME = "3c92f0"
# Set up where we'll be fetching data from
DATA_SOURCE = "https://api.hackster.io/v2/projects/"+PROJECT_NAME+"?"
DATA_SOURCE += "client_id="+secrets['hackster_clientid']
DATA_SOURCE += "&client_secret="+secrets['hackster_secret']
VIEWS_LOCATION = ['stats', 'views']
LIKES_LOCATION = ['stats', 'respects']
CAPTION = "http://www.hackster.com/project/"+PROJECT_NAME
# the current working directory (where this file is)
cwd = ("/"+__file__).rsplit('/', 1)[0]
pyportal = PyPortal(url=DATA_SOURCE, json_path=(LIKES_LOCATION, VIEWS_LOCATION),
status_neopixel=board.NEOPIXEL,
default_bg=cwd+"/hackster_background.bmp",
text_font=cwd+"/fonts/Arial-Bold-24.bdf",
text_position=((80, 75), (80, 145)),
text_color=(0x0000FF, 0x000000),
caption_text=CAPTION,
caption_font=cwd+"/fonts/Arial-12.bdf",
caption_position=(20, 200),
caption_color=0x000000)
# track the last value so we can play a sound when it updates
last_likes = 0
while True:
try:
likes, views = pyportal.fetch()
print("Views", views, "Likes", likes)
if last_likes < likes: # ooh it went up!
print("New respect!")
pyportal.play_file(cwd+"/coin.wav")
last_likes = likes
except RuntimeError as e:
print("Some error occured, retrying! -", e)
time.sleep(60)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.